index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
73,173 | fiquinho/actor_critic | refs/heads/master | /agents/online_actor_critic.py | import logging
from pathlib import Path
import wandb
import tensorflow as tf
import numpy as np
from .base_agent import BaseActorCriticAgent, ConfigManager, Episode
logger = logging.getLogger()
class OnlineActorCriticAgent(BaseActorCriticAgent):
"""
Implementing online actor critic algorithm from
http://rail.eecs.berkeley.edu/deeprlcourse/static/slides/lec-6.pdf
"""
def __init__(self, agent_path: Path, config: ConfigManager):
super().__init__(agent_path, config)
self.env.reset_environment()
self.last_complete_episode = None
self.last_complete_episode_reward = 0.
self.finished_episode = False
self.current_episode = self.reset_current_episode()
@staticmethod
def reset_current_episode() -> dict:
new_episode = {"states": [],
"actions": [],
"rewards": []}
return new_episode
def train_step(self, step_n: int) -> int:
""" Make a single training step for this method.
Args:
step_n: The current training step number
Returns:
The total reward from the last completed episode
"""
if self.finished_episode:
self.env.reset_environment()
self.finished_episode = False
# Take an action in the environment and save to current episode history
state = self.env.get_normalized_state()
tf_state = tf.constant(np.array([state]), dtype=tf.float32)
self.current_episode["states"].append(state)
action = self.actor.produce_actions(tf_state)[0][0]
tf_action = tf.constant(np.array([action]), dtype=tf.int32)
self.current_episode["actions"].append(action.numpy())
_, reward, done = self.env.environment_step(int(action))
norm_next_state = self.env.get_normalized_state()
tf_next_state = tf.constant(np.array([norm_next_state]), dtype=tf.float32)
self.current_episode["rewards"].append(reward)
# Calculate critic target and train critic
if done:
next_state_value = tf.constant(np.array([[0.]]), dtype=tf.float32)
else:
next_state_value = self.critic(tf_next_state)
critic_target = reward + self.config.agent_config.discount * next_state_value.numpy()[0][0]
tf_critic_target = tf.constant(np.array([critic_target]), dtype=tf.float32)
old_critic_values, critic_loss, _ = self.critic.train_step(tf_state, tf_critic_target)
# Evaluate advantages
new_state_value = self.critic(tf_state)
if done:
new_next_state_value = tf.constant(np.array([0.]), dtype=tf.float32)
else:
new_next_state_value = self.critic(tf_next_state)
advantage = reward + self.config.agent_config.discount * tf.reshape(new_next_state_value, -1) - \
tf.reshape(new_state_value, -1)
advantage_batch = tf.constant(advantage, dtype=np.float32)
# Train actor
action_probabilities = self.actor.get_probabilities(tf_state)
policy_outputs, actor_loss, log_probabilities = self.actor.train_step(
tf_state, tf_action, advantage_batch)
# Save metrics to WandB
if self.env.state_names is not None:
for state_idx, state in enumerate(self.env.state_names):
state_attribute_hist = tf_state[:, state_idx]
wandb.log({f"{state}": wandb.Histogram(state_attribute_hist)}, step=step_n)
if self.env.actions is not None:
for action_idx, action in enumerate(self.env.actions):
action_attribute_hist = action_probabilities[:, action_idx]
wandb.log({f"{action}": wandb.Histogram(action_attribute_hist)}, step=step_n)
wandb.log({'training_step': step_n, 'actor_loss': actor_loss,
'critic_loss': critic_loss, 'episode_reward': self.last_complete_episode_reward},
step=step_n)
try:
wandb.log({"log_probabilities": wandb.Histogram(log_probabilities)}, step=step_n)
except ValueError:
logger.info(f"Failed to save log probabilities: {log_probabilities}")
try:
wandb.log({"advantages": wandb.Histogram(advantage_batch)}, step=step_n)
except ValueError:
logger.info(f"Failed to save advantages: {advantage_batch}")
try:
wandb.log({"values": wandb.Histogram(new_state_value)}, step=step_n)
except ValueError:
logger.info(f"Failed to save values: {new_state_value}")
if done:
episode = Episode(self.current_episode["states"], self.current_episode["actions"],
self.current_episode["rewards"], "discrete", self.config.agent_config.discount)
self.last_complete_episode = episode
self.finished_episode = True
self.last_complete_episode_reward = self.last_complete_episode.total_reward
self.current_episode = self.reset_current_episode()
return self.last_complete_episode_reward, self.finished_episode
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,174 | fiquinho/actor_critic | refs/heads/master | /train_agent.py | import argparse
import logging
import os
import sys
import shutil
import json
import time
from pathlib import Path
import wandb
import tensorflow as tf
from git import Repo
from code_utils import prepare_file_logger, prepare_stream_logger, ConfigManager
from agents import get_agent
logger = logging.getLogger()
prepare_stream_logger(logger, logging.INFO)
logging.getLogger("tensorflow").setLevel(logging.ERROR)
EXPERIMENTS_DIR = Path("experiments")
SCRIPT_DIR = Path(os.path.abspath(sys.argv[0]))
CONFIGS_DIR = Path(SCRIPT_DIR.parent, "configurations")
def main():
parser = argparse.ArgumentParser(description="Train an Actor-Critic agent that plays a specific environment.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
required_named = parser.add_argument_group('REQUIRED named arguments')
required_named.add_argument("--config_file", type=str, required=True,
help="Configuration file for the experiment.")
parser.add_argument("--output_dir", type=str, default=EXPERIMENTS_DIR,
help="Where to save the experiment files")
parser.add_argument("--debug", action="store_true", default=False,
help="Activate to run Tensorflow in eager mode.")
parser.add_argument("--replace", action="store_true", default=False,
help="Activate to replace old experiment with the same name in the output folder.")
args = parser.parse_args()
# On debug mode all functions are executed normally (eager mode)
if args.debug:
tf.config.run_functions_eagerly(True)
# Get git version
repo = Repo(search_parent_directories=True)
sha = repo.head.object.hexsha
# Use provided configurations file
config_file = Path(args.config_file)
config = ConfigManager.from_json_file(config_file)
# Create experiment folder and handle old results
output_dir = Path(args.output_dir)
agent_folder = Path(output_dir, config.agent_config.name)
deleted_old = False
if agent_folder.exists():
if args.replace:
shutil.rmtree(agent_folder)
deleted_old = True
else:
raise FileExistsError(f"The experiment {agent_folder} already exists."
f"Change output folder, experiment name or use -replace "
f"to overwrite.")
agent_folder.mkdir(parents=True)
# Save experiments configurations and start experiment log
prepare_file_logger(logger, logging.INFO, Path(agent_folder, "experiment.log"))
logger.info(f"Running experiment {config.agent_config.name}")
if deleted_old:
logger.info(f"Deleted old experiment in {agent_folder}")
config.log_configurations(logger)
experiment_config_file = Path(agent_folder, "configurations.json")
logger.info(f"Saving experiment configurations to {experiment_config_file}")
config.to_json_file(experiment_config_file)
wandbrun = wandb.init(project=f"AC-{config.agent_config.env}",
name=config.agent_config.name,
group=config.agent_config.agent_type,
notes=config.agent_config.desc,
config=config.as_single_dict(),
reinit=True,
dir=f"experiments/{config.agent_config.name}")
# Create agent
agent = get_agent(config.agent_config.agent_type)(agent_path=agent_folder, config=config)
start_time = time.time()
test_reward = agent.train_policy(training_config=config.training_config)
train_time = time.time() - start_time
experiment_info = {"mean_test_reward": float(test_reward),
"name": config.agent_config.name,
"description": config.agent_config.desc,
"git_hash": sha,
"train_time": train_time}
with open(Path(agent_folder, "experiment_information.json"), "w") as outfile:
json.dump(experiment_info, outfile, indent=4)
wandbrun.finish()
if __name__ == '__main__':
main()
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,175 | fiquinho/actor_critic | refs/heads/master | /code_utils/config_utils.py | import os
import sys
import json
import logging
from pathlib import Path
from dataclasses import dataclass
SCRIPT_DIR = Path(os.path.abspath(sys.argv[0]))
sys.path.append(str(SCRIPT_DIR.parent.parent.parent.parent))
from models import ActorConfig, CriticConfig
@dataclass
class TrainingConfig(object):
train_steps: int
save_policy_every: int
@dataclass
class BaseAgentConfig(object):
name: str
desc: str
env: str
agent_type: str
discount: float
# A2C
num_processes: int = None
batch_env_steps: int = None # training_batch_size = batch_env_steps * num_processes
class ConfigManager(object):
def __init__(self, config_dict: dict):
self.agent_config = BaseAgentConfig(**config_dict["agent_config"])
self.critic_config = CriticConfig(**config_dict["critic_config"])
self.actor_config = ActorConfig(**config_dict["actor_config"])
self.training_config = TrainingConfig(**config_dict["training_config"])
def log_configurations(self, logger: logging.Logger):
logger.info("Used configurations:")
for key, value in self.__dict__.items():
logger.info(f"\t{key}: {value}")
def to_json_file(self, output_file: Path):
json_data = {}
for key, value in self.__dict__.items():
json_data[key] = value.__dict__
with open(output_file, "w", encoding="utf8") as f:
json.dump(json_data, f, indent=4)
@classmethod
def from_json_file(cls, config_file: Path):
config_dict = cls.read_json_config(config_file)
return cls(config_dict)
@staticmethod
def read_json_config(config_file: Path):
with open(config_file, "r", encoding="utf8") as cfile:
config_dict = json.load(cfile)
return config_dict
def as_single_dict(self) -> dict:
data = {}
for key, value in self.__dict__.items():
data.update(value.__dict__)
return data
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,176 | fiquinho/actor_critic | refs/heads/master | /code_utils/__init__.py | from .config_utils import ConfigManager, TrainingConfig
from .model_utils import CheckpointsManager
from .logger_utils import prepare_file_logger, prepare_stream_logger
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,177 | fiquinho/actor_critic | refs/heads/master | /agents/base_agent.py | import logging
import time
import os
import sys
from pathlib import Path
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from tqdm import tqdm
SCRIPT_DIR = Path(os.path.abspath(sys.argv[0]))
sys.path.append(str(SCRIPT_DIR.parent.parent.parent.parent))
from models import critic_feed_forward_model_constructor, feed_forward_discrete_policy_constructor
from code_utils import ConfigManager, CheckpointsManager, TrainingConfig
from environments import get_env, Episode, Environment
logger = logging.getLogger()
class BaseActorCriticAgent(object):
"""
Base class for actor critic algorithms:
- Creates a FFNN model to use as policy (actor)
- Creates a FFNN model to use as critic
- Has training logic
"""
def __init__(self, agent_path: Path, config: ConfigManager):
"""Creates an actor critic agent that uses FFNNs to represent both.
Args:
agent_path: The output folder for the model files
config: The configurations for this agent
"""
self.env: Environment = get_env(config.agent_config.env)()
self.agent_path = agent_path
self.config = config
self.models_path = Path(agent_path, "models")
self.periodic_test = False
critic_constructor = critic_feed_forward_model_constructor(self.env.state_space_n)
self.critic = critic_constructor(self.config.critic_config)
actor_constructor = feed_forward_discrete_policy_constructor(self.env.state_space_n, self.env.action_space_n)
self.actor = actor_constructor(self.config.actor_config)
self.ckpts_manager = CheckpointsManager(self.models_path, self.actor, self.critic)
def generate_episode(self) -> Episode:
self.env.reset_environment()
done = False
states = []
rewards = []
actions = []
while not done:
state = self.env.get_normalized_state()
tf_current_state = tf.constant(np.array([state]), dtype=tf.float32)
action = self.actor.produce_actions(tf_current_state)[0][0]
_, reward, done = self.env.environment_step(int(action))
states.append(state)
actions.append(action)
rewards.append(reward)
episode = Episode(states, actions, rewards, "discrete", self.config.agent_config.discount)
return episode
def train_step(self, step_n: int) -> (int, bool):
"""
Make a single training step for this method
Args:
step_n: The current training step number
Returns:
The total reward from the last completed episode
If the last episode finished or is still running
"""
raise NotImplementedError()
def pass_test(self, **kwargs) -> bool:
"""Standard test of agent
Args:
**kwargs:
Returns: If the agent solved the environment
"""
train_episodes_rewards = kwargs["train_episodes_rewards"]
return self.env.pass_test(train_episodes_rewards[-20:])
def train_policy(self, training_config: TrainingConfig) -> float:
"""Train the agent to solve the current environment.
Args:
training_config: The training configurations
Returns
The final policy test mean reward
"""
train_episodes_rewards = []
start_time = time.time()
progress_save = int(training_config.train_steps * 0.05)
best_step = None
best_checkpoints = None
best_score = float("-inf")
for i in tqdm(range(training_config.train_steps)):
episode_reward, finished_episode = self.train_step(i)
self.ckpts_manager.step_checkpoints()
if not i % progress_save:
self.ckpts_manager.save_progress_ckpts()
logger.info(f"Progress checkpoints saved for step {i}")
# Log information and save policy if it improved
if training_config.save_policy_every is not None:
if not i % training_config.save_policy_every:
if self.periodic_test:
test_episode = self.generate_episode()
episode_reward = test_episode.total_reward
train_episodes_rewards.append(episode_reward)
if episode_reward >= best_score:
best_score = episode_reward
best_step = i
best_checkpoints = self.ckpts_manager.save_ckpts()
logger.info(f"New best model - Reward = {episode_reward}")
logger.info(f"Checkpoint saved for step {i}")
logger.info(f"Training step N° {i} - "
f"Last Episode reward: {episode_reward} - "
f"Batch time = {time.time() - start_time} sec")
start_time = time.time()
if finished_episode:
train_episodes_rewards.append(episode_reward)
if self.pass_test(train_episodes_rewards=train_episodes_rewards):
logger.info("The agent trained successfully!!")
best_step = i
best_checkpoints = self.ckpts_manager.save_ckpts()
logger.info(f"New best model - Reward = {episode_reward}")
logger.info(f"Checkpoint saved for step {i}")
break
# Load best checkpoint and save it
logger.info(f"Best model in step {best_step} - {best_checkpoints[0]}")
self.ckpts_manager.actor.restore(best_checkpoints[0])
test_reward = self.test_agent(episodes=100)
logger.info(f"Best model test: {100} episodes mean reward = {test_reward}")
self.save_agent()
self.plot_training_info(train_episodes_rewards, self.agent_path)
return test_reward
def save_agent(self):
"""Save the policy neural network to files in the model path."""
logger.info(f"Saving trained policy to {self.models_path}")
start = time.time()
self.actor.save(self.models_path)
logger.info(f"Saving time {time.time() - start}")
def load_model(self, model_dir: Path):
"""Loads a trained policy from files. If no save model is found,
it loads the latest checkpoint available.
Args:
model_dir: Where the trained model is stored.
"""
if Path(model_dir, "saved_model.pb").exists():
self.actor = tf.keras.models.load_model(model_dir)
else:
self.ckpts_manager.actor.restore(self.ckpts_manager.actor_manager.latest_checkpoint)
logger.info(f"Restored model from checkpoint {self.ckpts_manager.actor_manager.latest_checkpoint}")
@staticmethod
def plot_training_info(rewards: np.array, agent_folder: Path = None):
"""Plots the training reward moving average.
Args:
rewards: The rewards
agent_folder: Where to save the generated plot
"""
plt.figure(figsize=(5, 5))
# Moving average plot
plt.plot([i for i in range(len(rewards))], rewards)
plt.ylabel(f"Reward")
plt.xlabel("Episode #")
plt.title("Rewards")
if agent_folder is not None:
plt.savefig(Path(agent_folder, "training_rewards.png"))
def play_game(self, plot_game: bool = False, delay: float = None) -> Episode:
"""Plays a full episode using the current policy.
Args:
plot_game: If the environment should be plotted
delay: Delay between environment steps (frames)
Returns:
The full played episode
"""
self.env.reset_environment()
done = False
states = []
rewards = []
actions = []
while not done:
if plot_game:
self.env.render_environment()
if delay is not None:
time.sleep(delay)
state = self.env.get_normalized_state()
tf_current_state = tf.constant(np.array([state]), dtype=tf.float32)
action = self.actor.produce_actions(tf_current_state)[0][0]
new_state, reward, done = self.env.environment_step(int(action))
states.append(state)
rewards.append(reward)
actions.append(action)
episode = Episode(states, actions, rewards, "discrete", self.config.agent_config.discount)
self.env.reset_environment()
return episode
def test_agent(self, episodes: int = 100):
total_rewards = []
for i in range(episodes):
episode = self.play_game(plot_game=False, delay=None)
total_rewards.append(episode.total_reward)
mean_reward = np.mean(total_rewards)
return mean_reward
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,178 | fiquinho/actor_critic | refs/heads/master | /environments/flappy_bird.py | import os
import sys
from typing import List
from pathlib import Path
import pygame
import numpy as np
from ple.games.flappybird import FlappyBird
from ple import PLE
SCRIPT_DIR = Path(os.path.abspath(sys.argv[0]))
sys.path.append(str(SCRIPT_DIR.parent.parent))
from environments import Episode, Environment
class FlappyBirdEnvironment(Environment):
def __init__(self):
env = FlappyBird()
self.p = PLE(env, add_noop_action=True)
self.p.init()
self.win_score = 10.
action_space = len(self.p.getActionSet())
state_space = len(self.p.getGameState())
actions = ["up", "nothing"]
state_names = list(self.p.getGameState().keys())
Environment.__init__(self, env, action_space, state_space, actions, state_names)
def reset_environment(self):
self.p.reset_game()
def get_state(self) -> np.array:
state = list(self.p.getGameState().values())
state = np.array(state)
return state
def get_normalized_state(self) -> np.array:
"""Get the current state of the environment with each
state attribute normalized in [0, 1], ready to be fed to a NN.
Returns:
The current normalized state (np.array)
"""
state = self.get_state()
states_mins = np.array([0., -10., 0., 0., 103., 103., 0., 103.])
states_maxs = np.array([410., 10., 288., 205., 308., 410., 205., 308.])
state = (state - states_mins) / (states_maxs - states_mins)
return state
def environment_step(self, action: int) -> (np.array, int, bool):
"""Do a move in the environment.
Args:
action: The action to take
Returns:
The next state, the reward obtained by doing the action, and if the environment is terminated
"""
p_action = self.p.getActionSet()[action]
reward = self.p.act(p_action)
done = self.p.game_over()
if self.p.score() >= self.win_score:
done = True
next_state = self.get_state()
return next_state, reward, done
def render_environment(self):
self.p.display_screen = True
self.p.force_fps = False
def pass_test(self, rewards: List[float]):
if np.mean(rewards) >= self.win_score:
return True
else:
return False
def close(self):
pygame.quit()
def win_condition(self, episode: Episode):
if episode.total_reward >= self.win_score:
return True
else:
return False
def main():
env = FlappyBirdEnvironment()
for i in range(1):
print(f"Episode: {i}")
done = False
env.reset_environment()
print(f"Start state = {env.get_state()}")
while not done:
# action = np.random.randint(0, 2)
action = 1
print(f"Action = {action}")
next_state, reward, done = env.environment_step(action)
env.render_environment()
print(f"New state = {next_state}")
print(f"Reward = {reward}")
env.close()
pass
if __name__ == '__main__':
main()
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,179 | fiquinho/actor_critic | refs/heads/master | /models/critic.py | import logging
from typing import List
import tensorflow as tf
import numpy as np
from tensorflow.keras import Model
from tensorflow.keras.layers import Dense
from dataclasses import dataclass
logger = logging.getLogger()
@dataclass
class CriticConfig(object):
layer_sizes: List[int]
learning_rate: float
hidden_activation: str = "relu"
output_activation: str = "linear"
def critic_feed_forward_model_constructor(input_dim):
"""Creates a tf.keras.Model subclass for a Feed Forward Neural Network
for value function approximation.
Args:
input_dim: The length of the state vector
Returns:
A class to instantiate the model object.
"""
class Critic(Model):
"""Feed Forward Neural Network for value function approximation.
The input size is already defined.
"""
def __init__(self, critic_config: CriticConfig):
"""Creates a new FFNN for value function approximation. Implements all needed
methods from tf.keras.Model.
Args:
critic_config: The model configurations
"""
self.model_config = critic_config
self.input_size = input_dim
super(Critic, self).__init__()
self.hidden_layers = []
for i in self.model_config.layer_sizes:
self.hidden_layers.append(Dense(i, activation=self.model_config.hidden_activation,
name=f"hidden_{len(self.hidden_layers)}"))
self.value = Dense(1, activation=self.model_config.output_activation, name="value")
self.loss_object = tf.keras.losses.MeanSquaredError()
self.optimizer = tf.keras.optimizers.Adam(learning_rate=self.model_config.learning_rate)
def get_config(self):
"""Used by tf.keras to load a saved model."""
return {"layer_sizes": self.model_config.layer_sizes,
"learning_rate": self.model_config.learning_rate,
"hidden_activation": self.model_config.hidden_activation,
"output_activation": self.model_config.output_activation}
@tf.function(input_signature=(tf.TensorSpec(shape=[None, input_dim], dtype=tf.float32), ))
def call(self, inputs: tf.Tensor) -> tf.Tensor:
"""See base Class."""
logger.info("[Retrace] call")
x = inputs
for layer in self.hidden_layers:
x = layer(x)
value = self.value(x)
return value
@tf.function(input_signature=[tf.TensorSpec(shape=[None, input_dim], dtype=tf.float32),
tf.TensorSpec(shape=[None], dtype=tf.float32)])
def train_step(self, states: tf.Tensor, discounted_rewards: tf.Tensor) -> (tf.Tensor, tf.Tensor, tf.Tensor):
"""See base Class."""
logger.info("[Retrace] train_step")
with tf.GradientTape() as tape:
values = self(states)
loss = self.loss_object(discounted_rewards, values)
gradients = tape.gradient(loss, self.trainable_variables)
self.optimizer.apply_gradients(zip(gradients, self.trainable_variables))
return values, loss, gradients
return Critic
def test():
tf.config.run_functions_eagerly(True)
tf.random.set_seed(0)
sample_config = {
"layer_sizes": [3, 3],
"learning_rate": 0.001,
"hidden_activation": "relu",
"output_activation": "linear"
}
model_config = CriticConfig(**sample_config)
critic_constructor = critic_feed_forward_model_constructor(3)
model = critic_constructor(model_config)
state = np.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
discounted_rewards = np.array([[0.5], [1.], [1.]])
values, loss, gradients = model.train_step(state, discounted_rewards)
print(f"Values = {values}")
print(f"loss = {loss}")
print(f"gradients train= {gradients}")
if __name__ == '__main__':
test()
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,180 | fiquinho/actor_critic | refs/heads/master | /test_agent.py | import argparse
import sys
import os
from pathlib import Path
SCRIPT_DIR = Path(os.path.abspath(sys.argv[0]))
sys.path.append(str(SCRIPT_DIR.parent.parent.parent))
from code_utils import ConfigManager
from agents import BaseActorCriticAgent
def main():
parser = argparse.ArgumentParser(description="Test a trained agent on it's environment.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
required_named = parser.add_argument_group('REQUIRED named arguments')
required_named.add_argument("--experiment_dir", type=str, required=True,
help="The path to a trained agent directory.")
parser.add_argument("--episodes", type=int, default=200,
help="The number of episodes to play during testing.")
parser.add_argument("--render_games", action="store_true", default=False,
help="Activate to render the agent playing each episode.")
args = parser.parse_args()
experiment_dir = Path(args.experiment_dir)
config_file = Path(experiment_dir, "configurations.json")
config = ConfigManager.from_json_file(config_file)
# Load agent
agent = BaseActorCriticAgent(agent_path=experiment_dir, config=config)
agent.load_model(Path(experiment_dir, "models"))
results = []
rewards = []
for i in range(args.episodes):
episode = agent.play_game(plot_game=args.render_games, delay=0.001)
win = agent.env.win_condition(episode)
results.append(win)
rewards.append(episode.total_reward)
print(f"Episode = {i} - Total Reward = {episode.total_reward} - Victory = {win} - "
f"Episode length = {len(episode)}")
if results[0] is not None:
print(f"Agent performance = {sum(results) * 100 / len(results)} % of Wins")
print(f"Mean reward = {sum(rewards) / len(rewards)}")
agent.env.close()
if __name__ == '__main__':
main()
| {"/animated_progress.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/models/__init__.py": ["/models/critic.py", "/models/actor.py"], "/agents/batch_actor_critic.py": ["/agents/base_agent.py"], "/agents/advantage_actor_critic.py": ["/agents/base_agent.py"], "/environments/__init__.py": ["/environments/cart_pole.py", "/environments/environments.py", "/environments/flappy_bird.py"], "/agents/__init__.py": ["/agents/base_agent.py", "/agents/batch_actor_critic.py", "/agents/online_actor_critic.py", "/agents/advantage_actor_critic.py"], "/environments/cart_pole.py": ["/environments/environments.py"], "/agents/online_actor_critic.py": ["/agents/base_agent.py"], "/train_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"], "/code_utils/config_utils.py": ["/models/__init__.py"], "/code_utils/__init__.py": ["/code_utils/config_utils.py", "/code_utils/model_utils.py", "/code_utils/logger_utils.py"], "/agents/base_agent.py": ["/models/__init__.py", "/code_utils/__init__.py", "/environments/__init__.py"], "/environments/flappy_bird.py": ["/environments/__init__.py"], "/test_agent.py": ["/code_utils/__init__.py", "/agents/__init__.py"]} |
73,187 | mobone/option_pressure | refs/heads/master | /trade_all.py | from stock import stock
import pandas as pd
from datetime import datetime, timedelta
import numpy as np
import matplotlib.pyplot as plt
if __name__ == "__main__":
ticker_df = pd.read_csv('tickers.csv').values.T.tolist()[0]
columns = ['Symbol', 'Date', 'Pressure', 'Call_Open_Int', 'Put_Open_Int', 'Call_ROI', 'Put_ROI']
df = pd.DataFrame(columns=columns)
while len(ticker_df)>0:
symbol = ticker_df.pop()
#print(symbol, len(ticker_df))
current_date = datetime.strptime("2016-10-05", "%Y-%m-%d")
end_date = datetime.now()
x = None
while current_date<end_date:
current_date = current_date + timedelta(days=1)
if current_date.isoweekday() in range(1, 6):
if x is None:
x = stock(symbol, current_date)
else:
x = stock(symbol, current_date, x.exp_dates)
if hasattr(x, "result"):
df = df.append(pd.Series(x.result, index=columns), ignore_index=True)
#print(df)
#input()
#print(df)
try:
if len(ticker_df) % 10 != 0:
continue
print(len(df), df.corr())
df.to_csv('data_open_int.csv')
rounded_df = df
rounded_df['Pressure'] = rounded_df['Pressure'].round(decimals=1)
rounded_df_put = rounded_df[['Pressure', 'Put_ROI']]
rounded_df_call = rounded_df[['Pressure', 'Call_ROI']]
rounded_df_put.to_csv('rounded_df_put.csv')
rounded_df_call.to_csv('rounded_df_call.csv')
try:
plt.figure()
rounded_df_put.boxplot(by='Pressure')
plt.savefig('plot_put.png')
plt.clf()
plt.close()
plt.figure()
rounded_df_call.boxplot(by='Pressure')
plt.savefig('plot_call.png')
plt.clf()
plt.close()
except Excetion as e:
print(e)
dfs = rounded_df.groupby(['Pressure'])
"""
print('--------')
for i in dfs:
if len(i[1])>5:
print(round(float(i[0]),1), len(i[1]), i[1]['Call_ROI'].mean(), i[1]['Put_ROI'].mean())
print('--------')
"""
except Exception as e:
print(e)
pass
| {"/trade_all.py": ["/stock.py"], "/trade_one.py": ["/stock.py"], "/tests.py": ["/stock.py"]} |
73,188 | mobone/option_pressure | refs/heads/master | /trade_one.py | from stock import stock
import pandas as pd
from datetime import datetime, timedelta
import numpy as np
if __name__ == "__main__":
x = stock('SVU', datetime.strptime('2016-10-14', '%Y-%m-%d'))
print(x.result)
x = stock('SVU', datetime.strptime('2016-10-17', '%Y-%m-%d'))
print(x.result)
| {"/trade_all.py": ["/stock.py"], "/trade_one.py": ["/stock.py"], "/tests.py": ["/stock.py"]} |
73,189 | mobone/option_pressure | refs/heads/master | /tests.py | #
import pytest
from stock import stock
import os
from datetime import datetime
class TestClass:
def test_get_exp_date(self):
# test accuracy of months returned from db
stock_obj = stock('SVU', datetime.strptime('2016-10-12', '%Y-%m-%d'))
for month in ['November2016', 'October2016']:
month_check = stock_obj.exp_dates['Expiration_Date'].str.contains(month).any()
assert month_check == True
# test expiraiton within current month
stock_obj = stock('SVU', datetime.strptime('2016-10-07', '%Y-%m-%d'))
assert stock_obj.expiration == 'November2016'
# test expiration next month
stock_obj = stock('SVU', datetime.strptime('2016-10-19', '%Y-%m-%d'))
assert stock_obj.expiration == 'October2016'
| {"/trade_all.py": ["/stock.py"], "/trade_one.py": ["/stock.py"], "/tests.py": ["/stock.py"]} |
73,190 | mobone/option_pressure | refs/heads/master | /stock.py | import sys
from time import sleep
import pandas as pd
import MySQLdb
from datetime import datetime, timedelta, time
from math import ceil
import configparser
import calendar
config = configparser.RawConfigParser()
config.read("sql_statements.cfg")
c = calendar.Calendar(firstweekday=calendar.SUNDAY)
read_con = MySQLdb.connect(host="192.168.1.20", user="user", passwd="cookie", db="options")
#write_con = MySQLdb.connect(host="192.168.1.20", user="user", passwd="cookie", db="option_pressure_trades")
trade_columns = ['Symbol', 'Date', 'Pressure', 'Call_Interest', 'Put_Interest', 'Call_ROI', 'P']
class stock():
def __init__(self, symbol, date, exp_dates = None, trades_df = None):
self.exp_dates = exp_dates
self.trades_df = trades_df
self.symbol = symbol
self.pressure = None
self.hold_days = 5
self.strike = None
#print(symbol, date, exp_dates, trades_df)
self.get_exp_date(date)
try:
self.options = self.get_options(date)
self.get_option_pressure()
self.get_roi(date)
except:
return
if hasattr(self, "call_roi") and hasattr(self, "put_roi") and self.call_interest is not None and self.put_interest is not None: # store results. will be stored in db later
self.result = [self.symbol, date.strftime('%Y-%m-%d'), self.pressure, self.call_interest, self.put_interest, self.call_roi, self.put_roi]
# Gets the nearest expiration date for the trade
def get_exp_date(self, end_date):
if self.exp_dates is None:
sql = config['get_exp_date']['sql'].format(self.symbol, end_date.strftime("%Y-%m-%d"))
sql = sql.replace('\n',' ')
self.exp_dates = pd.read_sql(sql, read_con)
if len(self.exp_dates)<2:
return
for i in range(self.hold_days):
end_date = end_date + timedelta(days=1)
while end_date.isoweekday() not in range(1, 6):
end_date = end_date + timedelta(days=1)
self.end_date = end_date
monthcal = c.monthdatescalendar(end_date.year, end_date.month)
third_friday = [day for week in monthcal for day in week if \
day.weekday() == calendar.FRIDAY and \
day.month == end_date.month][2]
if self.end_date<datetime.combine(third_friday, time()):
self.expiration = self.exp_dates.iloc[0]['Expiration_Date']
else:
self.expiration = self.exp_dates.iloc[1]['Expiration_Date']
#print(self.expiration)
# Gets the options available around nearest strike price
def get_options(self, date):
sql = config['get_options']['sql'].format(self.symbol, date.strftime("%Y-%m-%d"), self.expiration)
options = pd.read_sql(sql, read_con)
if options.empty:
return
self.strike = round(float(options.iloc[0]['Last_Stock_Price'])) # round the last stock price
last_price_index = options[(options['Strike']==self.strike) & (options['Type_Option']=='C')].index[0]
options = options.iloc[last_price_index-4:last_price_index+6]
self.calls = options[options['Type_Option']=="C"]
self.puts = options[options['Type_Option']=="P"]
if len(self.calls)!=len(self.puts):
return None
self.call_trade = options[(options['Type_Option']=="C") & (options['Strike']==self.strike)].iloc[0]
self.put_trade = options[(options['Type_Option']=="P") & (options['Strike']==self.strike)].iloc[0]
self.call_interest = self.call_trade['Open_Int']
self.put_interest = self.put_trade['Open_Int']
return options
# Returns option pressure function I created
def get_option_pressure(self):
call_pressure = self.calls[['Ask','Bid']].sum()
call_pressure = call_pressure['Ask'] + call_pressure['Bid']
put_pressure = self.puts[['Ask','Bid']].sum()
put_pressure = put_pressure['Ask'] + put_pressure['Bid']
try:
self.pressure = call_pressure/(call_pressure+put_pressure)
#print(self.pressure, call_pressure, put_pressure)
#input()
except:
pass
# Returns the ROI on the call and put trade
def get_roi(self, date):
sql_call = config['get_roi']['sql'].format(self.call_trade['Ref_Num'], self.end_date.strftime("%Y-%m-%d"))
sql_put = config['get_roi']['sql'].format(self.put_trade['Ref_Num'], self.end_date.strftime("%Y-%m-%d"))
df_call = pd.read_sql(sql_call, read_con)
df_put = pd.read_sql(sql_put, read_con)
if not df_call.empty and not df_put.empty:
buy_price_call = float(self.call_trade['Ask'])
sell_price_call = float(df_call.iloc[0]['Bid'])
buy_price_put = float(self.put_trade['Ask'])
sell_price_put = float(df_put.iloc[0]['Bid'])
try:
self.call_roi = (sell_price_call-buy_price_call)/buy_price_call
self.put_roi = (sell_price_put-buy_price_put)/buy_price_put
except:
pass
def store_trade(self, trades_df):
if trades_df is None:
trades_df = pd.DataFrame(columns = trade_columns)
| {"/trade_all.py": ["/stock.py"], "/trade_one.py": ["/stock.py"], "/tests.py": ["/stock.py"]} |
73,200 | alexding1226/ds_final | refs/heads/master | /DataFormat.py | from typing import List
class task_item:
def __init__(self, name, duration, importance, deadline_date, deadline_time, id, min_length = 1,
type = "None", non_consecutive_type = []) :
self.name = name # str
self.duration = duration # int, every 30 min or 10 min
self.importance = importance # int 1~5, 5 is the most important
self.deadline_date = deadline_date # int
self.deadline_time = deadline_time # int
self.type = type # str
self.min_length = min_length # int, every 30 min or 10 min
self.non_consecutive_type = non_consecutive_type # list of types that can't take place right after this task
self.id = id # to identify tasks with same properties (e.g. splitted tasks)
##self.partition_num = [1,duration//min_length]
##self.partition = [duration]
##for i in range():
## self.partition.append(min_length)
def copy(self):
copy = task_item(self.name,self.duration, self.importance, self.deadline_date, self.deadline_time,
self.min_length, self.type, self.non_consecutive_type)
return copy
def __gt__(self,other):
if (self.deadline_date, self.deadline_time, self.importance) > (other.deadline_date, other.deadline_time, other.importance):
return True
else:
return False
def __eq__(self, other: object) -> bool:
if (self.deadline_date, self.deadline_time, self.importance, self.name, self.id) == \
(other.deadline_date, other.deadline_time, other.importance, other.name, other.id):
return True
else:
return False
def __lt__(self,other):
if (self.deadline_date, self.deadline_time, self.importance) < (other.deadline_date, other.deadline_time, other.importance):
return True
else:
return False
def __ge__(self,other):
if (self.deadline_date, self.deadline_time, self.importance) >= (other.deadline_date, other.deadline_time, other.importance):
return True
else:
return False
def __le__(self,other):
if (self.deadline_date, self.deadline_time, self.importance) <= (other.deadline_date, other.deadline_time, other.importance):
return True
else:
return False
class period_item:
def __init__(self,date,begin_time,end_time):
self.date = date
self.begin = begin_time
self.end = end_time
def copy(self):
copy = period_item(self.date, self.begin, self.end)
return copy
def __gt__(self,other):
if (self.date, self.begin, self.end) > (other.date, other.begin, other.end):
return True
else:
return False
def __eq__(self, other: object) -> bool:
if (self.date, self.begin, self.end) == (other.date, other.begin, other.end):
return True
else:
return False
def __lt__(self,other):
if (self.date, self.begin, self.end) < (other.date, other.begin, other.end):
return True
else:
return False
def __ge__(self,other):
if (self.date, self.begin, self.end) >= (other.date, other.begin, other.end):
return True
else:
return False
def __le__(self,other):
if (self.date, self.begin, self.end) <= (other.date, other.begin, other.end):
return True
else:
return False
class my_list(List):
def __init__(self, list_item):
super().__init__(list_item)
def RemoveMin(self):
super().pop(0)
def Swap(self):
if len(self) > 1:
temp = self[0]
self[0] = self[1]
self[1] = temp
return True
return False
def Peek(self):
return self[0]
if __name__ == "__main__": #testing
# date, begin time, endd time
general_period_list = [(1,9,12),(2,13,19),(3,12,15)]
t1 = task_item("B",3,1,2,24,1,"exercise",["academy"])
t4 = task_item("A",3,1,1,24,1,"academy",[])
#t3 = task_item("C",3,2,3,24,1,"academy",[])
#t2 = task_item("D",1,1,4,24,1,"exercise",[])
#T = list([t1,t2,t3,t4])
print(t1,t4)
T = list([t1,t4])
print(sorted(T)) | {"/main.py": ["/application.py", "/notes.py", "/schedule.py", "/DataFormat.py"], "/schedule.py": ["/DataFormat.py"]} |
73,201 | alexding1226/ds_final | refs/heads/master | /main.py | from calendar import month
from enum import Flag
from re import T, split
import time
import datetime
from datetime import date
from tkinter import *
from tkinter import font
from tkinter.font import ITALIC
import tkinter.ttk as ttk
from typing import Pattern
from tkcalendar import Calendar
from tkinter import messagebox
#import red_black_tree as rbt
import random
import string
import application
import notes
import schedule as sch
import DataFormat
class Task():
def __init__(self,name,duration,importance,date,type = "work",time_finished = 0) :
self.name = name
self.duration = duration
self.importance = importance
self.type = type
self.date = date
self.time_finished = time_finished
class AllTasks():
def __init__(self,tasks) :
self.tasks = dict()
for task in tasks:
self.tasks[task.name] = task
#self.rbt = rbt.RBTree()
#for task in tasks:
# self.rbt.insert(key=task.name,data=task)
def add(self,task):
self.tasks[task.name] = task
#self.rbt.insert(key=task.name,data = task)
def sort(self ,type, rev = True):
if type == "type":
a = sorted(self.tasks.items(),key = lambda task:task[1].type ,reverse = not (rev))
return a
elif type == "importance":
a = sorted(self.tasks.items(),key = lambda task:task[1].importance ,reverse = (rev))
return a
elif type == "duration":
a = sorted(self.tasks.items(),key = lambda task:task[1].duration ,reverse = (rev))
return a
elif type == "deadline":
a = sorted(self.tasks.items(),key = lambda task:task[1].date ,reverse = not (rev))
return a
elif type == "name":
a = sorted(self.tasks.keys(),reverse = not (rev))
return a
"""
def algorithm(self):
self.today = [Task("a",0.5,3,[7,20]),Task("b",2,2,[8,30])]
self.today[0].whentodo = [7,1,0]
self.today[1].whentodo = [7,1,21]
"""
def delete(self,task):
del self.tasks[task.name]
#self.rbt.delete(key=task.name)
class FinishedTasks():
def __init__(self,tasks) :
self.tasks = tasks
def add(self,task):
self.tasks.append(task)
def sort(self ,type, rev = True):
if type == "type":
self.tasks = sorted(self.tasks,key= lambda task:task.type,reverse = not (rev))
return self.tasks
elif type == "importance":
self.tasks = sorted(self.tasks,key= lambda task:task.importance,reverse = (rev))
return self.tasks
elif type == "duration":
self.tasks = sorted(self.tasks,key= lambda task:task.duration,reverse = (rev))
return self.tasks
elif type == "name":
self.tasks = sorted(self.tasks,key= lambda task:task.name,reverse = not (rev))
return self.tasks
elif type == "deadline":
self.tasks = sorted(self.tasks,key= lambda task:task.date,reverse = not (rev))
return self.tasks
def delete(self,task):
self.tasks.remove(task)
class Data():
def __init__(self,all,finished) :
self.alltasks = all
self.types = ["work","exercise","life"]
self.all_color = [["deep sky blue","light sky blue"],["deep pink","pink"],["green2","palegreen1"],
["yellow","#FFFF99"],["#9966CC","#CC99CC"],["#2376bd","#99c2e4"]]
self.cand_color = [["yellow","#FFFF99"],["#9966CC","#CC99CC"],["#2376bd","#99c2e4"]]
self.typecolor = {"work":["deep sky blue","light sky blue"],"exercise":["deep pink","pink"],"life":["green2","palegreen1"]}
self.finishtasks =finished
self.schedule = []
self.period = {"Monday":[],"Tuesday":[],"Wednesday":[],"Thursday":[],"Friday":[],"Saturday":[],"Sunday":[]}
t = open("data/tasks.txt")
tasks = t.read().splitlines()
for task in tasks:
task_items = task.split("/")
name = task_items[0]
duration = float(task_items[1])
importance = int(task_items[2])
type = task_items[3]
time_finished = float(task_items[4])
date = task_items[5].split(",")
date[0] = int(date[0])
date[1] = int(date[1])
ta = Task(name,duration,importance,date,type,time_finished)
self.add(ta)
f = open("data/finished_tasks.txt")
tasks = f.read().splitlines()
for task in tasks:
task_items = task.split("/")
name = task_items[0]
duration = float(task_items[1])
importance = int(task_items[2])
type = task_items[3]
time_finished = float(task_items[4])
date = task_items[5].split(",")
date[0] = int(date[0])
date[1] = int(date[1])
ta = Task(name,duration,importance,date,type,time_finished)
self.finishtasks.add(ta)
ty = open("data/type.txt")
types = ty.read().splitlines()
for type in types:
type_items = type.split("/")
self.types.append(type_items[0])
self.typecolor[type_items[0]] = [type_items[1],type_items[2]]
p = open("data/period.txt")
days = p.read().splitlines()
for day in days:
day_items = day.split("/")
day_items.pop()
day_name = day_items[0]
for day_period in day_items[1:]:
day_period = day_period.split(",")
day_period[0] = float(day_period[0])
day_period[1] = float(day_period[1])
self.period[day_name].append(day_period)
s = open("data/schedule.txt")
days = s.read().splitlines()
for day in days:
day_result = []
periods = day.split("/")
periods.pop()
date = periods[0].split(",")
date = [int(date[0]),int(date[1])]
day_result.append(date)
for period in periods[1:]:
period_items = period.split(",")
period_items.pop()
period_items[1] = float(period_items[1])
period_items[2] = float(period_items[2])
period_items = tuple(period_items)
day_result.append(period_items)
self.schedule.append(day_result)
self.update_schedule()
def update_schedule(self):
today = datetime.datetime.today()
today_monthday = [today.month,today.day]
if len(self.schedule) >0:
print(self.schedule)
while self.schedule[0][0] != today_monthday:
self.schedule.pop(0)
def finished(self,task):
self.alltasks.delete(task)
self.finishtasks.add(task)
def finished_part(self,task):
task_in_all = self.alltasks.tasks[task[0]]
if task_in_all.time_finished + task[2] - task[1] >= task_in_all.duration:
self.alltasks.delete(task_in_all)
self.finishtasks.add(task_in_all)
else:
task_in_all.time_finished += task[2] - task[1]
def add(self,task):
self.alltasks.add(task)
def scheduling(self):
if len(self.alltasks.tasks)>0:
days = ["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"]
today_weekday = days[datetime.datetime.today().weekday()]
now = datetime.datetime.now()
now_hour = now.hour
now_minute = now.minute
taskslist = list(self.alltasks.tasks.values())
today = datetime.datetime.today().date()
toalgo_task = []
for task in taskslist:
deadline = date(2021,task.date[0],task.date[1])
remainday = (deadline - today).days
new_type_task = DataFormat.task_item(task.name,task.duration-task.time_finished,task.importance,remainday+1,24,1,task.type)
toalgo_task.append(new_type_task)
toalgo_period = []
temp = 0
today_period = self.period[today_weekday]
period_amount = len(today_period)
while temp < period_amount:
if today_period[temp][0] > now_hour:
toalgo_period.append(DataFormat.period_item(1,today_period[temp][0],today_period[temp][1]))
elif today_period[temp][0]//1 == now_hour:
if today_period[temp][0]%1 == 0.5 and now_minute < 30:
toalgo_period.append(DataFormat.period_item(1,today_period[temp][0],today_period[temp][1]))
temp += 1
temp = datetime.datetime.today().weekday()
for i in range(1,10):
index = int((temp+i)%7)
for period in self.period[days[index]]:
toalgo_period.append(DataFormat.period_item(i+1,period[0],period[1]))
algo =sch.schedule(toalgo_task,toalgo_period)
if algo.Detect() == 0:
schedule = algo.Schedule()
for s in schedule:
s.sort(key = lambda task:task[1])
date_for_schedule = datetime.datetime.today().date()
one_day = datetime.timedelta(days=1)
for day in schedule:
monthday = [date_for_schedule.month,date_for_schedule.day]
day.insert(0,monthday)
date_for_schedule += one_day
self.schedule = schedule
else:
self.schedule = ["expire",algo.Detect()]
else:
self.schedule = ["no task"]
def addtype(self,type):
self.types.append(type)
type_color_index = random.randint(0,len(self.cand_color)-1)
type_color = self.cand_color.pop(type_color_index)
self.typecolor[type] = type_color
if len(self.cand_color) == 0:
self.cand_color = self.all_color
def save(self):
type = open("data/type.txt","w")
for key,value in self.typecolor.items():
if key == "work" or key == "exercise" or key == "life":
pass
else:
type.write(key + "/")
type.write(value[0] + "/")
type.write(value[1] + "\n")
type.close()
t = open("data/tasks.txt","w")
for task in self.alltasks.tasks.values():
name = task.name + "/"
duration = str(task.duration) + "/"
type = task.type + "/"
importance = str(task.importance) + "/"
finishedtime = str(task.time_finished) + "/"
deadline = str(task.date[0]) + "," + str(task.date[1]) + "\n"
t.write(name + duration + importance + type + finishedtime + deadline)
t.close()
p = open("data/period.txt","w")
for day,periods in self.period.items():
p.write(day + "/")
for period in periods:
p.write(str(period[0]) + "," + str(period[1]) + "/")
p.write("\n")
p.close()
s = open("data/schedule.txt","w")
if len(self.schedule)>0:
if self.schedule == ["no task"]:
s.write("no task")
elif self.schedule[0] == "expire":
s.write("expire/" + str(self.schedule[1]))
else:
for day in self.schedule:
for period in day:
for item in period:
item = str(item)
s.write(item + ",")
s.write("/")
s.write("\n")
else:
for a in self.schedule:
s.write(a)
s.close()
f = open("data/finished_tasks.txt","w")
for task in self.finishtasks.tasks:
name = task.name + "/"
duration = str(task.duration) + "/"
type = task.type + "/"
importance = str(task.importance) + "/"
finishedtime = str(task.time_finished) + "/"
deadline = str(task.date[0]) + "," + str(task.date[1]) + "\n"
f.write(name + duration + importance + type + finishedtime + deadline)
f.close()
class main(Tk):
def __init__(self,data) :
super().__init__()
self._frame = None
self.geometry("1050x700")
self.duration = 50
self.data = data
change_frame = Frame(self)
change_frame.pack(side="left",fill="y")
self.protocol("WM_DELETE_WINDOW",self.save)
Button(change_frame,text="all tasks",command=lambda:self.switch_frame(AllTasksPage,data)).grid(column=0,row=2,pady=15,padx=15,sticky=N)
Button(change_frame,text="schedule",command=lambda:self.switch_frame(SchedulePage,data)).grid(column=0,row=3,pady=15,padx=15,sticky=N)
Button(change_frame,text = "set period",command=lambda:self.switch_frame(PeriodPage,data)).grid(column=0,row=4,pady=15,padx=15,sticky=N)
Button(change_frame,text="pomodoro",command=pomodoro).grid(column=0,row=5,pady=15,sticky=N)
Button(change_frame,text="notes",command=lambda:self.switch_frame(notes.NotesPage,data)).grid(column=0,row=6,pady=15,sticky=N)
Button(change_frame,text="為你安排行程!",command= self.scheduling).grid(column=0,row = 7,pady=15,sticky=N)
self.switch_frame(AllTasksPage,data)
def switch_frame(self, frame_class,data):
new_frame = frame_class(self,data)
if self._frame is not None:
self._frame.destroy()
self._frame = new_frame
self._frame.pack(side = "left",fill = "both")
def scheduling(self):
self.data.scheduling()
if self.data.schedule[0] == "expire":
today = datetime.datetime.today()
days = datetime.timedelta(days=self.data.schedule[1]-1)
expireday = today + days
expiredate = [expireday.month,expireday.day]
messagebox.showwarning("expire","task expire at day %d/%d"%(expiredate[0],expiredate[1]))
elif self.data.schedule[0] == "no task":
messagebox.showinfo("no task","add a task first")
def save(self):
if messagebox.askokcancel("Quit", "Do you want to quit?"):
self.data.save()
self.destroy()
def pomodoro():
global pomodoro_frame
top = Toplevel()
top.title("pomodoro")
top.geometry("500x500")
top.configure(bg="#ffbc92")
pomodoro_frame = application.PomodoroPage(top)
pomodoro_frame.pack()
#from https://stackoverflow.com/questions/16188420/tkinter-scrollbar-for-frame
class VerticalScrolledFrame(Frame):
"""A pure Tkinter scrollable frame that actually works!
* Use the 'interior' attribute to place widgets inside the scrollable frame
* Construct and pack/place/grid normally
* This frame only allows vertical scrolling
"""
def __init__(self, parent,height,width=None, *args, **kw):
Frame.__init__(self, parent, *args, **kw)
# create a canvas object and a vertical scrollbar for scrolling it
vscrollbar = Scrollbar(self, orient=VERTICAL)
vscrollbar.pack(fill=Y, side=RIGHT, expand=FALSE)
canvas = Canvas(self, bd=0, highlightthickness=0,height=height,
yscrollcommand=vscrollbar.set)
if width:
canvas["width"] = width
canvas.pack(side=LEFT, fill=BOTH, expand=TRUE)
canvas.pack_propagate(0)
vscrollbar.config(command=canvas.yview)
# reset the view
canvas.xview_moveto(0)
canvas.yview_moveto(0)
# create a frame inside the canvas which will be scrolled with it
self.interior = interior = Frame(canvas)
self.interior["width"] = 1000
interior_id = canvas.create_window(0, 0, window=interior,
anchor=NW)
# track changes to the canvas and frame width and sync them,
# also updating the scrollbar
def _configure_interior(event):
# update the scrollbars to match the size of the inner frame
size = (interior.winfo_reqwidth(), interior.winfo_reqheight())
canvas.config(scrollregion="0 0 %s %s" % size)
if interior.winfo_reqwidth() != canvas.winfo_width():
# update the canvas's width to fit the inner frame
canvas.config(width=interior.winfo_reqwidth())
interior.bind('<Configure>', _configure_interior)
def _configure_canvas(event):
if interior.winfo_reqwidth() != canvas.winfo_width():
# update the inner frame's width to fill the canvas
canvas.itemconfigure(interior_id, width=canvas.winfo_width())
canvas.bind('<Configure>', _configure_canvas)
class AllTasksPage(Frame):
def __init__(self,master,data):
Frame.__init__(self, master)
self.data = data
self.master = master
self.alltasks = data.alltasks
self.frame_config = Frame(self)
self.frame_config.grid(row=0,column=0,sticky=W)
self.frame_tasks = VerticalScrolledFrame(self,425)
self.frame_tasks["height"] = 500
self.frame_tasks["width"] = 1000
self.frame_tasks.grid_propagate(0)
self.frame_tasks.grid(row=1,column=0,rowspan=10,pady = 15)
taskrow = 0
if len(self.alltasks.tasks) == 0:
no_task_frame = Frame(self.frame_tasks.interior,width=800,height=25)
no_task_frame.grid(row=0,column=0,pady=5,sticky=W)
no_task_frame.grid_propagate(0)
no_task_label = Label(no_task_frame,text="there is no unfinished task, add one below")
no_task_label.place(anchor="c",relx=.5,rely=.5)
taskrow = 1
for task in self.alltasks.tasks.values():
color = self.data.typecolor[task.type][0]
task_frame =Frame(self.frame_tasks.interior,bg=color)
task_frame.grid(row = taskrow,column = 0, pady=5,sticky=W)
name_label=Label(task_frame,text=task.name,width=20,bg=color)
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
imp_l = Label(task_frame,text="*" * task.importance,width=5,bg=color)
imp_l.grid_propagate(0)
imp_l.grid(row=0,column=1,padx=20,sticky=W)
du_l = Label(task_frame,text=str(task.time_finished) +"/" + str(task.duration)+"hrs",width=8,bg=color)
du_l.grid(row=0,column=2,padx=20)
du_l.grid_propagate(0)
date_l = Label(task_frame,text="%d/%d"%(task.date[0],task.date[1]),width=20,bg=color)
date_l.grid_propagate(0)
date_l.grid(row=0,column=3,padx=20,sticky=W)
type_l = Label(task_frame,text=task.type,width = 10,bg=color)
type_l.grid(row=0,column=4,padx = 30,sticky=E)
type_l.grid_propagate(0)
task_button = TaskChangeButton(self.frame_tasks.interior,task,taskrow,task_frame,self.data)
task_button.grid(row = taskrow,column = 1)
finished_button = FinishButton(task_frame,task,self.data,self,master)
finished_button["bg"] = color
finished_button.grid(row=0,column=5)
delete_button = Delete_notfinishButton(task_frame,task,self.data,self.master)
delete_button["bg"] = color
delete_button.grid(row =0,column=6,padx=10)
taskrow += 1
self.finished_frame = Frame()
if len(self.data.finishtasks.tasks)>0:
self.finished_frame = Frame(self.frame_tasks.interior)
self.finished_frame.grid(row=taskrow,column=0)
task_frame =Frame(self.finished_frame)
task_frame.grid(row = 0,column = 0, pady=30,sticky=W)
name_label=Label(task_frame,text="finished tasks:")
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
finished_row = 1
for task in self.data.finishtasks.tasks:
color = self.data.typecolor[task.type][1]
task_frame =Frame(self.finished_frame,width=800,height=25,bg=color)
task_frame.grid(row = finished_row,column = 0, pady=5,sticky=W)
task_frame.grid_propagate(0)
name_label=Label(task_frame,text=task.name,width=20,bg=color)
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
imp_l = Label(task_frame,text="*" * task.importance,width=5,bg=color)
imp_l.grid_propagate(0)
imp_l.grid(row=0,column=1,padx=20,sticky=W)
du_l = Label(task_frame,text=str(task.duration)+"hrs",width=8,bg=color)
du_l.grid(row=0,column=2,padx=20)
du_l.grid_propagate(0)
date_l = Label(task_frame,text="%d/%d"%(task.date[0],task.date[1]),width=20,bg=color)
date_l.grid_propagate(0)
date_l.grid(row=0,column=3,padx=20,sticky=W)
type_l = Label(task_frame,text=task.type,width = 10,bg=color)
type_l.grid(row=0,column=4,padx = 30,sticky=W)
type_l.grid_propagate(0)
delete_button = Delete_finishButton(task_frame,task,self.data,self.master)
delete_button["bg"] = color
delete_button.grid(row =0,column=5,padx=10)
finished_row += 1
self.add_task_button = Button(self,text="add task",command=self.add_task,font=('Corbel',16))
self.add_task_button.grid(row=30,column=0,pady=30)
sort_combobox = ttk.Combobox(self.frame_config,values=["name","importance","duration","deadline","type"],state="readonly",width=10)
sort_combobox.current(0)
sort_combobox.grid(row=0,column=1,padx = 10,pady=10)
Label(self.frame_config,text="sort by:").grid(row=0,column=0)
rev = BooleanVar()
check_reverse = Checkbutton(self.frame_config,text="reverse",variable=rev,onvalue=True,offvalue=False)
check_reverse.grid(column = 2,row = 0,padx=10,pady=10)
Button(self.frame_config,text="confirm",command = lambda: self.sort(sort_combobox.get(),rev.get())).grid(column=3,row=0,padx=10,pady=10)
Label(self,text="Name").place(x=68,y=40)
Label(self,text="importance").place(x=170,y=40)
Label(self,text="time").place(x=300,y=40)
Label(self,text="deadline").place(x=430,y=40)
Label(self,text="type").place(x=605,y=40)
def sort(self,type,rev):
self.frame_tasks.destroy()
self.frame_tasks = VerticalScrolledFrame(self,425)
self.frame_tasks.grid(row=1,column=0,pady=15)
tasks = self.data.alltasks.sort(type,not rev)
taskrow = 0
if len(self.alltasks.tasks) == 0:
no_task_frame = Frame(self.frame_tasks.interior,width=800,height=25)
no_task_frame.grid(row=0,column=0,pady=5,sticky=W)
no_task_frame.grid_propagate(0)
no_task_label = Label(no_task_frame,text="there is no unfinished task, add one below")
no_task_label.place(anchor="c",relx=.5,rely=.5)
taskrow = 1
for task in tasks:
if type == "name":
task = task
else:
task = task[1]
color = self.data.typecolor[task.type][0]
task_frame =Frame(self.frame_tasks.interior,bg=color)
task_frame.grid(row = taskrow,column = 0, pady=5,sticky=W)
name_label=Label(task_frame,text=task.name,width=20,bg=color)
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
imp_l = Label(task_frame,text="*" * task.importance,width=5,bg=color)
imp_l.grid_propagate(0)
imp_l.grid(row=0,column=1,padx=20,sticky=W)
du_l = Label(task_frame,text=str(task.time_finished) +"/" + str(task.duration)+"hrs",width=8,bg=color)
du_l.grid(row=0,column=2,padx=20)
du_l.grid_propagate(0)
date_l = Label(task_frame,text="%d/%d"%(task.date[0],task.date[1]),width=20,bg=color)
date_l.grid_propagate(0)
date_l.grid(row=0,column=3,padx=20,sticky=W)
type_l = Label(task_frame,text=task.type,width = 10,bg=color)
type_l.grid(row=0,column=4,padx = 30,sticky=E)
type_l.grid_propagate(0)
task_button = TaskChangeButton(self.frame_tasks.interior,task,taskrow,task_frame,self.data)
task_button.grid(row = taskrow,column = 1)
finished_button = FinishButton(task_frame,task,self.data,self,self.master)
finished_button["bg"] = color
finished_button.grid(row=0,column=5)
delete_button = Delete_notfinishButton(task_frame,task,self.data,self.master)
delete_button.grid(row =0,column=6,padx=10)
taskrow += 1
self.finished_frame = Frame()
if len(self.data.finishtasks.tasks)>0:
self.finished_frame = Frame(self.frame_tasks.interior)
self.finished_frame.grid(row=taskrow,column=0)
task_frame =Frame(self.finished_frame)
task_frame.grid(row = 0,column = 0, pady=30,sticky=W)
name_label=Label(task_frame,text="finished tasks:")
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
finished_row = 1
for task in self.data.finishtasks.sort(type,not rev):
color = self.data.typecolor[task.type][1]
task_frame =Frame(self.finished_frame,width=800,height=25,bg=color)
task_frame.grid(row = finished_row,column = 0, pady=5,sticky=W)
task_frame.grid_propagate(0)
name_label=Label(task_frame,text=task.name,width=20,bg=color)
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
imp_l = Label(task_frame,text="*" * task.importance,width=5,bg=color)
imp_l.grid_propagate(0)
imp_l.grid(row=0,column=1,padx=20,sticky=W)
du_l = Label(task_frame,text=str(task.duration)+"hrs",width=8,bg=color)
du_l.grid(row=0,column=2,padx=20)
du_l.grid_propagate(0)
date_l = Label(task_frame,text="%d/%d"%(task.date[0],task.date[1]),width=20,bg=color)
date_l.grid_propagate(0)
date_l.grid(row=0,column=3,padx=20,sticky=W)
type_l = Label(task_frame,text=task.type,width = 10,bg=color)
type_l.grid(row=0,column=4,padx = 30,sticky=W)
type_l.grid_propagate(0)
delete_button = Delete_finishButton(task_frame,task,self.data,self.master)
delete_button["bg"] = color
delete_button.grid(row =0,column=5,padx=10)
finished_row += 1
self.add_task_button = Button(self,text="add task",command=self.add_task)
self.add_task_button.grid(row=30,column=0,pady=30)
Label(self,text="Name").place(x=68,y=40)
Label(self,text="importance").place(x=170,y=40)
Label(self,text="time").place(x=300,y=40)
Label(self,text="deadline").place(x=430,y=40)
Label(self,text="type").place(x=605,y=40)
def add_task(self):
self.add_task_button.destroy()
frame_addtask = Frame(self)
frame_addtask.grid(row=30,column=0,pady=30,sticky=W)
e_name = Entry(frame_addtask,width=20)
e_name.insert(0,"Name")
e_name.grid(column=0,row=0,padx=10)
scale_importance = Scale(frame_addtask,from_= 1 ,to=5,orient=HORIZONTAL,length=50,width=10)
scale_importance.grid(column=1,row=0,padx=20)
e_duration = Entry(frame_addtask,width=3)
e_duration.insert(0,1)
e_duration.grid(column=2,row=0,padx=10)
Label(frame_addtask,text="*30min").grid(row=0,column=3)
now = datetime.datetime.now()
cal = Calendar(frame_addtask,selectmode = "day",year = now.year,month = now.month,day = now.day,date_pattern = "yyyy/MM/dd")
cal.grid(row = 0,column = 4,padx = 20)
types=self.data.types[:]
types.append("new")
combo_type = ttk.Combobox(frame_addtask,values=types,width=10)
combo_type.current(0)
combo_type.grid(column=5,row=0,padx=10)
confirm_button = Button(self,text="confirm"
,command=lambda: self.add_confirm(frame_addtask,e_name,scale_importance,e_duration,cal,combo_type))
confirm_button.grid(row=30,column=1,pady=30,sticky=W)
self.confirm_button = confirm_button
def add_confirm(self,frame,name,imp,dur,cal,type):
ymd = cal.get_date().split("/")
if name.get() not in self.data.alltasks.tasks:
task = Task(name.get(),int(dur.get())/2,imp.get(),[int(ymd[1]),int(ymd[2])],type.get())
self.data.alltasks.add(task)
if type.get() not in self.data.types:
self.data.addtype(type.get())
color = self.data.typecolor[task.type][0]
frame.destroy()
self.confirm_button.destroy()
self.add_task_button = Button(self,text="add task",command=self.add_task,font=('Corbel',16))
self.add_task_button.grid(row=30,column=0,pady=30)
task_frame =Frame(self.frame_tasks.interior,bg=color)
task_frame.grid(row = len(self.data.alltasks.tasks)-1,column = 0, pady=5,sticky=W)
name_label=Label(task_frame,text=task.name,width=20,bg=color)
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
imp_l = Label(task_frame,text="*" * task.importance,width=5,bg=color)
imp_l.grid_propagate(0)
imp_l.grid(row=0,column=1,padx=20,sticky=W)
du_l = Label(task_frame,text=str(task.time_finished) +"/" + str(task.duration)+"hrs",width=8,bg=color)
du_l.grid(row=0,column=2,padx=20)
du_l.grid_propagate(0)
type_l = Label(task_frame,text=task.type,width = 10,bg=color)
type_l.grid(row=0,column=4,padx = 30,sticky=E)
type_l.grid_propagate(0)
date_l = Label(task_frame,text="%d/%d"%(task.date[0],task.date[1]),width=20,bg=color)
date_l.grid_propagate(0)
date_l.grid(row=0,column=3,padx=20,sticky=W)
task_button = TaskChangeButton(self.frame_tasks.interior,task,len(self.data.alltasks.tasks)-1,task_frame,self.data)
task_button.grid(row = len(self.data.alltasks.tasks)-1,column = 1)
finished_button = FinishButton(task_frame,task,self.data,self,self.master)
finished_button.grid(row=0,column=5)
finished_button["bg"] = color
delete_button = Delete_notfinishButton(task_frame,task,self.data,self.master)
delete_button.grid(row =0,column=6,padx=10)
delete_button["bg"] = color
taskrow = len(self.data.alltasks.tasks)
self.finished_frame.destroy()
if len(self.data.finishtasks.tasks)>0:
self.finished_frame = Frame(self.frame_tasks.interior)
self.finished_frame.grid(row=taskrow,column=0,sticky=W)
task_frame =Frame(self.finished_frame)
task_frame.grid(row = 0,column = 0, pady=30,sticky=W)
name_label=Label(task_frame,text="finished tasks:")
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
finished_row = 1
for task in self.data.finishtasks.tasks:
color = self.data.typecolor[task.type][1]
task_frame =Frame(self.finished_frame,bg=color)
task_frame.grid(row = finished_row,column = 0, pady=5,sticky=W)
name_label=Label(task_frame,text=task.name,width=20,bg=color)
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
imp_l = Label(task_frame,text="*" * task.importance,width=5,bg=color)
imp_l.grid_propagate(0)
imp_l.grid(row=0,column=1,padx=20,sticky=W)
du_l = Label(task_frame,text=str(task.time_finished) +"/" + str(task.duration)+"hrs",width=8,bg=color)
du_l.grid(row=0,column=2,padx=20)
du_l.grid_propagate(0)
date_l = Label(task_frame,text="%d/%d"%(task.date[0],task.date[1]),width=20,bg=color)
date_l.grid_propagate(0)
date_l.grid(row=0,column=3,padx=20,sticky=W)
type_l = Label(task_frame,text=task.type,width = 10,bg=color)
type_l.grid(row=0,column=4,padx = 30,sticky=E)
type_l.grid_propagate(0)
delete_button = Delete_finishButton(task_frame,task,self.data,self.master)
delete_button["bg"] = color
delete_button.grid(row =0,column=5,padx=10)
finished_row += 1
else:
name = name.get()
frame.destroy()
self.confirm_button.destroy()
self.add_task_button = Button(self,text="add task",command=self.add_task,font=('Corbel',16))
self.add_task_button.grid(row=30,column=0,pady=30)
messagebox.showwarning("Error","Name \"" + name + "\" already exist")
class FinishButton(Button):
def __init__(self,frame,task,data,master,grandmaster):
super().__init__(frame)
self.root = frame
self.task = task
self.data = data
self["text"] = "finish"
self["command"] = self.finish
self.master = master
self.grandmaster = grandmaster
def finish(self):
self.data.finished(self.task)
self.grandmaster.switch_frame(AllTasksPage,self.data)
class Delete_notfinishButton(Button):
def __init__(self,frame,task,data,grandmaster):
super().__init__(frame)
self.task = task
self.data = data
self["text"] = "delete"
self["command"] = self.delete
self.grandmaster = grandmaster
self["bg"] = data.typecolor[task.type][0]
def delete(self):
self.data.alltasks.delete(self.task)
self.grandmaster.switch_frame(AllTasksPage,self.data)
class Delete_finishButton(Button):
def __init__(self,frame,task,data,grandmaster):
super().__init__(frame)
self.task = task
self.data = data
self["text"] = "delete"
self["command"] = self.delete
self.grandmaster = grandmaster
self["bg"] = data.typecolor[task.type][0]
def delete(self):
self.data.finishtasks.delete(self.task)
self.grandmaster.switch_frame(AllTasksPage,self.data)
class TaskChangeButton(Button):
def __init__(self,frame,task,taskrow,task_frame,data):
super().__init__(frame)
self.root = frame
self.task = task
self.taskrow = taskrow
self.task_frame = task_frame
self.data = data
self["command"] = self.change
self["text"] = "change"
def change(self):
self["text"] = "confirm"
self["command"] = self.confirm
self.task_frame.destroy()
change_frame = Frame(self.root)
change_frame.grid(column=0,row = self.taskrow,sticky=W)
e_name = Entry(change_frame,width=20)
e_name.insert(0,self.task.name)
e_name.grid(column=0,row=0,rowspan=2,padx=10)
scale_importance = Scale(change_frame,from_= 1 ,to=5,orient=HORIZONTAL,length=50)
scale_importance.set(self.task.importance)
scale_importance.grid(column=1,row=0,padx=20)
e_duration = Entry(change_frame,width=5)
e_duration.insert(0,int(self.task.duration*2))
e_duration.grid(column=2,row=0,rowspan=2,padx=10)
Label(change_frame,text="*30mins").grid(column=3,row=0,rowspan=2)
now = datetime.datetime.now()
cal = Calendar(change_frame,selectmode = "day",year = now.year,month = self.task.date[0],day = self.task.date[1],date_pattern = "yyyy/MM/dd")
cal.grid(column = 4,row = 0,padx = 10)
types=self.data.types[:]
types.append("new")
current_type = types.index(self.task.type)
combo_type = ttk.Combobox(change_frame,values=types,width=10)
combo_type.current(current_type)
combo_type.grid(column=5,row=0,padx=10)
self.name = e_name
self.importance = scale_importance
self.duration = e_duration
self.frame = change_frame
self.cal = cal
self.type = combo_type
def confirm(self):
name = self.name.get()
old_name = self.task.name
if name not in self.data.alltasks.tasks or name == old_name:
self.data.alltasks.delete(self.task)
self.task.name = name
self.task.importance = self.importance.get()
self.task.duration = int(self.duration.get())/2
self.task.type = self.type.get()
if self.task.type not in self.data.types:
self.data.addtype(self.task.type)
ymd = self.cal.get_date().split("/")
self.task.date = [int(ymd[1]),int(ymd[2])]
self.data.alltasks.add(self.task)
else:
messagebox.showwarning("Error","Name \"" + name + "\" already exist")
self.frame.destroy()
color = self.data.typecolor[self.task.type][0]
task_frame =Frame(self.root,bg=color)
self.task_frame = task_frame
task_frame.grid(row = self.taskrow,column = 0, pady=5,sticky=W)
name_label=Label(task_frame,text=self.task.name,width=20,bg=color)
name_label.grid(row = 0,column=0,padx=20,sticky=W)
name_label.grid_propagate(0)
imp_l = Label(task_frame,text="*" * self.task.importance,width=5,bg=color)
imp_l.grid_propagate(0)
imp_l.grid(row=0,column=1,padx=20,sticky=W)
du_l = Label(task_frame,text=str(self.task.time_finished) +"/" + str(self.task.duration)+"hrs",width=8,bg=color)
du_l.grid(row=0,column=2,padx=20)
du_l.grid_propagate(0)
date_l =Label(task_frame,text="%d/%d"%(self.task.date[0],self.task.date[1]),width=20,bg=color)
date_l.grid(row=0,column=3,padx=20,sticky=W)
date_l.grid_propagate(0)
type_l = Label(task_frame,text=self.task.type,width = 10,bg=color)
type_l.grid(row=0,column=4,padx = 30,sticky=E)
type_l.grid_propagate(0)
finished_button = FinishButton(task_frame,self.task,self.data,self,self.master.master.master.master.master)
finished_button["bg"] = color
finished_button.grid(row=0,column=5)
delete_button = Delete_notfinishButton(task_frame,self.task,self.data,self.master.master.master.master.master)
delete_button["bg"] = color
delete_button.grid(row =0,column=6,padx=10)
self["text"] = "change"
self["command"] = self.change
class SchedulePage(Frame):
def __init__(self,master,data):
Frame.__init__(self,master)
self.data = data
if self.data.schedule == [] :
Label(self,text="push the buttom \"為你安排行程\" at left first").pack()
else:
todayframe = TodayFrame(self,data)
todayframe.pack(side="left")
scheduleframe = ScheduleFrame(self,data)
scheduleframe.pack(side="left")
class TodayFrame(VerticalScrolledFrame):
def __init__(self,master,data):
VerticalScrolledFrame.__init__(self, master,700,500)
self.data = data
self.master = master
self.tasks = data.schedule[0][1:]
now = datetime.datetime.now()
self.now = now
self.now_hm = [now.hour,now.minute]
Label(self.interior,text="today:",font=('Helvetica', 20, ITALIC)).pack(side = "top",anchor=W,padx=30)
if len(self.tasks) == 0:
l = Label(self.interior,text="today has no work",width=50)
l.pack_propagate(0)
l.pack()
if len(self.tasks)>0:
endtime = self.tasks[0][1]
for task in self.tasks:
if task[1] != endtime:
rest_time = task[1] - endtime
rest_frame = RestFrame(self.interior,data,rest_time,endtime)
rest_frame.pack()
task_frame = TaskFrame(self.interior,data,task)
task_frame.pack(pady=1)
endtime =task[2]
class TaskFrame(Frame):
def __init__(self,master,data,task):
Frame.__init__(self,master)
self.data = data
self.master = master
len_hour = 50
self.task = task
if task[1]%1 == 0.5:
starttime_min = 3
else:
starttime_min = 0
starttime_hour = task[1]//1
now = datetime.datetime.now()
now_hour = now.hour
now_minute = now.minute
if now_hour < starttime_hour:
done = False
doing = False
elif now_hour == starttime_hour:
if now_minute <30:
if starttime_min == 3:
done = False
doing = False
else:
done = False
doing = True
else:
if starttime_min == 3:
done = False
doing = True
else:
if task.duration == 0.5:
done = True
doing = False
else:
done = False
doing = True
else: #now_hour > starttime_hour
endtime = task[2]
endtime_hour = endtime//1
if endtime%1 == 0.5:
endtime_min = 30
else:
endtime_min = 0
if now_hour < endtime_hour:
done = False
doing = True
elif now_hour == endtime_hour:
if now_minute < endtime_min:
done = False
doing = True
else:
done = True
doing = False
else:
done = True
doing = False
if done:
if len(task) == 3:
color = data.typecolor[data.alltasks.tasks[task[0]].type][1]
else:
color = task[3]
else:
if len(task) == 3:
color = data.typecolor[data.alltasks.tasks[task[0]].type][0]
else:
color = task[3]
starttime = "%d:%d0"%(starttime_hour,starttime_min)
time_frame = Frame(self)
time_frame.grid(row=0,column=0,sticky=N)
l = Label(time_frame,text=starttime,width=5,height=1)
l.pack(side="top")
l.pack_propagate(0)
task_canvas = Canvas(self,bg = color,width=300,height=(task[2]-task[1])*len_hour)
task_canvas.grid(row=0,column =1)
label_task = Label(task_canvas,text=task[0],bg=color,font=('Helvetica', 22, "bold"),width=20,height=20)
label_task.place(anchor="c",relx = .5,rely = .5)
if doing:
passtime = ((now_hour*60 + now_minute)-(starttime_hour*60+starttime_min*10))/60
y = passtime * len_hour
now_label = Label(task_canvas,text="now",bg = color)
now_label.place(x=10,y=y-10,in_=task_canvas)
task_canvas.create_line(50,y,400,y,width=3)
self.var = IntVar
#check_done = Checkbutton(self,text="finished",command=self.finished,variable=self.var)
#check_done.grid(row=0,column=2)
if done or task[0] == "a":
self.check_done = Checkbutton(self,text="finished",command=self.finished,variable=self.var)
self.check_done.grid(row=0,column=2)
if len(task) == 4:
self.check_done.select()
self.check_done.config(state=DISABLED)
else:
pad_frame = Frame(self,width=71,highlightthickness=5)
pad_frame.grid(row = 0,column=2)
"""
if self.task.finished :
check_done.select()
check_done.config(state=DISABLED)
else:
check_done.deselect()
"""
def finished(self):
color = data.typecolor[data.alltasks.tasks[self.task[0]].type][1]
temp = 0
for task in self.data.schedule[0][1:]:
if task[1] == self.task[1]:
task = task + (color,)
self.data.schedule[0][temp+1] = task
break
temp += 1
self.check_done.config(state=DISABLED)
self.data.finished_part(self.task)
self.master.master.master.master.master.switch_frame(SchedulePage,data)
class RestFrame(Frame):
def __init__(self,master,data,duration,starttime):
Frame.__init__(self,master)
self.data = data
len_hour = 50
self.duration = duration
if starttime %1 == 0.5:
starttime_min = 3
else:
starttime_min = 0
starttime_hour = starttime // 1
now = datetime.datetime.now()
now_hour = now.hour
now_minute = now.minute
if now_hour < starttime_hour:
done = False
doing = False
elif now_hour == starttime_hour:
if now_minute <30:
if starttime_min == 3:
done = False
doing = False
else:
done = False
doing = True
else:
if starttime_min == 3:
done = False
doing = True
else:
if duration == 0.5:
done = True
doing = False
else:
done = False
doing = True
else: #now_hour > starttime_hour
endtime = starttime+duration
endtime_hour = endtime//1
if endtime%1 == 0.5:
endtime_min = 30
else:
endtime_min = 0
if now_hour < endtime_hour:
done = False
doing = True
elif now_hour == endtime_hour:
if now_minute < endtime_min:
done = False
doing = True
else:
done = True
doing = False
else:
done = True
doing = False
starttime = "%d:%d0"%(starttime_hour,starttime_min)
time_frame = Frame(self)
time_frame.grid(row=0,column=0,sticky=N+W)
Label(time_frame,text=starttime).pack(side="top")
task_canvas = Canvas(self,width=300,height=duration*len_hour)
task_canvas.grid(row=0,column =1,sticky=W)
rest_label = Label(task_canvas,text="Rest",font=('Helvetica', 22, "bold"),width=20)
rest_label.place(anchor="c",in_=task_canvas,relx=.5,rely = .5)
if doing:
passtime = ((now_hour*60 + now_minute)-(starttime_hour*60+starttime_min*10))/60
y = passtime * len_hour
task_canvas.create_line(50,y,400,y,width=3)
now_label = Label(task_canvas,text="now")
now_label.place(x=10,y=y-10,in_=task_canvas)
pad_frame = Frame(self,width=71,highlightthickness=5)
pad_frame.grid(row = 0,column=2)
class ScheduleFrame(VerticalScrolledFrame):
def __init__(self,master,data):
VerticalScrolledFrame.__init__(self, master,700)
self.master = master
self.data = data
schedule = self.data.schedule
Label(self.interior,text="all:",font=('Helvetica', 20, ITALIC)).pack(anchor=W,padx=25)
date = datetime.date.today()
one_day = datetime.timedelta(days=1)
for day in schedule:
if len(day) > 0:
day_frame = DayFrame(self.interior,data,day[1:],date)
day_frame.pack(pady=20)
date += one_day
class DayFrame(Frame):
def __init__(self,master,data,tasks,date):
Frame.__init__(self,master)
self.master = master
self.data = data
if len(tasks)>0:
date = "%d/%d"%(date.month,date.day)
date_frame = Frame(self)
date_frame.grid(row=0,column=0,sticky=N)
Label(date_frame,text=date,font=('Helvetica', 13, "bold")).pack()
tasks_frame = Frame(self)
tasks_frame.grid(row=0,column=1)
row = 0
for task in tasks:
starttime = task[1]
start_hour = str(int(starttime//1))
if starttime%1 == 0:
starttime_minute = "00"
else:
starttime_minute = "30"
endtime = task[2]
endtime_hour = str(int(endtime//1))
if endtime%1 == 0:
endtime_minute = "00"
else:
endtime_minute = "30"
time_label = Label(tasks_frame,text=start_hour+ ":" + starttime_minute + "~" + endtime_hour + ":" + endtime_minute)
time_label.grid(row=row,column=0,sticky=W)
if len(task) == 3:
color = self.data.typecolor[data.alltasks.tasks[task[0]].type][0]
else:
color = task[3]
task_name_frame = Frame(tasks_frame,bg=color,width=300,height=20)
task_name_frame.grid(row=row,column=1,sticky=W)
task_name_frame.grid_propagate(0)
task_label = Label(task_name_frame,text=task[0],bg = color,font = ('Helvetica', 15, "bold"))
task_label.place(anchor="c",relx = .5,rely = .5)
row += 1
class PeriodPage(Frame):
def __init__(self,master,data):
Frame.__init__(self,master)
self.data = data
self.period = self.data.period
days = ["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"]
row = 0
for day in days:
day_frame = Frame(self)
day_frame.grid(row=row,column=0,pady=25,sticky=W)
day_label = Label(day_frame,text=day+":",font =('Microsoft Sans Serif',16),width=10)
day_label.pack(side="left")
day_label.pack_propagate(0)
period_text = []
if len(self.period[day])>0:
for period in self.period[day]:
startime = period[0]
starthour =str(int(startime//1))
if startime%1 == 0.5:
startmin = "30"
else:
startmin = "00"
endtime = period[1]
endhour = str(int(endtime//1))
if endtime %1 == 0.5:
endmin = "30"
else:
endmin = "00"
period_text.append(starthour+":"+startmin + "~" + endhour + ":" + endmin)
period_text = " ".join(period_text)
period_label = Label(day_frame,text=period_text,font = ('Microsoft Sans Serif',16),width=60,anchor=W)
period_label.pack(side="left")
period_label.pack_propagate(0)
delete_button = DeletePeriodButton(day_frame,data,day)
delete_button.pack(side="right")
row += 1
add_period_frame = Frame(self)
add_period_frame.grid(row = row,column=0,sticky=W,pady=25)
Label(add_period_frame,text="add period:",font= ('Microsoft Sans Serif',16),fg="red").grid(row = 0,column=0,sticky=W)
day_combo = ttk.Combobox(add_period_frame,values=days,state="readonly",width=10)
day_combo.current(0)
day_combo.grid(row = 0,column=1,padx=10)
hour_start_entry = Entry(add_period_frame,width=5)
hour_end_entry = Entry(add_period_frame,width=5)
min_start_combo = ttk.Combobox(add_period_frame,values = ["00","30"],state="readonly",width=5)
min_start_combo.current(0)
min_end_combo = ttk.Combobox(add_period_frame,values=["00","30"],state="readonly",width=5)
min_end_combo.current(0)
hour_start_entry.grid(row = 0,column=2)
Label(add_period_frame,text=":").grid(row = 0,column=3)
min_start_combo.grid(row = 0,column=4)
Label(add_period_frame,text="~").grid(row=0,column=5)
hour_end_entry.grid(row = 0,column=6)
Label(add_period_frame,text=":").grid(row = 0,column= 7)
min_end_combo.grid(row = 0,column=8)
Button(add_period_frame,text="add",command=self.add).grid(row = 0,column=9)
self.day = day_combo
self.starthour = hour_start_entry
self.startmin = min_start_combo
self.endhour = hour_end_entry
self.endmin = min_end_combo
def add(self):
day = self.day.get()
try:
starthour = int(self.starthour.get())
endhour = int(self.endhour.get())
if starthour > 24 or endhour > 24 or starthour < 0 or endhour < 0:
messagebox.showwarning("Error","Please enter integer between 0 and 24")
return
except:
messagebox.showwarning("Error","Please enter integer")
return
if self.startmin.get() == "00":
starttime = starthour
else:
starttime = starthour+0.5
if self.endmin.get() == "00":
endtime = endhour
else:
endtime = endhour+0.5
new = True
for period in self.data.period[day]:
p_start = period[0]
p_end = period[1]
if p_start <= starttime <= p_end:
new = False
if endtime > p_end:
period[1] = endtime
else:
break
elif p_start <= endtime <= p_end:
new = False
if starttime < p_start:
period[0] = starttime
else:
break
if len(self.data.period[day]) >= 2:
former_period = self.data.period[day][0]
for period in self.data.period[day][1:]:
if former_period[1] >= period[0]:
former_period[1] = period[1]
self.data.period[day].remove(period)
former_period = period
if new:
self.data.period[day].append([starttime,endtime])
self.data.period[day].sort()
self.master.switch_frame(PeriodPage,self.data)
class DeletePeriodButton(Button):
def __init__(self,master,data,day):
super().__init__(master)
self["command"] = self.delete
self["text"] = "delete"
self.data = data
self.day = day
def delete(self):
self.data.period[self.day] = []
self.master.master.master.switch_frame(PeriodPage,self.data)
t1 = Task("錄資結影片",1,3,[7,5],"工作")
t2 = Task("複習DS",3,4,[7,6],"工作")
t3 = Task("跑步",0.5,2,[7,7],"運動")
t4 = Task("整理演算法",2,5,[7,5],"工作")
t5 = Task("全聯買東西",1,2,[7,12],"生活")
all = AllTasks([])
f = FinishedTasks([])
data = Data(all,f)
"""
now = time.process_time()
for i in range(100000):
name = "".join(random.choice(string.ascii_letters + string.digits) for x in range(10))
duration = random.randint(1,10000)
t = Task(name,duration,1,[5,5])
data.alltasks.add(t)
end = time.process_time()
print("add by dict:%f" %(end - now))
now = time.process_time()
data.alltasks.sort("name")
end = time.process_time()
print("sort by rbt:%f" %(end - now))
now = time.process_time()
data.alltasks.sort("duration")
end = time.process_time()
print("sort by dict:%f" %(end - now))
"""
app = main(data)
app.mainloop() | {"/main.py": ["/application.py", "/notes.py", "/schedule.py", "/DataFormat.py"], "/schedule.py": ["/DataFormat.py"]} |
73,202 | alexding1226/ds_final | refs/heads/master | /schedule.py | import DataFormat as data
# total time insufficient warning should take plasce @ task adding section
# that deals with the case that the sum of time of the first n task exceed the sum of time
# ... add up to the deadline of the n'th task
# 現在版本 : 先試著不切,照non_consecutive排。if 不行,type豁免。if 再不行,切最後一個,填進去前面的空檔
# 兩種 items 都會被 modified,所以需要 deepcopy
class schedule:
def __init__(self,data_task_list,data_period_list):
self.data_task_list = data_task_list
##self.data_task_list.sort()
self.data_period_list = data_period_list
##self.data_period_list.sort()
self.task_list = data.my_list(data_task_list)
self.period_list = data.my_list([])
for i in data_period_list:
self.period_list.append(i.copy())
self.flag = 0
##self.task_list.sort()
##self.period_list.sort()
def Detect(self): # insufficient time detection : # using built-in list
amount_task = 0
amount_period = 0
task_list = self.data_task_list
period_list = self.data_period_list
task_list.sort()
period_list.sort()
current_task = 1
current_period = 1
while current_task <= len(task_list):
current_date = task_list[current_task-1].deadline_date
##print("date",current_date)
amount_task = amount_task + task_list[current_task-1].duration
##print("amout of tasks",amount_task)
while(True):
if (current_period > len(period_list)) or (period_list[current_period-1].date > current_date): # which day ?
break
amount_period = amount_period + period_list[current_period-1].end - period_list[current_period-1].begin
current_period = current_period +1
##print("amount of periods",amount_period)
if amount_task > amount_period:
##print("too much task on day",current_date)
return current_date
current_task = current_task + 1
##print("the amount of tasks is okay")
return 0
def CheckExpire(self,task_deadline_date, task_deadline_time, current_date, current_time):
if current_date < task_deadline_date:
return False
elif current_date == task_deadline_date:
if current_time <= task_deadline_time:
return False
return True
def ExpirationHandling(self,task_item_with_problem):
S_new = schedule(self.data_task_list,self.data_period_list)
index = S_new.task_list.index(task_item_with_problem)
if self.flag == 1: # type
S_new.flag = 1
if index >= 1:
S_new.data_task_list[index].type = "special"
S_new.task_list[index].type = "special" ## Take Care
return S_new.Schedule()
else :
self.flag = self.flag +1
##print("index < 1 @ expiration handling")
##return -1
if self.flag == 2: # min_length # for loop 可優化
S_new.flag = 0
empty_period = []
for i in range(len(self.period_list)):
p = self.period_list[i]
if (p.date < task_item_with_problem.deadline_date):
empty_period.append(p)
elif p.date == task_item_with_problem.deadline_date:
if p.begin < task_item_with_problem.deadline_time:
if p.end <= task_item_with_problem.deadline_time:
empty_period.append(p)
else:
empty_period.append(data.period_item(p.date,p.begin,task_item_with_problem.deadline_time))
else:
break
# 找出前面空著的時間,把那項分拆
#S_new.task_list.delete(task_item_with_problem)
self.data_task_list.remove(task_item_with_problem)
t = task_item_with_problem.copy()
t.min_length = 0
t.type = "special2"
current_duration_left = t.duration
for p in empty_period:
if current_duration_left != 0:
p_duration = p.end - p.begin
t.importance = t.importance + 0.001
if p_duration <= current_duration_left:
t.duration = p_duration
#S_new.task_list.add(t.copy())
self.data_task_list.append(t.copy())
current_duration_left = current_duration_left - p_duration
else:
t.duration = current_duration_left
#S_new.task_list.add(t)
self.data_task_list.append(t)
current_duration_left = 0
else:
break
self.data_task_list.sort()
S_new.task_list = data.my_list(self.data_task_list)
#S_new.task_list.sort()
return S_new.Schedule()
else:
print("flag > 2")
return -1
def Schedule(self):
schedule = [[]] # name, begin time, end time ### separated by date
current_period = 1
current_date = self.period_list.Peek().date
current_time = self.period_list.Peek().begin
current_progress = 0
current_non_consecutive = []
p_duration = self.period_list.Peek().end - current_time
t_duration = self.task_list.Peek().duration - current_progress
while len(self.task_list) != 0:
while len(schedule) < current_date:
schedule.append([])
if self.task_list.Peek().type in current_non_consecutive: ## 可優化,變成試試看從下一天排起
if self.task_list.Swap():
t_duration = self.task_list.Peek().duration
else:
##print("can't skip this, no other task left")
current_non_consecutive = []
if current_period > len(self.period_list):
##print("task", self.task_list.Peek().name,"expires at day", current_date,"due to delay")
##print("old schedule current state :",schedule)
self.flag = self.flag + 1
return self.ExpirationHandling(self.task_list.Peek())
if p_duration >= t_duration:
schedule[current_date-1].append((self.task_list.Peek().name, current_time, current_time + t_duration))
##print(schedule)
current_time = current_time + t_duration
self.period_list[current_period-1].begin = current_time
if self.CheckExpire(self.task_list.Peek().deadline_date, self.task_list.Peek().deadline_time, current_date, current_time):
##print("task", self.task_list.Peek().name,"expires at day", current_date)
##print("old schedule current state :",schedule)
self.flag = self.flag + 1
return self.ExpirationHandling(self.task_list.Peek())
current_progress = 0
current_non_consecutive = self.task_list.Peek().non_consecutive_type
self.task_list.RemoveMin()
if current_time == self.period_list[current_period-1].end:
self.period_list.pop(current_period-1)
else:
self.period_list[current_period-1].begin = current_time
current_period = 1
if len(self.task_list) != 0:
t_duration = self.task_list.Peek().duration - current_progress
else:
break
else:
current_period = current_period +1
if current_period <= len(self.period_list):
current_date = self.period_list[current_period-1].date
current_time = self.period_list[current_period-1].begin
p_duration = self.period_list[current_period-1].end - current_time
return schedule
if __name__ == '__main__':
# name, duration, importance, deadline_date, deadline_time, id
p1 = data.period_item(1,18,21)
p2 = data.period_item(2,5,10)
p3 = data.period_item(5,5,6)
p4 = data.period_item(7,5,6)
p5 = data.period_item(7,15,17)
p6 = data.period_item(8,4,9)
p7 = data.period_item(8,18,21)
p8 = data.period_item(9,5,10)
t1 = data.task_item("a",3.5,1,2,24,1)
t2 = data.task_item("b",4,1,4,24,1)
s = schedule([t1,t2],[p1,p2,p6])
| {"/main.py": ["/application.py", "/notes.py", "/schedule.py", "/DataFormat.py"], "/schedule.py": ["/DataFormat.py"]} |
73,203 | alexding1226/ds_final | refs/heads/master | /application.py | from tkinter import *
import tkinter
from PIL import Image, ImageTk
class PomodoroPage(Frame):
def __init__(self,master):
super().__init__(master)
self.createWidgets()
self.pack(side=TOP)
self.master = master
self.timer = None
self.conti = 0
self._paused = False
self.timerToStart = 25
def createWidgets(self):
self.timerVariable = StringVar()
self.timerVariable.set(None)
self.canvas = Canvas(self.master,bg="#ffbc92", highlightthickness=0)
self.tomatoImg = PhotoImage(file="2tomato.png")
# Here need to be revise for the windows xy
self.canvas.create_image(180, 150, image=self.tomatoImg)
# Here need to be revise for windows y+11
self.timerLabel = self.canvas.create_text(180, 150, text="00:00", fill="white", font=('Arial', 112))
self.type = Label(self, text="Work", bg="#ffbc92", fg="sienna", font=('Arial', 40))
self.type.pack(fill="both")
self.canvas.pack(side=TOP)
self.frame1 = Frame(self,bg="#ffbc92")
self.time = Scale(self.frame1, label='Time-period', from_=25, to=55, orient=HORIZONTAL, length=200, showvalue=0,tickinterval=10, resolution=1, bg="#ffbc92")
self.time.grid(row=2, column=0)
self.b1=Button(self.frame1,text='Set',command=self.set1, bg="#ffbc92")
self.b1.grid(row=2, column=1)
self.frame1.pack(fill=BOTH)
self.frame2 = Frame(self,bg="#ffbc92")
self.startButton = Button(self.frame2,text="Start",fg="RED",activeforeground="BlanchedAlmond",width="8",height="2",font=('Arial',11),command=self.startTime)
self.startButton.grid(row=4, column=1, sticky="ewns")
self.stopButton = Button(self.frame2,text="Stop",fg="green",background = 'BlanchedAlmond',width="8",height="2",activebackground="#ffbc92",activeforeground="#ffbc92",font=('Arial',11),command=self.stopTime)
self.stopButton.grid(row=4, column=2, sticky="ewns")
self.resetButton = Button(self.frame2,text="Reset",fg="black",width="8",height="2",font=('Arial',11),command=self.resetTime)
self.resetButton.grid(row=4, column=3, sticky="ewns")
self.frame2.pack(fill="both")
def set1(self):
self.timerToStart = self.time.get()
def startTime(self):
self._paused = False
if self.timer is None:
self.countdown(self.timerToStart*60)
def stopTime(self):
if self.timer is not None:
self._paused = True
def resetTime(self):
self.master.after_cancel(self.timer)
self.timer = None
self._paused = False
if self.conti%2 == 0:
self.type.config(text="Work")
self.type.pack()
self.timerToStart = self.time.get()
else:
self.type.config(text="Break")
self.type.pack()
self.timerToStart = int(0.1*(self.time.get()))
self.countdown(self.timerToStart * 60)
self._paused = True
def countdown(self,timeInSeconds, start=True):
if timeInSeconds >= 0:
if start:
self._starttime = timeInSeconds
if self._paused:
self.timer = self.master.after(1000,self.countdown,timeInSeconds,False)
else:
mins,secs = divmod(timeInSeconds,60)
self.canvas.itemconfig(self.timerLabel, text=f"{mins}:{secs}")
self.timer = self.master.after(1000,self.countdown,timeInSeconds-1,False)
else:
self.conti += 1
self.resetTime()
self.startTime()
if __name__ == '__main__':
root = Tk()
root.title("Pomodoro Timer")
# Here need to be revise for the windows size
root.geometry("700x500")
root.configure(bg="#ffbc92")
top = Toplevel()
top.geometry("700x500")
top.configure(bg="#ffbc92")
app =PomodoroPage(top)
app.configure(background="#ffbc92")
root.mainloop()
| {"/main.py": ["/application.py", "/notes.py", "/schedule.py", "/DataFormat.py"], "/schedule.py": ["/DataFormat.py"]} |
73,204 | alexding1226/ds_final | refs/heads/master | /notes.py | from tkinter import *
from tkinter import ttk
from tkinter import Button
import tkinter
class NotesPage(Frame):
def __init__(self,master,data, task=None):
super().__init__(master)
self.bgColor = ["RoyalBlue","DarkSlateBlue", "DarkMagenta", "Teal", "Indigo", "PaleVioletRed", "Crimson", "FireBrick","IndianRed", "Peru", "DarkGoldenRod", "OliveDrab", "LightSeaGreen", "CornflowerBlue", "DarkBlue", "DarkSlateBlue"]
self["height"] = 500
self["width"] = 800
self.pack_propagate(0)
self.data =data
if not task:
self.tasks = []
task1 = Label(self, text="NOTES", bg="white", fg="tomato", pady=20, font=("Times",23))
task1.pack(side=TOP, fill=X)
self.taskCreate = Text(self, height=1, bg="DarkCyan", fg="white")
self.taskCreate.pack(side=BOTTOM, fill=X)
self.taskCreate.focus_set()
self.instru = Label(self, text="Fill your note below and press ENTER. (Maximum 40 words, 16 notes)", fg="black")
self.instru.pack(side=BOTTOM)
self.taskCreate.bind('<Return>', self.addTask)
def addTask(self, event):
self.newText = self.taskCreate.get(1.0,END).strip()
if len(self.newText) > 0:
self.newTask = Label(self, text = self.newText, pady=20)
self.tasks.append(self.newTask)
#self.doneButton = Button(self.newTask, text = "done",command = lambda:self.tasks.remove(self.doneButton.master))
#print(self.newText)
self.bgIdx = len(self.tasks)%len(self.bgColor)
self.bgc = self.bgColor[self.bgIdx]
self.newTask.configure(bg=self.bgc,fg="white",font=("Times",20))
self.newTask.pack(side=TOP, fill=X)
doneButton = Button(self.newTask, text = "done",command = lambda:self.removeTask(doneButton))
doneButton.pack(side=RIGHT)
self.taskCreate.delete(1.0, END)
def removeTask(self, doneButton):
doneButton.pack_forget()
doneButton.master.pack_forget()
self.tasks.remove(doneButton.master)
if __name__ == "__main__":
root = Tk()
root.geometry ("1050x700")
notes = NotesPage(root)
notes.pack()
root.mainloop() | {"/main.py": ["/application.py", "/notes.py", "/schedule.py", "/DataFormat.py"], "/schedule.py": ["/DataFormat.py"]} |
73,205 | SmartDeveloperHub/agora-client | refs/heads/master | /agora/client/wrapper.py | """
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
This file is part of the Smart Developer Hub Project:
http://www.smartdeveloperhub.org
Center for Open Middleware
http://www.centeropenmiddleware.com/
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Copyright (C) 2015 Center for Open Middleware.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
"""
import StringIO
from urllib import urlencode
import requests
from agora.client.execution import PlanExecutor
from rdflib import Graph
from rdflib.plugins.parsers.notation3 import BadSyntax
__author__ = "Fernando Serena"
class FountainException(Exception):
pass
class FragmentCollector(object):
""" Class for interacting with the Agora planner and executing the plan for a certain graph pattern.
"""
def __init__(self, planner_uri, gp):
self.__planner = planner_uri
self.__graph_pattern = gp
# Request a search plan on initialization and extract patterns and spaces
plan_graph = self.__get_gp_plan(self.__graph_pattern)
self.__plan_executor = PlanExecutor(plan_graph)
def __get_gp_plan(self, gp):
"""
Request the planner a search plan for a given gp and returns the plan as a graph.
:param gp:
:return:
"""
query = urlencode({'gp': gp})
response = requests.get('{}/plan?'.format(self.__planner) + query, headers={'Accept': 'text/turtle'})
graph = Graph()
try:
graph.parse(source=StringIO.StringIO(response.text), format='turtle')
except BadSyntax:
pass
return graph
def get_fragment(self, **kwargs):
return self.__plan_executor.get_fragment(**kwargs)
def get_fragment_generator(self, **kwargs):
return self.__plan_executor.get_fragment_generator(**kwargs)
class Agora(object):
"""
Wrapper class for the FragmentCollector
"""
def __init__(self, host='localhost', port=9001):
self.__host = 'http://{}:{}'.format(host, port)
def get_fragment(self, gp, **kwargs):
"""
Return a complete fragment for a given gp.
:param gp: A graph pattern
:return:
"""
collector = FragmentCollector(self.__host, gp)
return collector.get_fragment(**kwargs)
def get_fragment_generator(self, gp, **kwargs):
"""
Return a fragment generator for a given gp.
:param gp:
:param kwargs:
:return:
"""
collector = FragmentCollector(self.__host, gp)
return collector.get_fragment_generator(**kwargs)
@property
def prefixes(self):
response = requests.get(self.__host + '/prefixes')
if response.status_code == 200:
return response.json()
raise FountainException(response.text)
| {"/agora/client/wrapper.py": ["/agora/client/execution.py"]} |
73,206 | SmartDeveloperHub/agora-client | refs/heads/master | /agora/client/execution.py | """
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
This file is part of the Smart Developer Hub Project:
http://www.smartdeveloperhub.org
Center for Open Middleware
http://www.centeropenmiddleware.com/
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Copyright (C) 2015 Center for Open Middleware.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
"""
import Queue
import StringIO
import logging
import multiprocessing
import traceback
from threading import RLock, Thread, Event
from xml.sax import SAXParseException
import gc
from _bsddb import DBNotFoundError
from datetime import datetime as dt
import requests
from agora.client.namespaces import AGORA
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import wait
from rdflib import ConjunctiveGraph, RDF, URIRef
pool = ThreadPoolExecutor(max_workers=20)
__author__ = 'Fernando Serena'
log = logging.getLogger('agora.client')
_accept_mimes = {'turtle': 'text/turtle', 'xml': 'application/rdf+xml'}
class StopException(Exception):
pass
def chunks(l, n):
"""
Yield successive n-sized chunks from l.
:param l:
:param n:
:return:
"""
if n:
for i in xrange(0, len(l), n):
yield l[i:i + n]
def __extend_uri(prefixes, short):
"""
Extend a prefixed uri with the help of a specific dictionary of prefixes
:param prefixes: Dictionary of prefixes
:param short: Prefixed uri to be extended
:return:
"""
for prefix in prefixes:
if short.startswith(prefix):
return short.replace(prefix + ':', prefixes[prefix])
return short
class PlanExecutor(object):
def __init__(self, plan):
self.__plan_graph = plan
self.__fragment = set([])
self.__uri_cache = {}
self.__node_spaces = {}
self.__node_patterns = {}
self.__spaces = None
self.__patterns = {}
self.__subjects_to_ignore = {}
self.__resource_queue = {}
self.__resource_lock = RLock()
self.__completed = False
self.__last_success_format = None
self.__last_iteration_ts = dt.now()
# Request a search plan on initialization and extract patterns and spaces
self.__extract_patterns_and_spaces()
def __extract_patterns_and_spaces(self):
"""
Analyses the search plan graph in order to build the required data structures from patterns
and spaces.
:return:
"""
def __decorate_nodes(nodes, space):
"""
Performs a backward search from a list of pattern nodes and assigns a set of search spaces
to all encountered nodes.
:param nodes: List of pattern nodes that belongs to a search space
:param space: List of search space id
:return:
"""
for n in nodes:
if n not in self.__node_spaces:
self.__node_spaces[n] = set([])
self.__node_spaces[n].add(space)
pred_nodes = self.__plan_graph.subjects(AGORA.next, n)
__decorate_nodes(pred_nodes, space)
# Extract all search spaces in the plan and build a dictionary of subjects-to-ignore per each of them.
# Ignored subjects are those that won't be dereferenced due to a explicit graph pattern (object) filter,
# e.g. ?s doap:name "jenkins" -> All ?s that don't match the filter will be ignored.
self.__spaces = set(self.__plan_graph.subjects(RDF.type, AGORA.SearchSpace))
self.__subjects_to_ignore = dict([(sp, set([])) for sp in self.__spaces])
patterns = list(self.__plan_graph.subjects(RDF.type, AGORA.TriplePattern))
for tp in patterns:
# A triple pattern belongs to a UNIQUE search space
space = list(self.__plan_graph.subjects(AGORA.definedBy, tp)).pop()
self.__patterns[tp] = {'space': space}
# Depending on the format of each triple pattern (either '?s a Concept' or '?s prop O'),
# it is required to extract different properties.
tp_pred = list(self.__plan_graph.objects(tp, predicate=AGORA.predicate)).pop()
if tp_pred == RDF.type: # ?s a Concept
self.__patterns[tp]['type'] = list(self.__plan_graph.objects(tp, predicate=AGORA.object)).pop()
try:
check_type = list(self.__plan_graph.objects(tp, predicate=AGORA.checkType)).pop().toPython()
except IndexError:
check_type = True
self.__patterns[tp]['check'] = check_type
else: # ?s prop O
self.__patterns[tp]['property'] = tp_pred
tp_obj = list(self.__plan_graph.objects(tp, predicate=AGORA.object)).pop()
if (tp_obj, RDF.type, AGORA.Literal) in self.__plan_graph: # In case O is a Literal
self.__patterns[tp]['filter_object'] = list(self.__plan_graph.objects(tp_obj, AGORA.value)).pop()
elif isinstance(tp_obj, URIRef):
self.__patterns[tp]['filter_object'] = tp_obj
tp_sub = list(self.__plan_graph.objects(tp, predicate=AGORA.subject)).pop()
if isinstance(tp_sub, URIRef):
self.__patterns[tp]['filter_subject'] = tp_sub
# Get all pattern nodes (those that have a byPattern properties) of the search plan and search backwards
# in order to set the scope of each search space.
nodes = list(self.__plan_graph.subjects(AGORA.byPattern, tp))
for n in nodes:
if n not in self.__node_patterns:
self.__node_patterns[n] = set([])
self.__node_patterns[n].add(tp)
__decorate_nodes(nodes, space)
def get_fragment(self, **kwargs):
"""
Return a complete fragment.
:param gp:
:return:
"""
gen, namespaces, plan = self.get_fragment_generator(**kwargs)
graph = ConjunctiveGraph()
[graph.bind(prefix, u) for (prefix, u) in namespaces]
[graph.add((s, p, o)) for (_, s, p, o) in gen]
return graph
def get_fragment_generator(self, on_load=None, on_seeds=None, on_plink=None, on_link=None, on_type=None,
on_type_validation=None, on_tree=None, workers=None, stop_event=None, queue_wait=None,
queue_size=100, provider=None, lazy=True):
"""
Create a fragment generator that executes the search plan.
:param on_load: Function to be called just after a new URI is dereferenced
:param on_seeds: Function to be called just after a seed of a tree is identified
:param on_plink: Function to be called when a pattern link is reached
:param on_link: Function to be called when following a property that is not of a pattern
:param on_type: Function to be called when search for a type triple
:param on_type_validation: Function to be called just after a type is validated
:param on_tree: Function to be called just before a tree is going to be explored
:param provider:
:param queue_size:
:param workers:
:param stop_event:
:param queue_wait:
:param lazy:
:return:
"""
if workers is None:
workers = multiprocessing.cpu_count()
fragment_queue = Queue.Queue(maxsize=queue_size)
workers_queue = Queue.Queue(maxsize=workers)
if stop_event is None:
stop_event = Event()
def __create_graph():
if provider is None:
return ConjunctiveGraph()
else:
return provider.create(conjunctive=True)
def __release_graph(g):
if provider is not None:
provider.release(g)
else:
g.remove((None, None, None))
g.close()
def __open_graph(gid, loader, format):
if provider is None:
content, headers = loader(gid, format)
if not isinstance(content, bool):
g = ConjunctiveGraph()
g.parse(source=content, format=format)
return g
return content
else:
return provider.create(gid=gid, loader=loader, format=format)
def __get_content(uri, format):
try:
# log.debug('[Dereference][START] {}'.format(uri))
response = requests.get(uri, headers={'Accept': _accept_mimes[format]}, timeout=30)
except requests.Timeout:
log.debug('[Dereference][TIMEOUT][GET] {}'.format(uri))
return True
except UnicodeEncodeError:
log.debug('[Dereference][ERROR][ENCODE] {}'.format(uri))
return True
except Exception:
log.debug('[Dereference][ERROR][GET] {}'.format(uri))
return True
if response.status_code == 200:
try:
return StringIO.StringIO(response.content), response.headers
except SyntaxError:
traceback.print_exc()
log.error('[Dereference][ERROR][PARSE] {}'.format(uri))
return False
except ValueError:
traceback.print_exc()
log.debug('[Dereference][ERROR][VAL] {}'.format(uri))
return False
except DBNotFoundError:
# Ignore this exception... it is raised due to a stupid problem with prefixes
return True
except SAXParseException:
traceback.print_exc()
log.error('[Dereference][ERROR][SAX] {}'.format(uri))
return False
except Exception:
traceback.print_exc()
log.error('[Dereference][ERROR] {}'.format(uri))
return True
def __dereference_uri(tg, uri):
if not isinstance(uri, URIRef):
return
uri = uri.encode('utf-8')
def treat_resource_content(parse_format):
g = __open_graph(uri, loader=__get_content, format=parse_format)
if isinstance(g, bool):
return g
try:
tg.get_context(uri).__iadd__(g)
return True
finally:
if g is not None:
__release_graph(g)
"""
Load in a tree graph the set of triples contained in uri, trying to not deference the same uri
more than once in the context of a search plan execution
:param tg: The graph to be loaded with all the triples obtained from uri
:param uri: A resource uri to be dereferenced
:return:
"""
loaded = False
for fmt in sorted(_accept_mimes.keys(), key=lambda x: x != self.__last_success_format):
loaded = treat_resource_content(fmt)
if loaded:
self.__last_success_format = fmt
break
if loaded and on_load is not None:
triples = list(tg.get_context(uri).triples((None, None, None)))
on_load(uri, triples)
def __process_link_seed(seed, tree_graph, link, next_seeds):
__check_stop()
try:
__dereference_uri(tree_graph, seed)
seed_pattern_objects = tree_graph.objects(subject=seed, predicate=link)
next_seeds.update(seed_pattern_objects)
except Exception as e:
traceback.print_exc()
log.warning(e.message)
def __process_pattern_link_seed(seed, tree_graph, pattern_link):
__check_stop()
try:
__dereference_uri(tree_graph, seed)
except:
pass
seed_pattern_objects = tree_graph.objects(subject=seed, predicate=pattern_link)
return seed_pattern_objects
def __check_stop():
if stop_event.isSet():
with self.__resource_lock:
self.__fragment.clear()
for tg in self.__resource_queue.keys():
try:
tg.remove((None, None, None))
tg.store.close()
except KeyError:
pass
tg.close()
__release_graph(tg)
self.__plan_graph = None
self.__uri_cache = None
self.__node_spaces = None
self.__node_patterns = None
self.__spaces = None
self.__patterns = None
self.__subjects_to_ignore.clear()
self.__resource_queue.clear()
gc.collect()
raise StopException()
def __put_triple_in_queue(quad):
if (dt.now() - self.__last_iteration_ts).total_seconds() > 100:
log.info('Aborted fragment collection!')
stop_event.set()
fragment_queue.put(quad, timeout=queue_wait)
def __follow_node(node, tree_graph, seed_space, seed):
"""
Recursively search for relevant triples following the current node and all its successors
:param node: Tree node to be followed
:param tree_graph:
:param seed_space:
:param seed: Collected seed for the current node
:return:
"""
def node_has_filter(x):
"""
Check if a node is a pattern node and has an object filter
"""
p_node = list(self.__plan_graph.objects(subject=x, predicate=AGORA.byPattern))
try:
p_node = p_node.pop()
return 'filter_object' in self.__patterns[p_node] or 'filter_subject' in self.__patterns[p_node]
except IndexError:
return False
try:
# Get the sorted list of current node's successors
nxt = sorted(list(self.__plan_graph.objects(node, AGORA.next)),
key=lambda x: node_has_filter(x), reverse=True)
# Per each successor...
for n in nxt:
if seed_space in self.__node_spaces[n]:
node_patterns = self.__node_patterns.get(n, [])
# In case the node is not a leaf, 'onProperty' tells which is the next link to follow
try:
link = list(self.__plan_graph.objects(subject=n, predicate=AGORA.onProperty)).pop()
except IndexError:
link = None
filter_next_seeds = set([])
next_seeds = set([])
# If the current node is a pattern node, it must search for triples to yield
for pattern in node_patterns:
pattern_space = self.__patterns[pattern].get('space', None)
if pattern_space != seed_space or seed in self.__subjects_to_ignore[pattern_space]:
continue
subject_filter = self.__patterns[pattern].get('filter_subject', None)
if subject_filter is not None and seed != subject_filter:
self.__subjects_to_ignore[pattern_space].add(seed)
continue
pattern_link = self.__patterns[pattern].get('property', None)
# If pattern is of type '?s prop O'...
if pattern_link is not None:
if (seed, pattern_link) not in self.__fragment:
obj_filter = self.__patterns[pattern].get('filter_object', None)
if on_plink is not None:
on_plink(pattern_link, [seed], pattern_space)
seed_was_filtered = True
try:
for seed_object in list(
__process_pattern_link_seed(seed, tree_graph, pattern_link)):
__check_stop()
quad = (pattern, seed, pattern_link, seed_object)
if obj_filter is None or u''.join(seed_object).encode(
'utf-8') == u''.join(obj_filter.toPython()).encode('utf-8'):
self.__fragment.add((seed, pattern_link))
__put_triple_in_queue(quad)
seed_was_filtered = False
if isinstance(obj_filter, URIRef):
filter_next_seeds.add(obj_filter)
if obj_filter is not None and seed_was_filtered:
self.__subjects_to_ignore[pattern_space].add(seed)
except AttributeError as e:
log.warning('Trying to find {} objects of {}: {}'.format(link, seed, e.message))
# If pattern is of type '?s a Concept'...
obj_type = self.__patterns[pattern].get('type', None)
if obj_type is not None:
check_type = self.__patterns[pattern].get('check', False)
if on_type is not None:
on_type(obj_type, [seed], pattern_space)
__dereference_uri(tree_graph, seed)
try:
seed_objects = list(tree_graph.objects(subject=seed, predicate=link))
for seed_object in seed_objects:
type_triple = (pattern, seed_object, RDF.type, obj_type)
# In some cases, it is necessary to verify the type of the seed
if (seed_object, obj_type) not in self.__fragment:
if check_type:
__dereference_uri(tree_graph, seed_object)
types = list(
tree_graph.objects(subject=seed_object, predicate=RDF.type))
if obj_type in types:
self.__fragment.add((seed_object, obj_type))
__put_triple_in_queue(type_triple)
else:
self.__subjects_to_ignore[pattern_space].add(seed_object)
else:
self.__fragment.add((seed_object, obj_type))
__put_triple_in_queue(type_triple)
except AttributeError as e:
log.warning('Trying to find {} objects of {}: {}'.format(link, seed, e.message))
# If the current node is not a leaf... go on finding seeds for the successors
if link is not None and seed not in self.__subjects_to_ignore[seed_space]:
if on_link is not None:
on_link(link, [seed], seed_space)
__process_link_seed(seed, tree_graph, link, next_seeds)
if filter_next_seeds:
next_seeds = set.intersection(next_seeds, filter_next_seeds)
chs = list(chunks(list(next_seeds), min(len(next_seeds), max(1, workers / 2))))
next_seeds.clear()
try:
while True:
__check_stop()
chunk = chs.pop()
threads = []
for s in chunk:
try:
workers_queue.put_nowait(s)
future = pool.submit(__follow_node, n, tree_graph, seed_space, s)
threads.append(future)
except Queue.Full:
# If all threads are busy...I'll do it myself
__follow_node(n, tree_graph, seed_space, s)
except Queue.Empty:
pass
wait(threads)
[(workers_queue.get_nowait(), workers_queue.task_done()) for _ in threads]
except (IndexError, KeyError):
pass
except Queue.Full:
stop_event.set()
except Exception as e:
traceback.print_exc()
log.error(e.message)
return
def get_fragment_triples():
"""
Iterate over all search trees and yield relevant triples
:return:
"""
def execute_plan():
for tree in trees:
if on_tree is not None:
on_tree(tree)
# Prepare an dedicated graph for the current tree and a set of type triples (?s a Concept)
# to be evaluated retrospectively
tree_graph = __create_graph()
try:
self.__resource_queue[tree_graph] = []
# Get all seeds of the current tree
seeds = list(self.__plan_graph.objects(tree, AGORA.hasSeed))
if on_seeds is not None:
on_seeds(seeds)
# Check if the tree root is a pattern node and in that case, adds a type triple to the
# respective set
root_pattern = list(self.__plan_graph.objects(tree, AGORA.byPattern))
if len(root_pattern):
pattern_node = list(
self.__plan_graph.objects(subject=tree, predicate=AGORA.byPattern)).pop()
seed_type = self.__patterns[pattern_node].get('type', None)
[type_triples.add((pattern_node, sd, seed_type)) for sd in seeds]
# Get the children of the root node and follow them recursively
nxt = list(self.__plan_graph.objects(tree, AGORA.next))
if len(nxt):
# Prepare the list of seeds to start the exploration with, taking into account all
# search spaces that were defined
s_seeds = set(seeds)
for sp in self.__spaces:
for seed in s_seeds:
__follow_node(tree, tree_graph, sp, seed)
finally:
__release_graph(tree_graph)
if lazy and found_data and len(self.__spaces) == 1:
break
self.__completed = True
def get_tree_length(x):
"""
Return the value of the Agora length property in the given tree node
:param x:
:return:
"""
length = list(self.__plan_graph.objects(subject=x, predicate=AGORA.length)).pop()
return length
# Get all search trees contained in the search plan and sort them by length. A shorter tree is going
# to be explored first.
trees = self.__plan_graph.subjects(RDF.type, AGORA.SearchTree)
trees = sorted(trees, key=lambda x: get_tree_length(x))
type_triples = set([])
thread = Thread(target=execute_plan)
thread.daemon = True
thread.start()
found_data = False
while not self.__completed or fragment_queue.not_empty:
try:
(t, s, p, o) = fragment_queue.get(timeout=1)
found_data = True
fragment_queue.task_done()
if p == RDF.type:
type_triples.add((t, s, o))
else:
yield (t, s, p, o)
except Queue.Empty:
if self.__completed:
break
self.__last_iteration_ts = dt.now()
thread.join()
# All type triples that are of subjects to ignore won't be returned (this has to be done this way
# because of generators nature)
all_ignores = {}
if self.__subjects_to_ignore.values():
all_ignores = set.intersection(*self.__subjects_to_ignore.values())
valid_type_triples = [(t, s, o) for (t, s, o) in type_triples if s not in all_ignores]
for (t, s, o) in valid_type_triples:
if on_type_validation is not None:
on_type_validation((t, s, RDF.type, o))
yield (t, s, RDF.type, o)
return get_fragment_triples(), self.__plan_graph.namespaces(), self.__plan_graph
| {"/agora/client/wrapper.py": ["/agora/client/execution.py"]} |
73,210 | k95yong/computer-security | refs/heads/master | /miller_rabin.py | import random
from exponentian import exp
Prime = 0
Composite = 1
def miller_rabin(n, s):
if n == 2:
return Prime
elif n % 2 == 0:
return Composite
for _ in range(s):
a = random.randint(1, n-1)
if test(a, n):
return Composite
return Prime
def get_tu(n):
nn = n-1
t = 0
u = 0
while nn % 2 == 0:
nn = int(nn / 2)
t += 1
u = int((n-1) / (1 << t))
return int(t), int(u)
def test(a, n):
t, u = get_tu(n)
arr = [exp(a, u, n)]
for i in range(1, t+1):
arr.append(exp(arr[i-1], 2, n))
if arr[i] == 1 and arr[i-1] != 1 and arr[i-1] != n-1:
return True
if arr[t] != 1:
return True
return False;
if __name__ == "__main__":
primes = [7879, 7883, 7901, 7907, 7919, 7927, 7933, 7937, 7949, 7951,
7963, 7993, 8009, 8011, 8017, 8039, 8053, 8059, 8069, 8081,
8087, 8089, 8093, 8101, 8111, 8117, 8123, 8147, 8161, 8167]
for p in primes:
result = miller_rabin(p, 20)
if result == Prime:
print("Prime")
elif result == Composite:
print("Composite")
else:
print("Undefined") | {"/miller_rabin.py": ["/exponentian.py"], "/discrete_log.py": ["/exponentian.py"]} |
73,211 | k95yong/computer-security | refs/heads/master | /exponentian.py | def exp(a, b, n):
c = 0
f = 1
bin_b = int_to_bin(b)
k = len(bin_b)
for i in range(k):
c = 2 * c
f = (f * f) % n
if bin_b[i] == '1':
c = c + 1
f = (f * a) % n
return f
def int_to_bin(num):
return list(bin(num))[2:] | {"/miller_rabin.py": ["/exponentian.py"], "/discrete_log.py": ["/exponentian.py"]} |
73,212 | k95yong/computer-security | refs/heads/master | /discrete_log.py | import random
from exponentian import exp
def dlog(b, a, m):
cnt = 0
cur = 1
while cur != a:
cur *= b
cur %= m
cnt += 1
return cnt
if __name__ == "__main__":
base = 3
modulus = 65537
expos = [
1,
2,
65535
]
try:
for expo in expos:
powed = exp(base, expo, modulus)
logged = dlog(base, powed, modulus)
print("({},{},{},{},{}),".format(expo, base, powed, logged, modulus))
assert expo == logged
print(">> All the tests passed <<")
except:
print("expo({}) and logged({}) is not equal".format(expo, logged)) | {"/miller_rabin.py": ["/exponentian.py"], "/discrete_log.py": ["/exponentian.py"]} |
73,213 | k95yong/computer-security | refs/heads/master | /binfield.py | m = 32
def get_poly_str(f):
polys = []
for i, v in enumerate(reversed(bin(f)[2:])):
if v == '1':
polys.insert(0, (i, v))
return " + ".join(["z^{}".format(i) for i, v in polys])
def carry(a):
if a & 0x100:
return True
else:
return False
def bin_mul(a, b, n):
buf = n & 0xff # the low 8-bit part of `n`
t = [0] * 8 # pre-computation table for `a`
t[0] = a
for i in range(1, 8):
t[i] = t[i-1] << 1
if carry(t[i]):
t[i] &= 0xff
t[i] ^= buf
res = 0
for i in range(8):
mask = 1 << i
if b & mask is not 0:
res ^= t[i]
return res
def deg(bp):
for i in reversed(range(m)):
if(bp & (1 << i)) != 0:
return i
return 0
def bin_ext_euclid(a, b):
u = a
v = b
g1 = 1
g2 = 0
h1 = 0
h2 = 1
while u is not 0:
j = deg(u) - deg(v)
if j < 0:
u, v = v, u
g1, g2 = g2, g1
h1, h2 = h2, h1
j = -j
u = u ^ (v << j)
g1 = g1 ^ (g2 << j)
h1 = h1 ^ (h2 << j)
d, g, h = v, g2, h2
return d, g, h
def bin_inv(a, n):
d, g, h = bin_ext_euclid(a, n)
return g
if __name__ == "__main__":
print("deg(10) = {}".format(deg(10)))
# f(z) = z^8 + z^4 + z^3 + z + 1. f(z) is irreducible.
print(get_poly_str(0b100011011))
# the example on 4th slide
print(get_poly_str(bin_mul(0b01010111, 0b10000011, 0b100011011)))
# Inversion test
d, g, h = bin_ext_euclid(128, 0b100011011)
print(d, "|", get_poly_str(g), "|", get_poly_str(h))
print(get_poly_str(bin_inv(128, 0b100011011)))
print(get_poly_str(bin_mul(128, 131, 0b100011011))) | {"/miller_rabin.py": ["/exponentian.py"], "/discrete_log.py": ["/exponentian.py"]} |
73,215 | shivam2298/memeshare | refs/heads/master | /stylit/accounts/models.py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Account(models.Model):
user = models.OneToOneField(User, related_name="profile",on_delete=models.CASCADE)
following = models.ManyToManyField(User,related_name="followers",null=True,blank=True) | {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,216 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/models.py | from django.db import models
from django.contrib.auth.models import User
from django.db.models import signals
from .tasks import send_notification_email
# Create your models here.
class Meme(models.Model):
author = models.ForeignKey(User,on_delete=models.CASCADE)
caption = models.CharField(max_length=40,blank=True,null=True)
image = models.FileField(blank=False,null=False)
like = models.ManyToManyField(User,blank=True,related_name='likes')
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ['-created_at',]
def meme_post_save(sender, instance, signal, *args, **kwargs):
print(instance.author.email)
send_notification_email.delay(instance.author.pk)
signals.post_save.connect(meme_post_save, sender=Meme)
class Comment(models.Model):
comment = models.CharField(max_length=140)
author = models.ForeignKey(User,on_delete=models.CASCADE)
meme = models.ForeignKey(Meme,related_name='comments',on_delete=models.CASCADE,null=True) | {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,217 | shivam2298/memeshare | refs/heads/master | /stylit/accounts/migrations/0003_auto_20180630_1837.py | # Generated by Django 2.0.4 on 2018-06-30 18:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_account_follow'),
]
operations = [
migrations.RenameField(
model_name='account',
old_name='follow',
new_name='following',
),
]
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,218 | shivam2298/memeshare | refs/heads/master | /stylit/accounts/urls.py | from django.contrib import admin
from django.urls import path
from django.conf.urls import url,include
from .views import signup,wallview,portfolioview,followview
from django.contrib.auth import views as auth_views
app_name = 'accounts'
urlpatterns = [
url(r'^signup/$', signup, name='signup'),
url(r'^logout/$', auth_views.logout, {'next_page': '/gallery/index'}, name='logout'),
url(r'^login/$', auth_views.login, {'template_name': 'accounts/login.html'}, name='login'),
url(r'^(?P<username>[\w\-]+)/wall/$',wallview, name = 'wallview'),
url(r'^(?P<username>[\w\-]+)/portfolio/$',portfolioview, name = 'portfolio'),
url(r'^(?P<username>[\w\-]+)/follow/$',followview, name = 'follow'),
] | {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,219 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/views.py | from django.shortcuts import render
from django.shortcuts import HttpResponseRedirect
import base64
from django.core.files.base import ContentFile
from .models import Meme,Comment
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404,HttpResponse,redirect
import json
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required()
def createview(request):
if request.method == 'POST':
image_data = request.POST['image']
format, imgstr = image_data.split(';base64,')
ext = format.split('/')[-1]
name = request.POST['filename'].split('.')[0]
data = ContentFile(base64.b64decode(imgstr))
file_name = name + '.' + ext
print(type(data))
meme = Meme()
meme.author = request.user
meme.caption = request.POST['caption']
meme.image.save(file_name, data, save=False)
meme.save()
return HttpResponseRedirect('/gallery/index')
else:
return render(request, 'gallery/create.html')
def indexview(request):
memes = Meme.objects.all()
return render(request, 'gallery/index.html',{'memes':memes})
@login_required()
def likeview(request,id):
meme = get_object_or_404(Meme,pk=id)
if request.user.is_authenticated:
isliked = True
if request.user in meme.like.all():
meme.like.remove(request.user)
isliked = False
else:
meme.like.add(request.user)
isliked = True
return HttpResponse(json.dumps({'liked': isliked, 'likes': meme.like.all().count()}), content_type="application/json")
@login_required()
def commentview(request,id):
meme = get_object_or_404(Meme,pk=id)
if request.user.is_authenticated:
comment = Comment(author=request.user,comment=request.POST.get('comment'))
comment.save()
meme.comments.add(comment)
return HttpResponse(json.dumps({'added':1,'username':request.user.username,'comment':comment.comment}),
content_type="application/json")
else:
return HttpResponse(json.dumps({'added': 0}),
content_type="application/json")
@login_required()
def createcommentview(request,id):
meme = get_object_or_404(Meme,pk=id)
return render(request,'gallery/comment.html',{'meme':meme})
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,220 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/tasks.py | from django.core.mail import send_mail
import logging
from django.contrib.auth.models import User
from django.core.mail import send_mail
from stylit.celery import app
@app.task
def send_notification_email(user_id):
user = User.objects.get(pk=user_id)
for f in user.followers.all():
print(f.user.email)
send_mail(
'A new post',
user.username+' created a new post ,check it out',
'shivamrustagi@hotmail.com',
[f.user.email],
fail_silently=False,
)
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,221 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/migrations/0001_initial.py | # Generated by Django 2.0.4 on 2018-06-29 13:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('accounts', '0002_account_follow'),
]
operations = [
migrations.CreateModel(
name='Meme',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('caption', models.CharField(blank=True, max_length=40, null=True)),
('image', models.ImageField(upload_to='')),
('created_at', models.DateTimeField(auto_now_add=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Account')),
('like', models.ManyToManyField(blank=True, related_name='likes', to='accounts.Account')),
('parent', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='gallery.Meme')),
],
options={
'ordering': ['-created_at'],
},
),
]
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,222 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/urls.py | from django.contrib import admin
from django.urls import path
from django.conf.urls import url,include
from django.views.generic import TemplateView
from .views import createview,indexview,likeview,commentview,createcommentview
app_name = 'gallery'
urlpatterns = [
url(r'^index$',indexview , name='index'),
url(r'^create$',createview , name='create'),
url(r'^(?P<id>\d+)/like/$',likeview, name='like'),
url(r'^(?P<id>\d+)/comment/$',commentview, name='comment'),
url(r'^(?P<id>\d+)/add/comment/$',createcommentview, name='commentpage')
] | {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,223 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/migrations/0003_auto_20180629_2313.py | # Generated by Django 2.0.4 on 2018-06-29 23:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('gallery', '0002_auto_20180629_1355'),
]
operations = [
migrations.AlterField(
model_name='meme',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='meme',
name='image',
field=models.FileField(upload_to=''),
),
migrations.AlterField(
model_name='meme',
name='like',
field=models.ManyToManyField(blank=True, related_name='likes', to=settings.AUTH_USER_MODEL),
),
]
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,224 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/migrations/0005_auto_20180705_2117.py | # Generated by Django 2.0.4 on 2018-07-05 21:17
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('gallery', '0004_comment'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='meme',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='gallery.Meme'),
),
]
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,225 | shivam2298/memeshare | refs/heads/master | /stylit/gallery/migrations/0006_remove_meme_parent.py | # Generated by Django 2.0.4 on 2018-07-19 21:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('gallery', '0005_auto_20180705_2117'),
]
operations = [
migrations.RemoveField(
model_name='meme',
name='parent',
),
]
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,226 | shivam2298/memeshare | refs/heads/master | /stylit/accounts/views.py | from django.shortcuts import render
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from django.views import View
from .models import Account
from gallery.models import Meme
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import authenticate,logout,login
from django.shortcuts import render,redirect,HttpResponseRedirect,HttpResponse
import json
from .forms import SignUpForm
# Create your views here.
def signup(request):
args = {}
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
#print('hello')
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
account = Account(user=user)
account.save()
login(request, user)
return redirect('/gallery/index')
else:
form = SignUpForm()
return render(request, 'accounts/signup.html', {'form': form})
args['form'] = form
return render(request, 'accounts/signup.html', args)
def wallview(request,username):
currentuser = get_object_or_404(User,username=username)
if(request.user.username != currentuser.username):
return HttpResponseRedirect('/gallery/index')
user_followed = currentuser.profile.following.all()
memes = Meme.objects.filter(author__in = user_followed)
return render(request, 'accounts/wall.html', {'memes': memes})
def portfolioview(request,username):
user = get_object_or_404(User,username=username)
memes = Meme.objects.filter(author__username=username)
return render(request,'accounts/portfolio.html',{'memes':memes,'profile_user':user})
def followview(request,username):
user = get_object_or_404(User,username=username)
if(request.user.is_authenticated):
if user in request.user.profile.following.all():
request.user.profile.following.remove(user)
followed = False
else:
request.user.profile.following.add(user)
followed = True
return HttpResponse(json.dumps({'followed': followed, 'following': user.profile.following.all().count(),'followers':user.followers.all().count()}), content_type="application/json")
else:
return HttpResponse(json.dumps({'notlogedin': True}), content_type="application/json")
| {"/stylit/gallery/models.py": ["/stylit/gallery/tasks.py"], "/stylit/accounts/urls.py": ["/stylit/accounts/views.py"], "/stylit/gallery/views.py": ["/stylit/gallery/models.py"], "/stylit/gallery/urls.py": ["/stylit/gallery/views.py"], "/stylit/accounts/views.py": ["/stylit/accounts/models.py"]} |
73,239 | cnridley/youi | refs/heads/master | /progression/apps.py | from django.apps import AppConfig
class ProgressionConfig(AppConfig):
name = 'progression'
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,240 | cnridley/youi | refs/heads/master | /Profile/migrations/0002_remove_profile_user.py | # Generated by Django 3.1.2 on 2020-11-28 14:35
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Profile', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='user',
),
]
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,241 | cnridley/youi | refs/heads/master | /comments/views.py | from django.shortcuts import render, reverse, redirect
from Profile.models import Profile
from .forms import CommentForm
from .models import comment
from Profile.views import profile
# Create your views here.
def comments(request):
user = Profile.objects.filter(user=request.user)
user_comment = comment.objects.filter(user=request.user)
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
form.save()
return redirect(reverse('comments'))
else:
form = CommentForm()
context = {
'user': user,
'user_comment': user_comment,
'form': form,
}
return render(request, 'comments.html', context)
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,242 | cnridley/youi | refs/heads/master | /comments/models.py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class comment(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
title = models.CharField(max_length=500, null=True, blank=True)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,243 | cnridley/youi | refs/heads/master | /bag/urls.py | from django.contrib import admin
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.shopping_bag, name='bag'),
path('add/<item_id>/', views.add_to_bag, name='add_to_bag'),
path('remove/<item_id>/', views.remove_from_bag, name='remove_from_bag'),
] | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,244 | cnridley/youi | refs/heads/master | /home/views.py | from django.shortcuts import render
from Profile.models import Profile
# Create your views here.
def index(request):
"""A view to return index page"""
user = Profile.objects.all()
context = {
'user': user
}
return render(request, 'index.html', context)
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,245 | cnridley/youi | refs/heads/master | /progression/migrations/0004_auto_20201204_0232.py | # Generated by Django 3.1.2 on 2020-12-04 02:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('progression', '0003_progressionpicture_week'),
]
operations = [
migrations.AddField(
model_name='progressionpicture',
name='chest_measurement',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='progressionpicture',
name='waist_measurement',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='progressionpicture',
name='weight',
field=models.CharField(blank=True, max_length=250, null=True),
),
]
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,246 | cnridley/youi | refs/heads/master | /Profile/urls.py | from django.contrib import admin
from django.urls import path, include
from . import views
from django.contrib.auth.models import User
urlpatterns = [
path('', views.profile, name='profile'),
] | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,247 | cnridley/youi | refs/heads/master | /workouts/forms.py | from django import forms
from .models import workouts
class WorkoutForm(forms.ModelForm):
"""form for the workouts model"""
class Meta():
model = workouts
fields = ('user', 'weeks', 'body_part', 'exercise1', 'sets1', 'reps1',
'exercise2', 'sets2', 'reps2',
'exercise3', 'sets3', 'reps3',
'exercise4', 'sets4', 'reps4',
'exercise5', 'sets5', 'reps5',
'exercise6', 'sets6', 'reps6',
'exercise7', 'sets7', 'reps7',
'exercise8', 'sets8', 'reps8',
'exercise9', 'sets9', 'reps9',
'exercise10', 'sets10', 'reps10',
'gym_or_home') | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,248 | cnridley/youi | refs/heads/master | /checkout/signals.py | """This is the way to update the order total, delivery cost, and grand_total for each order
as users add line items to it.
The basic process is that will first create an order. Then iterate through the shopping bag.
And add line items to it one by one updating the various costs along the way.
The method to update the total is already in the order model.
We just need a way to call it each time a line item is attached to the order.
To accomplish this we'll use a built-in feature of django called signals.
"""
from django.db.models.signals import post_save, post_delete
""" post means after. So this implies these signals are sent by django to the entire application
after a model instance is saved and after it's deleted respectively."""
from django.dispatch import receiver
from .models import OrderLineItem
"""receiver decorator to tell django we are recieving post_save signals frrom the OrderLineItem model"""
@receiver(post_save, sender=OrderLineItem)
def update_on_save(sender, instance, created, **kwargs):
"""
Update order total on lineitem update/create
"""
instance.order.update_total()
@receiver(post_delete, sender=OrderLineItem)
def update_on_delete(sender, instance, **kwargs):
"""
Update order total on lineitem delete
"""
instance.order.update_total() | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,249 | cnridley/youi | refs/heads/master | /progression/forms.py | from django import forms
from .models import ProgressionPicture
class ProgressionForm(forms.ModelForm):
class Meta():
model = ProgressionPicture
fields = ('user', 'week', 'progression_pic', 'chest_measurement', 'waist_measurement', 'weight') | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,250 | cnridley/youi | refs/heads/master | /workouts/models.py | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from Profile.models import Profile
# Create your models here.
class workouts(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
weeks = models.CharField(max_length=250, null=True, blank=True)
body_part = models.CharField(max_length=250, null=True, blank=True)
exercise1 = models.CharField(max_length=250, null=True, blank=True)
reps1 = models.CharField(max_length=250, null=True, blank=True)
sets1 = models.CharField(max_length=250, null=True, blank=True)
exercise2 = models.CharField(max_length=250, null=True, blank=True)
reps2 = models.CharField(max_length=250, null=True, blank=True)
sets2 = models.CharField(max_length=250, null=True, blank=True)
exercise3 = models.CharField(max_length=250, null=True, blank=True)
reps3 = models.CharField(max_length=250, null=True, blank=True)
sets3 = models.CharField(max_length=250, null=True, blank=True)
exercise4 = models.CharField(max_length=250, null=True, blank=True)
reps4 = models.CharField(max_length=250, null=True, blank=True)
sets4 = models.CharField(max_length=250, null=True, blank=True)
exercise5 = models.CharField(max_length=250, null=True, blank=True)
reps5 = models.CharField(max_length=250, null=True, blank=True)
sets5 = models.CharField(max_length=250, null=True, blank=True)
exercise6 = models.CharField(max_length=250, null=True, blank=True)
reps6 = models.CharField(max_length=250, null=True, blank=True)
sets6 = models.CharField(max_length=250, null=True, blank=True)
exercise7 = models.CharField(max_length=250, null=True, blank=True)
reps7 = models.CharField(max_length=250, null=True, blank=True)
sets7 = models.CharField(max_length=250, null=True, blank=True)
exercise8 = models.CharField(max_length=250, null=True, blank=True)
reps8 = models.CharField(max_length=250, null=True, blank=True)
sets8 = models.CharField(max_length=250, null=True, blank=True)
exercise9 = models.CharField(max_length=250, null=True, blank=True)
reps9 = models.CharField(max_length=250, null=True, blank=True)
sets9 = models.CharField(max_length=250, null=True, blank=True)
exercise10 = models.CharField(max_length=250, null=True, blank=True)
reps10 = models.CharField(max_length=250, null=True, blank=True)
sets10 = models.CharField(max_length=250, null=True, blank=True)
gym_or_home = models.CharField(max_length=250, null=True, blank=True)
def __str__(self):
return self.user.username | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,251 | cnridley/youi | refs/heads/master | /progression/models.py | from django.db import models
from django.contrib.auth.models import User
from Profile.models import Profile
# Create your models here.
class ProgressionPicture(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
week = models.CharField(max_length=250, null=True, blank=True)
progression_pic = models.ImageField(null=True, blank=True)
progression_url = models.URLField(null=True, blank=True)
chest_measurement = models.CharField(max_length=250, null=True, blank=True)
waist_measurement = models.CharField(max_length=250, null=True, blank=True)
weight = models.CharField(max_length=250, null=True, blank=True)
def __str__(self):
return self.user.username | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,252 | cnridley/youi | refs/heads/master | /newclient/views.py | from django.shortcuts import render, redirect
from Profile.models import Profile
from django.contrib.auth import login, authenticate
from .forms import SignUpForm
# Create your views here.
def newclient(request):
"""A view to return index page"""
user = Profile.objects.filter(user=request.user)
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
user = form.save()
user.refresh_from_db() # load the profile instance created by the signal)
user.save()
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=user.username, password=raw_password)
login(request, user)
return redirect('home')
else:
form = SignUpForm()
context = {
'user': user,
'form': form,
}
return render(request, 'newclient.html', context)
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,253 | cnridley/youi | refs/heads/master | /workouts/migrations/0004_auto_20201201_0345.py | # Generated by Django 3.1.2 on 2020-12-01 03:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workouts', '0003_auto_20201201_0339'),
]
operations = [
migrations.AlterField(
model_name='workouts',
name='reps1',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps10',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps2',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps3',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps4',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps5',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps6',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps7',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps8',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='reps9',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets1',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets10',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets2',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets3',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets4',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets5',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets6',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets7',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets8',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='sets9',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='workouts',
name='weeks',
field=models.CharField(blank=True, max_length=250, null=True),
),
]
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,254 | cnridley/youi | refs/heads/master | /newclient/apps.py | from django.apps import AppConfig
class NewclientConfig(AppConfig):
name = 'newclient'
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,255 | cnridley/youi | refs/heads/master | /workouts/views.py | from django.shortcuts import render, reverse, redirect, get_object_or_404
from .forms import WorkoutForm
from Profile.models import Profile
from .models import workouts
from django.contrib import messages
from django.contrib.auth.decorators import login_required
# Create your views here.
def workouts_page(request):
user = Profile.objects.filter(user=request.user)
workout = workouts.objects.filter(user=request.user)
if request.method == 'POST':
form = WorkoutForm(request.POST)
if form.is_valid():
form.save()
return redirect(reverse('workouts_page'))
else:
form = WorkoutForm()
context = {
'user': user,
'workout': workout,
'form': form,
}
template = 'workouts.html'
return render(request, template, context)
@login_required
def edit_workout(request, workout_id):
""" Edit a product in the store """
if not request.user.is_superuser:
messages.info(request, 'Sorry, you can not do that.')
return redirect(reverse('Profile'))
workout = get_object_or_404(workouts, pk=workout_id)
if request.method == 'POST':
form = WorkoutForm(request.POST, request.FILES, instance=workout)
if form.is_valid():
form.save()
messages.info(request, 'Successfully updated workout!')
return redirect(reverse('workouts_page'))
else:
messages.error(request, 'Failed to update picture. Please ensure the form is valid.')
else:
form = WorkoutForm(instance=workout)
template = 'edit_workout.html'
context = {
'form': form,
'workout': workout,
}
return render(request, template, context)
@login_required
def delete_workout(request, workouts_id):
""" Delete a product from the store """
if not request.user.is_authenticated:
messages.info(request, 'Sorry, you can not do that!')
return redirect(reverse('workouts'))
picture = get_object_or_404(workouts, pk=workouts_id)
picture.delete()
messages.info(request, 'Picture deleted!')
return redirect(reverse('workouts_page')) | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,256 | cnridley/youi | refs/heads/master | /bag/views.py | from django.shortcuts import render, redirect, reverse, HttpResponse
from django.contrib import messages
from products.models import Product
from Profile.models import Profile
# Create your views here.
def shopping_bag(request):
"""A view to show the shopping bag"""
user = Profile.objects.filter(user=request.user)
context = {
'user': user
}
return render(request, 'bag.html', context)
def add_to_bag(request, item_id):
""" Add a quantity of the specified product to the shopping bag """
product = Product.objects.get(pk=item_id)
quantity = int(request.POST.get('quantity'))
redirect_url = request.POST.get('redirect_url')
size = None
if 'product_size' in request.POST:
size = request.POST['product_size']
bag = request.session.get('bag', {})
if size:
if item_id in list(bag.keys()):
if size in bag[item_id]['items_by_size'].keys():
bag[item_id]['items_by_size'][size] += quantity
messages.success(request, f'Added {product.name} to your bag')
else:
bag[item_id]['items_by_size'][size] = quantity
messages.success(request, f'Added size {size.upper()} {product.name} to your bag')
else:
bag[item_id] = {'items_by_size': {size: quantity}}
messages.success(request, f'Added size {size.upper()} {product.name} to your bag')
else:
if item_id in list(bag.keys()):
bag[item_id] += quantity
messages.success(request, f'Added {product.name} to your bag')
else:
bag[item_id] = quantity
messages.success(request, f'Added {product.name} to your bag')
request.session['bag'] = bag
return redirect(redirect_url)
def remove_from_bag(request, item_id):
"""Remove the item from the shopping bag"""
try:
size = None
if 'product_size' in request.POST:
size = request.POST['product_size']
bag = request.session.get('bag', {})
if size:
del bag[item_id]['items_by_size'][size]
if not bag[item_id]['items_by_size']:
bag.pop(item_id)
else:
bag.pop(item_id)
request.session['bag'] = bag
return HttpResponse(status=200)
except Exception as e:
return HttpResponse(status=500) | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,257 | cnridley/youi | refs/heads/master | /progression/views.py | from django.shortcuts import render, reverse, redirect, get_object_or_404
from .models import ProgressionPicture
from Profile.models import Profile
from .forms import ProgressionForm
from django.contrib import messages
from django.contrib.auth.decorators import login_required
# Create your views here.
def progression_pictures(request):
user = Profile.objects.filter(user=request.user)
progression = ProgressionPicture.objects.filter(user=request.user)
if request.method == 'POST':
form = ProgressionForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect(reverse('progression_pictures'))
else:
form = ProgressionForm()
context = {
'user': user,
'progression': progression,
'form': form,
}
return render(request, 'progression_pics.html', context)
@login_required
def delete_picture(request, progressionpicture_id):
""" Delete a product from the store """
if not request.user.is_authenticated:
messages.info(request, 'Sorry, you can not do that!')
return redirect(reverse('Profile'))
picture = get_object_or_404(ProgressionPicture, pk=progressionpicture_id)
picture.delete()
messages.info(request, 'Picture deleted!')
return redirect(reverse('progression_pictures'))
@login_required
def edit_picture(request, progression_id):
""" Edit a product in the store """
if not request.user.is_authenticated:
messages.info(request, 'Sorry, you can not do that.')
return redirect(reverse('Profile'))
progression = get_object_or_404(ProgressionPicture, pk=progression_id)
if request.method == 'POST':
form = ProgressionForm(request.POST, request.FILES, instance=progression)
if form.is_valid():
form.save()
messages.info(request, 'Successfully updated picture!')
return redirect(reverse('progression_pictures'))
else:
messages.error(request, 'Failed to update picture. Please ensure the form is valid.')
else:
form = ProgressionForm(instance=progression)
template = 'edit_picture.html'
context = {
'form': form,
'progression': progression,
}
return render(request, template, context)
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,258 | cnridley/youi | refs/heads/master | /Profile/views.py | from django.shortcuts import render, reverse
from .models import Profile
from workouts.forms import WorkoutForm
from workouts.models import workouts
from workouts.views import workouts_page
from progression.views import progression_pictures
from progression.models import ProgressionPicture
def profile(request):
""" Display the user's profile. """
user = Profile.objects.filter(user=request.user)
template = 'Profile.html'
context = {
'user': user,
}
return render(request, template, context) | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,259 | cnridley/youi | refs/heads/master | /reviews/migrations/0001_initial.py | # Generated by Django 3.1.2 on 2020-12-05 15:28
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=50, null=True)),
('title', models.CharField(blank=True, max_length=500, null=True)),
('rating', models.IntegerField(blank=True, choices=[(1, 'Poor'), (2, 'Average'), (3, 'Good'), (4, 'Great'), (5, 'Amazing')], default=3, null=True)),
('text', models.TextField()),
('created_on', models.DateTimeField(auto_now_add=True)),
],
),
]
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,260 | cnridley/youi | refs/heads/master | /workouts/urls.py | from django.contrib import admin
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.workouts_page, name='workouts_page'),
path('edit/<int:workout_id>/', views.edit_workout, name='edit_workout'),
path('delete/<int:workouts_id>/', views.delete_workout, name='delete_workout'),
] | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,261 | cnridley/youi | refs/heads/master | /workouts/migrations/0002_auto_20201201_0251.py | # Generated by Django 3.1.2 on 2020-12-01 02:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workouts', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='workouts',
old_name='exercise',
new_name='exercise1',
),
migrations.AddField(
model_name='workouts',
name='exercise10',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise2',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise3',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise4',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise5',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise6',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise7',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise8',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='workouts',
name='exercise9',
field=models.CharField(blank=True, max_length=250, null=True),
),
]
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,262 | cnridley/youi | refs/heads/master | /progression/urls.py | from django.contrib import admin
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.progression_pictures, name='progression_pictures'),
path('delete/<int:progressionpicture_id>/', views.delete_picture, name='delete_picture'),
path('edit/<int:progression_id>/', views.edit_picture, name='edit_picture'),
] | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,263 | cnridley/youi | refs/heads/master | /reviews/views.py | from django.shortcuts import render, reverse, redirect, get_object_or_404
from .models import Reviews
from Profile.models import Profile
from .forms import ReviewForm
from django.contrib.auth.decorators import login_required
from django.contrib import messages
# Create your views here.
def reviews(request):
review = Reviews.objects.all()
user = Profile.objects.all()
if request.method == 'POST':
form = ReviewForm(request.POST)
if form.is_valid():
form.save()
return redirect(reverse('reviews'))
else:
form = ReviewForm()
context = {
'review': review,
'user': user,
'form': form,
}
return render(request, 'reviews.html', context)
@login_required
def delete_review(request, reviews_id):
""" Delete a product from the store """
if not request.user.is_superuser:
messages.info(request, 'Sorry, only store owners can do that.')
return redirect(reverse('home'))
review = get_object_or_404(Reviews, pk=reviews_id)
review.delete()
return redirect(reverse(reviews))
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,264 | cnridley/youi | refs/heads/master | /gallery/views.py | from django.shortcuts import render, reverse, redirect
from .models import Gallery
from .forms import ImageForm
from Profile.models import Profile
# Create your views here.
def gallery(request):
"""A view to return gallery page"""
gallery = Gallery.objects.all()
user = Profile.objects.all()
if request.method == 'POST':
form = ImageForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect(reverse('gallery'))
else:
form = ImageForm()
context = {
'gallery': gallery,
'form': form,
'user': user,
}
return render(request, 'gallery.html', context)
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,265 | cnridley/youi | refs/heads/master | /workouts/migrations/0003_auto_20201201_0339.py | # Generated by Django 3.1.2 on 2020-12-01 03:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workouts', '0002_auto_20201201_0251'),
]
operations = [
migrations.RenameField(
model_name='workouts',
old_name='reps',
new_name='reps1',
),
migrations.RenameField(
model_name='workouts',
old_name='sets',
new_name='reps10',
),
migrations.AddField(
model_name='workouts',
name='reps2',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='reps3',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='reps4',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='reps5',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='reps6',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='reps7',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='reps8',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='reps9',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets1',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets10',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets2',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets3',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets4',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets5',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets6',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets7',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets8',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
migrations.AddField(
model_name='workouts',
name='sets9',
field=models.DecimalField(blank=True, decimal_places=0, max_digits=50, null=True),
),
]
| {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,266 | cnridley/youi | refs/heads/master | /reviews/models.py | from django.db import models
# Create your models here.
class Reviews(models.Model):
My_Rating = (
(1, '1'),
(2, '2'),
(3, '3'),
(4, '4'),
(5, '5'),
)
name = models.CharField(max_length=50, blank=True, null=True)
title = models.CharField(max_length=500, null=True, blank=True)
rating = models.IntegerField(choices=My_Rating, null=True, blank=True, default=3)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,267 | cnridley/youi | refs/heads/master | /products/views.py | from django.shortcuts import render, reverse, redirect, get_object_or_404
from .models import Product
from Profile.models import Profile
from .forms import ProductForm
from django.contrib import messages
from django.contrib.auth.decorators import login_required
# Create your views here.
def all_products(request):
"""A view to show all products selling"""
products = Product.objects.all()
user = Profile.objects.all()
if request.method == 'POST':
form = ProductForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect(reverse('products'))
else:
form = ProductForm()
context = {
'products': products,
'user': user,
'form': form,
}
return render(request, 'products.html', context)
@login_required
def edit_product(request, product_id):
""" Edit a product in the store """
if not request.user.is_superuser:
messages.info(request, 'Sorry, only store owners can do that.')
return redirect(reverse('home'))
product = get_object_or_404(Product, pk=product_id)
if request.method == 'POST':
form = ProductForm(request.POST, request.FILES, instance=product)
if form.is_valid():
form.save()
messages.info(request, 'Successfully updated product!')
return redirect(reverse('products'))
else:
messages.error(request, 'Failed to update product. Please ensure the form is valid.')
else:
form = ProductForm(instance=product)
messages.info(request, f'You are editing {product.name}')
template = 'edit_product.html'
context = {
'form': form,
'product': product,
}
return render(request, template, context)
@login_required
def delete_product(request, product_id):
""" Delete a product from the store """
if not request.user.is_superuser:
messages.info(request, 'Sorry, only store owners can do that.')
return redirect(reverse('home'))
product = get_object_or_404(Product, pk=product_id)
product.delete()
messages.success(request, 'Product deleted!')
return redirect(reverse('products')) | {"/comments/views.py": ["/comments/models.py", "/Profile/views.py"], "/workouts/forms.py": ["/workouts/models.py"], "/progression/forms.py": ["/progression/models.py"], "/workouts/views.py": ["/workouts/forms.py", "/workouts/models.py"], "/progression/views.py": ["/progression/models.py", "/progression/forms.py"], "/Profile/views.py": ["/workouts/forms.py", "/workouts/models.py", "/workouts/views.py", "/progression/views.py", "/progression/models.py"], "/reviews/views.py": ["/reviews/models.py"]} |
73,270 | steemblog/blog | refs/heads/master | /steem/comment.py | # -*- coding:utf-8 -*-
from beem import Steem
from beem.comment import Comment
from beem.exceptions import ContentDoesNotExistsException
from steem.settings import STEEM_HOST
from steem.markdown import SteemMarkdown
from utils.logging.logger import logger
class SteemComment:
def __init__(self, comment=None, author_perm=None, url=None):
self.comment = comment
self.author_perm = author_perm
self.url = url
def get_author_perm(self):
if self.author_perm is None:
self.author_perm = "/".join(self.url.split("/")[-2:])
return self.author_perm
def get_comment(self):
if self.comment is None:
self.comment = Comment(self.get_author_perm())
return self.comment
def get_url(self):
if self.url is None:
if self.author_perm:
self.url = u"{}/{}".format(STEEM_HOST, self.author_perm)
else:
c = self.get_comment()
if c.authorperm:
self.url = u"{}/{}".format(STEEM_HOST, c.authorperm)
else:
self.url = u"{}/@{}/{}".format(STEEM_HOST, c.author, c.permlink)
return self.url
def get_pic_url(self, regex=False):
body = self.get_comment().body
return SteemMarkdown(body).get_top_image(regex)
def get_text_body(self):
body = self.get_comment().body
return SteemMarkdown(body).get_rendered_text()
def get_compatible_markdown(self):
body = self.get_comment().body
return SteemMarkdown(body).get_steem_markdown()
def get_tags(self):
c = self.get_comment()
if c.json_metadata and 'tags' in c.json_metadata:
return c.json_metadata['tags']
else:
self.refresh()
c = self.get_comment()
if c.json_metadata and 'tags' in c.json_metadata:
return c.json_metadata['tags']
return []
def has_tag(self, tag):
return tag in self.get_tags()
def has_tags(self, tags):
if not tags or len(tags) == 0:
return False
for tag in tags:
if self.has_tag(tag):
return True
return False
def refresh(self):
c = self.get_comment()
try:
c.refresh()
except ContentDoesNotExistsException:
logger.info("Failed when refresh {} with title [{}], which is probably deleted.".format(self.get_url(), c.title))
return None
return c
def log(self):
c = self.get_comment()
logger.info("@%s | %s | %s | %s" % (c.author, c.title, self.get_url(), c['created']))
def is_commented_by(self, account):
commented_by = [reply['author'] for reply in self.get_comment().get_replies()]
return account in commented_by
def is_upvoted_by(self, account):
voters = self.get_comment().get_curation_rewards()['active_votes'].keys()
return account in voters
def is_downvoted_by(self, account):
has_downvoted = False
for vote in self.get_comment().get_votes():
if vote.voter == account and vote.percent < 0:
has_downvoted = True
return has_downvoted
def is_comment(self):
c = self.get_comment()
return c["parent_author"] != ''
| {"/steem/comment.py": ["/steem/markdown.py"], "/blog/builder.py": ["/steem/comment.py", "/steem/markdown.py", "/blog/message.py"], "/blog/command.py": ["/blog/builder.py"]} |
73,271 | steemblog/blog | refs/heads/master | /blog/builder.py | # -*- coding:utf-8 -*-
import os
import shutil
import subprocess
import requests
import traceback
from steem.comment import SteemComment
from steem.account import SteemAccount
from steem.markdown import SteemMarkdown
from steem.settings import settings, STEEM_HOST
from data.reader import SteemReader
from utils.logging.logger import logger
from utils.system.date import get_uct_time_str
from blog.message import get_message
BLOG_ORGANIZATION = "steemblog"
BLOG_AVATAR = "https://avatars0.githubusercontent.com/u/50857551?s=200&v=4"
BLOG_FAVICON = "https://www.easyicon.net/api/resizeApi.php?id=1185564&size=32"
CONFIG_FILE = "_config.yml"
CONFIG_THEME_FILE = "_config.theme.yml"
SOURCE_BRANCH = "source"
HEXO_SOURCE_FOLDER = "source"
SOURCE_REPO_FOLDER = ".source"
POSTS_FOLDER = "_posts"
BLOG_CONTENT_FOLDER = "./{}/{}".format(HEXO_SOURCE_FOLDER, POSTS_FOLDER)
POSITION_TAG_SELECTOR = 'div[position]'
DEFAULT_POSITION = 9999
class BlogBuilder(SteemReader):
def __init__(self, account=None, tag=None, days=None, host="github"):
SteemReader.__init__(self, account=account, tag=tag, days=days)
self.host = host
# create blog folder
self.blog_folder = os.path.join(BLOG_CONTENT_FOLDER, self._get_subfolder())
self.folder_created = False
self.commited = False
def get_name(self):
name = "blog"
target = self.account or self.tag
return "{}-{}-{}".format(name, target, self._get_time_str())
def is_qualified(self, post):
return True
def _get_subfolder(self):
# create blog folder
subfolder = None
if self.account:
subfolder = os.path.join("account", self.account)
elif self.tag:
subfolder = os.path.join("tag", self.tag)
return subfolder
def _get_content_folder(self):
if not self.folder_created and not os.path.exists(self.blog_folder):
os.makedirs(self.blog_folder)
self.folder_created = True
return self.blog_folder
def _get_position(self, body):
try:
elements = SteemMarkdown(body).find_elements(POSITION_TAG_SELECTOR)
if elements and len(elements) > 0:
position = elements[0].get("position")
return int(position)
except:
logger.error("Failed when getting position tag.\nError: {}".format(traceback.format_exc()))
return DEFAULT_POSITION
def _yaml_compatible(self, item, default=None):
if item and len(item) > 0:
return "'{}'".format(item.replace("'", "''"))
return default or item
def _write_content(self, post):
folder = self._get_content_folder()
c = SteemComment(comment=post)
# retrieve necessary data from steem
title = self._yaml_compatible(post.title, "''")
permlink = post["permlink"]
body = c.get_compatible_markdown()
position = self._get_position(body)
date_str = post.json()["created"]
date = date_str.replace('T', ' ')
tags = "\n".join(["- {}".format(tag) for tag in c.get_tags()])
category = c.get_tags()[0]
thumbnail = self._yaml_compatible(c.get_pic_url(), "")
url = c.get_url()
# build content with template
template = get_message("blog", footer=True)
content = template.format(title=title, permlink=permlink,
position=position, date=date,
tags=tags, category=category,
thumbnail=thumbnail, body=body, url=url)
# write into MD files
filename = os.path.join(folder, "{}_{}.md".format(date_str.split('T')[0], permlink))
with open(filename, "w", encoding="utf-8") as f:
f.write(content)
logger.info("Download post [{}] into file {}".format(title, filename))
def download(self):
if len(self.posts) == 0:
self.get_latest_posts()
if len(self.posts) > 0:
for post in self.posts:
self._write_content(post)
return len(self.posts)
def _get_domain(self):
if self.host == "netlify":
return "netlify.com"
else: # self.host == "github"
return "github.io"
def _get_blog_url(self):
return "https://{}.{}/@{}".format(BLOG_ORGANIZATION, self._get_domain(), self.account)
def _get_source_repo_url(self):
return "{}/tree/{}".format(self._get_repo(), self.blog_folder)
def _get_repo(self, prefix=True):
repo = "{0}/{0}.github.io".format(BLOG_ORGANIZATION)
if prefix:
repo = "https://github.com/" + repo
return repo
def update_config(self, incremental=False):
if not self.account:
return
domain = self._get_domain()
organization = BLOG_ORGANIZATION
logo = BLOG_AVATAR
favicon = BLOG_FAVICON
language = settings.get_env_var("LANGUAGE") or "en"
a = SteemAccount(self.account)
author = self.account
name = self._yaml_compatible(a.get_profile("name"), "")
avatar = self._yaml_compatible(a.avatar(), "")
# about = a.get_profile("about") or ""
location = self._yaml_compatible(a.get_profile("location"), "")
website = self._yaml_compatible(a.get_profile("website"), "''")
incremental = "true" if incremental else "false"
# build config file with template
template = get_message("config")
config = template.format(organization=organization, domain=domain,
language=language, name=name, author=author,
incremental=incremental)
filename = CONFIG_FILE
with open(filename, "w", encoding="utf-8") as f:
f.write(config)
logger.info("{} file has been updated for the account @{}".format(filename, author))
# build config theme file with template
template = get_message("config.theme")
config = template.format(organization=organization,
favicon=favicon, logo=logo,
author=author, name=name, location=location,
avatar=avatar, website=website)
filename = CONFIG_THEME_FILE
with open(filename, "w", encoding="utf-8") as f:
f.write(config)
logger.info("{} file has been updated for the account @{}".format(filename, author))
def _get_github_pat(self):
github_pat = settings.get_env_var("GITHUB_PAT") or None
if github_pat:
github_pat += "@"
else:
github_pat = ""
return github_pat
def setup_source_repo(self):
git_clone_cmd = "git clone --depth 1 --branch {} --single-branch https://{}github.com/{}.git {}".format(SOURCE_BRANCH, self._get_github_pat(), self._get_repo(prefix=False), SOURCE_REPO_FOLDER)
os.system(git_clone_cmd)
# on `source` branch after clone
logger.info("Cloned source repo into workspace: {}".format(SOURCE_REPO_FOLDER))
def _init_source_repo(self):
os.mkdir(SOURCE_REPO_FOLDER)
os.chdir(SOURCE_REPO_FOLDER)
git_init_cmds = [
"git init",
"git remote add origin https://{}github.com/{}.git".format(self._get_github_pat(), self._get_repo(prefix=False))
]
for cmd in git_init_cmds:
os.system(cmd)
# run the below commads after above command, to do sparse checkout
# "git config core.sparsecheckout true",
# "echo {}/ >> .git/info/sparse-checkout".format(subfolder),
# "git pull origin {} --depth 1".format(SOURCE_BRANCH)
os.chdir("..")
def _sparse_checkout(self):
""" not use sparse checkout now to keep `source` folder clean """
os.chdir(SOURCE_REPO_FOLDER)
subfolder = os.path.join(POSTS_FOLDER, self._get_subfolder())
git_sparse_checkout_cmds = [
"git config core.sparsecheckout true",
"echo {}/ > .git/info/sparse-checkout".format(subfolder),
"git read-tree -mu HEAD"
]
for cmd in git_sparse_checkout_cmds:
os.system(cmd)
os.chdir("..")
logger.info("Sparse checkout to subfolder: {}".format(subfolder))
def _commit_source(self):
os.chdir(SOURCE_REPO_FOLDER)
# commit the files into source repo
os.system("git add --all *")
res = os.system('git commit -m "Source updated: {}"'.format(get_uct_time_str()))
os.chdir("..")
if res == 0:
logger.info("Commited source into [{}] folder".format(SOURCE_REPO_FOLDER))
return True
else:
logger.info("Failed to add new source into [{}] folder".format(SOURCE_REPO_FOLDER))
return False
def _diff_files(self):
os.chdir(SOURCE_REPO_FOLDER)
res = subprocess.run(['git', 'diff', 'HEAD', 'HEAD~1', '--name-only'], stdout=subprocess.PIPE).stdout.decode('utf-8')
os.chdir("..")
files = [f for f in res.split("\n") if len(f) > 0]
logger.info("{} different files:\n{}".format(len(files), res))
return files
def _copy_files(self, src_dir, dst_dir):
if not os.path.exists(src_dir):
return
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
for f in self.list_all_posts(src_dir):
shutil.copyfile(os.path.join(src_dir, f), os.path.join(dst_dir, f))
def update_workspace(self):
source_folder = os.path.join(SOURCE_REPO_FOLDER, POSTS_FOLDER, self._get_subfolder())
self._copy_files(self.blog_folder, source_folder)
self._copy_files(source_folder, self.blog_folder)
self.commited = self._commit_source()
def list_new_posts(self):
""" this should be run after download completed """
if self.commited:
# self._sparse_checkout()
files = self._diff_files()
count = len(files)
else:
files = []
count = 0
logger.info("{} new posts needs to build.".format(count))
return files
def list_all_posts(self, folder=None):
""" list all the posts in the blog folder """
folder = folder or self.blog_folder
files = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f))]
logger.info("{} posts in blog folder {}".format(len(files), folder))
return files
def _blog_exists(self):
if not self.account:
return False
blog_url = self._get_blog_url()
r = requests.get(blog_url)
if r.ok:
logger.info("The blog [{}] already exists".format(blog_url))
return True
else:
logger.info("The blog [{}] doesn't exist".format(blog_url))
return False
def _source_repo_exists(self):
if not self.account:
return False
source_url = self._get_source_repo_url()
try:
r = requests.get(source_url)
if r.ok:
logger.info("The source repo [{}] already exists".format(source_url))
return True
else:
logger.info("The source repo [{}] doesn't exist".format(source_url))
return False
except:
logger.info("Failed when try to connect to source repo [{}]".format(source_url))
return False
def set_smart_duration(self):
if not self.account:
return
if self._source_repo_exists() and self._blog_exists():
self.days = settings.get_env_var("DURATION") or 1.5
logger.info("The download duration has been set to {} days".format(self.days))
else:
self.days = None
logger.info("The download duration has been expanded to the entire lifetime of the account")
| {"/steem/comment.py": ["/steem/markdown.py"], "/blog/builder.py": ["/steem/comment.py", "/steem/markdown.py", "/blog/message.py"], "/blog/command.py": ["/blog/builder.py"]} |
73,272 | steemblog/blog | refs/heads/master | /steem/markdown.py | # -*- coding:utf-8 -*-
import re
import html
from bs4 import BeautifulSoup
from markdown import markdown
REGEX_IMAGE_URL = r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)\.(jpg|jpeg|png|gif|svg)"
class SteemMarkdown:
def __init__(self, text):
self.text = text
def get_top_image(self, regex=False):
if regex:
# follow markdown format
m = re.search(r"!\[(.*)\]\((\S+)\)", self.text)
if m:
pic_url = m.group(2)
return pic_url
# follow url format
m = re.search(REGEX_IMAGE_URL, self.text)
if m:
pic_url = m.group(0)
return pic_url
else:
links = self.get_img_links()
if links and len(links) > 0:
return links[0]
return None
def get_rendered_text(self):
""" Converts a markdown string to plaintext """
# md -> html -> text since BeautifulSoup can extract text cleanly
html = markdown(self.text)
# remove code snippets
html = re.sub(r'<pre>(.*?)</pre>', ' ', html)
html = re.sub(r'<code>(.*?)</code >', ' ', html)
# extract text
soup = BeautifulSoup(html, "html.parser")
text = ''.join(soup.findAll(text=True))
text = re.sub(REGEX_IMAGE_URL, '', text)
return text
def _get_valid_link(self, url):
url = url.strip()
if url[-1] == ")":
url = url[:-1]
# unescape HTML chars
return html.unescape(url)
def _is_img_link(self, url):
m = re.match(REGEX_IMAGE_URL, url)
return m is not None
def get_links(self, regex=True):
body = self.text
if regex:
# text = re.sub('<[^<]+?>', ' ', str(self.text))
links = re.findall(URL_REGEX, body)
else:
# md -> html -> text since BeautifulSoup can extract text cleanly
html = markdown(body)
# extract links
soup = BeautifulSoup(html, "html.parser")
tags = soup.findAll("a")
links = [tag.get("href") for tag in tags]
if len(links) > 0:
links = [self._get_valid_link(link) for link in links if link is not None]
return links or []
def get_img_links(self):
body = self.get_steem_markdown()
# md -> html -> text since BeautifulSoup can extract text cleanly
html = markdown(body)
# extract links
soup = BeautifulSoup(html, "html.parser")
tags = soup.findAll("img")
links = [tag.get("src") for tag in tags]
if len(links) > 0:
links = [self._get_valid_link(link) for link in links if link is not None]
return links or []
def get_steem_markdown(self):
text = self.text
# convert the raw image URL into markdown image tag
text = re.sub(r"(?P<url>" + REGEX_IMAGE_URL + r")(?P<space>\s+)", r"\g<space>", text)
# add an extra new line beofre the table head to render successfully
text = re.sub(r"(?P<previous>[^|\s]+\s*[^|\n])\n\|", r"\g<previous>\n\n|", text)
return text
def find_elements(self, selector):
body = self.get_steem_markdown()
# md -> html -> text since BeautifulSoup can extract text cleanly
html = markdown(body)
# extract links
soup = BeautifulSoup(html, "html.parser")
return soup.select(selector)
| {"/steem/comment.py": ["/steem/markdown.py"], "/blog/builder.py": ["/steem/comment.py", "/steem/markdown.py", "/blog/message.py"], "/blog/command.py": ["/blog/builder.py"]} |
73,273 | steemblog/blog | refs/heads/master | /blog/command.py | # -*- coding:utf-8 -*-
import os, time, random
from invoke import task
import traceback
from utils.logging.logger import logger
from steem.settings import settings
from blog.builder import BlogBuilder, SOURCE_REPO_FOLDER, HEXO_SOURCE_FOLDER
@task(help={
'account': 'the account of the blogs to download',
'tag': 'the tag of the blogs to download',
'days': 'the posts in recent days to fetch',
'host': 'the host server for the site: [github, netlify]',
'debug': 'enable the debug mode',
'clear': 'clean previous posts before download',
'production': 'set production mode to download incrementally'
})
def download(ctx, account=None, tag=None, days=None, host="github", debug=False, clear=False, production=False):
""" download the posts to local by the account """
if debug:
settings.set_debug_mode()
if clear:
clean(ctx)
settings.set_steem_node()
account = account or settings.get_env_var("STEEM_ACCOUNT")
tag = tag or settings.get_env_var("STEEM_TAG")
days = days or settings.get_env_var("DURATION")
clean_build = settings.get_env_var("CLEAN_BUILD")
if clean_build and clean_build.lower() == "true":
clean_build = True
else:
clean_build = False
if clean_build:
incremental = False
else:
incremental = production
builder = BlogBuilder(account=account, tag=tag, days=days, host=host)
if production and not clean_build:
builder.set_smart_duration()
builder.update_config(incremental=incremental)
count = builder.download()
if production:
builder.update_workspace()
if incremental:
if production and count > 0:
count = len(builder.list_new_posts())
else:
count = len(builder.list_all_posts())
return count
@task(help={
})
def setup(ctx):
""" clean the downloaded posts """
os.system("rm -rf {}".format(SOURCE_REPO_FOLDER))
builder = BlogBuilder(account="none")
builder.setup_source_repo()
@task(help={
})
def clean(ctx):
""" clean the downloaded posts """
os.system("rm -rf {}".format(HEXO_SOURCE_FOLDER))
def configure():
settings.set_env_var("NODE_OPTIONS", "--max-old-space-size=8192")
os.system("cp -f _config.theme.yml themes/icarus/_config.yml")
@task(help={
'debug': 'enable the debug mode',
})
def build(ctx, debug=False):
""" build the static pages from steem posts """
configure()
os.system("hexo clean")
build_cmd = "hexo generate"
if debug:
build_cmd += " --debug"
os.system(build_cmd)
@task(help={
'accounts': 'the accounts of the blogs to download, delimiter is comma',
'host': 'the host server for the site: [github, netlify]',
'debug': 'enable the debug mode',
'production': 'set production mode to download incrementally'
})
def build_all(ctx, accounts=None, host="github", debug=False, production=False):
""" download the posts of all the accounts, and generate pages """
if not debug:
debug_setting = settings.get_env_var("DEBUG")
if debug_setting and debug_setting.lower() == "true":
debug = True
accounts = accounts or settings.get_env_var("STEEM_ACCOUNTS") or []
if accounts and len(accounts) > 0:
if production:
setup(ctx)
for account in accounts.split(","):
try:
logger.info("Start generating pages for account @{} ...".format(account))
clean(ctx)
count = download(ctx, account=account, host=host, debug=debug, production=production)
if count > 0:
build(ctx, debug)
except:
logger.error("Failed when generating pages for account @{}.\nError: {}".format(account, traceback.format_exc()))
@task(help={
'debug': 'enable the debug mode',
})
def test(ctx, debug=False):
""" build and launch the blog server in local environment """
build(ctx, debug)
os.system("hexo server -s")
@task(help={
"host": "the host environment to deploy the build"
})
def deploy(ctx, host="hexo"):
""" deploy the static blog to the GitHub pages """
logger.info("launch the deploy on [{}]".format(host))
if host == "hexo":
build(ctx)
os.system("hexo deploy")
elif host == "netlify":
hook_id = settings.get_env_var("NETLIFY_HOOK") or None
if hook_id:
build_hook = "curl -X POST -d {} https://api.netlify.com/build_hooks/" + hook_id
os.system(build_hook)
else:
logger.error("Failed: we need the hook ID to deploy")
elif host == "github":
pass
else:
pass
| {"/steem/comment.py": ["/steem/markdown.py"], "/blog/builder.py": ["/steem/comment.py", "/steem/markdown.py", "/blog/message.py"], "/blog/command.py": ["/blog/builder.py"]} |
73,274 | steemblog/blog | refs/heads/master | /blog/message.py | # -*- coding:utf-8 -*-
import traceback
from utils.logging.logger import logger
def get_message(id, footer=False):
return build_message(id, footer)
def build_message(id, footer=False, message_marker=False):
message = MESSAGES[id]
if footer and id in FOOTERS:
message += FOOTERS[id]
if message_marker:
message += MESSAGE_ID.format(message_id=id)
return message
MESSAGE_ID = """
<div message_id=\"{message_id}\"></div>
"""
MESSAGES = {}
FOOTERS = {}
MESSAGES["blog"] = """
---
title: {title}
permlink: {permlink}
catalog: true
toc_nav_num: true
toc: true
position: {position}
date: {date}
categories:
- {category}
tags:
{tags}
thumbnail: {thumbnail}
sidebar:
right:
sticky: true
widgets:
-
type: toc
position: right
---
{body}
"""
FOOTERS["blog"] = """
- - -
This page is synchronized from the post: [{title}]({url})
"""
MESSAGES['config'] = """
# Hexo Configuration
## Docs: http://hexo.io/docs/configuration.html
## Source: https://github.com/hexojs/hexo/
# Site
# add 'site_title' because 'title' is overriden by the metadata in post
site_title: {name}
title: {name}
subtitle:
description:
author: {author}
language: {language}
timezone:
# URL
## If your site is put in a subdirectory, set url as 'http://yoursite.com/child' and root as '/child/'
url: http://{organization}.{domain}
root: /@{author}/
permalink: :permlink/
permalink_defaults:
# Directory
source_dir: source
public_dir: public/@{author}
tag_dir: tags
archive_dir: archives
category_dir: categories
code_dir: downloads/code
i18n_dir: :lang
skip_render:
# Writing
new_post_name: :title.md # File name of new posts
default_layout: post
titlecase: false # Transform title into titlecase
external_link: true # Open external links in new tab
filename_case: 0
render_drafts: false
post_asset_folder: false
relative_link: false
future: true
highlight:
enable: true
line_number: true
tab_replace:
# Category & Tag
default_category: uncategorized
category_map:
tag_map:
# Date / Time format
## Hexo uses Moment.js to parse and display date
## You can customize the date format as defined in
## http://momentjs.com/docs/#/displaying/format/
date_format: YYYY-MM-DD
time_format: HH:mm:ss
# Pagination
## Set per_page to 0 to disable pagination
per_page: 6
pagination_dir: page
index_generator:
per_page: 6
order_by: position -date
archive_generator:
per_page: 20
yearly: true
monthly: true
category_generator:
per_page: 20
tag_generator:
per_page: 20
# Extensions
## Plugins: https://github.com/hexojs/hexo/wiki/Plugins
## Themes: https://github.com/hexojs/hexo/wiki/Themes
theme: icarus
# Deployment
## Docs: http://hexo.io/docs/deployment.html
deploy:
type: git
repository: https://github.com/{organization}/blog.git
branch: gh-pages
marked:
gfm: true
pedantic: false
sanitize: false
tables: true
breaks: true
smartLists: true
smartypants: true
autolink: true
githubEmojis:
className: not-gallery-item
all_minifier: false
stop_tag_plugins: true
incremental: {incremental}
"""
MESSAGES['config.theme'] = """
# Version of the Icarus theme that is currently used
version: 2.3.0
# Path or URL to the website's icon
favicon: {favicon}
# Path or URL to RSS atom.xml
rss:
# Path or URL to the website's logo to be shown on the left of the navigation bar or footer
logo: {logo}
# Open Graph metadata
# https://hexo.io/docs/helpers.html#open-graph
# open_graph:
# # Facebook App ID
# fb_app_id:
# # Facebook Admin ID
# fb_admins:
# # Twitter ID
# twitter_id:
# # Twitter site
# twitter_site:
# # Google+ profile link
# google_plus:
# Navigation bar link settings
navbar:
# Navigation bar menu links
menu:
Home: /
Archives: /archives
Categories: /categories
Tags: /tags
About: /about
# Navigation bar links to be shown on the right
# links:
# Download on GitHub:
# icon: fab fa-github
# url: 'http://github.com/ppoffice/hexo-theme-icarus'
# Footer section link settings
footer:
# Links to be shown on the right of the footer section
links:
Creative Commons:
icon: fab fa-creative-commons
url: 'https://creativecommons.org/'
Attribution 4.0 International:
icon: fab fa-creative-commons-by
url: 'https://creativecommons.org/licenses/by/4.0/'
# Download on GitHub:
# icon: fab fa-github
# url: 'http://github.com/ppoffice/hexo-theme-icarus'
# Article display settings
article:
# Code highlight theme
# https://github.com/highlightjs/highlight.js/tree/master/src/styles
highlight: atom-one-dark
# Whether to show article thumbnail images
thumbnail: true
# Whether to show estimate article reading time
readtime: true
# Search plugin settings
# http://ppoffice.github.io/hexo-theme-icarus/categories/Configuration/Search-Plugins
search:
# Name of the search plugin
type: insight
# Comment plugin settings
# http://ppoffice.github.io/hexo-theme-icarus/categories/Configuration/Comment-Plugins
# comment:
# # Name of the comment plugin
# type: disqus
# shortname: hexo-theme-icarus
# Donation entries
# http://ppoffice.github.io/hexo-theme-icarus/categories/Donation/
# donate:
# -
# # Donation entry name
# type: alipay
# # Qrcode image URL
# qrcode: /gallery/donate/alipay.png
# -
# # Donation entry name
# type: wechat
# # Qrcode image URL
# qrcode: /gallery/donate/wechat.jpg
# -
# # Donation entry name
# type: paypal
# # Paypal business ID or email address
# business: paypal@paypal.com
# # Currency code
# currency_code: USD
# -
# # Donation entry name
# type: patreon
# # URL to the Patreon page
# url: https://www.patreon.com/
# Share plugin settings
# http://ppoffice.github.io/hexo-theme-icarus/categories/Configuration/Share-Plugins
share:
# Share plugin name
type: sharethis
install_url: //platform-api.sharethis.com/js/sharethis.js#property=5ab6f60ace89f00013641890&product=inline-share-buttons
# Sidebar settings.
# Please be noted that a sidebar is only visible when it has at least one widget
sidebar:
# left sidebar settings
left:
# Whether the left sidebar is sticky when page scrolls
# http://ppoffice.github.io/hexo-theme-icarus/Configuration/Theme/make-a-sidebar-sticky-when-page-scrolls/
sticky: false
# right sidebar settings
right:
# Whether the right sidebar is sticky when page scrolls
# http://ppoffice.github.io/hexo-theme-icarus/Configuration/Theme/make-a-sidebar-sticky-when-page-scrolls/
sticky: false
# Sidebar widget settings
# http://ppoffice.github.io/hexo-theme-icarus/categories/Widgets/
widgets:
-
# Widget name
type: profile
# Where should the widget be placed, left or right
position: left
# Author name to be shown in the profile widget
author: {author}
# Title of the author to be shown in the profile widget
author_title: {name}
# Author's current location to be shown in the profile widget
location: {location}
# Path or URL to the avatar to be shown in the profile widget
avatar: {avatar}
# Email address for the Gravatar to be shown in the profile widget
gravatar:
# Path or URL for the follow button
follow_link: 'http://steemit.com/@{author}'
# Links to be shown on the bottom of the profile widget
social_links:
Github:
icon: fab fa-github
url: 'http://{organization}.github.io/@{author}'
Steem:
icon: fa fa-book
url: 'http://steemit.com/@{author}'
Website:
icon: fa fa-home
url: {website}
# Cache the widget or not, true or false
cache: true
# Render the widget as a component
component: true
# -
# Widget name
# type: links
# Where should the widget be placed, left or right
# position: left
# Links to be shown in the links widget
# links:
# Hexo: 'https://hexo.io'
# Bulma: 'https://bulma.io'
-
# Widget name
type: category
# Where should the widget be placed, left or right
position: left
# Cache the widget or not, true or false
cache: true
# Render the widget as a component
component: true
-
# Widget name
type: tagcloud
# Where should the widget be placed, left or right
position: left
# Cache the widget or not, true or false
cache: true
# Render the widget as a component
component: true
-
# Widget name
type: tag
# Where should the widget be placed, left or right
position: left
# Cache the widget or not, true or false
cache: true
# Render the widget as a component
component: true
-
# Widget name
type: toc
# Where should the widget be placed, left or right
position: right
# Cache the widget or not, true or false
cache: false
-
# Widget name
type: recent_posts
# Where should the widget be placed, left or right
position: right
# Cache the widget or not, true or false
cache: true
# Render the widget as a component
component: true
-
# Widget name
type: archive
# Where should the widget be placed, left or right
position: right
# Cache the widget or not, true or false
cache: true
# Render the widget as a component
component: true
# Other plugin settings
plugins:
# Enable page animations
animejs: true
# Enable the lightGallery and Justified Gallery plugins
# http://ppoffice.github.io/hexo-theme-icarus/Plugins/General/gallery-plugin/
gallery: true
# Enable the Outdated Browser plugin
# http://outdatedbrowser.com/
outdated-browser: true
# Enable the MathJax plugin
# http://ppoffice.github.io/hexo-theme-icarus/Plugins/General/mathjax-plugin/
mathjax: true
# Show the back to top button on mobile devices
back-to-top: true
# Google Analytics plugin settings
# http://ppoffice.github.io/hexo-theme-icarus/Plugins/General/site-analytics-plugin/#Google-Analytics
google-analytics:
# # Google Analytics tracking id
tracking_id: # UA-72437521-5
# Baidu Analytics plugin settings
# http://ppoffice.github.io/hexo-theme-icarus/Plugins/General/site-analytics-plugin/#Baidu-Analytics
baidu-analytics:
# Baidu Analytics tracking id
tracking_id:
# Hotjar user feedback plugin
# http://ppoffice.github.io/hexo-theme-icarus/Plugins/General/site-analytics-plugin/#Hotjar
hotjar:
# # Hotjar site id
site_id: # 1067642
# Show a loading progress bar at top of the page
progressbar: true
# Show the copy button in the highlighted code area
clipboard: true
# BuSuanZi site/page view counter
# https://busuanzi.ibruce.info
busuanzi: false
# CDN provider settings
# http://ppoffice.github.io/hexo-theme-icarus/Configuration/Theme/speed-up-your-site-with-custom-cdn/
providers:
# Name or URL of the JavaScript and/or stylesheet CDN provider
cdn: jsdelivr
# Name or URL of the webfont CDN provider
fontcdn: google
# Name or URL of the webfont Icon CDN provider
iconcdn: fontawesome
"""
| {"/steem/comment.py": ["/steem/markdown.py"], "/blog/builder.py": ["/steem/comment.py", "/steem/markdown.py", "/blog/message.py"], "/blog/command.py": ["/blog/builder.py"]} |
73,275 | jroitgrund/subsync | refs/heads/master | /main.py | from lib import syllables
from lib import vad_reader
from lib import moreno_recursion
from lib.subtitles.write_subs import write_srt
import argparse
import numpy
import pysrt
import subprocess
import shlex
import sys
def main():
print ' '.join(sys.argv)
parser = argparse.ArgumentParser(
description="Run synchronization with the data files in a directory")
parser.add_argument('directory')
parser.add_argument('-a', action='store_true')
parser.add_argument('-t', action='store_true')
parser.add_argument('-f', action='store_true')
parser.add_argument('-i', action='store_true')
parser.add_argument('-p', default=2, type=int)
parser.add_argument('-s', default=30, type=int)
parser.add_argument('-w', default=1, type=int)
parser.add_argument('-l', default=5, type=int)
args = parser.parse_args()
directory = args.directory
use_automaton = args.a
use_own_time = args.t
use_fmllr = args.f
per_transcription_compile = args.i
segment_seconds = args.s
window = args.w
min_len = args.l
passes = args.p
generate_subs(directory, use_own_time, passes, segment_seconds, window, min_len, use_fmllr, use_automaton, per_transcription_compile)
def generate_subs(directory, use_own_time, passes, segment_seconds, window, min_len, use_fmllr, use_automaton, per_transcription_compile):
activations = vad_reader.movie_activations(directory)
if use_own_time:
voiced_duration = sum(duration for start, duration, channel in vad_reader.get_segments(activations))
else:
voiced_duration = numpy.sum(numpy.fromfile("%s/times.txt" % directory, sep="\n", dtype=numpy.int).reshape((-1, 2))[:,1])
scaled_activations = vad_reader.scale_activations(activations)
subs = pysrt.open("%s/nofluff.srt" % directory, "utf-8")
text_segments = [sub.text for sub in subs]
text_lengths, syllable_duration = syllables.text_lengths(text_segments, voiced_duration)
alignment_info = moreno_recursion.AlignmentInfo(scaled_activations, text_segments, text_lengths, directory, syllable_duration, segment_seconds, window, min_len, use_fmllr, use_automaton, per_transcription_compile)
alignment = alignment_info.get_alignment(passes=passes)
write_srt((
(text_segments[i], alignment[i] * 100, alignment[i] * 100 + text_lengths[i] * 100)
for i in xrange(len(text_segments)) if i < len(alignment) and alignment[i] != -1
), "%s/generated.srt" % directory)
if __name__ == '__main__':
main()
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,276 | jroitgrund/subsync | refs/heads/master | /lib/dataset.py | import yaml
def get_dataset():
with open('data/files.yaml') as data_file:
return yaml.load(data_file)
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,277 | jroitgrund/subsync | refs/heads/master | /lib/time_align.py | import numpy
import sys
class ActivationsBetween:
def __init__(self, activations):
self._activations = activations
self._end = activations.shape[0]
self._init_before_after()
def _init_before_after(self):
before = 0
after = self._activations.sum()
self._before = numpy.zeros(self._end + 1, dtype=numpy.int)
self._after = numpy.zeros(self._end + 1, dtype=numpy.int)
self._before[0] = before
self._after[0] = after
for i, activation in enumerate(self._activations):
before += activation
after -= activation
self._before[i + 1] = before
self._after[i + 1] = after
def _pos_in_range(self, pos):
return max(min(pos, self._end), 0)
def end(self):
return self._end
def before(self, pos):
return self._before[self._pos_in_range(pos)]
def after(self, pos):
return self._after[self._pos_in_range(pos)]
def between(self, start, end):
return self.before(end) - self.before(start)
def best_partial_score_table(mobile_segments, activations_between):
size = max(activations_between.end(), sum(mobile_segments) + 20)
partial_score_table = numpy.zeros((len(mobile_segments), size), dtype=numpy.int)
total_length = 0
for i, length in enumerate(mobile_segments):
total_length += length
_scores_with_segment(partial_score_table, i, length, activations_between, total_length)
return partial_score_table
def _scores_with_segment(partial_score_table, index, length, activations_between, total_length):
num_positions = partial_score_table.shape[1]
best_so_far = -(sys.maxint - 1)
for highest_time in range(0, num_positions):
if highest_time < total_length:
score = -(sys.maxint - 1)
else:
start = highest_time - length + 1
score = partial_score_table[index - 1, start - 1] if start > 0 and index > 0 else 0
if score != -(sys.maxint - 1):
score += activations_between.between(start, start + length)
if score > best_so_far:
best_so_far = score
partial_score_table[index, highest_time] = best_so_far
def best_alignment(partial_score_table, mobile_segments, activations_between):
numpy.savetxt('score_table.txt', partial_score_table, delimiter='\t')
num_segments, num_positions = partial_score_table.shape
alignment = numpy.zeros(num_segments, dtype=numpy.int)
necessary_score = partial_score_table[-1, -1]
start_time = 0
for segment in xrange(num_segments - 1, -1, -1):
end_time = numpy.nonzero(partial_score_table[segment, ] == necessary_score)[0][0] + 1
length = mobile_segments[segment]
start_time = end_time - length
alignment[segment] = max(-1, start_time)
if start_time < 0:
print "start time %s for segment %s" % segment
necessary_score -= activations_between.between(start_time, start_time + length)
if necessary_score == 0:
break
assert necessary_score == 0
return alignment
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,278 | jroitgrund/subsync | refs/heads/master | /lib/wavdump.py | import subprocess
def wavdump(infile, outfile):
command = "mplayer -ao pcm:file=%s,fast -vc null -vo null %s" \
% (outfile, infile)
print command
return subprocess.call(command, shell=True)
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,279 | jroitgrund/subsync | refs/heads/master | /lib/subtitles/write_subs.py | from lib import time_helper
_TIME_FORMAT = "%H:%M:%S,%f"
def write_srt(subs, outfile):
with open(outfile, 'w') as subfile:
for i, (text, start, end) in enumerate(subs):
start_time = time_helper.time_from_millis(start)
end_time = time_helper.time_from_millis(end)
subfile.write("%s\n" % (i + 1))
subfile.write(
"%s --> %s\n" % (start_time.strftime(_TIME_FORMAT)[:-3],
end_time.strftime(_TIME_FORMAT)[:-3]))
subfile.write(text.encode('utf-8'))
subfile.write("\n\n")
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,280 | jroitgrund/subsync | refs/heads/master | /prepare_data.py | from lib import cd
from lib import time_helper
from lib import vad_reader
from lib import wavdump
from lib.subtitles import remove_fluff
import os
import argparse
import pysrt
import shlex
import shutil
import subprocess
def main():
parser = argparse.ArgumentParser(
description="Prepare data for movie and store it in outdir"
parser.add_argument('movie')
parser.add_argument('outdir')
parser.add_argument('-c', action='store_true')
parser.add_argument('-v', action='store_true')
parser.add_argument('-s', type=int, default=25)
parser.add_argument('-t', type=float, default=0.0)
args = parser.parse_args()
movie = args.movie
outdir = args.outdir
use_channels = args.c
use_vad_segments = args.v
min_segment_length = args.s
threshold = args.t
prepare_data(movie, outdir, use_channels, use_vad_segments, min_segment_length, threshold)
def prepare_data(movie, outdir, use_channels, use_vad_segments, min_segment_length, threshold):
directory = movie.rpartition('/')[0]
shutil.rmtree(outdir, True)
os.mkdir(outdir)
wav_filename = '%s/audio.wav' % outdir
wavdump.wavdump(movie, wav_filename)
mono_filename = "%s/audio_mono.wav" % outdir
command = "sox %s -e signed -b 16 %s remix -" % (wav_filename, mono_filename)
print command
subprocess.call(shlex.split(command))
if (not use_channels) or use_vad_segments:
delete_vad_output_segments()
with cd.cd('VAD'):
command = "./SMILExtract -C Standalone.conf -I ../%s" % mono_filename
if not use_channels:
command += " -csv ../%s/activations.csv" % outdir
print command
subprocess.call(shlex.split(command))
os.mkdir("%s/segments" % outdir)
if use_vad_segments:
segments = os.listdir("VAD/output_segments")
for segment in segments:
command = "sox VAD/output_segments/%s %s/segments/%s" % (segment, outdir, segment)
print command
subprocess.call(command, shell=True)
if use_channels:
command = "soxi %s | grep Channels | awk '{print $3}'" % wav_filename
channels = int(subprocess.check_output(command, shell=True))
for channel in xrange(1, channels + 1):
channel_filename = "%s/audio_%s.wav" % (outdir, channel)
command = "sox %s -e signed -b 16 %s remix %s" % (wav_filename, channel_filename, channel)
print command
subprocess.call(shlex.split(command))
csv_filename = '%s/activations_%s.csv' % (outdir, channel)
with cd.cd('VAD'):
command = "./SMILExtract -C Standalone.conf -I ../%s -csv ../%s" % (channel_filename, csv_filename)
print command
subprocess.call(shlex.split(command))
activations = vad_reader.movie_activations(outdir)
rate = int(subprocess.check_output("soxi %s/audio_mono.wav | grep 'Sample Rate' | awk '{print $4}'" % outdir, shell=True))
if use_vad_segments:
subprocess.call("./get_offsets.sh %s" % outdir, shell=True)
with open("%s/durations.txt" % outdir) as durations:
durations = map(lambda x: int(x) / (rate / 100), durations)
with open("%s/offsets.txt" % outdir) as offsets:
offsets = map(lambda x: int(x) / (rate / 50), offsets)
os.remove("%s/durations.txt" % outdir)
os.remove("%s/offsets.txt" % outdir)
if use_channels:
segments = zip(offsets, durations)
channels = vad_reader.voiciest_channel_segments(segments, activations)
segments = [(offset / 10, duration / 10, channel) for (offset, duration), channel in zip(segments, channels)]
else:
segments = [(offset / 10, duration / 10) for offset, duration in zip(offsets, durations)]
else:
segments = vad_reader.get_segments(activations, min_segment_length, threshold)
with open("%s/times.txt" % outdir, 'w') as times:
times.write("".join("%s %s\n" % (segment[0], segment[1]) for segment in segments))
if use_channels or not use_vad_segments:
for segment in os.listdir("%s/segments" % outdir):
os.remove("%s/segments/%s" % (outdir, segment))
generate_segments(outdir, segments, use_channels)
generate_features(outdir, rate)
no_fluff_srt = '%s/nofluff.srt' % outdir
remove_fluff.remove_fluff("%s/subs.srt" % directory, no_fluff_srt)
with open("%s/transcription.txt" % outdir, 'w') as transcription:
transcription.write(" ".join(' '.join(remove_fluff.corpusize(sub.text) for sub in pysrt.open("%s/nofluff.srt" % outdir)).split()).encode('utf-8'))
subprocess.call("echo ' (uwotm8)' >> %s/transcription.txt" % outdir, shell=True)
delete_vad_output_segments()
def generate_segments(outdir, segments, use_channels):
command = "ffmpeg -y -i %s/audio_{}.wav -acodec copy -ss {} -t {} %s/segments/seg_{}.wav" % (outdir, outdir)
for i, (start, duration, channel) in enumerate(segments):
start_string = time_helper.time_from_millis(start * 100).strftime("%H:%M:%S.%f")[:-3]
duration_string = time_helper.time_from_millis(duration * 100).strftime("%H:%M:%S.%f")[:-3]
seg_10_digits = "0" * (10 - len(str(i + 1))) + str(i + 1)
channel_string = (channel + 1) if use_channels else "mono"
curr_command = command.format(channel_string, start_string, duration_string, seg_10_digits)
print curr_command
subprocess.call(shlex.split(curr_command))
def generate_features(outdir, rate):
with open("%s/segments.scp" % outdir, 'w') as scp:
segment_files = sorted(os.listdir("%s/segments" % outdir))
scp.write("".join("%s %s/segments/%s\n" % (i + 1, outdir, name) for i, name in enumerate(segment_files)))
command = "./mfcc.sh %s %s" % (outdir, rate)
print command
subprocess.call(shlex.split(command))
def delete_vad_output_segments():
for segment in os.listdir("VAD/output_segments"):
os.remove("VAD/output_segments/%s" % segment)
if __name__ == '__main__':
main()
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,281 | jroitgrund/subsync | refs/heads/master | /lib/subtitles/compare.py | import itertools
import numpy
def simple_compare(subs1, subs2):
a = [_begin_diff(sub1, sub2) for sub1, sub2 in itertools.izip(subs1, subs2)]
return max(a), a.index(max(a)), sum(a), numpy.mean(a), len([diff for diff in a if diff > 15]), len([diff for diff in a if diff > 10]), len([diff for diff in a if diff > 5])
def _begin_diff(sub1, sub2):
return abs((sub1.start - sub2.start).ordinal) / 1000.0
def _time_diff(sub1, sub2):
return (abs((sub1.start - sub2.start).ordinal) + abs((sub1.end - sub2.end).ordinal)) / 1000.0
def compare(first, second):
first, second = _word_locations(first), _word_locations(second)
used_words = dict()
diff = 0
for word in first:
try:
diff += _difference(first[word], second[word])
except KeyError:
diff += _difference(first[word], [])
used_words[word] = True
for word in second:
if word not in used_words:
diff += _difference(second[word], [])
return diff
def _word_locations(subs):
res = dict()
for sub in subs:
sub_info = (sub.start, sub.end)
for word in sub.text.split(' '):
try:
res[word].append(sub_info)
except KeyError:
res[word] = [sub_info]
return res
def _difference(positions1, positions2):
used_pos2 = []
diff = 0
for pos1 in positions1:
try:
match_overlap, match_index, match_len =\
max((_overlap(pos1, pos2), index, _len(pos2))
for (index, pos2) in
enumerate(positions2) if index not in used_pos2)
used_pos2.append(match_index)
diff += match_len + _len(pos1) - 2 * match_overlap
except ValueError:
diff += _len(pos1)
diff += sum(_len(pos2) for (index, pos2) in
enumerate(positions2) if index not in used_pos2)
return diff
def _overlap(pos1, pos2):
pos1_start, pos1_end = pos1
pos2_start, pos2_end = pos2
last_start, first_end = None, None
if pos1_start >= pos2_start:
last_start = pos1_start
else:
last_start = pos2_start
if pos2_start <= pos2_start:
first_end = pos1_end
else:
first_end = pos2_end
return max(0, (first_end - last_start).ordinal)
def _len(pos):
return (pos[1] - pos[0]).ordinal
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,282 | jroitgrund/subsync | refs/heads/master | /lib/moreno_recursion.py | import itertools
from lib.time_align import ActivationsBetween, best_partial_score_table, best_alignment
from lib import asr
from lib.subtitles.remove_fluff import corpusize
class AlignmentInfo:
def __init__(self, activations, texts, text_lengths, directory, syllable_duration, segment_seconds, window, min_len, use_fmllr, use_automaton, per_transcription_compile):
self._activations = activations
self._total_length = activations.shape[0]
self._num_subs = len(texts)
self._texts = [corpusize(text) for text in texts]
self._text_lengths = text_lengths
self._directory = directory
self._syllable_duration = syllable_duration
self._segment_seconds = segment_seconds
self._window = window
self._min_len = min_len
self._use_fmllr = use_fmllr
self._use_automaton = use_automaton
self._per_transcription_compile = per_transcription_compile
def get_alignment(self, passes=5):
anchors = []
for i in xrange(1, passes):
anchors = self.anchor_pass(anchors)
return [start for sub_num, start in self.anchor_pass(anchors, True)]
def anchor_pass(self, anchors=[], final_pass=False):
anchors = [(-1, 0, 0)] + anchors + [(self._num_subs, self._total_length, 0)]
alignment = []
for (start_anchor, end_anchor) in itertools.izip(anchors, itertools.islice(anchors, 1, None)):
start_text, start_time, start_length = start_anchor
first_text = start_text + 1
first_time = start_time + start_length
end_text, end_time, _ = end_anchor
activations = self._activations[first_time:end_time]
text_lengths = self._text_lengths[first_text:end_text]
alignment += [(sub + first_text, start + first_time) for sub, start in get_alignment(activations, text_lengths)]
alignment.append((end_text, end_time))
if final_pass:
return alignment[:-1]
anchors = asr.get_anchors(self._directory, self._total_length / 10, alignment[:-1], self._texts, self._syllable_duration, self._segment_seconds, self._window, self._min_len, self._use_fmllr, self._use_automaton, self._per_transcription_compile)
return [(sub, time, self._text_lengths[sub]) for sub, time in anchors]
def get_alignment(activations, text_lengths):
if len(text_lengths) == 0 or activations.shape[0] == 0:
return []
activations_between = ActivationsBetween(activations)
alignment = best_alignment(best_partial_score_table(text_lengths, activations_between), text_lengths, activations_between)
return list(enumerate(alignment))
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,283 | jroitgrund/subsync | refs/heads/master | /lib/asr.py | import numpy
import itertools
import re
from lib.syllables import SyllableCounter
from lib.subtitles import remove_fluff
import os
from os import path
import pysrt
import string
import shutil
import subprocess
import sys
def get_anchors(directory, duration_seconds, alignment, texts, syllable_duration, segment_seconds, window, min_len, use_fmllr, use_automaton, per_transcription_compile):
working = "%s/data" % directory
shutil.rmtree(working, True)
os.mkdir(working)
false_positives = 0
results = []
for minute in xrange(duration_seconds / segment_seconds):
times = numpy.array([time for sub, time in alignment], numpy.int)
first, last = times_in_minute(minute, times, segment_seconds, window)
minute_texts = texts[first:last]
times = numpy.fromfile("%s/times.txt" % directory, sep="\n", dtype=numpy.int).reshape((-1, 2))[:,0]
first, last = times_in_minute(minute, times, segment_seconds)
prepare_data(directory, minute_texts, first, last, use_fmllr, use_automaton)
if per_transcription_compile:
res, false_pos = compile_results(directory, min_len, numpy.fromfile("%s/times.txt" % directory, sep="\n", dtype=numpy.int).reshape((-1, 2)), True)
results.extend(res)
false_positives += false_pos
try:
os.remove("%s/transcriptions.txt" % working)
os.remove("%s/alignments.txt" % working)
except:
pass
if not per_transcription_compile:
results, false_positives = compile_results(directory, min_len, numpy.fromfile("%s/times.txt" % directory, sep="\n", dtype=numpy.int).reshape((-1, 2)), per_transcription_compile)
print "Total count: have %s anchors, probably %s false positives" % (len(results), false_positives)
syllable_counter = SyllableCounter()
results = [find_alignment(expression, times[segment - 1], segment_offset, texts, syllable_counter, syllable_duration) for expression, segment, segment_offset in results]
seen = {}
unduped = []
for sub_num, time in results:
if sub_num in seen:
continue
seen[sub_num] = 1
unduped.append((sub_num, time))
print "\n".join("%s is at %s" % result for result in unduped)
return unduped
def find_alignment(expression, segment_time, segment_offset, texts, syllable_counter, syllable_duration):
try:
sub_num, sub = next((i, text) for i, text in enumerate(texts) if expression in text)
except:
sub_num, sub = next((num, text) for num, text in [(i, ' '.join(itertools.imap(lambda x: ' '.join(x.split()), texts[i:])))
for i in xrange(len(texts) - 1, -1, -1)] if expression in text)
before = sub[:string.find(sub, expression)]
before_time = sum(syllable_counter.count_syllables(word) for word in before.split()) * syllable_duration
print "%s is before, and lasts %s deciseconds" % (before, before_time)
return (sub_num, int(segment_time + segment_offset - before_time))
def prepare_data(directory, texts, first, last, use_fmllr, use_automaton):
working = "%s/data" % directory
with open("%s/corpus.txt" % working, 'w') as corpus:
corpus.write("".join(texts))
with open("%s/curr_transcription.txt" % working, 'w') as curr_transcription:
curr_transcription.write(" ".join("".join(texts).split()))
subprocess.call("echo ' (uwotm8)' >> %s/curr_transcription.txt" % working, shell=True)
for feat_suffix in ["", "_3b", "_3b_mmi"]:
command = "sed -n %s,%sp %s/feats%s.scp > %s/curr%s.scp" % (first + 1, last, directory, feat_suffix, working, feat_suffix)
print command
subprocess.call(command, shell=True)
command = "./run_asr.sh%s%s %s" % (" -f" if use_fmllr else "", " -a" if use_automaton else "", working)
print command
subprocess.call(command, shell=True)
def times_in_minute(minute, times, segment_seconds, extra=0):
start = (minute - extra) * segment_seconds * 10
end = (minute + extra + 1) * segment_seconds * 10
sub_indexes = numpy.searchsorted(times, [start, end])
return sub_indexes[0], sub_indexes[1]
def compile_results(directory, min_len, times, per_transcription_compile):
working = "%s/data" % directory
if not path.isfile("%s/transcriptions.txt" % working):
return [], 0
correct_trans = "%s/curr_transcription.txt" % working if per_transcription_compile else "%s/transcription.txt" % directory
subprocess.call("awk '{$1=\"\";printf(\"%%s \", $0)}' %s/transcriptions.txt > %s/hyp.txt" % (working, working), shell=True)
subprocess.call("echo ' (uwotm8)' >> %s/hyp.txt" % working, shell=True)
subprocess.call("sclite -r %s -h %s/hyp.txt -i rm -o stdout sgml | sed -n 4p > %s/align.sgml" % (correct_trans, working, working), shell=True)
with open('%s/align.sgml' % working) as sgml:
alignments = ''.join(line for line in sgml).split(":")
pairs = [(part[0], part[2]) for part in map(lambda x: x.partition(','), alignments)]
groups = [list(group) for alignment, group in itertools.groupby(pairs, lambda x: x[0]) if alignment == 'C']
groups = filter(lambda x: len(x) >= min_len, groups)
expressions = [map(lambda x: x[1].partition('"')[2].partition('"')[0], pair) for pair in groups]
expressions = [" ".join(expression).upper() for expression in expressions]
with open("%s/transcriptions.txt" % working) as transcriptions:
transcriptions = [(int(segment), ' '.join(transcription.split())) for segment, _, transcription in (tuple(t.partition(' ')) for t in transcriptions)]
segment = 0
false_positives = 0
pairs = []
print expressions
for expression in expressions:
try:
segment = next(index for index, transcription in transcriptions if expression in transcription)
except:
try:
segment = next(index for index, transcription in [(transcriptions[i][0], ' '.join(' '.join(map(lambda x: x[1], transcriptions[i:])).split()))
for i in xrange(len(transcriptions) - 1, -1, -1)] if expression in transcription)
except:
print "Can't find expression %s in transcriptions %s" % (expression, transcriptions)
sys.exit(-1)
print "%s is correct in segment %s" % (expression, segment)
segment_start = times[segment - 1][0] * 100
segment_end = times[segment - 1][1] * 100 + segment_start
try:
sub = next(remove_fluff.corpusize(sub.text) for sub in pysrt.open("%s/nofluff.srt" % directory) if sub.end.ordinal > segment_start and sub.start.ordinal < segment_end)
if expression not in sub and sub not in expression:
print "%s is probably a false positive: not in %s" % (expression, sub)
false_positives += 1
except:
print "Couldn't find a sub for segment between %s and %s miliseconds" % (segment_start, segment_end)
pairs.append((expression, segment))
print "Have %s anchors, probably %s false positives" % (len(expressions), false_positives)
with open("%s/alignments.txt" % working) as alignments:
alignments = "".join(list(alignments))
results = []
for expression, segment in pairs:
rg = "\\s*".join(r"[0-9]* [0-9]* ([0-9]*\.[0-9]*) [0-9]*\.[0-9]* %s" % word for word in expression.split()[:99])
try:
in_segment = float(re.search(rg, alignments).group(1)) * 10
except:
print "Can't find expression %s using regex %s in alignments: %s" % (expression, rg, alignments)
sys.exit(-1)
result = (expression, segment, in_segment)
results.append(result)
return results, false_positives
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,284 | jroitgrund/subsync | refs/heads/master | /lib/time_helper.py | from datetime import time
def time_from_millis(milliseconds):
sec, milli = divmod(milliseconds, 1000)
minutes, sec = divmod(sec, 60)
hours, minutes = divmod(minutes, 60)
return time(hours, minutes, sec, milli * 1000)
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,285 | jroitgrund/subsync | refs/heads/master | /lib/subtitles/anchors.py | import itertools
import random
def get_anchors(subtitles, freq):
if freq == 0:
return []
anchors = []
for sub in itertools.islice(subtitles, None, None, freq):
words = sub.text.split(' ')
pos = random.randrange(0, len(words))
start = sub.start.ordinal
length = (sub.end - sub.start).ordinal
time = start + int(length * (pos / float(len(words))))
if time < start or time > sub.end.ordinal:
raise AssertionError("this shouldn't be possible")
anchors.append((words[pos], time))
return anchors
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,286 | jroitgrund/subsync | refs/heads/master | /lib/subtitles/remove_fluff.py | import pysrt
import re
import string
def corpusize(sub):
return re.sub(r"(^ *| *$)", "", re.sub("\n+", "\n", re.sub(" +", " ", re.sub(r"(,|;|:|-)", " ",
re.sub(r"(\?|!|\.)", "\n", sub + "\n")))), flags=re.MULTILINE).upper()
def rmf(line):
line = re.sub(r"(^ *-)", "", re.sub(r"([a-zA-Z]+:)|(\[.*\])|(\<[^\<]*\>)|" + '"',
"", line), flags=re.MULTILINE)
line = string.join(line.split())
return line
def remove_fluff(infile, outfile):
subs = pysrt.open(infile)
for sub in subs:
sub.text = rmf(sub.text)
subs.save(outfile, 'utf-8')
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,287 | jroitgrund/subsync | refs/heads/master | /lib/vad_reader.py | import csv
import glob
import numpy
import BitVector
def get_segments(activations_list, min_duration, threshold):
activations = numpy.mean(activations_list, axis=0)
sounds = BitVector.BitVector(size=activations.shape[0])
for i, activation in enumerate(activations):
sounds[i] = activation > threshold
segments = []
current_segment_duration = 0
current_segment_start = 0
start = 0
for group in sounds.runs():
voice = group[0] == '1'
duration = len(group)
extending_segment = current_segment_duration > 0
known_sound = duration >= min_duration
if not extending_segment:
if known_sound and voice:
current_segment_start = start
current_segment_duration = duration
if extending_segment:
if known_sound and not voice:
segments.append((current_segment_start, current_segment_duration))
current_segment_duration = 0
else:
current_segment_duration += duration
start += duration
if current_segment_duration > 0:
segments.append((current_segment_start, current_segment_duration))
return [(start / 10, duration / 10, most_voicy_channel(start, duration, activations_list)) for start, duration in segments]
def scale_activations(mult_activations):
activations = numpy.multiply(numpy.max(mult_activations, axis=0), 10000000000)
activations = numpy.rint(activations).astype(int)
scaled_activations = numpy.zeros(activations.shape[0] / 10 + (0 if activations.shape[0] % 10 == 0 else 1), dtype=numpy.int)
for i, start in enumerate(xrange(0, activations.shape[0], 10)):
scaled_activations[i] = activations[start:start+10].sum()
return scaled_activations
def activations_from_csv(infiles):
data_point_count = sum(1 for line in open(infiles[0]))
activations = numpy.zeros((len(infiles), data_point_count), dtype=numpy.float)
for i, infile in enumerate(infiles):
with open(infile) as csv_output:
reader = csv.reader(csv_output)
for j, line in enumerate(reader):
activations[i, j] = float(line[1])
return activations
def movie_activations(directory):
csv_files = glob.glob("%s/activations*.csv" % directory)
return activations_from_csv(csv_files)
def most_voicy_channel(start, duration, activations):
sums = [numpy.sum(activations[i][start:start+duration]) for i in xrange(activations.shape[0])]
return sums.index(max(sums))
def voiciest_channel_segments(segments, activations):
return [most_voicy_channel(start, duration, activations)
for start, duration in segments]
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,288 | jroitgrund/subsync | refs/heads/master | /lib/factor_automaton.py | import argparse
def build_automaton(strings):
word_lists = [string.split() for string in strings]
states = range(sum(map(len, word_lists)) + 4)
transitions = [(0, 3, '<s>'), (1, 2, '</s>'), (2, '', ''), (3, 1, '<eps>')]
state = 4
for word_list in word_lists:
for i, word in enumerate(word_list):
transitions.append((3, state, word))
if word == word_list[-1]:
transitions.append((state, 3, '<eps>'))
else:
transitions.append((state, 3, '<eps>', 0.1))
transitions.append((state, state, '<eps>', 0.1))
transitions.append((state, state + 1, word_list[i + 1], 0.8))
state += 1
transitions = map(lambda x: x if len(x) >= 4 else x + ("",), transitions)
transitions = map(lambda x: x if len(x) >= 5 else x + (x[2],), transitions)
transitions.sort()
return '\n'.join(['%s\t%s\t%s\t%s\t%s' % (start, to, inl, out, prob) for start, to, inl, prob, out in transitions])
def main():
parser = argparse.ArgumentParser(
description="Generate automaton from corpus")
parser.add_argument('corpus')
parser.add_argument('-f', action='store_true')
parser.add_argument('-w', action='store_true')
args = parser.parse_args()
if args.f:
with open(args.corpus) as c:
print build_automaton(list(c))
elif args.w:
with open(args.corpus) as w:
words = ' '.join(list(w)).split()
all_words = []
[all_words.append(word) for word in words if word not in all_words]
all_words.append('<eps>')
all_words.append('<s>')
all_words.append('</s>')
all_words.append('#0')
print '\n'.join("%s %s" % (word, i) for i, word in enumerate(all_words))
else:
print build_automaton([args.corpus])
if __name__ == '__main__':
main() | {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,289 | jroitgrund/subsync | refs/heads/master | /lib/syllables.py |
import codecs
class SyllableCounter:
def __init__(self):
self._wordlist = []
with codecs.open('data/mhyph.txt', encoding='utf-8') as mhyph:
for word in mhyph:
syllables = word.count(u"\u00A5") + 1
self._wordlist.append((word.replace(u"\u00A5", '').replace('\n', '').lower(), syllables))
def count_syllables(self, word):
word = word.lower()
return next((syllables for candidate, syllables in self._wordlist if word == candidate),
int(round(len(word) / 3.0)))
def text_lengths(text_segments, voice_duration):
syllable_counter = SyllableCounter()
segment_syllables = [sum(syllable_counter.count_syllables(word) for word in segment.split()) for segment in text_segments]
total_syllables = sum(segment_syllables)
syllable_duration = voice_duration / float(total_syllables)
text_lengths = [int(round(syllables * syllable_duration)) for syllables in segment_syllables]
return text_lengths, syllable_duration
| {"/lib/moreno_recursion.py": ["/lib/time_align.py", "/lib/subtitles/remove_fluff.py"]} |
73,295 | Meitie/WTC_Code_Clinics | refs/heads/main | /user_logging/__init__.py | import user_logging.login as login
import user_logging.logout as logout | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,296 | Meitie/WTC_Code_Clinics | refs/heads/main | /patient/__init__.py | import patient.insert as book
import patient.delete as leave | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,297 | Meitie/WTC_Code_Clinics | refs/heads/main | /clinician/update.py | def current_events(service, calander_id):
"""
Grabs the current events that are on the "code-clinics" calendar,
and saves it to the event variable.
:returns: event
"""
event = service.events().get(calendarId='teamtwotesting@gmail.com', eventId=calander_id).execute()
return event
def updaters(service, event):
"""
Trys to execute the update via the service, if it was successfully run, then prints "You event has been updated"
else "No event with that ID was found"
"""
try:
service.events().update(calendarId='teamtwotesting@gmail.com', eventId=event['id'], body=event, maxAttendees=2, sendUpdates='all', sendNotifications=True, alwaysIncludeEmail=True).execute()
print("Your event has been updated")
except:
print("No event with that ID was found")
def update_event(service, params, events, user_name):
"""
Grabs the current events, and and isolates the "summary" and the "description".
Checks if the params are there, if they are then it updates the parameters.
Else the summary and description remains the same.
1. Checks if the creator of the event is the current user.
2. Checks if the id is the same as the parameter.
if the previous checks are true = Runs the update function
if the previous checks are false = Prints error message
"""
event = current_events(service, params[0])
summary = event["summary"]
description = event["description"]
if len(params) == 3:
summary = params[1]
description = params[2]
else:
print("Please add what you would like to change.")
return
event['summary'] = summary
event['description'] = description
if (event["creator"]['email']) == (user_name + '@student.wethinkcode.co.za'):
if (event["id"]) == params[0]:
updaters(service, event)
return
else:
print("The ID's do not match, please try again.")
else:
print("This is not your slot, please choose your own slot.")
return | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,298 | Meitie/WTC_Code_Clinics | refs/heads/main | /patient/insert.py | import datetime
def already_booked(slots, attendees, user_name):
"""
Creates a flag to check if the attendees is already there.
if the email is equal to the logged in email, set flag to true.
If the flag is True return False
Else return True
"""
already_joined = False
for i in attendees:
if i["email"] == user_name+'@student.wethinkcode.co.za':
already_joined = True
if already_joined == True:
return False
else:
return True
def fully_booked(slots, attendees, user_name):
"""
Checks if the list of attendees is already at the max of two or not.
if it is 2 or more returns False
else Returns True
"""
if len(attendees) >= 2:
return False
else:
return True
def user_pre_slotted(cc_events, user_name):
"""
Makes a Creators and Start_times list, append all the creators
and date times. as well as splitting them all on the "@".
Slots = the dateTime for each of the creators.
returns slots
"""
creators = list()
start_times = list()
for i in range(len(cc_events)):
creators.append(cc_events[i]['creator']['email'])
start_times.append(cc_events[i]['start']['dateTime'])
creator_names = list()
for i in range(len(creators)):
name = creators[i]
name = name.split('@')
creator_names.append(name[0])
slots = [cc_events[num]['start']['dateTime'] for num, user in enumerate(creator_names) if creator_names[num] == user_name]
return slots
def current_events(service, calander_id):
"""
Grabs and returns the current events from the 'code-clinics' calendar.
returns it as an event.
"""
event = service.events().get(calendarId='teamtwotesting@gmail.com', eventId=calander_id).execute()
return event
def make_datetime_from_string(string):
"""
Creates a dattime object form a given string, in the right format.
:return: a datetime in the correct format.
"""
return datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%S%z")
def freebusy_check(service, date, time, user_name):
"""
checks the format for timeMin and timeMax as well as the timezones.
then checks the id for both calendars.
returns the eventsResults.
"""
event = {
"timeMin": (make_datetime_from_string(f'{date}T{time}:00+0200')).isoformat(),
"timeMax": (make_datetime_from_string(f'{date}T{time}:00+0200')+datetime.timedelta(minutes = 90)).isoformat(),
"timeZone": 'Africa/Johannesburg',
"items": [
{
"id": user_name + '@student.wethinkcode.co.za'
},
{
'id': 'teamtwotesting@gmail.com'
}
]
}
eventsResult = service.freebusy().query(body=event).execute()
return eventsResult
def do_you_have_meetings(service, date, time, user_name):
"""
Grabs the events from freebusy_check
seperates the 2 calendars based on the events.
Check if the patients calendar is empty in the alotted time.
If patient['busy'] == []: return true
else they have an event and return false
"""
events = freebusy_check(service, date, time, user_name)
two_cals = events['calendars']
patient, clinic = two_cals[user_name+'@student.wethinkcode.co.za'], two_cals['teamtwotesting@gmail.com']
if patient['busy'] == []:
return True
else:
return False
return False
def insert_patient(service, command_params, user_name, cc_events):
"""
Creates all variables that will be used, for the params, and the date/time.
Checks if the user already has slots. If false return.
Checks if the slot is already fully booked. if False return.
Adds the user to the event.
checks if you already have meetings or not.
trys to update the calendar slot, if it succeeds prints "You have successfully joined"
If failed prints "No event with that name was found".
"""
event = current_events(service, command_params[0])
begin = event['start']["dateTime"]
begin = begin.split("T")
date = begin[0]
time = begin[1][:5]
slots = user_pre_slotted(cc_events, user_name)
if already_booked(slots, event["attendees"], user_name) == False:
print(f"You have already joined a slot on '{date}' at '{time}'.")
return
if fully_booked(slots, event["attendees"], user_name) == False:
print(f"Sorry this event is fully booked.")
return
event['attendees'].append({'email': f'{user_name}@student.wethinkcode.co.za'})
if do_you_have_meetings(service, date, time, user_name) == False:
print("You already have a meeting at this time in your calendar.")
return
try:
service.events().update(calendarId='teamtwotesting@gmail.com', eventId=event['id'], body=event, maxAttendees=2, sendUpdates='all', sendNotifications=True, alwaysIncludeEmail=True).execute()
print("You have successfully joined the meeting")
except:
print("No event with that name was found")
return | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,299 | Meitie/WTC_Code_Clinics | refs/heads/main | /api_calls/get_events.py | import datetime, json
def get_code_clinic_events(service, future_date):
"""
Creates the calendar based on the days that will need to show up for the given days
"""
# Call the Calendar API
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
a_week = datetime.date.today() + datetime.timedelta(days=future_date)
a_week = str(a_week) + 'T23:59:59.999999+02:00'
# print(f'Getting the upcoming {future_date} days events of your calendar')
events_result = service.events().list(calendarId='teamtwotesting@gmail.com', timeMin=now,
timeMax=a_week, singleEvents=True,
orderBy='startTime').execute()
events = events_result.get('items', [])
with open('.calendar_ttt.json', 'w+') as f:
json.dump(events, f)
return events
def get_user_events(service, future_date):
"""
Creates the calendar based on the days that will need to show up for the given days
"""
# Call the Calendar API
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
a_week = datetime.date.today() + datetime.timedelta(days=future_date)
a_week = str(a_week) + 'T23:59:59.999999+02:00'
# print(f'Getting the upcoming {future_date} days events of your calendar')
events_result = service.events().list(calendarId='primary', timeMin=now,
timeMax=a_week, singleEvents=True,
orderBy='startTime').execute()
events = events_result.get('items', [])
with open('.calendar.json', 'w+') as f:
json.dump(events, f)
return events | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,300 | Meitie/WTC_Code_Clinics | refs/heads/main | /clinician/__init__.py | import clinician.insert as create
import clinician.delete as cancel
import clinician.update as update | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,301 | Meitie/WTC_Code_Clinics | refs/heads/main | /clinic_calendars/client_calendar.py | import datetime
import statistics
import terminaltables
from terminaltables import AsciiTable
import json
table_data = []
full_time_list = [['08:00'], ['08:30', '09:00', '09:30'], ['10:00', '10:30', '11:00'], ['11:30', '12:00', '12:30'], ['13:00', '13:30', '14:00'], ['14:30', '15:00', '15:30'], ['16:00', '16:30', '17:00'], ['17:30']]
def find_time(x):
"""Returns the index of which row to put the data on"""
count = 1
time_string = x['start']['dateTime'][:-9]
time_string = time_string[11:]
for y in full_time_list:
try:
index = y.index(time_string)
return count
except:
pass
count = count + 1
return None
def list_of_times():
"""Creates alist of the time_slots"""
time_list = []
time = datetime.datetime(year=2020,month=12,day=1,hour=7,minute=0)
time_list.append('08:00')
table_data[1][0] = time_list[0]
for i in range(6):
time = time + datetime.timedelta(minutes=90)
time_list.append(time.strftime('%H:%M'))
table_data[i+2][0] = time_list[i+1]
time_list.append('17:30')
table_data[8][0] = time_list[7]
return time_list
def creating_slot(dict):
"""Creates output from the personal calendar for the table"""
try:
string = 'Title: ' + dict['summary']
except:
string = 'Title: None'
string = string + '\nCreator: ' + dict['creator']['email']
time_string = dict['start']['dateTime'][:-9]
time_string = time_string[11:]
return (string + '\nTime: ' + time_string)
def creating_list_of_week(i,today_date):
"""Creates a list with all the dates in the week"""
list_week = []
for i in range(i):
list_week.append(today_date + datetime.timedelta(days=i))
list_week[i] = list_week[i].strftime('%Y-%m-%d')
return list_week
def generate_days(i,r,list_week):
"""Fills the top row of the table with the list of the week"""
global table_data
table_data[0].append('')
for x in range(i):
table_data[0].append(list_week[x])
def generate_list_of_empty_strings(i):
"""Creates a list of empty strings to fill up the 2d array"""
temp_list = []
for x in range(i+1):
temp_list.append('')
return temp_list
def determine_and_set_table_height(dict,temp_list):
"""sets the amount of rows in the table"""
global table_data
list_of_dates = []
for x in dict:
list_of_dates.append(x['start']['dateTime'][:-15])
table_data.append([])
count = 0
for x in range(8):
table_data.append(temp_list.copy())
return count,list_of_dates
def writing_to_table(dict,rows, list_week, list_of_dates,max,time_list):
"""Writing the data from to the table"""
global table_data
count = 1
for x in dict:
try:
index = list_week.index(x['start']['dateTime'][:-15])
index2 = find_time(x)
table_data[index2][index+1] = ' ' + 'X'
except:
pass
count = count + 1
def writing_to_table_command(list_week):
"""Writing the data from to the table"""
global table_data
count1 = 0
while count1 <= 8:
count2 = 0
while count2 <= 8:
if table_data[count1][count2] == '':
string = 'Command:\napp create ' + list_week[count2 - 1] + ' ' + table_data[count1][0]
table_data[count1][count2] = string
count2 = count2 + 1
count1 = count1 + 1
table_data[0][0] = ''
def generate_table(i,dict,username):
"""Calls all of the functions"""
global table_data
list_week = creating_list_of_week(i,datetime.date.today())
temp_list = generate_list_of_empty_strings(i)
count,list_of_dates = determine_and_set_table_height(dict,temp_list)
generate_days(i,count,list_week)
time_list = list_of_times()
writing_to_table(dict,count,list_week,list_of_dates,count,time_list)
writing_to_table_command(list_week)
def print_table(i,events,username):
"""Prints the table out"""
generate_table(i,events,username)
table = terminaltables.SingleTable(table_data)
table.inner_row_border = True
table.padding_left = 0
table.padding_right = 0
print(table.table)
| {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,302 | Meitie/WTC_Code_Clinics | refs/heads/main | /patient/delete.py | def current_events(service, calander_id):
"""
Grabs all the events from the current calendar with the associated ID.
returns the found event
"""
event = service.events().get(calendarId='teamtwotesting@gmail.com', eventId=calander_id).execute()
return event
def leaving(service, event):
"""
Trys to execute the service call to delete the event off the 'code-clinics' calenar.
If it success print "Your have left the meeting"
else prints "No event with that name was found."
"""
try:
service.events().update(calendarId='teamtwotesting@gmail.com', eventId=event['id'], body=event, maxAttendees=2, sendUpdates='all', sendNotifications=True, alwaysIncludeEmail=True).execute()
print("You have left the meeting.")
except:
print("No event with that name was found.")
def delete_event(service, params, user_name):
"""
Creates the event, as well as a counter flag.
Checks for each attendee in the event to find if the logged in user is one of them.
if they are increase the flag "count"
if count not 0 (mean you were found).
Find the index where you are the attendee and pop off the list, and run leaving.
else Print out the error statements.
"""
event = current_events(service, params)
count = 0
for i in event['attendees']:
if i['email'] != user_name+'@student.wethinkcode.co.za':
count += 1
continue
if count != 0:
if (event["id"]) == params:
event['attendees'].pop(count)
leaving(service, event)
else:
print("The ID's do not match, please try again.")
else:
print("This is not your slot, please choose your own slot.") | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,303 | Meitie/WTC_Code_Clinics | refs/heads/main | /tests/test_app.py | import unittest
from unittest.mock import patch
import io
import sys
import os
USER_PATHS = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../'))
sys.path.insert(0, USER_PATHS + "/")
from importlib.util import spec_from_loader, module_from_spec
from importlib.machinery import SourceFileLoader
spec = spec_from_loader("app", SourceFileLoader("app", USER_PATHS + '/app'))
app = module_from_spec(spec)
spec.loader.exec_module(app)
sys.modules['app'] = app
import app
class Test_Quickstart(unittest.TestCase):
text = io.StringIO()
sys.stdout = text
def test_valid_action(self):
result = app.valid_action()
self.assertEqual(result,["create", "cancel", "update", "join", "leave","logout",'help','create_calendar','join_calendar','delete_calendar','update_calendar','leave_calendar'])
def test_valid_command(self):
result1 = app.valid_command('create')
result2 = app.valid_command('cancel')
result3 = app.valid_command('create_calendar')
result4 = app.valid_command('hello')
self.assertEqual(result1,True)
self.assertEqual(result2,True)
self.assertEqual(result3,True)
self.assertEqual(result4,False)
def test_arguments(self):
pass
def test_help_func(self):
result = app.help_func()
self.assertEqual(result,"""
These are the code-clinics commands that can be used in various situations:\n\
Please copy and paste the code in '<>' to call the functions:\n\
\nlogging in and out:\n\
login logs the user in automatically when a command is entered \
<./app login "username">
logout logs you out of the code clinics calendar \
<./app logout>
\nVolunteering commands:\n\
create Create a slot (of 3x30 minutes), to host a code-clinic \
<./app create "date" "time" "summary" "description">
update Update an existing slots description/summary \
<./app update "id" "summary" "description">
delete Deletes an individual users sessions of code clinics \
<./app delete "id">
\nBooking commands:\n\
join Join a code clinic slot (of 1x30 minutes) with a host \
<./app join "id_of_session" "description">
leave Leave a session that you are apart of \
<./app leave "id_of_session">
\nCalendar commands:\n\
voluntee_calendar See a preview of the volunteering calendar, and the available slots\
<./app "create_calendar_slot">
join_calendar See a preview of the booking calendar, and the available slots \
<./app "join_calendar_slot">
delete_calendar See all the slots you can delete \
<./app "delete_calendar_slot">
update_calendar See all the events you can update \
<./app "update_calendar_slot">
leave_calendar See all the events you can leave \
<./app "leave_calendar_slot">
""")
def test_argument_validator(self):
result1 = app.argument_validator(['create_calendar'])
result2 = app.argument_validator(['join hbsadfbahsjbd'])
self.assertEqual(result1,('create_calendar', []))
self.assertEqual(result2,('',''))
if __name__ == '__main__':
unittest.main()
| {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,304 | Meitie/WTC_Code_Clinics | refs/heads/main | /clinic_calendars/delete_calendar.py | import datetime
import statistics
import terminaltables
from terminaltables import AsciiTable
import json
table_data = []
def list_of_times():
"""Creates a list of times"""
time_list = []
time = datetime.datetime(year=2020,month=12,day=1,hour=7,minute=30)
for i in range(20):
time = time + datetime.timedelta(minutes=30)
time_list.append(time.strftime('%H:%M'))
table_data[i+1][0] = time_list[i]
return time_list
def creating_slot_clinic(username,dict):
"""Creates output from the code clinics calendar for the table"""
email_list = []
for y in dict['attendees']:
email_list.append(y['email'])
for x in email_list:
if not (username + '@student.wethinkcode.co.za') in x:
raise Exception('')
if not len(email_list) == 1:
raise Exception('')
try:
string = 'Title: ' + dict['summary']
except:
string = 'Title: None'
string = string + '\nCreator: ' + dict['creator']['email']
time_string = dict['start']['dateTime'][:-9]
time_string = time_string[11:]
command_string = '\nCommand:\n' + 'app cancel ' + dict['id']
return (string + command_string)
def creating_list_of_week(i,today_date):
"""Creates a list with all the dates in the week"""
list_week = []
for i in range(i):
list_week.append(today_date + datetime.timedelta(days=i))
list_week[i] = list_week[i].strftime('%Y-%m-%d')
return list_week
def generate_days(i,r,list_week):
"""Fills the top row of the table with the list of the week"""
global table_data
table_data[0].append('')
for x in range(i):
table_data[0].append(list_week[x])
def generate_list_of_empty_strings(i):
"""Creates a list of empty strings to fill up the 2d array"""
temp_list = []
for x in range(i+1):
temp_list.append('')
return temp_list
def determine_and_set_table_height(dict,temp_list):
"""sets the amount of rows in the table"""
global table_data
list_of_dates = []
for x in dict:
list_of_dates.append(x['start']['dateTime'][:-15])
table_data.append([])
count = 0
for x in range(20):
table_data.append(temp_list.copy())
return count,list_of_dates
def writing_to_table(dict,rows, list_week, list_of_dates,max,time_list,username):
"""Writing the data from to the table"""
global table_data
count = 1
for x in dict:
try:
index = list_week.index(x['start']['dateTime'][:-15])
time_string = x['start']['dateTime'][:-9]
time_string = time_string[11:]
index2 = time_list.index(time_string)
table_data[index2+1][index+1] = creating_slot_clinic(username,x)
except:
pass
count = count + 1
def generate_table(i,dict,username):
"""Calls all of the functions and also prints out the table"""
global table_data
list_week = creating_list_of_week(i,datetime.date.today())
temp_list = generate_list_of_empty_strings(i)
try:
count,list_of_dates = determine_and_set_table_height(dict,temp_list)
except:
return print("1")
generate_days(i,count,list_week)
time_list = list_of_times()
writing_to_table(dict,count,list_week,list_of_dates,count,time_list,username)
table = terminaltables.SingleTable(table_data)
table.inner_row_border = True
table.padding_left = 0
table.padding_right = 0
print(table.table)
| {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,305 | Meitie/WTC_Code_Clinics | refs/heads/main | /user_logging/logout.py | import json, os
def logout():
"""
opens the user_info file, and deletes all the data.
deletes the pickle file.
"""
with open('.user_info.json', 'r+') as f:
data = json.load(f)
data['user'] = ''
data['role'] = ''
data['expire'] = ''
with open('.user_info.json', 'w+') as f:
json.dump(data, f)
try:
os.remove('token.pickle')
except:
pass | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,306 | Meitie/WTC_Code_Clinics | refs/heads/main | /tests/test_calendar.py | import unittest
from unittest.mock import patch
import io
import sys
import os
USER_PATHS = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../'))
sys.path.insert(0, USER_PATHS + "/")
import clinic_calendars.calendar as c
import datetime
class Test_Quickstart(unittest.TestCase):
def test_list_week(self):
result = c.creating_list_of_week(8,datetime.date.today())
self.assertEqual(len(result),8)
def test_empty_strings(self):
result = c.generate_list_of_empty_strings(3)
self.assertEqual(result,['','','',''])
if __name__ == '__main__':
unittest.main()
| {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,307 | Meitie/WTC_Code_Clinics | refs/heads/main | /clinic_calendars/__init__.py | import clinic_calendars.calendar as patient_calendar
import clinic_calendars.client_calendar as clinician_calendar | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,308 | Meitie/WTC_Code_Clinics | refs/heads/main | /api_calls/__init__.py | import api_calls.api_call as serives_maker
import api_calls.get_events as event_maker | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,309 | Meitie/WTC_Code_Clinics | refs/heads/main | /clinician/delete.py | def deleted(service, eventsID):
"""
Trys to execute the service call to delete the event off the 'code-clinics' calenar.
If it success print "Your event has been deleted"
else prints "That is not an existing event"
"""
try:
service.events().delete(calendarId='teamtwotesting@gmail.com',
eventId=eventsID).execute()
print("Your event has been deleted")
except:
print("That is not an existing event")
def delete_event(delete_id ,service, user_name, events):
"""
Converts the "delete_id" list into a string.
checks each item that has the same ID as the delete id from the events.
1. if the creator is the same as the person logged in.
2. if the id is the same as the one we want to delete.
If both are true: run the deleted function and return.
else: Show the errors that are related.
"""
delete_id = ''.join(delete_id)
canllelation = [item for item in events if item["id"] == delete_id]
if (canllelation[0]["creator"]['email']) == (user_name + '@student.wethinkcode.co.za'):
if (canllelation[0]["id"]) == delete_id:
deleted(service, delete_id)
return
else:
print("The ID's do not match, please try again.")
else:
print("This is not your slot, please choose your own slot.") | {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
73,310 | Meitie/WTC_Code_Clinics | refs/heads/main | /user_logging/login.py | import json, datetime, sys, os
USER_PATHS = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../'))
sys.path.insert(0, USER_PATHS + "/")
from api_calls import serives_maker, event_maker
def user_name_func():
'''
Asking for the username, therfore it can connect to their email address
'''
user_name = input((f"Enter your WeThinkCode Student User_Name: "))
return user_name
def log_in_checker():
"""
Checks the user log in info:
Checks if the expiry time in the user_info file has expired or not.
Checks for the user roles and sets them up if they do not exist.
"""
user_name = ''
with open('.user_info.json', 'r+') as f:
data = json.load(f)
if data['expire'] == '':
data['expire'] = datetime.datetime.now() + datetime.timedelta(hours=8)
data['expire'] = data['expire'].strftime("%Y/%m/%d, %H:%M:%S")
with open('.user_info.json', 'w+') as f:
json.dump(data, f)
elif datetime.datetime.strptime(data['expire'],'%Y/%m/%d, %H:%M:%S') > datetime.datetime.now():
return data['user'],data['role']
else:
try:
os.remove('token.pickle')
except:
pass
data['user'] = ''
data['role'] = ''
data['expire'] = datetime.datetime.now() + datetime.timedelta(hours=8)
data['expire'] = data['expire'].strftime("%Y/%m/%d, %H:%M:%S")
with open('.user_info.json', 'w+') as f:
json.dump(data, f)
if data['user'] == '':
user_name = user_name_func()
serives_maker.creating_service()
data['user'] = user_name
with open('.user_info.json', 'w+') as f:
json.dump(data, f)
return data['user']
| {"/user_logging/__init__.py": ["/user_logging/login.py", "/user_logging/logout.py"], "/patient/__init__.py": ["/patient/insert.py", "/patient/delete.py"], "/clinician/__init__.py": ["/clinician/insert.py", "/clinician/delete.py", "/clinician/update.py"], "/clinic_calendars/__init__.py": ["/clinic_calendars/client_calendar.py"], "/api_calls/__init__.py": ["/api_calls/get_events.py"], "/user_logging/login.py": ["/api_calls/__init__.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.