index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
39,283
|
JhansiSai/Pythontrng
|
refs/heads/master
|
/simple_intrest.py
|
p=int(input("enter principal amount"))
t=int(input("enter time"))
r=int(input("enter rate"))
print((p*t*r)/100)
|
{"/module_program.py": ["/fun_default_value.py"], "/paper_ceaser_game.py": ["/package_program/__init__.py"], "/package_program/__init__.py": ["/paper_ceaser_game.py"]}
|
39,284
|
JhansiSai/Pythontrng
|
refs/heads/master
|
/package_program/compound_intrest.py
|
p=int(input("enter principal amount"))
t=int(input("enter time"))
r=int(input("enter rate"))
|
{"/module_program.py": ["/fun_default_value.py"], "/paper_ceaser_game.py": ["/package_program/__init__.py"], "/package_program/__init__.py": ["/paper_ceaser_game.py"]}
|
39,285
|
JhansiSai/Pythontrng
|
refs/heads/master
|
/list_input_with_mapfun.py
|
n=int(input("enter number of elements:"))
a=list(map(int,input("Enter the numbers").strip().split(',')))
print(type(a))
print("List is -",a)
|
{"/module_program.py": ["/fun_default_value.py"], "/paper_ceaser_game.py": ["/package_program/__init__.py"], "/package_program/__init__.py": ["/paper_ceaser_game.py"]}
|
39,286
|
JhansiSai/Pythontrng
|
refs/heads/master
|
/class_program.py
|
class Person:
def __init__(self,a,b,c):
self.name=a
self.address=b
self.place=c
def display(self):
print(self.name,self.address,self.place)
class Employee(Person):
def __init__(self,dept,sal,name,address,place):
Person.__init__(self,name,address,place)
self.dept=dept
self.sal=sal
def display1(self):
print(self.name,self.address,self.place)
p=Person("jhansi","ongole","andhra pradesh")
p.display()
e1=Employee("dd","10000","john","blr","karnataka")
e1.display1()
e2=Employee("ww","1323","sad","qweq","wq")
e2.display1()
print(getattr(e1,"place"))
print(hasattr(e1,"smd"))
print(hasattr(e1,"name"))
print(setattr(e1,"age",8))
delattr(e1,"age") #
|
{"/module_program.py": ["/fun_default_value.py"], "/paper_ceaser_game.py": ["/package_program/__init__.py"], "/package_program/__init__.py": ["/paper_ceaser_game.py"]}
|
39,287
|
JhansiSai/Pythontrng
|
refs/heads/master
|
/hello.py
|
print('hello')
t=10
type(t)
i=10
j=20
if i < j:
print(j)
else:
print(i)#this is i values
('python \n'
'is \n'
'simple \n'
'language')
|
{"/module_program.py": ["/fun_default_value.py"], "/paper_ceaser_game.py": ["/package_program/__init__.py"], "/package_program/__init__.py": ["/paper_ceaser_game.py"]}
|
39,317
|
getflox/flox
|
refs/heads/master
|
/flox/profile/command.py
|
import click
from click import Context
from floxcore.context import Flox, Prompt
from floxcore.exceptions import ProfileException
@click.group(invoke_without_command=True)
@click.pass_obj
@click.pass_context
def profile(ctx: Context, flox: Flox):
"""
Manage development profile
"""
if not ctx.invoked_subcommand:
click.secho(f"Active profile: {Prompt.colourize(flox.profile)}")
@profile.command(name="list")
@click.pass_obj
def profile_list(flox: Flox):
"""List available profiles"""
click.echo("Environments:")
for f in flox.settings.flox.stages:
click.secho(("-> " if f == flox.profile else " ") + Prompt.colourize(f))
@profile.command(name="set")
@click.argument("name")
@click.pass_obj
def set_profile(flox: Flox, name: str):
"""Change active profile"""
if name not in flox.settings.get("global").stages:
raise ProfileException('Unable to select "{}". Not allowed stage.'.format(name))
flox.local.profile = name
click.secho("Selected profile: {}".format(Prompt.colourize(name)))
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,318
|
getflox/flox
|
refs/heads/master
|
/flox/config/__init__.py
|
import click
from loguru import logger
from schema import Optional
from floxcore.config import ParamDefinition, Configuration
from floxcore.console import info, warning, prompt
from floxcore.utils.functions import list_get
from floxcore.utils.table import BaseTable
class GlobalConfiguration(Configuration):
def parameters(self):
return (
ParamDefinition("stages", "List of the project environments", multi=True, default=["dev", "test", "prod"]),
)
def schema(self):
schemas = {Optional("stages", default=["production", "test", "integration"]): list}
def apply_diff(settings, diff):
logger.debug(f"Applying diff: {diff} on {settings}")
for action, field, changes in diff:
logger.debug(f"Processing change with action: '{action}', on field: '{field}' and changes: '{changes}'")
if action == "add":
for index, value in changes:
if field == "":
settings[index] = value
else:
settings[field].insert(index, value)
elif action == "change":
if isinstance(field, str):
if changes[1]:
settings[field] = changes[1]
else:
settings.pop(field, None)
elif changes[1] is None:
del settings[field]
else:
settings[field[0]][field[1]] = changes[1]
elif action == "remove":
for index, value in changes:
if field == "":
settings.pop(index, None)
else:
settings[field].pop(index)
return settings
def show_diff(parameters, current_settings, new_settings):
"""Show difference between existing and new configuration"""
data = [("Key", "Old value", "New value")]
for param in [p for p in parameters if
new_settings.get(p.name) != current_settings.get(p.name)]:
data.append((
param.description,
click.style(str(current_settings.get(param.name)), fg="red") if current_settings.get(
param.name) is not None else "-",
click.style(str(new_settings.get(param.name)), fg="green") if new_settings.get(
param.name) is not None else "-"
))
if len(data) == 1:
warning("No configuration changes detected")
return False
click.secho("\nNew configuration:\n", fg="green")
table = BaseTable(data)
click.echo(table.table + "\n")
return click.confirm(click.style("Save plugin settings?", fg="green"))
def configuration():
return GlobalConfiguration()
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,319
|
getflox/flox
|
refs/heads/master
|
/flox/info/command.py
|
import click
from floxcore.context import Flox
from floxcore.utils.table import BaseTable
@click.command(name="info")
@click.pass_obj
def flox_info(flox: Flox):
"""Display project information"""
labels = dict(
id="Project ID", name="Project Name", description="Description", tags="Tags"
)
data = [(
click.style("Attribute".ljust(30), fg="green"),
click.style("Value".ljust(30), fg="green")
)]
for name, label in labels.items():
data.append((label, flox.meta.get(name)))
table = BaseTable(data)
click.echo("")
click.echo(table.table)
click.echo("")
data = [(
click.style("Plugin".ljust(30), fg="green"),
click.style("Description".ljust(30), fg="green")
)]
for name in flox.meta.features:
data.append((name, flox.plugins.get(name)))
table = BaseTable(data)
click.echo(table.table)
click.echo("")
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,320
|
getflox/flox
|
refs/heads/master
|
/flox/plugins/manager.py
|
from typing import List
import click
import requests
from plumbum import ProcessExecutionError
from requests import HTTPError
from floxcore.context import Flox
from floxcore.exceptions import PluginException
from floxcore.plugin import PluginDefinition
def _request() -> dict:
try:
r = requests.get("https://api.github.com/search/repositories?q=org:getflox+is:public+topic:flox-plugin")
r.raise_for_status()
result = r.json()
except HTTPError as e:
raise PluginException(f'Unable to fetch list of repositories from github: "{e}".') from e
except AttributeError as e:
raise PluginException(f'Unable to parse github repositories list: "{e}"') from e
return result
def search(name: str) -> List[PluginDefinition]:
result = _request()
# @TODO handle `incomplete_results`
for item in [i for i in result.get("items", []) if name in i["name"]]:
yield PluginDefinition(item["name"], item["description"], item["html_url"])
def install(url: str) -> None:
try:
from plumbum.cmd import pip
((pip["install", url]) > click.get_binary_stream('stdout'))()
except ImportError:
raise PluginException(f'You do not have "pip" installed.')
except ProcessExecutionError as e:
raise PluginException(f'Unable to install plugin: "{e.stderr}".')
def uninstall(name: str) -> None:
try:
from plumbum.cmd import pip
((pip["uninstall", name, "-y"]) > click.get_binary_stream('stdout'))()
except ImportError:
raise PluginException(f'You do not have "pip" installed.')
def list_installed(flox: Flox) -> List[PluginDefinition]:
return flox.plugins.plugins.values()
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,321
|
getflox/flox
|
refs/heads/master
|
/flox/project/command.py
|
import re
from os import makedirs, getcwd
from os.path import join, abspath
import click
from slugify import slugify
from floxcore.command import execute_stages
from floxcore.console import info_box
from floxcore.context import Flox
from floxcore.exceptions import PluginException
def initiate_project_structure(flox: Flox, name, description, tag, features):
if not name:
name = click.prompt("Enter project name")
if not description:
description = click.prompt("Enter project description")
name = re.sub(r"\s+", " ", name)
project_id = slugify(name)
project_dir = join(getcwd(), project_id)
target = click.prompt("Target directory", default=project_dir)
if target != project_dir:
project_dir = abspath(target)
makedirs(join(project_dir, ".flox"), exist_ok=True)
new_project = Flox(project_dir)
new_project.meta.id = project_id
new_project.meta.name = name
new_project.meta.description = description
new_project.meta.tags = tag
new_project.meta.features = features
return new_project
@click.group(invoke_without_command=True, with_plugin_selector=True, params_from=["flox_project"])
@click.option("--name", help="Project name")
@click.option("--description", help="Longer project description")
@click.option("--tag", help="Tag, only used by plugins which are able to use it.", multiple=True)
@click.pass_obj
@click.pass_context
def project(ctx, flox: Flox, name: str, description: str, tag: list, **kwargs):
"""Initialise new project with flox"""
if ctx.invoked_subcommand:
return
if flox.initiated and not click.confirm("Trying to initialise already initialised project. Are you sure you would like to proceed?"):
raise click.Abort()
features = [k.replace("with_", "") for k, v in kwargs.items() if k.startswith("with_") and v]
project_flox = flox
if not flox.initiated:
project_flox = initiate_project_structure(flox, name, description, tag, features)
execute_stages(project_flox, "project",
features=kwargs.get("scope", []) or project_flox.meta.features, **kwargs)
@project.command(name="add")
@click.argument("feature")
@click.pass_obj
def project_add(flox: Flox, feature: str):
"""Add plugin features to active project"""
if feature in flox.meta.features:
raise PluginException(
f"Plugin {feature} is already enabled for '{flox.meta.name}' project"
)
flox.meta.features.append(feature)
flox.meta._save()
execute_stages(flox, "project", options={feature: True})
@project.command(name="remove")
@click.argument("feature")
@click.pass_obj
def project_remove(flox: Flox, feature: str):
"""Remove plugin features from active project"""
if feature not in flox.meta.features:
raise PluginException(
f"Plugin {feature} is not enabled for '{flox.meta.name}' project",
extra=f"You can list installed plugins with `flox info`"
)
flox.meta.features.remove(feature)
flox.meta._save()
info_box(
message=f"'{feature}' integration has been removed from current project",
extra="Please note that flox only disabled any future plugin actions, it's your responsibility "
"to remove / modify any relevant code from your project."
)
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,322
|
getflox/flox
|
refs/heads/master
|
/flox/__init__.py
|
import sys
from loguru import logger
import floxcore
from flox.click import patched
from flox.config import GlobalConfiguration
from floxcore.plugin import Plugin
# we use native system options to detect verbose mode as soon as possible so we can get right logging level
# even before click framework kicks in, or any logger actions are called
logger.remove()
floxcore.DEBUG = "-v" in sys.argv
logger.add(sys.stderr, level="DEBUG" if floxcore.DEBUG else "WARNING")
class GlobalPlugin(Plugin):
def configuration(self):
return GlobalConfiguration()
def plugin():
return GlobalPlugin()
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,323
|
getflox/flox
|
refs/heads/master
|
/flox/config/secrets.py
|
from typing import List
import dictdiffer
from flox.config import prompt, show_diff, apply_diff
from floxcore.config import ParamDefinition
from floxcore.console import info
from floxcore.context import Flox
from floxcore.plugin import Plugin
from loguru import logger
def save_secrets(flox: Flox, scope: str, profile: str, settings: dict):
for name, value in settings.items():
if not value:
logger.debug(f"Ignore empty secret for {name}")
continue
logger.debug(f"Storing secret `{name}` with value `{value}` in `{scope}` scope")
flox.secrets.put(name, value, scope=scope, profile=profile)
info(f"Updated {len(settings)} secrets")
def with_secrets(flox: Flox, plugin: str, scope, profile, parameters: List[ParamDefinition]):
for param in parameters:
param.name = f"{plugin}_{param.name}"
param.default = flox.secrets.getone(param.name, profile=profile) or param.default
return parameters
def interactive_secrets(flox: Flox, name: str, plugin: Plugin, scope, profile):
configuration = plugin.configuration()
new_values = {}
secrets = with_secrets(flox, name, scope, profile, configuration.secrets())
for param in secrets:
new_values[param.name] = prompt(param)
current_settings = dict(filter(lambda y: y[1], map(lambda x: (x.name, x.default), secrets)))
diff = list(dictdiffer.diff(current_settings, new_values))
new_settings = apply_diff(current_settings.copy(), diff)
if show_diff(secrets, current_settings, new_settings):
save_secrets(flox, scope, profile, new_settings)
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,324
|
getflox/flox
|
refs/heads/master
|
/flox/plugins/command.py
|
import re
from dataclasses import astuple
from typing import List
import click
from click import Context
from flox.plugins.manager import search, PluginDefinition, install, list_installed, uninstall
from floxcore.console import success
from floxcore.context import Flox
from floxcore.exceptions import PluginException
from floxcore.utils.table import BaseTable
class PluginsTable(BaseTable):
HEADER = list(PluginDefinition.__annotations__.keys())
@classmethod
def build(cls, plugins: List[PluginDefinition]):
return cls([PluginsTable.HEADER] + [astuple(p) for p in plugins])
def build_plugin_name(name: str):
# @TODO Add tests
if name.startswith(("https://", "http://", "git://", "git+")):
"""
Handle cases like:
- https://github.com/getflox/flox-jira
- http://github.com/getflox/flox-jira
- git://github.com/getflox/flox-jira.git
- git+https://github.com/getflox/flox-jira
- https://github.com/getflox/flox-jira.git
and combinations of those.
"""
install_name = name
if not name.endswith(".git"):
install_name = name + ".git"
if not name.startswith("git+"):
install_name = "git+" + name
elif re.match(r"\w+/\w+", name):
"""
Handle cases like:
- getflox/flox-jira
"""
install_name = f"git+https://github.com/{name}.git"
else:
"""
Handle cases like:
- flox-jira
"""
try:
plugin = next(search(name))
install_name = f'git+{plugin.url}.git'
except StopIteration:
raise PluginException(f'Plugin "{name}" not found.')
return install_name
@click.group(invoke_without_command=True)
@click.pass_context
@click.pass_obj
def plugin(flox: Flox, ctx: Context):
"""Manage plugins"""
if ctx.invoked_subcommand:
return
plugins = list_installed(flox)
if not plugins:
raise PluginException("No plugins installed.")
click.echo(PluginsTable.build(plugins).table)
@plugin.command(name="uninstall")
@click.argument("name")
def plugin_uninstall(name):
"""Uninstall installed plugin"""
uninstall(name)
success(f'Plugin {name} uninstalled.')
@plugin.command(name="install")
@click.argument("name")
def plugin_install(name: str):
"""Install plugin"""
install(build_plugin_name(name))
success(f'Plugin "{name}" installed.')
@plugin.command(name="search")
@click.argument("name")
def plugin_search(name: str):
"""Search plugin"""
result = search(name)
table = PluginsTable.build(result).table.replace(name, click.style(name, fg="red"))
click.echo(table)
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,325
|
getflox/flox
|
refs/heads/master
|
/flox/config/command.py
|
import hashlib
from os.path import join, isdir
from shutil import rmtree
import anyconfig
import click
from click import Abort
from loguru import logger
from flox.config.parameters import interactive_parameters
from flox.config.remotes import fetch_remote
from flox.config.secrets import interactive_secrets
from floxcore import CONFIG_DIRS
from floxcore.command import execute_stages
from floxcore.console import info, warning, warning_box, success_box, success
from floxcore.context import Flox
from floxcore.exceptions import MissingPluginException, ConfigurationException
@click.group(name="config", invoke_without_command=True)
@click.option(
"--scope",
help="Save configuration to given scope",
type=click.Choice(["system", "user", "project"], case_sensitive=False),
default="project",
)
@click.option("--profile", help="Save configuration for given profile. "
"By default settings are stored for all profiles in given scope.")
@click.option("--without-secrets", help="Skip secrets configuration", default=False, is_flag=True)
@click.option("--without-parameters", help="Skip parameters configuration", default=False, is_flag=True)
@click.option("--plugin", multiple=True)
@click.pass_obj
@click.pass_context
def config(ctx, flox: Flox, scope, profile, plugin, without_secrets, without_parameters):
"""
Run configuration wizard for flox.
"""
if ctx.invoked_subcommand:
return
if not plugin:
raise ConfigurationException("You need to specify at least one plugin to be configured",
extra="Use flox config --plugin=<plugin-name>")
if not flox.initiated and scope == "project":
warning("Unable to use scope project outside of project directory. Changing scope to 'user'")
scope = "user"
for name in plugin:
if not flox.plugins.has(name):
raise MissingPluginException(name)
info(f"Starting configuration of {name} for '{scope}' scope" + (f" and '{profile}' profile" if profile else ""))
if not without_parameters:
interactive_parameters(flox, name, flox.plugins.get(name), scope, profile)
if not without_secrets:
if scope == "system" and not click.confirm(warning("Flox can't manage secrets on the system level. "
"If you like to continue all secrets would be stored at "
"user level", no_print=True)):
raise Abort
interactive_secrets(flox, name, flox.plugins.get(name), scope, profile)
execute_stages(
flox, "configuration_change", features=[name]
)
@config.command(name="show")
@click.pass_obj
def show(flox: Flox):
"""Display current configuration"""
for section, settings in flox.settings.items():
info(f"Configuration of '{section}'")
click.echo(anyconfig.dumps(settings, ac_parser="toml"))
@config.command(name="add")
@click.argument("remote")
@click.pass_obj
def remotes_add(flox: Flox, remote):
"""Add new remote configuration"""
# if flox.remotes.has(remote):
# raise ConfigurationException(f"Remote configuration '{remote}' already exists")
warning_box("Remote configuration sources are potentially dangerous, you should only add configuration "
"from trusted sources")
if not click.confirm(click.style(f"Would you still like to add {remote} as configuration source?", fg="yellow")):
raise Abort
config_type = "local"
if remote.lower().startswith(("http://", "https://")):
config_type = "remote"
elif remote.lower().startswith("git") or remote.endswith(".git"):
config_type = "git"
flox.remotes.set(remote, dict(
type=config_type,
hash=hashlib.sha256(remote.encode("UTF-8")).hexdigest()
))
fetch_remote(flox, remote)
success_box(f"Remote source '{remote}' has been added as a configuration source")
@config.command(name="remove")
@click.argument("remote")
@click.pass_obj
def remotes_remove(flox: Flox, remote):
"""Remove remote configuration"""
if not flox.remotes.has(remote):
raise ConfigurationException(f"Unable to find '{remote}' remote configuration")
remote_config_dir = join(CONFIG_DIRS.get("user"), "externals", flox.remotes.get(remote).get("hash"))
if remote_config_dir.startswith(CONFIG_DIRS.get("user")) and isdir(remote_config_dir):
logger.debug(f"Removed local cache of remote config: {remote_config_dir}")
rmtree(remote_config_dir)
flox.remotes.remove(remote)
success_box(f"Remote source '{remote}' has been removed")
@config.command(name="update-remotes")
@click.pass_obj
def remotes_update(flox: Flox):
"""Update all remote configurations"""
for source in flox.remotes.all().keys():
fetch_remote(flox, source)
success(f"Updated: {source}")
success_box(f"Remote sources updated")
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,326
|
getflox/flox
|
refs/heads/master
|
/flox/config/remotes.py
|
from os import makedirs
from os.path import join, basename
from urllib.parse import urlparse
import requests
from loguru import logger
from floxcore import CONFIG_DIRS
from floxcore.context import Flox
from floxcore.exceptions import ConfigurationException
from floxcore.remotes import fetch_remote_git, copy_local_config
def fetch_remote_file(flox: Flox, storage, source):
logger.debug(f"Adding {source} as external storage with http requests")
sources = [source] + [source.replace(".toml", f".{f}.toml") for f in flox.settings.get("global").stages]
for s in sources:
try:
logger.debug(f"Trying to download '{s}'")
response = requests.get(s)
response.raise_for_status()
destination = join(storage, basename(urlparse(s).path))
with open(destination, "wb+") as f:
f.write(response.content)
logger.debug(f"Saved remote config into: '{destination}'")
except Exception as e:
if s == source:
raise ConfigurationException("Unable to fetch remote configuration", extra=str(e))
logger.debug(f"Failed to download external config with: {e}")
def fetch_remote(flox: Flox, remote):
source = flox.remotes.get(remote)
storage = join(CONFIG_DIRS.get("user"), "externals", str(source.get("hash")))
makedirs(storage, exist_ok=True)
if source.get("type") == "git":
return fetch_remote_git(flox, storage, remote)
elif source.get("type") == "local":
return copy_local_config(flox, storage, remote)
else:
return fetch_remote_file(flox, storage, remote)
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,327
|
getflox/flox
|
refs/heads/master
|
/flox/click.py
|
import click
from click import ClickException, Choice
from click.exceptions import Exit, Abort
from loguru import logger
import floxcore
from floxcore.console import error_box
from floxcore.exceptions import FloxException
GLOBAL_OPTIONS = (
click.core.Option(("-v",), default=False, is_flag=True, help="Verbose mode - show debug info"),
)
def append_unique(params: list, *options):
existing = list(map(lambda x: x.name, params))
for option in options:
if option.name in existing:
continue
params.append(option)
existing.append(option.name)
class FloxCommand(click.core.Command):
def __init__(self, name, context_settings=None, callback=None, params=None, help=None, epilog=None, short_help=None,
options_metavar="[OPTIONS]", add_help_option=True, no_args_is_help=False, hidden=False,
deprecated=False, params_from=None, with_plugin_selector=False):
super().__init__(name, context_settings, callback, params, help, epilog, short_help, options_metavar,
add_help_option, no_args_is_help, hidden, deprecated)
self.with_plugin_selector = with_plugin_selector
self.params_from = params_from or []
self.params.extend(
GLOBAL_OPTIONS
)
self.global_values = {}
def invoke(self, ctx):
state = {}
if hasattr(self, "pre_invoke"):
state = self.pre_invoke(ctx.obj)
for opt in GLOBAL_OPTIONS:
self.global_values[opt.name] = ctx.params.pop(opt.name, None)
try:
result = super().invoke(ctx)
except (ClickException, Exit, Abort) as e:
raise e
except Exception as e:
if floxcore.DEBUG:
logger.exception(e)
raise FloxException(str(e))
finally:
if hasattr(self, "post_invoke"):
self.post_invoke(ctx.obj, pre_invoke_state=state)
return result
def get_params(self, ctx):
for param_name in self.params_from:
for plugin in ctx.obj.plugins.handlers(f"command_options_{param_name}").values():
for command_name, kwargs in plugin.handle(f"command_options_{param_name}"):
append_unique(self.params, click.core.Option((command_name,), **kwargs))
if self.with_plugin_selector:
self._add_plugins(ctx)
return super().get_params(ctx)
def _add_plugins(self, ctx):
if ctx.obj.initiated:
active = ctx.obj.meta.features
append_unique(
self.params,
click.core.Option(
(f"--scope",),
type=Choice(active),
show_choices=True,
multiple=True,
help="Execute stages only from given plugins",
)
)
else:
for name, plugin in ctx.obj.plugins.plugins.items():
append_unique(
self.params,
click.core.Option(
(f"--with-{name}/--without-{name}",),
default=True,
is_flag=True,
help=plugin.description,
)
)
click.core.Command = FloxCommand
click.decorators.Command = FloxCommand
click.Command = FloxCommand
ClickMultiCommand = type('MultiCommand', (FloxCommand,), dict(click.core.MultiCommand.__dict__))
class FloxMultiCommand(ClickMultiCommand):
def __init__(
self,
name=None,
invoke_without_command=False,
no_args_is_help=None,
subcommand_metavar=None,
chain=False,
result_callback=None,
**attrs
):
super().__init__(name, invoke_without_command, no_args_is_help, subcommand_metavar, chain, result_callback,
**attrs)
click.core.MultiCommand = FloxMultiCommand
click.MultiCommand = FloxMultiCommand
ClickGroup = type('Group', (click.core.MultiCommand,), dict(click.core.Group.__dict__))
class FloxGroup(ClickGroup):
def __init__(self, name=None, commands=None, **attrs):
super().__init__(name, commands, **attrs)
click.core.Group = FloxGroup
click.decorators.Group = FloxGroup
click.Group = FloxGroup
def show(self):
error_box(message=self.message)
click.ClickException.show = show
patched = True
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,328
|
getflox/flox
|
refs/heads/master
|
/flox/config/parameters.py
|
import os
from os.path import join, isfile, dirname
from typing import Tuple
import anyconfig
import click
import dictdiffer
from flox.config import prompt, show_diff, apply_diff
from floxcore import CONFIG_DIRS
from floxcore.config import ParamDefinition, load_settings
from floxcore.console import info
from floxcore.context import Flox
from floxcore.plugin import Plugin
def with_settings(settings: dict, parameters: Tuple[ParamDefinition]) -> Tuple[ParamDefinition]:
"""Set existing configuration values as defaults for parameters"""
for param in parameters:
param.default = settings.get(param.name, param.default)
yield param
def save_settings(flox, name, scope, profile, settings, remove=None):
"""Save new configuration settings to scoped file"""
if remove is None:
remove = []
file_name = f"settings.{profile}.toml" if profile else "settings.toml"
file_path = join(CONFIG_DIRS.get(scope, join(flox.working_dir, ".flox")), file_name)
current = open(file_path).read() if isfile(file_path) else ""
scoped_config = anyconfig.loads(current, ignore_missing=True, ac_parser="toml")
section = scoped_config.get(name, {})
section.update(settings)
for r in remove:
section.pop(r, None)
scoped_config[name] = section
os.makedirs(dirname(file_path), exist_ok=True)
with open(file_path, "w+") as f:
anyconfig.dump(scoped_config, f, ac_parser="toml")
info(f"Configuration saved: {click.format_filename(file_path)}")
def build_new_settings(settings, name, new_values):
dd = list(dictdiffer.diff(settings.get(name).to_dict(), new_values))
new_settings = apply_diff(dict(settings.get(name).to_dict()), dd)
has_changes = bool(dd)
removals = []
for removal in next(filter(lambda x: x[0] == "remove", dd), [None, None, []])[2]:
new_settings.pop(removal[0], None)
removals.append(removal[0])
has_changes = True
return new_settings, has_changes, removals
def interactive_parameters(flox: Flox, name: str, plugin: Plugin, scope, profile):
configuration = plugin.configuration()
settings = load_settings(Flox.plugins, flox.initiated, flox.working_dir, profile)
new_values = {}
for param in with_settings(settings.get(name), configuration.parameters()):
if param.depends_on and not new_values.get(param.depends_on):
continue
if param.value_of:
param.default = param.default or new_values.get(param.value_of)
new_values[param.name] = prompt(param)
new_settings, has_changes, removals = build_new_settings(settings, name, new_values)
if show_diff(configuration.parameters(), settings.get(name), new_settings):
save_settings(flox, name, scope, profile, new_settings, removals)
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,329
|
getflox/flox
|
refs/heads/master
|
/flox/cli.py
|
from os import getcwd
import click
from click_plugins import with_plugins
from click_shell import shell
from pkg_resources import iter_entry_points
from flox.config.command import config
from flox.info.command import flox_info
from flox.plugins.command import plugin
from flox.profile.command import profile
from flox.project.command import project
from floxcore.context import Flox, locate_project_root
from floxcore.exceptions import FloxException
instance = Flox()
CONTEXT_SETTINGS = dict(auto_envvar_prefix="FLOX", obj=instance)
@with_plugins(iter_entry_points("flox.plugin.command"))
@shell(prompt=instance.prompt, context_settings=CONTEXT_SETTINGS)
@click.pass_context
def cli(ctx):
"""
Consistent project management and automation with flox
"""
if not instance.initiated and not ctx.invoked_subcommand:
raise FloxException("Unable to load interactive shell for uninitialised project.")
cli.add_command(config)
cli.add_command(plugin)
cli.add_command(project)
if locate_project_root(getcwd()):
cli.add_command(flox_info)
cli.add_command(profile)
Flox.plugins.add_commands(cli)
if __name__ == "__main__":
cli()
|
{"/flox/__init__.py": ["/flox/click.py", "/flox/config/__init__.py"], "/flox/config/secrets.py": ["/flox/config/__init__.py"], "/flox/plugins/command.py": ["/flox/plugins/manager.py"], "/flox/config/command.py": ["/flox/config/parameters.py", "/flox/config/remotes.py", "/flox/config/secrets.py"], "/flox/config/parameters.py": ["/flox/config/__init__.py"], "/flox/cli.py": ["/flox/config/command.py", "/flox/info/command.py", "/flox/plugins/command.py", "/flox/profile/command.py", "/flox/project/command.py"]}
|
39,330
|
WoTTsecurity/agent
|
refs/heads/master
|
/agent/__main__.py
|
import argparse
import asyncio
import logging
from . import run, get_device_id, get_open_ports, say_hello, get_claim_token, get_claim_url, upgrade, executor
from . import fetch_credentials, fetch_device_metadata, setup_logging
from .security_helper import patch_sshd_config
logger = logging.getLogger('agent')
def main():
actions = {
'whoami': (get_device_id, "Print node ID."),
'portscan': (get_open_ports, "Print open ports."),
'test-cert': (say_hello, "Validate node certificate."),
'claim-token': (get_claim_token, "Print claim token."),
'claim-url': (get_claim_url, "Print claim URL."),
'daemon': (run_daemon, "Run as daemon"),
'node-metadata': (fetch_device_metadata, "Fetch node specific, secret metadata."),
'credentials': (fetch_credentials, "Fetch credentials."),
}
patches = {
'openssh-empty-password':
('OpenSSH: Disable logins with empty password', 'PermitEmptyPasswords'),
'openssh-root-login':
('OpenSSH: Disable root login', 'PermitRootLogin'),
'openssh-password-auth':
('OpenSSH: Disable password authentication', 'PasswordAuthentication'),
'openssh-agent-forwarding':
('OpenSSH: Disable agent forwarding', 'AllowAgentForwarding'),
'openssh-protocol':
('\tOpenSSH: Force protocol version 2', 'Protocol'),
'openssh-client-alive-interval':
('OpenSSH: Active Client Interval', 'ClientAliveInterval'),
'openssh-client-alive-count-max':
('OpenSSH: Active Client Max Count', 'ClientAliveCountMax'),
'openssh-host-based-auth':
('OpenSSH: Host-based Authentication', 'HostbasedAuthentication'),
'openssh-ignore-rhosts':
('OpenSSH: Ignore rhosts', 'IgnoreRhosts'),
'openssh-log-level':
('\tOpenSSH: Log Level', 'LogLevel'),
'openssh-login-grace-time':
('OpenSSH: Login Grace Time', 'LoginGraceTime'),
'openssh-max-auth-tries':
('OpenSSH: Max Auth Tries', 'MaxAuthTries'),
'openssh-permit-user-env':
('OpenSSH: Permit User Environment', 'PermitUserEnvironment'),
'openssh-x11-forwarding':
('OpenSSH: X11 Forwarding', 'X11Forwarding')
}
patch_help_string = "One of the following:\n" + "\n".join(
["{}\t{}".format(k, v[0]) for k, v in patches.items()])
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description="""
Streamlined security audit for your infrastructure.
When called without arguments, generates node certificate
or renews it if necessary.
""",
prog="wott-agent")
subparsers = parser.add_subparsers(help='Choose one of the following:', dest='action')
for action, desc in actions.items():
subparsers.add_parser(action, help=desc[1],
formatter_class=argparse.RawTextHelpFormatter)
parser_patch = subparsers.add_parser('patch', help='patch the system',
formatter_class=argparse.RawTextHelpFormatter)
parser_patch.add_argument('patch_name',
choices=patches.keys(),
metavar='patch_name',
help=patch_help_string)
parser_upgrade = subparsers.add_parser('upgrade', help='upgrade packages',
formatter_class=argparse.RawTextHelpFormatter)
parser_upgrade.add_argument('packages', metavar='pkg', nargs='+', help='packages to upgrade')
parser.add_argument(
'--dev',
required=False,
action="store_true",
help="Developer mode: work with locally running server.")
parser.add_argument(
'--debug',
required=False,
action="store_true",
help="Debug mode: set log level to DEBUG.")
args = parser.parse_args()
level = logging.DEBUG if args.debug is True else None
if args.action == 'daemon':
setup_logging(level=level, log_format="%(asctime)s - %(name)s - %(threadName)s - %(levelname)s - %(message)s")
else:
setup_logging(level=level, daemon=False,
log_format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
action = args.action
if not action:
logger.info("start in ping mode...")
run(ping=True, dev=args.dev)
elif action == 'daemon':
logger.info("start in daemon mode...")
run_daemon(dev=args.dev)
elif action == 'patch':
patch_sshd_config(patches[args.patch_name][1])
run(ping=True, dev=args.dev)
elif action == 'upgrade':
upgrade(args.packages)
run(ping=True, dev=args.dev)
else:
run(ping=False, dev=args.dev)
print(actions[args.action][0](dev=args.dev))
PING_INTERVAL = 60 * 60
PING_TIMEOUT = 10 * 60
CREDS_INTERVAL = 15 * 60
CREDS_TIMEOUT = 1 * 60
# secret device-specific metadata fetching time constants
DEV_MD_INTERVAL = 15 * 60
DEV_MD_TIMEOUT = 1 * 60
def run_daemon(dev):
exes = [
executor.Executor(PING_INTERVAL, run, (True, dev, logger), timeout=PING_TIMEOUT),
executor.Executor(CREDS_INTERVAL, fetch_credentials, (dev, logger), timeout=CREDS_TIMEOUT),
executor.Executor(DEV_MD_INTERVAL, fetch_device_metadata, (dev, logger), timeout=DEV_MD_TIMEOUT)
]
futures = [executor.schedule(exe) for exe in exes]
def stop_exe():
logger.info('Stopping all tasks...')
for fut in futures:
fut.cancel()
for exe in exes:
exe.stop()
asyncio.get_event_loop().stop()
logger.info('All tasks stopped.')
try:
executor.spin()
logger.info('Daemon exiting.')
except KeyboardInterrupt:
logger.info('Daemon was interrupted!')
stop_exe()
if __name__ == '__main__':
main()
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,331
|
WoTTsecurity/agent
|
refs/heads/master
|
/agent/journal_helper.py
|
from systemd import journal
import time
def get_journal_records():
'''
Get syslog records as returned by
journalctl -l SYSLOG_FACILITY=10 --priority=5 --since "1 hour ago"
'''
j = journal.Reader()
j.this_boot()
j.log_level(journal.LOG_INFO)
last_hour = time.time() - 60**2
j.seek_realtime(last_hour)
j.add_match(SYSLOG_FACILITY=10)
return j
def logins(entries):
'''
Returns the number of failed or successful login attempts per user as
{'<user>': {'success': <N>, 'failed': '<N>'}, ...}
Failed attempts are logged in the system journal like this:
pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=<ip> user=<user>
PAM 1 more authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=<ip> user=<user>
PAM <n> more authentication failures; logname= uid=0 euid=0 tty=ssh ruser= rhost=<ip> user=<user>
Successful attempts are logged like this:
pam_unix(sshd:session): session opened for user pi by (uid=0)
'''
MSG_AUTH_FAIL = 'pam_unix(sshd:auth): authentication failure;'
MSG_SESSION_OPENED = 'pam_unix(sshd:session): session opened for user'
MSG_MORE_FAILURE = 'more authentication failure'
username = ''
res = {}
def logins_by_username(username):
if username not in res:
res[username] = {'failed': 0, 'success': 0}
return res[username]
for entry in entries:
m = entry['MESSAGE']
if m.startswith(MSG_AUTH_FAIL):
u = m.split()[-1]
if u.startswith('user='):
username = u.split('=')[1]
else:
username = ''
logins_by_username(username)['failed'] += 1
elif m.startswith('PAM ') and MSG_MORE_FAILURE in m:
u = m.split()[-1]
if u.startswith('user='):
username = u.split('=')[1]
else:
username = ''
logins_by_username(username)['failed'] += int(m.split()[1])
elif m.startswith(MSG_SESSION_OPENED):
username = m.split()[-3]
logins_by_username(username)['success'] += 1
return res
def logins_last_hour():
return logins(get_journal_records())
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,332
|
WoTTsecurity/agent
|
refs/heads/master
|
/agent/security_helper.py
|
import copy
import crypt
import logging
import os
import shutil
import socket
import subprocess
import time
from enum import IntEnum
from hashlib import sha256
from pathlib import Path
from socket import SocketKind
import psutil
import spwd
from sh import ErrorReturnCode_1, ErrorReturnCode_255
from .os_helper import CloudProvider, detect_cloud, is_debian, kernel_cmdline, confirmation
logger = logging.getLogger('agent')
def check_for_default_passwords(config_path):
"""
Check if the 'pi' user current password hash is in our list of default password hashes.
"""
base_dir = Path(config_path)
pass_hashes_file_path = base_dir.joinpath('pass_hashes.txt') # For deb installation.
if not pass_hashes_file_path.is_file():
base_dir = Path(__file__).resolve().parent.parent
pass_hashes_file_path = base_dir.joinpath('misc/pass_hashes.txt')
with pass_hashes_file_path.open() as f:
read_data = f.read()
known_passwords = {}
for username_password in read_data.splitlines():
username, password = username_password.split(':', maxsplit=1)
pw = known_passwords.get(username, [])
pw.append(password)
known_passwords[username] = pw
def hash_matches(pwdp, plaintext_password):
i = pwdp.rfind('$')
salt = pwdp[:i]
crypted = crypt.crypt(plaintext_password, salt)
return crypted == pwdp
usernames = set()
for shadow in spwd.getspall():
encrypted_password = shadow.sp_pwdp
for password in known_passwords.get(shadow.sp_namp, []):
if hash_matches(encrypted_password, password):
usernames.add(shadow.sp_namp)
return list(usernames)
def netstat_scan():
"""
Returns all open inet connections with their addresses and PIDs.
"""
connections = psutil.net_connections(kind='inet')
return (
[{
'ip_version': 4 if c.family == socket.AF_INET else 6,
'type': 'udp' if c.type == socket.SOCK_DGRAM else 'tcp',
'local_address': c.laddr,
'remote_address': c.raddr,
'status': c.status if c.type == socket.SOCK_STREAM else None,
'pid': c.pid
} for c in connections if c.raddr],
[{
'ip_version': 4 if c.family == socket.AF_INET else 6,
'host': c.laddr[0],
'port': c.laddr[1],
'proto': {SocketKind.SOCK_STREAM: 'tcp', SocketKind.SOCK_DGRAM: 'udp'}.get(c.type),
'state': c.status if c.type == socket.SOCK_STREAM else None,
'pid': c.pid
} for c in connections if not c.raddr and c.laddr]
)
def process_scan():
processes = []
for proc in psutil.process_iter():
try:
proc_info = proc.as_dict(attrs=['pid', 'name', 'cmdline', 'username'])
cpuset = Path('/proc/{}/cpuset'.format(proc_info['pid']))
if cpuset.exists():
with cpuset.open() as cpuset_file:
if cpuset_file.read().startswith('/docker/'):
proc_info['container'] = 'docker'
processes.append(proc_info)
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
return processes
def is_app_armor_enabled():
"""
Returns a True/False if AppArmor is enabled.
"""
try:
import LibAppArmor
except ImportError:
# If Python bindings for AppArmor are not installed (if we're
# running on Jessie where we can't build python3-apparmor package)
# we resort to calling aa-status executable.
try:
from sh import aa_status
except ImportError:
return False
# Return codes (as per aa-status(8)):
# 0 if apparmor is enabled and policy is loaded.
# 1 if apparmor is not enabled/loaded.
# 2 if apparmor is enabled but no policy is loaded.
# 3 if the apparmor control files aren't available under /sys/kernel/security/.
# 4 if the user running the script doesn't have enough privileges to read the apparmor
# control files.
return aa_status(['--enabled'], _ok_code=[0, 1, 2, 3, 4]).exit_code in [0, 2]
else:
return LibAppArmor.aa_is_enabled() == 1
def selinux_status():
"""
Returns a dict as similar to:
{'enabled': False, 'mode': 'enforcing'}
"""
selinux_enabled = False
selinux_mode = None
try:
import selinux
except ImportError:
# If Python bindings for SELinux are not installed (if we're
# running on Jessie where we can't build python3-selinux package)
# we resort to calling sestatus executable.
try:
from sh import sestatus
except ImportError:
return {'enabled': False}
# Manually parse out the output for SELinux status
for line in sestatus().stdout.split(b'\n'):
row = line.split(b':')
if row[0].startswith(b'SELinux status'):
selinux_enabled = row[1].strip() == b'enabled'
if row[0].startswith(b'Current mode'):
selinux_mode = row[1].strip()
else:
if selinux.is_selinux_enabled() == 1:
selinux_enabled = True
selinux_mode = {-1: None, 0: 'permissive', 1: 'enforcing'}[selinux.security_getenforce()]
return {'enabled': selinux_enabled, 'mode': selinux_mode}
AUDITED_CONFIG_FILES = [
'/etc/passwd',
'/etc/shadow',
'/etc/group'
]
BLOCK_SIZE = 64 * 1024
SSHD_CONFIG_PATH = '/etc/ssh/sshd_config'
class SshdConfigParam:
"""
Encapsulates safe and default values for an OpenSSH parameter.
"""
class COMPARE(IntEnum):
"""
Supported comparsions.
MATCH: exact match
RANGE: inclusive integer range (min, max)
"""
MATCH = 1
RANGE = 2
def _match(self, val: str) -> bool:
return self._safe == val
def _range(self, val: str) -> bool:
vmin, vmax = self._safe
return vmin <= int(val) <= vmax
@property
def safe_value(self) -> str:
"""
:return: safe value
"""
return self._safe_value
def __init__(self, default, safe, compare=COMPARE.MATCH):
self.default = default
self._safe = safe
# select compare function which decides if the value is safe.
self.is_safe = {self.COMPARE.MATCH: self._match,
self.COMPARE.RANGE: self._range}[compare]
self._safe_value = safe if compare == self.COMPARE.MATCH else str(self._safe[1])
SSHD_CONFIG_PARAMS_INFO = {
'PermitEmptyPasswords': SshdConfigParam('no', 'no'),
'PermitRootLogin': SshdConfigParam('yes', 'no'),
'PasswordAuthentication': SshdConfigParam('yes', 'no'),
'AllowAgentForwarding': SshdConfigParam('yes', 'no'),
'Protocol': SshdConfigParam('2', '2'),
'ClientAliveInterval': SshdConfigParam('0', (1, 300), SshdConfigParam.COMPARE.RANGE),
'ClientAliveCountMax': SshdConfigParam('3', (0, 3), SshdConfigParam.COMPARE.RANGE),
'HostbasedAuthentication': SshdConfigParam('no', 'no'),
'IgnoreRhosts': SshdConfigParam('yes', 'yes'),
'LogLevel': SshdConfigParam('INFO', 'INFO'),
'LoginGraceTime': SshdConfigParam('120', (1, 60), SshdConfigParam.COMPARE.RANGE),
'MaxAuthTries': SshdConfigParam('6', (0, 4), SshdConfigParam.COMPARE.RANGE),
'PermitUserEnvironment': SshdConfigParam('no', 'no'),
'X11Forwarding': SshdConfigParam('no', 'no')
}
def audit_config_files():
"""
For a predefined list of system config files (see AUDITED_CONFIG_FILES)
get their last modified time and SHA256 hash.
The same info regarding SSHD_CONFIG_PATH is appended (see audit_sshd below),
:return: [{'name': ..., 'sha256': ..., 'last_modified': ...}]
"""
def digest_sha256(file_path):
h = sha256()
with open(file_path, 'rb') as file:
while True:
# Reading is buffered, so we can read smaller chunks.
chunk = file.read(BLOCK_SIZE)
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def audit_common(file_path):
return {
'name': file_path,
'sha256': digest_sha256(file_path),
'last_modified': os.path.getmtime(file_path)
}
audited_files = [audit_common(file_path) for file_path in AUDITED_CONFIG_FILES if os.path.isfile(file_path)]
if os.path.isfile(SSHD_CONFIG_PATH):
audited_sshd = audit_common(SSHD_CONFIG_PATH)
audited_sshd['issues'] = audit_sshd()
audited_files.append(audited_sshd)
return audited_files
def audit_sshd():
"""
Read and parse SSHD_CONFIG_PATH, detect all unsafe parameters.
:return: a dict where key is an unsafe parameter and value is its (unsafe) value.
"""
sshd_version = None
try:
from sh import sshd
except ImportError:
pass
else:
sshd_help = sshd(['--help'], _ok_code=[1]).stderr
sshd_help_lines = sshd_help.splitlines()
for l in sshd_help_lines:
if l.startswith(b'OpenSSH_'):
sshd_version = float(l.lstrip(b'OpenSSH_')[:3])
break
config = copy.deepcopy(SSHD_CONFIG_PARAMS_INFO)
if sshd_version is not None and sshd_version >= 7.0:
# According to https://www.openssh.com/releasenotes.html those things were changed in 7.0.
del (config['Protocol'])
config['PermitRootLogin'].default = 'prohibit-password'
# Fill the dict with default values which will be updated with found config parameters' values.
insecure_params = {k: config[k].default for k in config}
with open(SSHD_CONFIG_PATH) as sshd_config:
for line in sshd_config:
line = line.strip()
if not line or line[0] == '#':
# skip empty lines and comments
continue
line_split = line.split(maxsplit=1)
if len(line_split) != 2:
# skip invalid lines
continue
parameter, value = line_split
value = value.strip('"')
if parameter in insecure_params:
insecure_params[parameter] = value
issues = {}
for param in insecure_params:
if not config[param].is_safe(insecure_params[param]):
issues[param] = insecure_params[param]
return issues
def mysql_root_access():
try:
subprocess.check_call(["mysql", "-uroot", "-eSHOW DATABASES;"], timeout=5,
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
return True
except subprocess.CalledProcessError as e:
# Non-zero exit code: can't connect to server, root password is set (code 1 in both cases), or SIGSEGV.
if e.returncode == 1:
return False
except (FileNotFoundError, PermissionError):
# Can't execute mysql client.
pass
def cpu_vulnerabilities():
"""
Query sysfs for CPU vulnerabilities mitigation.
:return: A dict where
'vendor': "Vendor ID" field returned by lscpu. Possible values: GenuineIntel, AuthenticAMD, ARM.
'vulnerable': False if not vulnerable, True if vulnerable, None if in doubt. Present if vendor is GenuineIntel.
'mitigations_disabled': whether any mitigation was disabled in kernel cmdline. Present if vulnerable is None.
"""
from sh import lscpu
os.environ['LC_ALL'] = 'en_US' # switch language to English to be able to parse lscpu output.
vendor_id = None
for line in lscpu().stdout.decode().split('\n'):
param = line.split(':', 1)
if param and param[0] == 'Vendor ID':
vendor_id = param[1].strip()
break
# TODO: switch LC_ALL back?
res = {'vendor': vendor_id}
if vendor_id != "GenuineIntel":
# Not an Intel CPU, most probably not vulnerable
return res
sys_vulnerabilities = Path('/sys/devices/system/cpu/vulnerabilities')
if not sys_vulnerabilities.is_dir():
# Directory does not exist: either smth is bind-mounted over it or the kernel is too old.
vulnerable = None
else:
vulnerable = False
vulns = ['l1tf', 'meltdown', 'spectre_v1', 'spectre_v2']
if detect_cloud() != CloudProvider.AMAZON:
# AWS reports no mitigation for those vulnerabilities, as if they are not mitigated at all.
# But we decided to trust AWS and assume it's not vulnerable.
vulns += ['spec_store_bypass', 'mds']
for name in vulns:
status_file = sys_vulnerabilities / name
if status_file.is_file():
# If CPU is not prone to this vulnerability the status file will start with
# 'Not affected' or 'Mitigation: ...'. Otherwise it will start with 'Vulnerable: ...'.
if status_file.read_text().startswith('Vulnerable'):
vulnerable = True
break
else:
# Status file does not exist: smth is bind-mounted over it or the kernel is not completely patched.
vulnerable = None
break
res['vulnerable'] = vulnerable
# If we can't confidently tell if CPU is vulnerable we search cmdline for mitigation disablement params and let
# the server do the rest.
if vulnerable is None:
mitigations_disabled = False
mitigation_cmdline_params = {
'nopti': '',
'nospectre_v1': '',
'nospectre_v2': '',
'mds': 'off',
'pti': 'off',
'mitigations': 'off',
'spectre_v2': 'off',
'spectre_v2_user': 'off',
'spec_store_bypass_disable': 'off'
}
cmdline = kernel_cmdline()
for pname, pvalue in mitigation_cmdline_params.items():
if cmdline.get(pname) == pvalue:
mitigations_disabled = True
break
res['mitigations_disabled'] = mitigations_disabled
return res
def patch_sshd_config(patch_param):
from . import BACKUPS_PATH
param_info = SSHD_CONFIG_PARAMS_INFO[patch_param]
if not os.path.isfile(SSHD_CONFIG_PATH):
logger.error('%s not found', SSHD_CONFIG_PATH)
return
try:
from sh import sshd, service
except ImportError:
logger.exception('sshd or service executable not found')
return
safe_value_string = '\n# Added by wott-agent on {}\n{} {}\n'.format(
time.ctime(), patch_param, param_info.safe_value)
backup_filename = os.path.join(BACKUPS_PATH, 'sshd_config.' + str(int(time.time())))
replaced = False
with open(SSHD_CONFIG_PATH, 'r+') as sshd_config:
safe = False
lines = sshd_config.readlines()
patched_lines = []
for line in lines:
patched_lines.append(line)
line = line.strip()
if not line or line[0] == '#':
# skip empty lines and comments
continue
line_split = line.split(maxsplit=1)
if len(line_split) != 2:
continue
param, value = line_split
value = value.strip('"')
if param == patch_param:
if not param_info.is_safe(value):
logger.info('%s: replacing "%s" with "%s"', param, value, param_info.safe_value)
patched_lines[-1] = safe_value_string
replaced = True
safe = False
else:
safe = True
if not replaced and not safe and not param_info.is_safe(param_info.default):
logger.info('%s: replacing default "%s" with "%s"', patch_param, param_info.default, param_info.safe_value)
patched_lines.append(safe_value_string)
replaced = True
if replaced:
if patch_param == 'PasswordAuthentication':
if not confirmation(
"Warning: Before you disable password authentication, make sure that you have generated "
"and installed your SSH keys on this server. Failure to do so will result in that you "
"will be locked out. I have have my SSH key(s) installed:"):
return
logger.info('Backing up %s as %s', SSHD_CONFIG_PATH, backup_filename)
shutil.copy(SSHD_CONFIG_PATH, backup_filename)
logger.info('Writing %s', SSHD_CONFIG_PATH)
sshd_config.seek(0, 0)
sshd_config.truncate()
sshd_config.writelines(patched_lines)
else:
logger.info('Nothing to patch.')
return
try:
sshd('-t')
except ErrorReturnCode_255 as e:
if e.stderr.startswith(SSHD_CONFIG_PATH.encode()):
logger.exception('%s is invalid. Restoring from backup.', SSHD_CONFIG_PATH)
shutil.copy(backup_filename, SSHD_CONFIG_PATH)
else:
logger.exception('something went wrong')
return
try:
service_name = 'ssh' if is_debian() else 'sshd'
service([service_name, 'reload'])
except ErrorReturnCode_1:
logger.exception('failed to reload sshd.')
else:
logger.info('sshd reloaded.')
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,333
|
WoTTsecurity/agent
|
refs/heads/master
|
/debian/usercustomize.py
|
# Fix import priority on systems with multiple python3 versions,
# e.g. on Jessie with python 3.4 and 3.5 installed.
# Puts python3 dist-packages at the end of the list, which
# effectively gives python3.5 dist-packages more priority.
import sys
sys.path.remove('/usr/lib/python3/dist-packages')
sys.path.append('/usr/lib/python3/dist-packages')
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,334
|
WoTTsecurity/agent
|
refs/heads/master
|
/version.py
|
import email.utils
import textwrap
from os import getenv
def version():
"""
Extract static version part from VERSION file,
git HEAD commit hash and message (if git module can be imported) and
CircleCI build number.
:return: (str, str, str, str)
"""
static_version = open('VERSION').read().strip()
try:
import git
except ImportError:
commit = msg = None
else:
repo = git.Repo('.')
head = repo.head.object
commit = str(head)
msg = head.message
build_number = getenv('CIRCLE_BUILD_NUM', '0')
return static_version, commit, msg, build_number
def version_string(static_version, commit_hash, build_number):
"""
Format a full version string version.build_number~commit_hash.
:param static_version: manually managed (static) version part, e.g. 0.1.5
:param commit_hash: commit hash (optional)
:param build_number: build number (doesn't have to be a number
:return: str
"""
return '{}.{}~{}'.format(static_version, build_number, commit_hash[:7]) if commit_hash \
else '{}.{}'.format(static_version, build_number)
def write_changelog():
import debian.changelog
ver, commit, msg, build_number = version()
ver_str = version_string(ver, commit, build_number)
ch = debian.changelog.Changelog(open('debian/changelog'))
ch.new_block(package='wott-agent',
version=ver_str,
distributions='stable',
urgency='medium',
author="%s <%s>" % debian.changelog.get_maintainer(),
date=email.utils.formatdate(None, True))
ch.add_change(textwrap.indent(msg, ' * '))
ch.write_to_open_file(open('debian/changelog', 'w'))
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,335
|
WoTTsecurity/agent
|
refs/heads/master
|
/agent/__init__.py
|
import configparser
import os
import datetime
import platform
import socket
import netifaces
import json
import pwd
import glob
import logging
import logging.config
from math import floor
from sys import stdout, exit
from pathlib import Path
import requests
import pkg_resources
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.x509.oid import NameOID
import pytz
from agent import iptables_helper, journal_helper, security_helper
from agent.executor import Locker
from agent.os_helper import Confinement, auto_upgrades_enabled, detect_confinement, detect_cloud, detect_installation, \
detect_raspberry_pi, get_packages, get_os_release, kernel_package_info, reboot_required, upgrade_packages
CONFINEMENT = detect_confinement()
if CONFINEMENT == Confinement.SNAP:
__version__ = os.environ['SNAP_VERSION']
else:
try:
__version__ = pkg_resources.get_distribution('wott-agent')
except pkg_resources.DistributionNotFound:
__version__ = (Path(__file__).parents[1] / 'VERSION').read_text().strip()
WOTT_ENDPOINT = os.getenv('WOTT_ENDPOINT', 'https://api.wott.io')
MTLS_ENDPOINT = WOTT_ENDPOINT.replace('api', 'mtls')
DASH_ENDPOINT = WOTT_ENDPOINT.replace('api', 'dash')
DASH_DEV_PORT = 8000
WOTT_DEV_PORT = 8001
MTLS_DEV_PORT = 8002
CONFIG_PATH = os.getenv('CONFIG_PATH', '/opt/wott')
if CONFINEMENT == Confinement.SNAP:
Locker.LOCKDIR = CONFIG_PATH
CERT_PATH = os.getenv('CERT_PATH', os.path.join(CONFIG_PATH, 'certs'))
CREDENTIALS_PATH = os.getenv('CREDENTIALS_PATH', os.path.join(CONFIG_PATH, 'credentials'))
BACKUPS_PATH = os.path.join(CONFIG_PATH, 'backups')
CLIENT_CERT_PATH = os.path.join(CERT_PATH, 'client.crt')
CLIENT_KEY_PATH = os.path.join(CERT_PATH, 'client.key')
CA_CERT_PATH = os.path.join(CERT_PATH, 'ca.crt')
COMBINED_PEM_PATH = os.path.join(CERT_PATH, 'combined.pem')
INI_PATH = os.path.join(CONFIG_PATH, 'config.ini')
SECRET_DEV_METADATA_PATH = os.path.join(CONFIG_PATH, 'device_metadata.json')
with Locker('config'):
if not os.path.isdir(CONFIG_PATH):
os.makedirs(CONFIG_PATH)
os.chmod(CONFIG_PATH, 0o711)
if not os.path.isdir(BACKUPS_PATH):
os.makedirs(BACKUPS_PATH, 0o711)
# This needs to be adjusted once we have
# changed the certificate life span from 7 days.
RENEWAL_THRESHOLD = 3
logger = logging.getLogger('agent')
def is_bootstrapping():
# Create path if it doesn't exist
if not os.path.isdir(CERT_PATH):
os.makedirs(CERT_PATH)
os.chmod(CERT_PATH, 0o711)
client_cert = Path(CLIENT_CERT_PATH)
if not client_cert.is_file():
logger.warning('No certificate found on disk.')
return True
# Make sure there is no empty cert on disk
if os.path.getsize(CLIENT_CERT_PATH) == 0:
logger.warning('Certificate found but it is broken')
return True
return False
def can_read_cert():
if not os.access(CLIENT_CERT_PATH, os.R_OK):
logger.error('Permission denied when trying to read the certificate file.')
exit(1)
if not os.access(CLIENT_KEY_PATH, os.R_OK):
logger.error('Permission denied when trying to read the key file.')
exit(1)
def get_primary_ip():
try:
primary_interface = netifaces.gateways()['default'][netifaces.AF_INET][1]
addrs = netifaces.ifaddresses(primary_interface)
return addrs[netifaces.AF_INET][0]['addr']
except (OSError, KeyError):
return
def get_certificate_expiration_date():
"""
Returns the expiration date of the certificate.
"""
can_read_cert()
with open(CLIENT_CERT_PATH, 'r') as f:
cert = x509.load_pem_x509_certificate(
f.read().encode(), default_backend()
)
return cert.not_valid_after.replace(tzinfo=pytz.utc)
def time_for_certificate_renewal():
""" Check if it's time for certificate renewal """
return datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=RENEWAL_THRESHOLD) > \
get_certificate_expiration_date()
def is_certificate_expired():
return datetime.datetime.now(datetime.timezone.utc) > get_certificate_expiration_date()
def generate_device_id():
"""
Device ID is generated remotely.
"""
device_id_request = requests.get(
'{}/v0.2/generate-id'.format(WOTT_ENDPOINT)
).json()
logger.debug("[RECEIVED] Generate Node ID: {}".format(device_id_request))
return device_id_request['device_id']
def get_device_id(dev=False):
"""
Returns the WoTT Device ID (i.e. fqdn) by reading the first subject from
the certificate on disk.
"""
can_read_cert()
with open(CLIENT_CERT_PATH, 'r') as f:
cert = x509.load_pem_x509_certificate(
f.read().encode(), default_backend()
)
return cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value
def generate_cert(device_id):
private_key = ec.generate_private_key(
ec.SECP256R1(), default_backend()
)
builder = x509.CertificateSigningRequestBuilder()
builder = builder.subject_name(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u'{}'.format(device_id)),
x509.NameAttribute(NameOID.COUNTRY_NAME, u'UK'),
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, u'London'),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, u'Web of Trusted Things, Ltd'),
]))
builder = builder.add_extension(
x509.SubjectAlternativeName(
[x509.DNSName(u'{}'.format(device_id))]
),
critical=False
)
csr = builder.sign(private_key, hashes.SHA256(), default_backend())
serialized_private_key = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
serialized_csr = csr.public_bytes(serialization.Encoding.PEM)
return {
'csr': serialized_csr.decode(),
'key': serialized_private_key.decode()
}
def get_ca_cert():
ca = requests.get('{}/v0.2/ca-bundle'.format(WOTT_ENDPOINT))
logger.debug("[RECEIVED] Get CA Cert: {}".format(ca.status_code))
logger.debug("[RECEIVED] Get CA Cert: {}".format(ca.content))
if not ca.ok:
logger.error('Failed to get CA...')
logger.error(ca.status_code)
logger.error(ca.content)
return
return ca.json()['ca_bundle']
def get_mtls_header(dev=False):
return {
'SSL-CLIENT-SUBJECT-DN': 'CN=' + get_device_id(),
'SSL-CLIENT-VERIFY': 'SUCCESS'
} if dev else {}
def req_error_log(req_type, requester, response, log_on_ok=False, caller=''):
"""
logs error of mtls_request functions
:param req_type: 'GET', 'POST', ...
:param requester: requester id for message, if None then request_url string used
:param response: request response
:param log_on_ok: if True then debug log message even if response.ok
:param caller: caller string id
:return: None
"""
if log_on_ok or not response.ok:
logger.debug("{} :: [RECEIVED] {} {}: {}".format(caller, requester, req_type, response.status_code))
logger.debug("{} :: [RECEIVED] {} {}: {}".format(caller, requester, req_type, response.content))
def mtls_request(method, url, dev=False, requester_name=None, log_on_ok=False, return_exception=False, **kwargs):
"""
MTLS Request.request wrapper function.
:param method = 'get,'put,'post','delete','patch','head','options'
:param url: request url string (without endpoint)
:param dev: if true use dev endpoint and dev headers
:param requester_name: displayed requester id for error messages
:param log_on_ok: if true then log debug message even if response.ok
:param return_exception: if true, then returns tuple ( response, None ) or (None, RequestException)
:return: response or None (if there was exception raised), or tuple (see above, return_exception)
"""
try:
r = requests.request(
method,
'{}/v0.2/{}'.format(MTLS_ENDPOINT, url),
cert=(CLIENT_CERT_PATH, CLIENT_KEY_PATH),
headers=get_mtls_header(dev=dev),
**kwargs
)
if not requester_name:
requester_name = "({})".format(url)
req_error_log(method.upper(), requester_name, r, log_on_ok=log_on_ok, caller='mtls_request')
if return_exception:
return r, None
else:
return r
except requests.exceptions.RequestException as e:
logger.exception("mtls_request :: rises exception:")
if return_exception:
return None, e
else:
return None
def try_enroll_in_operation_mode(device_id, dev):
enroll_token = get_enroll_token()
if enroll_token is None:
return
logger.info("Enroll token found. Trying to automatically enroll the node.")
setup_endpoints(dev)
response = mtls_request('get', 'claimed', dev=dev, requester_name="Get Node Claim Info")
if response is None or not response.ok:
logger.error('Did not manage to get claim info from the server.')
return
logger.debug("[RECEIVED] Get Node Claim Info: {}".format(response))
claim_info = response.json()
if claim_info['claimed']:
logger.info('The node is already claimed. No enrolling required.')
else:
claim_token = claim_info['claim_token']
if not enroll_device(enroll_token, claim_token, device_id):
logger.error('Node enrolling failed. Will try next time.')
return
logger.info("Update config...")
config = configparser.ConfigParser()
config.read(INI_PATH)
config.remove_option('DEFAULT', 'enroll_token')
with open(INI_PATH, 'w') as configfile:
config.write(configfile)
os.chmod(INI_PATH, 0o600)
def get_claim_token(dev=False):
setup_endpoints(dev)
can_read_cert()
response = mtls_request('get', 'claimed', dev=dev, requester_name="Get Node Claim Info")
if response is None or not response.ok:
logger.error('Did not manage to get claim info from the server.')
exit(2)
logger.debug("[RECEIVED] Get Node Claim Info: {}".format(response))
claim_info = response.json()
if claim_info['claimed']:
logger.error('The node is already claimed.')
exit(1)
return claim_info['claim_token']
def get_fallback_token():
config = configparser.ConfigParser()
config.read(INI_PATH)
return config['DEFAULT'].get('fallback_token')
def get_ini_log_level():
config = configparser.ConfigParser()
config.read(INI_PATH)
return config['DEFAULT'].get('log_level')
def get_ini_log_file():
config = configparser.ConfigParser()
config.read(INI_PATH)
return config['DEFAULT'].get('log_file')
def get_enroll_token():
config = configparser.ConfigParser()
config.read(INI_PATH)
return config['DEFAULT'].get('enroll_token')
def get_claim_url(dev=False):
return '{WOTT_ENDPOINT}/claim-device?device_id={device_id}&claim_token={claim_token}'.format(
WOTT_ENDPOINT=DASH_ENDPOINT,
device_id=get_device_id(),
claim_token=get_claim_token(dev)
)
def get_uptime():
"""
Returns the uptime in seconds.
"""
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
return uptime_seconds
def get_open_ports(dev=False):
return security_helper.netstat_scan()[1]
def send_ping(dev=False):
can_read_cert()
ping = mtls_request('get', 'ping', dev=dev, requester_name="Ping", log_on_ok=True)
if ping is None or not ping.ok:
logger.error('Ping failed.')
return
ping = ping.json()
payload = {
'device_operating_system_version': platform.release(),
'fqdn': socket.getfqdn(),
'ipv4_address': get_primary_ip(),
'uptime': get_uptime(),
'agent_version': str(__version__),
'confinement': CONFINEMENT.name,
'cloud': detect_cloud().name,
'installation': detect_installation().name,
'os_release': get_os_release()
}
if CONFINEMENT != Confinement.SNAP:
packages = get_packages()
if ping.get('deb_packages_hash') != packages['hash']:
payload['deb_packages'] = packages
if CONFINEMENT in (Confinement.NONE, Confinement.SNAP):
connections, ports = security_helper.netstat_scan()
blocklist = ping
iptables_helper.block(blocklist)
payload.update({
'processes': security_helper.process_scan(),
'logins': journal_helper.logins_last_hour(),
'firewall_rules': iptables_helper.dump(),
'scan_info': ports,
'netstat': connections,
'selinux_status': security_helper.selinux_status(),
'app_armor_enabled': security_helper.is_app_armor_enabled()
})
if CONFINEMENT == Confinement.NONE:
payload.update({
'default_password_users': security_helper.check_for_default_passwords(CONFIG_PATH),
'audit_files': security_helper.audit_config_files(),
'auto_upgrades': auto_upgrades_enabled(),
'mysql_root_access': security_helper.mysql_root_access(),
'kernel_package': kernel_package_info(),
'reboot_required': reboot_required(),
'cpu': security_helper.cpu_vulnerabilities()
})
rpi_metadata = detect_raspberry_pi()
if rpi_metadata['is_raspberry_pi']:
payload.update({
'device_manufacturer': 'Raspberry Pi',
'device_model': rpi_metadata['hardware_model'],
})
logger.debug("[GATHER] POST Ping: {}".format(payload))
ping = mtls_request('post', 'ping', json=payload, dev=dev, requester_name="Ping", log_on_ok=True)
if ping is None or not ping.ok:
logger.error('Ping failed.')
return
def say_hello(dev=False):
hello = mtls_request('get', 'hello', dev=dev, requester_name='Hello')
if hello is None or not hello.ok:
logger.error('Hello failed.')
return hello.json()
def sign_cert(csr, device_id):
"""
This is the function for the initial certificate generation.
This is only valid for the first time. Future renewals require the
existing certificate to renew.
"""
payload = {
'csr': csr,
'device_id': device_id,
'device_architecture': platform.machine(),
'device_operating_system': platform.system(),
'device_operating_system_version': platform.release(),
'fqdn': socket.getfqdn(),
'ipv4_address': get_primary_ip(),
}
crt_req = requests.post(
'{}/v0.2/sign-csr'.format(WOTT_ENDPOINT),
json=payload
)
if not crt_req.ok:
logger.error('Failed to submit CSR...')
req_error_log('post', 'Sign Cert', crt_req, caller='sign_cert')
res = crt_req.json()
return {
'crt': res['certificate'],
'claim_token': res['claim_token'],
'fallback_token': res['fallback_token'],
'claimed': False
}
def renew_cert(csr, device_id):
"""
This is the renewal function. We need to use the existing certificate to
verify ourselves in order to get a renewed certificate
"""
logger.info('Attempting to renew certificate...')
can_read_cert()
payload = {
'csr': csr,
'device_id': device_id,
'device_architecture': platform.machine(),
'device_operating_system': platform.system(),
'device_operating_system_version': platform.release(),
'fqdn': socket.getfqdn(),
'ipv4_address': get_primary_ip()
}
crt_req = mtls_request('post', 'sign-csr', False, 'Renew Cert', json=payload)
if crt_req is None or not crt_req.ok:
logger.error('Failed to submit CSR...')
return
res = crt_req.json()
return {
'crt': res['certificate'],
'claim_token': res['claim_token'],
'fallback_token': res['fallback_token'],
'claimed': res['claimed'],
}
def renew_expired_cert(csr, device_id):
"""
This is the renewal function. We need to use the existing certificate to
verify ourselves in order to get a renewed certificate
"""
logger.info('Attempting to renew expired certificate...')
can_read_cert()
payload = {
'csr': csr,
'device_id': device_id,
'device_architecture': platform.machine(),
'device_operating_system': platform.system(),
'device_operating_system_version': platform.release(),
'fqdn': socket.getfqdn(),
'ipv4_address': get_primary_ip(),
'fallback_token': get_fallback_token()
}
crt_req = requests.post(
'{}/v0.2/sign-expired-csr'.format(WOTT_ENDPOINT),
json=payload
)
if not crt_req.ok:
logger.error('Failed to submit CSR...')
req_error_log('post', 'Renew expired Cert', crt_req)
return
res = crt_req.json()
return {
'crt': res['certificate'],
'claim_token': res['claim_token'],
'fallback_token': res['fallback_token'],
'claimed': res['claimed'],
}
def setup_endpoints(dev):
if dev:
global WOTT_ENDPOINT, MTLS_ENDPOINT, DASH_ENDPOINT
endpoint = os.getenv('WOTT_ENDPOINT', 'http://localhost')
DASH_ENDPOINT = endpoint + ':' + str(DASH_DEV_PORT)
WOTT_ENDPOINT = endpoint + ':' + str(WOTT_DEV_PORT) + '/api'
MTLS_ENDPOINT = endpoint + ':' + str(MTLS_DEV_PORT) + '/api'
logger.debug(
"\nDASH_ENDPOINT: %s\nWOTT_ENDPOINT: %s\nMTLS_ENDPOINT: %s",
DASH_ENDPOINT, WOTT_ENDPOINT, MTLS_ENDPOINT
)
def fetch_device_metadata(dev, logger=logger):
with Locker('dev.metadata'):
setup_endpoints(dev)
logger.info('Fetching node metadata...')
can_read_cert()
dev_md_req = mtls_request('get', 'device-metadata', dev=dev, requester_name="Fetching node metadata")
if dev_md_req is None or not dev_md_req.ok:
logger.error('Fetching failed.')
return
metadata = dev_md_req.json()
logger.info('metadata retrieved.')
if os.path.exists(SECRET_DEV_METADATA_PATH) and not os.path.isfile(SECRET_DEV_METADATA_PATH):
logger.error("Error: The filesystem object '{}' is not a file. Looks like a break-in attempt.".format(
SECRET_DEV_METADATA_PATH
))
exit(1)
with open(SECRET_DEV_METADATA_PATH, "w") as outfile:
json.dump(metadata, outfile, indent=4)
os.chmod(SECRET_DEV_METADATA_PATH, 0o600)
logger.info('metadata stored.')
def fetch_credentials(dev, logger=logger):
def clear_credentials(path):
files = glob.glob(os.path.join(path, '**/*.json'), recursive=True)
for file in files:
os.remove(os.path.join(path, file))
logger.debug("remove...{}".format(file))
with Locker('credentials'):
setup_endpoints(dev)
logger.info('Fetching credentials...')
can_read_cert()
credentials_req = mtls_request('get', 'credentials', dev=dev, requester_name="Fetch credentials")
if credentials_req is None or not credentials_req.ok:
logger.error('Fetching failed.')
return
credentials = credentials_req.json()
logger.info('Credentials retrieved.')
if not os.path.exists(CREDENTIALS_PATH):
os.mkdir(CREDENTIALS_PATH, 0o711)
else:
os.chmod(CREDENTIALS_PATH, 0o711)
if not os.path.isdir(CREDENTIALS_PATH):
logger.error("There is file named as our credentials dir(%s), that's strange...", CREDENTIALS_PATH)
exit(1)
clear_credentials(CREDENTIALS_PATH)
# group received credentials, by linux_user, name
credentials_grouped = {}
for cred in credentials:
name = cred['name']
owner = cred['linux_user'] if 'linux_user' in cred else ''
if owner not in credentials_grouped:
credentials_grouped[owner] = {}
if name not in credentials_grouped[owner]:
credentials_grouped[owner][name] = cred['data']
else:
logger.error("Duplicated owner/name combination for credentials ({}/{}). Skipped.".format(owner, name))
root_pw = pwd.getpwnam("root")
for owner in credentials_grouped:
pw = root_pw # if no owner, use 'root'
if owner:
try:
pw = pwd.getpwnam(owner)
except KeyError:
logger.warning("There are credentials with wrong owner ({}). Skipped.".format(owner))
continue
uid = pw.pw_uid
gid = pw.pw_gid
owner_path = CREDENTIALS_PATH if not owner else os.path.join(CREDENTIALS_PATH, owner)
if owner and not os.path.isdir(owner_path):
if os.path.exists(owner_path):
logger.error(
"There is a file with name of system user in credentials directory ({}).".format(owner_path)
)
exit(1)
os.mkdir(owner_path, 0o700)
os.chown(owner_path, uid, gid) # update ownership if user existence in system changed
for name in credentials_grouped[owner]:
credential_file_path = os.path.join(owner_path, "{}.json".format(name))
file_credentials = credentials_grouped[owner][name]
logger.debug('Store credentials to {}'.format(credential_file_path))
with open(credential_file_path, 'w') as outfile:
json.dump(file_credentials, outfile, indent=4)
os.chmod(credential_file_path, 0o400)
os.chown(credential_file_path, uid, gid)
def write_metadata(data, rewrite_file):
metadata_path = Path(CONFIG_PATH) / 'metadata.json'
if rewrite_file or not metadata_path.is_file():
with metadata_path.open('w') as metadata_file:
json.dump(data, metadata_file)
metadata_path.chmod(0o644)
def _log_request_errors(req):
errors = req.json()
logger.error("Code:{}, Reason:{}".format(req.status_code, req.reason))
for key in errors:
key_errors = errors[key]
if isinstance(key_errors, list):
for msg in key_errors:
logger.error("{} : {}".format(key, msg))
else:
logger.error("{} : {}".format(key, key_errors))
def enroll_device(enroll_token, claim_token, device_id):
"""
Enroll device using enroll_token to authorize
:param enroll_token: enroll pairing key
:param claim_token: claim token
:param device_id: device id
:return: True if enrolled successfully, otherwise return False
"""
payload = {
'key': enroll_token,
'claim_token': claim_token,
'device_id': device_id
}
try:
enroll_req = requests.post(
'{}/v0.2/enroll-device'.format(WOTT_ENDPOINT),
json=payload
)
if not enroll_req.ok:
logger.error('Failed to enroll node...')
_log_request_errors(enroll_req)
req_error_log('post', 'Enroll by token', enroll_req, caller='enroll-device')
return False
else:
logger.info('Node {} enrolled successfully.'.format(device_id))
return True
except requests.exceptions.RequestException:
logger.exception("enroll_device :: rises exception:")
return False
def run(ping=True, dev=False, logger=logger):
with Locker('ping'):
setup_endpoints(dev)
bootstrapping = is_bootstrapping()
if bootstrapping:
device_id = generate_device_id()
logger.info('Got WoTT ID: {}'.format(device_id))
write_metadata({'device_id': device_id}, rewrite_file=True)
else:
device_id = get_device_id()
try_enroll_in_operation_mode(device_id=device_id, dev=dev)
write_metadata({'device_id': device_id}, rewrite_file=False)
if not time_for_certificate_renewal() and not is_certificate_expired():
if ping:
send_ping(dev=dev)
time_to_cert_expires = get_certificate_expiration_date() - datetime.datetime.now(
datetime.timezone.utc)
logger.info(
"Certificate expires in {} days and {} hours. No need for renewal. "
"Renewal threshold is set to {} days.".format(
time_to_cert_expires.days,
floor(time_to_cert_expires.seconds / 60 / 60),
RENEWAL_THRESHOLD,
)
)
exit(0)
else:
return
logger.info('My WoTT ID is: {}'.format(device_id))
logger.info('Generating certificate...')
gen_key = generate_cert(device_id)
ca = get_ca_cert()
if not ca:
logger.error('Unable to retrieve CA cert. Exiting.')
exit(1)
logger.info('Submitting CSR...')
enroll_token = None
if bootstrapping:
crt = sign_cert(gen_key['csr'], device_id)
enroll_token = get_enroll_token()
if enroll_token is not None:
logger.info('Node enrollment token found...')
elif is_certificate_expired():
crt = renew_expired_cert(gen_key['csr'], device_id)
else:
crt = renew_cert(gen_key['csr'], device_id)
if not crt:
logger.error('Unable to sign CSR. Exiting.')
exit(1)
if enroll_token is None:
logger.info('Got Claim Token: {}'.format(crt['claim_token']))
logger.info(
'Claim your node: {WOTT_ENDPOINT}/claim-device?device_id={device_id}&claim_token={claim_token}'.format(
WOTT_ENDPOINT=DASH_ENDPOINT,
device_id=device_id,
claim_token=crt['claim_token']
)
)
logger.info('Writing certificate and key to disk...')
with open(CLIENT_CERT_PATH, 'w') as f:
f.write(crt['crt'])
os.chmod(CLIENT_CERT_PATH, 0o644)
with open(CA_CERT_PATH, 'w') as f:
f.write(ca)
os.chmod(CA_CERT_PATH, 0o644)
with open(CLIENT_KEY_PATH, 'w') as f:
f.write(gen_key['key'])
os.chmod(CLIENT_KEY_PATH, 0o600)
with open(COMBINED_PEM_PATH, 'w') as f:
f.write(gen_key['key'])
f.write(crt['crt'])
os.chmod(COMBINED_PEM_PATH, 0o600)
send_ping(dev=dev)
if enroll_token is not None:
logger.info('Enroll node by token...')
if enroll_device(enroll_token, crt['claim_token'], device_id):
enroll_token = None
logger.info("Writing config...")
config = configparser.ConfigParser()
config['DEFAULT'] = {'fallback_token': crt['fallback_token']}
if enroll_token is not None:
config['DEFAULT']['enroll_token'] = enroll_token # if enroll fails, store enroll token for next run
with open(INI_PATH, 'w') as configfile:
config.write(configfile)
os.chmod(INI_PATH, 0o600)
def upgrade(packages):
logger.info('upgrade packages: {}'.format(packages))
upgrade_packages(packages)
def setup_logging(level=None, log_format="%(message)s", daemon=True):
"""
Setup logging configuration
if there is `log_level` item in wott-agent `config.ini` it would be used as actual log level
otherwise used value of level parameter
"""
log_level = level if level is not None else logging.INFO
ini_level = get_ini_log_level()
if ini_level is not None and isinstance(ini_level, str):
ini_level = ini_level.upper()
if ini_level in ['CRITICAL', 'ERROR', 'WARN', 'WARNING', 'INFO', 'DEBUG', 'NOTSET']:
if level is None:
log_level = ini_level
filename = get_ini_log_file()
handlers = []
if filename is not None and filename != 'stdout':
file_handler = logging.FileHandler(filename=filename)
handlers.append(file_handler)
if filename is None or filename == 'stdout' or not daemon:
stdout_handler = logging.StreamHandler(stdout)
handlers.append(stdout_handler)
if not daemon:
stdout_handler.setFormatter(logging.Formatter("%(message)s"))
logging.basicConfig(level=log_level, format=log_format, handlers=handlers)
logging.getLogger('agent').setLevel(log_level)
logging.getLogger('agent.iptables_helper').setLevel(log_level)
logging.getLogger('agent.executor').setLevel(log_level)
if log_level != logging.DEBUG:
logging.getLogger('sh.command').setLevel(logging.ERROR)
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,336
|
WoTTsecurity/agent
|
refs/heads/master
|
/tests/test_agent.py
|
import asyncio
import datetime
import json
from unittest import mock
from pathlib import Path
import time
import pytest
import freezegun
import agent
from agent.journal_helper import logins_last_hour
from agent.os_helper import detect_raspberry_pi, kernel_cmdline
from agent.iptables_helper import block_networks, block_ports, OUTPUT_CHAIN, INPUT_CHAIN
from agent.security_helper import check_for_default_passwords, selinux_status
from agent import executor
import pwd
from os import getenv
def test_detect_raspberry_pi():
class mockPath():
def __init__(self, filename):
self._filename = filename
def is_file(self):
return True
def open(self):
return mock_open(self._filename)
def mock_open(filename, mode='r'):
"""
This will return either a Unicode string needed for "r" mode or bytes for "rb" mode.
The contents are still the same which is the mock sshd_config. But they are only interpreted
by audit_sshd.
"""
if filename == '/proc/device-tree/model':
content = 'Raspberry Pi 3 Model B Plus Rev 1.3\x00'
elif filename == '/proc/device-tree/serial-number':
content = '0000000060e3b222\x00'
else:
raise FileNotFoundError
file_object = mock.mock_open(read_data=content).return_value
file_object.__iter__.return_value = content.splitlines(True)
return file_object
with mock.patch('agent.os_helper.Path', mockPath):
metadata = detect_raspberry_pi()
assert metadata['is_raspberry_pi']
assert metadata['hardware_model'] == 'Raspberry Pi 3 Model B Plus Rev 1.3'
assert metadata['serial_number'] == '0000000060e3b222'
def test_failed_logins():
with mock.patch('agent.journal_helper.get_journal_records') as gjr:
gjr.return_value = [
]
result = logins_last_hour()
assert result == {}
with mock.patch('agent.journal_helper.get_journal_records') as gjr:
gjr.return_value = [
{'MESSAGE': 'pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=10.147.17.225'}
]
result = logins_last_hour()
assert result == {'': {'success': 0, 'failed': 1}}
with mock.patch('agent.journal_helper.get_journal_records') as gjr:
gjr.return_value = [
{'MESSAGE': 'pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=10.147.17.225 user=pi'}
]
result = logins_last_hour()
assert result == {'pi': {'success': 0, 'failed': 1}}
with mock.patch('agent.journal_helper.get_journal_records') as gjr:
gjr.return_value = [
{'MESSAGE': 'PAM 2 more authentication failures; logname= uid=0 euid=0 tty=ssh ruser= rhost=10.147.17.225 user=pi'},
{'MESSAGE': 'pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=10.147.17.225 user=pi'},
{'MESSAGE': 'PAM 1 more authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=10.147.17.225 user=pi'},
{'MESSAGE': 'pam_unix(sshd:session): session opened for user pi by (uid=0)'},
{'MESSAGE': 'pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=10.147.17.225'}
]
result = logins_last_hour()
assert result == {
'pi': {'success': 1, 'failed': 4},
'': {'success': 0, 'failed': 1}}
with mock.patch('agent.journal_helper.get_journal_records') as gjr:
gjr.return_value = [
{'MESSAGE': 'pam_unix(sshd:auth): some other message'},
{'MESSAGE': 'something unrelated'},
{'MESSAGE': 'PAM and something unrelated'},
]
result = logins_last_hour()
assert result == {}
def test_is_bootstrapping_stat_file(tmpdir):
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(tmpdir / 'client.crt')
with mock.patch('agent.logger') as prn:
assert agent.is_bootstrapping()
assert mock.call('No certificate found on disk.') in prn.warning.mock_calls
def test_is_bootstrapping_create_dir(tmpdir):
notexistent_dir = tmpdir / 'notexistent'
agent.CERT_PATH = str(notexistent_dir)
agent.CLIENT_CERT_PATH = str(notexistent_dir / 'client.crt')
with mock.patch('os.makedirs') as md, \
mock.patch('os.chmod') as chm, \
mock.patch('agent.logger') as prn:
assert agent.is_bootstrapping()
assert md.called_with(notexistent_dir)
assert chm.called_with(notexistent_dir, 0o700)
assert mock.call('No certificate found on disk.') in prn.warning.mock_calls
def test_is_bootstrapping_check_filesize(tmpdir):
crt = tmpdir / 'client.crt'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt)
with mock.patch('agent.logger') as prn:
Path(agent.CLIENT_CERT_PATH).touch()
assert agent.is_bootstrapping()
assert mock.call('Certificate found but it is broken') in prn.warning.mock_calls
def test_is_bootstrapping_false_on_valid_cert(tmpdir):
crt = tmpdir / 'client.crt'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt)
with mock.patch('builtins.print') as prn:
Path(agent.CLIENT_CERT_PATH).write_text('nonzero')
assert not agent.is_bootstrapping()
assert not prn.mock_calls
def test_can_read_cert_stat_cert(tmpdir):
crt = tmpdir / 'client.crt'
key = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt)
agent.CLIENT_KEY_PATH = str(key)
with mock.patch('agent.logger') as prn:
# Path(crt).touch(mode=0o100)
with pytest.raises(SystemExit):
agent.can_read_cert()
assert mock.call('Permission denied when trying to read the certificate file.') in prn.error.mock_calls
def test_can_read_cert_stat_key(tmpdir):
crt = tmpdir / 'client.crt'
key = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt)
agent.CLIENT_KEY_PATH = str(key)
with mock.patch('agent.logger') as prn:
Path(agent.CLIENT_CERT_PATH).touch(mode=0o600)
# Path(agent.CLIENT_KEY_PATH).touch(mode=0o100)
with pytest.raises(SystemExit):
agent.can_read_cert()
assert mock.call('Permission denied when trying to read the key file.') in prn.error.mock_calls
def test_can_read_cert_none_on_success(tmpdir):
crt = tmpdir / 'client.crt'
key = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt)
agent.CLIENT_KEY_PATH = str(key)
with mock.patch('agent.logger'):
Path(agent.CLIENT_CERT_PATH).touch(mode=0o600)
Path(agent.CLIENT_KEY_PATH).touch(mode=0o600)
can_read = agent.can_read_cert()
assert can_read is None
def test_get_primary_ip(netif_gateways, netif_ifaddresses):
with mock.patch('netifaces.gateways') as gw, \
mock.patch('netifaces.ifaddresses') as ifaddr:
gw.return_value = netif_gateways
ifaddr.return_value = netif_ifaddresses
primary_ip = agent.get_primary_ip()
assert primary_ip == '192.168.1.3'
def test_get_primary_ip_none_on_exception(netif_gateways_invalid, netif_ifaddresses):
with mock.patch('netifaces.gateways') as gw, \
mock.patch('netifaces.ifaddresses') as ifaddr:
gw.return_value = netif_gateways_invalid
ifaddr.return_value = netif_ifaddresses
primary_ip = agent.get_primary_ip()
assert primary_ip is None
def test_get_certificate_expiration_date(cert):
with mock.patch(
'builtins.open',
mock.mock_open(read_data=cert),
create=True
):
exp_date = agent.get_certificate_expiration_date()
assert exp_date.date() == datetime.date(2019, 3, 19)
@freezegun.freeze_time("2019-04-04")
def test_time_for_certificate_renewal(cert):
with mock.patch(
'builtins.open',
mock.mock_open(read_data=cert),
create=True
):
assert agent.time_for_certificate_renewal()
@freezegun.freeze_time("2019-04-14")
def test_cert_expired(cert):
with mock.patch(
'builtins.open',
mock.mock_open(read_data=cert),
create=True
), mock.patch('agent.can_read_cert') as cr:
cr.return_value = True
assert agent.is_certificate_expired()
@pytest.mark.vcr
def test_generate_device_id():
dev_id = agent.generate_device_id()
assert dev_id
def test_get_device_id(cert):
with mock.patch(
'builtins.open',
mock.mock_open(read_data=cert),
create=True
), mock.patch('agent.can_read_cert') as cr:
cr.return_value = True
device_id = agent.get_device_id()
assert device_id == '4853b630822946019393b16c5b710b9e.d.wott.local'
def test_generate_cert(): # TODO: parse key and csr
cert = agent.generate_cert('4853b630822946019393b16c5b710b9e.d.wott.local')
assert cert['key']
assert cert['csr']
@pytest.mark.vcr
def test_get_ca_cert():
ca_bundle = agent.get_ca_cert()
assert "BEGIN CERTIFICATE" in ca_bundle
def test_get_ca_cert_none_on_fail():
with mock.patch('requests.get') as req, \
mock.patch('agent.logger') as prn:
req.return_value.ok = False
ca_bundle = agent.get_ca_cert()
assert ca_bundle is None
assert mock.call('Failed to get CA...') in prn.error.mock_calls
assert prn.error.call_count == 3
def test_get_open_ports(net_connections_fixture, netstat_result):
with mock.patch('psutil.net_connections') as net_connections:
net_connections.return_value = net_connections_fixture
connections_ports = agent.get_open_ports()
assert connections_ports == [netstat_result[1]]
@pytest.mark.vcr
def test_send_ping(raspberry_cpuinfo, uptime, tmpdir, cert, key, net_connections_fixture):
crt_path = tmpdir / 'client.crt'
key_path = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt_path)
agent.CLIENT_KEY_PATH = str(key_path)
Path(agent.CLIENT_CERT_PATH).write_text(cert)
Path(agent.CLIENT_KEY_PATH).write_text(key)
with mock.patch(
'builtins.open',
mock.mock_open(read_data=raspberry_cpuinfo),
create=True
), \
mock.patch('socket.getfqdn') as getfqdn, \
mock.patch('psutil.net_connections') as net_connections, \
mock.patch('agent.iptables_helper.dump') as fr, \
mock.patch('agent.security_helper.check_for_default_passwords') as chdf, \
mock.patch('agent.security_helper.process_scan') as ps, \
mock.patch('agent.iptables_helper.block_ports') as bp, \
mock.patch('agent.iptables_helper.block_networks') as bn, \
mock.patch('agent.journal_helper.logins_last_hour') as logins, \
mock.patch('builtins.print') as prn, \
mock.patch(
'builtins.open',
mock.mock_open(read_data=uptime),
create=True
): # noqa E213
net_connections.return_value = net_connections_fixture[0],
fr.return_value = {}
chdf.return_value = False
ps.return_value = []
getfqdn.return_value = 'localhost'
bp.return_value = None
bn.return_value = None
logins.return_value = {}
ping = agent.send_ping()
assert ping is None
assert prn.call_count == 0 or (prn.call_count == 1 and mock.call('Ping failed.') in prn.mock_calls)
@pytest.mark.vcr
def test_renew_cert(tmpdir, cert, key):
crt_path = tmpdir / 'client.crt'
key_path = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt_path)
agent.CLIENT_KEY_PATH = str(key_path)
Path(agent.CLIENT_CERT_PATH).write_text(cert)
Path(agent.CLIENT_KEY_PATH).write_text(key)
with mock.patch('socket.getfqdn') as getfqdn, \
mock.patch('agent.logger') as prn: # noqa E213
getfqdn.return_value = 'localhost'
res = agent.renew_expired_cert(None, None)
assert res is None
assert prn.info.call_count == 1
assert prn.error.call_count == 1
assert mock.call('Failed to submit CSR...') in prn.error.mock_calls
@pytest.mark.vcr
def test_say_hello_failed(tmpdir, invalid_cert, invalid_key):
crt_path = tmpdir / 'client.crt'
key_path = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt_path)
agent.CLIENT_KEY_PATH = str(key_path)
Path(agent.CLIENT_CERT_PATH).write_text(invalid_cert)
Path(agent.CLIENT_KEY_PATH).write_text(invalid_key)
with mock.patch('agent.logger') as prn:
with pytest.raises(json.decoder.JSONDecodeError):
_ = agent.say_hello()
assert mock.call('Hello failed.') in prn.error.mock_calls
@pytest.mark.vcr
def test_say_hello_ok(tmpdir, cert, key):
crt_path = tmpdir / 'client.crt'
key_path = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt_path)
agent.CLIENT_KEY_PATH = str(key_path)
Path(agent.CLIENT_CERT_PATH).write_text(cert)
Path(agent.CLIENT_KEY_PATH).write_text(key)
hello = agent.say_hello()
assert hello['message']
def test_uptime(uptime):
with mock.patch(
'builtins.open',
mock.mock_open(read_data=uptime),
create=True
):
up = agent.get_uptime()
assert up == 60
def test_check_for_default_passwords_pos():
with mock.patch('pathlib.Path.open', mock.mock_open(read_data='pi:raspberry')),\
mock.patch('spwd.getspall') as getspnam:
# this is a real shadow record for password "raspberry"
getspnam.return_value = [
mock.Mock(
sp_pwdp='$6$2tSrLNr4$XblkH.twWBJB.6zxbtyDM4z3Db55SOqdi3MBYPwNXF1Kv5FCGS6jCDdVNsr50kctHZk/W0u2AtyomcQ16EVZQ/',
sp_namp='pi'
)
]
assert check_for_default_passwords('/doesntmatter/file.txt') == ['pi']
def test_check_for_default_passwords_neg():
with mock.patch('pathlib.Path.open', mock.mock_open(read_data='pi:raspberry')),\
mock.patch('spwd.getspall') as getspnam:
# this is a real shadow record for password which is not "raspberry"
getspnam.return_value = [
mock.Mock(
sp_pwdp='$6$/3W/.H6/$nncROMeVQxTEKRcjCfOwft08WPJm.JLnrlli0mutPZ737kImtHhcROgrYz7k6osr0XwuPDlwRfY.r584iQ425/',
sp_namp='pi'
)
]
assert check_for_default_passwords('/doesntmatter/file.txt') == []
def test_audit_config_files(sshd_config):
def mock_open(filename, mode='r'):
"""
This will return either a Unicode string needed for "r" mode or bytes for "rb" mode.
The contents are still the same which is the mock sshd_config. But they are only interpreted
by audit_sshd.
"""
if mode != 'rb':
content = sshd_config
else:
content = sshd_config.encode()
file_object = mock.mock_open(read_data=content).return_value
file_object.__iter__.return_value = content.splitlines(True)
return file_object
with mock.patch('builtins.open',
new=mock_open),\
mock.patch('os.path.isfile') as isfile,\
mock.patch('os.path.getmtime') as getmtime:
isfile.return_value = True
getmtime.return_value = 0
audit = agent.security_helper.audit_config_files()
assert len(audit) == 4 and \
audit[0]['sha256'] == audit[1]['sha256'] and \
audit[0]['last_modified'] == 0 and \
audit[3]['issues'] == {'PermitRootLogin': 'yes',
'PasswordAuthentication': 'yes',
'Protocol': '2,1',
'AllowAgentForwarding': 'yes',
'ClientAliveInterval': '0',
'MaxAuthTries': '5'}
def test_block_networks(ipt_networks, ipt_rules):
rule1, rule2 = ipt_rules
net1, net2 = ipt_networks
# Initial state: no networks are blocked
# Input: two networks (net1, net2)
# Result: net1 and net2 are blocked
with mock.patch('agent.iptc_helper.batch_add_rules') as batch_add_rules:
block_networks([net1, net2])
batch_add_rules.assert_has_calls([
mock.call('filter', [rule1, rule2], chain=OUTPUT_CHAIN, ipv6=False),
mock.call('filter', [], chain=OUTPUT_CHAIN, ipv6=True),
])
# Initial state: net1 is blocked
# Input: another network: net2
# Result: net2 gets blocked, net1 gets unblocked
with mock.patch('agent.iptc_helper.batch_add_rules') as batch_add_rules:
block_networks([net2])
batch_add_rules.assert_has_calls([
mock.call('filter', [rule2], chain=OUTPUT_CHAIN, ipv6=False),
mock.call('filter', [], chain=OUTPUT_CHAIN, ipv6=True)
])
# Initial state: empty
# Input: empty
# Result: nothing happens
with mock.patch('agent.iptc_helper.batch_add_rules') as batch_add_rules:
block_networks([])
batch_add_rules.assert_has_calls([
mock.call('filter', [], chain=OUTPUT_CHAIN, ipv6=False),
mock.call('filter', [], chain=OUTPUT_CHAIN, ipv6=True)
])
def test_block_ports(ipt_ports, ipt_ports_rules):
with mock.patch('agent.iptc_helper.batch_add_rules') as batch_add_rules:
block_ports(True, ipt_ports)
batch_add_rules.assert_has_calls([
mock.call('filter', [r for r, ipv6 in ipt_ports_rules], chain=INPUT_CHAIN, ipv6=False)
])
def test_fetch_credentials(tmpdir):
executor.Locker.LOCKDIR = str(tmpdir)
agent.CREDENTIALS_PATH = str(tmpdir)
json3_path_str = str(tmpdir / 'name3.json')
json3_path = Path(json3_path_str)
json3_path.write_text('nonzero')
pw = pwd.getpwnam("root")
rt_uid = pw.pw_uid
rt_gid = pw.pw_gid
user = getenv('USER', 'nobody')
pw = pwd.getpwnam(user)
pi_uid = pw.pw_uid
pi_gid = pw.pw_gid
mock_resp = mock.Mock()
mock_resp.raise_status = 200
mock_resp.json = mock.Mock(
return_value=[
{'name': 'name1', 'data': {'key1': 'v1'}, 'linux_user': user},
{'name': 'name2', 'data': {'key1': 'v21', 'key2': 'v22'}, 'linux_user': user},
{'name': 'name2', 'data': {'key3': 'v23'}, 'linux_user': ''},
]
)
mock_resp.return_value.ok = True
with mock.patch('agent.logger'), \
mock.patch('agent.can_read_cert') as cr, \
mock.patch('requests.request') as req, \
mock.patch('os.chmod') as chm, \
mock.patch('os.chown') as chw:
cr.return_value = True
req.return_value = mock_resp
mock_resp.return_value.ok = True
agent.fetch_credentials(False)
assert Path.exists(tmpdir / user / 'name1.json')
assert Path.exists(tmpdir / user / 'name2.json')
assert Path.exists(tmpdir / 'name2.json')
assert Path.exists(json3_path) is False
pi_dir_path = str(tmpdir / user)
pi_name1_path = str(tmpdir / user / 'name1.json')
pi_name2_path = str(tmpdir / user / 'name2.json')
rt_name2_path = str(tmpdir / 'name2.json')
with open(pi_name1_path) as f:
assert json.load(f) == {"key1": "v1"}
with open(pi_name2_path) as f:
assert json.load(f) == {"key1": "v21", "key2": "v22"}
with open(rt_name2_path) as f:
assert json.load(f) == {"key3": "v23"}
chm.assert_has_calls([
mock.call(pi_name1_path, 0o400),
mock.call(pi_name2_path, 0o400),
mock.call(rt_name2_path, 0o400),
], any_order=True)
chw.assert_has_calls([
mock.call(rt_name2_path, rt_uid, rt_gid),
mock.call(pi_dir_path, pi_uid, pi_gid),
mock.call(pi_name2_path, pi_uid, pi_gid),
mock.call(pi_name1_path, pi_uid, pi_gid)
], any_order=True)
def test_fetch_credentials_no_dir(tmpdir):
executor.Locker.LOCKDIR = str(tmpdir)
agent.CREDENTIALS_PATH = str(tmpdir / 'notexist')
file_path1 = tmpdir / 'notexist' / 'name1.json'
file_path2 = tmpdir / 'notexist' / 'name2.json'
mock_resp = mock.Mock()
mock_resp.raise_status = 200
mock_resp.json = mock.Mock(
return_value=[
{'name': 'name1', 'data': {'key1': 'v1'}},
{'name': 'name2', 'data': {'key1': 'v21'}}
]
)
mock_resp.return_value.ok = True
with mock.patch('agent.logger'), \
mock.patch('agent.can_read_cert') as cr, \
mock.patch('requests.request') as req:
cr.return_value = True
req.return_value = mock_resp
mock_resp.return_value.ok = True
agent.fetch_credentials(False)
assert Path.exists(file_path1)
assert Path.exists(file_path2)
with open(str(file_path1)) as f:
assert json.load(f) == {"key1": "v1"}
with open(str(file_path2)) as f:
assert json.load(f) == {"key1": "v21"}
def test_fetch_device_metadata(tmpdir):
executor.Locker.LOCKDIR = str(tmpdir)
json3_path_str = str(tmpdir / 'name3.json')
json3_path = Path(json3_path_str)
json3_path.write_text('nonzero')
agent.SECRET_DEV_METADATA_PATH = str(json3_path_str)
mock_resp = mock.Mock()
mock_resp.raise_status = 200
mock_resp.json = mock.Mock(
return_value={
'manufacturer': 'Raspberry Pi',
'device_id': '7fe5ef257a7a4ee38841a5f8bf672791.d.wott-dev.local',
'string': 'test string value',
'array': [1, 2, 3, 4, 5, 'penelopa'],
'test': 'value',
'model': 'Pi 3 Model B+'
}
)
mock_resp.return_value.ok = True
with mock.patch('agent.can_read_cert') as cr, \
mock.patch('requests.request') as req, \
mock.patch('agent.logger'), \
mock.patch('os.chmod') as chm:
cr.return_value = True
req.return_value = mock_resp
mock_resp.return_value.ok = True
agent.fetch_device_metadata(False, agent.logger)
assert Path.exists(json3_path)
with open(json3_path_str) as f:
assert json.load(f) == {
'manufacturer': 'Raspberry Pi',
'device_id': '7fe5ef257a7a4ee38841a5f8bf672791.d.wott-dev.local',
'string': 'test string value',
'array': [1, 2, 3, 4, 5, 'penelopa'],
'test': 'value',
'model': 'Pi 3 Model B+'
}
chm.assert_has_calls([
mock.call(json3_path_str, 0o600),
])
def test_enroll_device_ok(tmpdir):
executor.Locker.LOCKDIR = str(tmpdir)
message = "Node d3d301961e6c4095b59583083bdec290.d.wott-dev.local enrolled successfully."
mock_resp = mock.Mock()
with mock.patch('requests.post') as req, \
mock.patch('agent.logger') as prn:
req.return_value = mock_resp
req.return_value.ok = True
req.return_value.status_code = 200
req.return_value.content = {}
assert agent.enroll_device(
enroll_token="1dc99d48e67b427a9dc00b0f19003802",
device_id="d3d301961e6c4095b59583083bdec290.d.wott-dev.local",
claim_token="762f9d82-4e10-4d8b-826c-ac802219ec47"
)
assert prn.error.call_count == 0
assert prn.info.call_count == 1
assert mock.call(message) in prn.info.mock_calls
def test_enroll_device_nok(tmpdir):
executor.Locker.LOCKDIR = str(tmpdir)
error_content = {
"key": ["Pairnig-token not found"],
"claim_token": ["Claim-token not found"]
}
mock_resp = mock.Mock()
mock_resp.json = mock.Mock(return_value=error_content)
with mock.patch('requests.post') as req, \
mock.patch('agent.logger') as prn:
req.return_value = mock_resp
req.return_value.ok = False
req.return_value.status_code = 400
req.return_value.reason = "Bad Request"
req.return_value.content = error_content
assert not agent.enroll_device(
enroll_token="1dc99d48e67b427a9dc00b0f19003802",
device_id="d3d301961e6c4095b59583083bdec290.d.wott-dev.local",
claim_token="762f9d82-4e10-4d8b-826c-ac802219ec47"
)
assert prn.error.call_count == 4
assert mock.call('Failed to enroll node...') in prn.error.mock_calls
assert mock.call('Code:400, Reason:Bad Request') in prn.error.mock_calls
assert mock.call('claim_token : Claim-token not found') in prn.error.mock_calls
assert mock.call('key : Pairnig-token not found') in prn.error.mock_calls
assert prn.debug.call_count == 2
assert mock.call("enroll-device :: [RECEIVED] Enroll by token post: 400") in prn.debug.mock_calls
log_dbg_text = "enroll-device :: [RECEIVED] Enroll by token post: {}".format(error_content)
assert mock.call(log_dbg_text) in prn.debug.mock_calls
def _mock_repr(self):
return self.return_value
def test_enroll_in_operation_mode_ok(tmpdir):
executor.Locker.LOCKDIR = str(tmpdir)
agent.INI_PATH = str(tmpdir / 'config.ini')
with open(agent.INI_PATH, "w") as f:
f.write("[DEFAULT]\nenroll_token = 123456\nrollback_token = 123456\n")
mock_resp = mock.Mock()
mock_resp.json = mock.Mock(return_value={})
mock_mtls = mock.Mock()
mock_mtls.json = mock.Mock(return_value={'claim_token': '3456', 'claimed': False})
mock_mtls.return_value = "TestClaimToken"
with mock.patch('agent.mtls_request') as mtls, \
mock.patch('requests.post') as req, \
mock.patch('agent.logger') as prn:
req.return_value = mock_resp
req.return_value.ok = True
req.return_value.status_code = 200
req.return_value.content = {}
mtls.return_value = mock_mtls
mtls.return_value.__repr__ = _mock_repr
mtls.return_value.ok = True
mtls.return_value.status_code = 200
mtls.return_value.content = {}
agent.try_enroll_in_operation_mode('deviceid000', True)
assert mock.call.info('Enroll token found. Trying to automatically enroll the node.') in prn.method_calls
assert mock.call.debug('\nDASH_ENDPOINT: %s\nWOTT_ENDPOINT: %s\nMTLS_ENDPOINT: %s', 'http://localhost:8000',
'http://localhost:8001/api', 'http://localhost:8002/api') in prn.method_calls
assert mock.call.debug('[RECEIVED] Get Node Claim Info: TestClaimToken') in prn.method_calls
assert mock.call.info('Node deviceid000 enrolled successfully.') in prn.method_calls
assert mock.call.info('Update config...') in prn.method_calls
assert len(prn.method_calls) == 5
with open(agent.INI_PATH) as f:
assert f.read() == "[DEFAULT]\nrollback_token = 123456\n\n"
def test_enroll_in_operation_mode_enroll_fail(tmpdir):
error_content = {
"key": ["Pairnig-token not found"],
}
executor.Locker.LOCKDIR = str(tmpdir)
file_content = "[DEFAULT]\nenroll_token = 123456\nrollback_token = 123456\n"
agent.INI_PATH = str(tmpdir / 'config.ini')
with open(agent.INI_PATH, "w") as f:
f.write(file_content)
mock_resp = mock.Mock()
mock_resp.json = mock.Mock(return_value=error_content)
mock_mtls = mock.Mock()
mock_mtls.json = mock.Mock(return_value={'claim_token': '3456', 'claimed': False})
mock_mtls.return_value = "TestClaimToken"
with mock.patch('agent.mtls_request') as mtls, \
mock.patch('requests.post') as req, \
mock.patch('agent.logger') as prn:
req.return_value = mock_resp
req.return_value.ok = False
req.return_value.status_code = 400
req.return_value.content = {}
req.return_value.reason = "Bad Request"
req.return_value.content = error_content
mtls.return_value = mock_mtls
mtls.return_value.__repr__ = _mock_repr
mtls.return_value.ok = True
mtls.return_value.status_code = 200
mtls.return_value.content = {}
agent.try_enroll_in_operation_mode('deviceid000', True)
assert mock.call.info('Enroll token found. Trying to automatically enroll the node.') in prn.method_calls
assert mock.call.debug('\nDASH_ENDPOINT: %s\nWOTT_ENDPOINT: %s\nMTLS_ENDPOINT: %s', 'http://localhost:8000',
'http://localhost:8001/api', 'http://localhost:8002/api') in prn.method_calls
assert mock.call.debug('[RECEIVED] Get Node Claim Info: TestClaimToken') in prn.method_calls
assert mock.call.error('Failed to enroll node...') in prn.method_calls
assert mock.call.error('Code:400, Reason:Bad Request') in prn.method_calls
assert mock.call.error('key : Pairnig-token not found') in prn.method_calls
assert mock.call.debug('enroll-device :: [RECEIVED] Enroll by token post: 400') in prn.method_calls
assert mock.call.debug("enroll-device :: [RECEIVED] Enroll by token post: {"
"'key': ['Pairnig-token not found']}") in prn.method_calls
assert mock.call.error('Node enrolling failed. Will try next time.') in prn.method_calls
assert len(prn.method_calls) == 9
with open(agent.INI_PATH) as f:
assert f.read() == file_content
def test_enroll_in_operation_mode_already_claimed(tmpdir):
executor.Locker.LOCKDIR = str(tmpdir)
agent.INI_PATH = str(tmpdir / 'config.ini')
with open(agent.INI_PATH, "w") as f:
f.write("[DEFAULT]\nenroll_token = 123456\nrollback_token = 123456\n")
mock_mtls = mock.Mock()
mock_mtls.json = mock.Mock(return_value={'claim_token': '3456', 'claimed': True})
mock_mtls.return_value = "TestClaimToken"
with mock.patch('agent.mtls_request') as mtls, \
mock.patch('agent.logger') as prn:
mtls.return_value = mock_mtls
mtls.return_value.__repr__ = _mock_repr
mtls.return_value.ok = True
mtls.return_value.status_code = 200
mtls.return_value.content = {}
agent.try_enroll_in_operation_mode('deviceid000', True)
assert mock.call.info('Enroll token found. Trying to automatically enroll the node.') in prn.method_calls
assert mock.call.debug('\nDASH_ENDPOINT: %s\nWOTT_ENDPOINT: %s\nMTLS_ENDPOINT: %s', 'http://localhost:8000',
'http://localhost:8001/api', 'http://localhost:8002/api') in prn.method_calls
assert mock.call.debug('[RECEIVED] Get Node Claim Info: TestClaimToken') in prn.method_calls
assert mock.call.info('The node is already claimed. No enrolling required.') in prn.method_calls
assert mock.call.info('Update config...') in prn.method_calls
assert len(prn.method_calls) == 5
with open(agent.INI_PATH) as f:
assert f.read() == "[DEFAULT]\nrollback_token = 123456\n\n"
def test_enroll_in_operation_mode_no_claim_info(tmpdir): # or server error
executor.Locker.LOCKDIR = str(tmpdir)
agent.INI_PATH = str(tmpdir / 'config.ini')
file_content = "[DEFAULT]\nenroll_token = 123456\nrollback_token = 123456\n"
with open(agent.INI_PATH, "w") as f:
f.write(file_content)
mock_mtls = mock.Mock()
mock_mtls.json = mock.Mock(return_value={})
with mock.patch('agent.mtls_request') as mtls, \
mock.patch('agent.logger') as prn:
mtls.return_value = mock_mtls
mtls.return_value.__repr__ = _mock_repr
mtls.return_value.ok = False
mtls.return_value.status_code = 400
mtls.return_value.content = {}
agent.try_enroll_in_operation_mode('deviceid000', True)
assert mock.call.info('Enroll token found. Trying to automatically enroll the node.') in prn.method_calls
assert mock.call.debug('\nDASH_ENDPOINT: %s\nWOTT_ENDPOINT: %s\nMTLS_ENDPOINT: %s', 'http://localhost:8000',
'http://localhost:8001/api', 'http://localhost:8002/api') in prn.method_calls
assert mock.call.error('Did not manage to get claim info from the server.') in prn.method_calls
assert len(prn.method_calls) == 3
with open(agent.INI_PATH) as f:
assert f.read() == file_content
def test_enroll_in_operation_mode_no_token(tmpdir): # or server error
executor.Locker.LOCKDIR = str(tmpdir)
file_content = "[DEFAULT]\nrollback_token = 123456\n"
agent.INI_PATH = str(tmpdir / 'config.ini')
with open(agent.INI_PATH, "w") as f:
f.write(file_content)
with mock.patch('agent.logger') as prn:
agent.try_enroll_in_operation_mode('deviceid000', True)
assert len(prn.method_calls) == 0
with open(agent.INI_PATH) as f:
assert f.read() == file_content
@pytest.mark.vcr
def test_deb_package_cache(tmpdir, cert, key, raspberry_cpuinfo, net_connections_fixture, uptime):
"""
Test the package list cahing behavior.
"""
crt_path = tmpdir / 'client.crt'
key_path = tmpdir / 'client.key'
agent.CERT_PATH = str(tmpdir)
agent.CLIENT_CERT_PATH = str(crt_path)
agent.CLIENT_KEY_PATH = str(key_path)
Path(agent.CLIENT_CERT_PATH).write_text(cert)
Path(agent.CLIENT_KEY_PATH).write_text(key)
with mock.patch(
'builtins.open',
mock.mock_open(read_data=raspberry_cpuinfo),
create=True
), \
mock.patch('socket.getfqdn') as getfqdn, \
mock.patch('psutil.net_connections') as net_connections, \
mock.patch('agent.iptables_helper.dump') as fr, \
mock.patch('agent.security_helper.check_for_default_passwords') as chdf, \
mock.patch('agent.security_helper.process_scan') as ps, \
mock.patch('agent.iptables_helper.block_ports') as bp, \
mock.patch('agent.iptables_helper.block_networks') as bn, \
mock.patch('agent.journal_helper.logins_last_hour') as logins, \
mock.patch('apt.Cache') as aptCache, \
mock.patch('agent.mtls_request', wraps=agent.mtls_request) as mtls, \
mock.patch(
'builtins.open',
mock.mock_open(read_data=uptime),
create=True
): # noqa E213
deb_pkg = mock.MagicMock()
deb_pkg.installed.package.name = 'thepackage'
deb_pkg.installed.source_name = 'thepackage'
deb_pkg.installed.version = 'theversion'
deb_pkg.installed.source_version = 'theversion'
deb_pkg.installed.architecture = 'i386'
aptCache.return_value = [deb_pkg]
net_connections.return_value = net_connections_fixture[0],
fr.return_value = {}
chdf.return_value = False
ps.return_value = []
getfqdn.return_value = 'localhost'
bp.return_value = None
bn.return_value = None
logins.return_value = {}
# If the server doesn't have our package list yet it won't send deb_package_hash.
# In this case send_ping should send the package list and the hash.
agent.MTLS_ENDPOINT = 'https://mtls.wott.io'
agent.send_ping()
deb_packages_json = mtls.call_args[1]['json']['deb_packages']
assert deb_packages_json['hash'] == 'e88b4875f08ede2e1068e117bdaa80ac'
# The second time the server already knows the hash and sends it in deb_package_hash.
# send_ping should not send deb_packages in this case.
agent.MTLS_ENDPOINT = 'https://mtls.wott.io'
agent.send_ping()
deb_packages_json = mtls.call_args[1]['json']
assert 'deb_packages' not in deb_packages_json
def _is_parallel(tmpdir, use_lock: bool, use_pairs: bool = False):
"""
Execute two "sleepers" at once.
:param tmpdir: temp directory where logs and locks will be stored (provided by pytest)
:param use_lock: use executor.Locker to execute exclusively
:return: whether the two tasks were seen executing in parallel (boolean value)
"""
def _work(f: Path):
"""The actual workload: sleep and write before/after timestamps to provided file"""
of = f.open('a+')
of.write('{} '.format(time.time()))
time.sleep(0.1)
of.write('{}\n'.format(time.time()))
def sleeper(lock: bool, f: Path, lockname: str):
"""This task will be executed by executor."""
executor.Locker.LOCKDIR = str(tmpdir) # can't use /var/lock in CircleCI environment
if lock:
with executor.Locker(lockname):
_work(f)
else:
_work(f)
def stop_exe():
"""Stop execution of tasks launched by executor."""
for fut in futs:
fut.cancel()
for exe in exes:
exe.stop()
asyncio.get_event_loop().stop()
def find_parallel(first_pairs, second_pairs):
parallel = False
for begin1, end1 in first_pairs:
# Find a pair in second_pairs overlapping with first_pair.
# That means execution was overlapped (parallel).
for begin2, end2 in second_pairs:
if begin2 <= begin1 <= end2 or begin2 <= end1 <= end2:
parallel = True
break
if parallel:
break
return parallel
def is_parallel(timestamp_files):
# Parse timestamp files. Split them into (begin, end) tuples.
file_time_pairs = []
for f in timestamp_files:
of = f.open('r')
times = []
for line in of.read().splitlines():
begin, end = line.split()
times.append((float(begin), float(end)))
file_time_pairs.append(times)
first_pairs, second_pairs = file_time_pairs
return find_parallel(first_pairs, second_pairs) or find_parallel(second_pairs, first_pairs)
# Schedule two identical tasks to executor. They will write before/after timestamps
# to their files every 100 ms.
test_files = [tmpdir / 'test_locker_' + str(i) for i in range(2)]
exes = [executor.Executor(0.5, sleeper, (use_lock, test_file, 'one')) for test_file in test_files]
# If testing independent locking, schedule another couple of tasks with another lock and another
# set of timestamp files.
if use_pairs:
test_files_2 = [tmpdir / 'test_locker_2_' + str(i) for i in range(2)]
exes += [executor.Executor(0.5, sleeper, (use_lock, test_file, 'two')) for test_file in test_files_2]
futs = [executor.schedule(exe) for exe in exes]
# Stop this after 3 seconds
asyncio.get_event_loop().call_later(3, stop_exe)
executor.spin()
if use_lock:
# When using Locker the tasks need some additional time to stop.
time.sleep(3)
if use_pairs:
# If testing independent locking, find out:
# - whether first couple of tasks were executed in parallel
# - whether second couple of tasks were executed in parallel
# - whether tasks from both couples were executed in parallel
return is_parallel(test_files), \
is_parallel(test_files_2), \
is_parallel((test_files[0], test_files_2[0]))
else:
return is_parallel(test_files)
def test_locker(tmpdir):
assert not _is_parallel(tmpdir, True)
def test_no_locker(tmpdir):
assert _is_parallel(tmpdir, False)
def test_independent_lockers(tmpdir):
one, two, both = _is_parallel(tmpdir, True, True)
assert (one, two, both) == (False, False, True)
def test_selinux_status():
with mock.patch('selinux.is_selinux_enabled') as selinux_enabled,\
mock.patch('selinux.security_getenforce') as getenforce:
selinux_enabled.return_value = 1
getenforce.return_value = 1
assert selinux_status() == {'enabled': True, 'mode': 'enforcing'}
selinux_enabled.return_value = 1
getenforce.return_value = 0
assert selinux_status() == {'enabled': True, 'mode': 'permissive'}
selinux_enabled.return_value = 0
assert selinux_status() == {'enabled': False, 'mode': None}
def test_kernel_cmdline(cmdline):
class mockPath():
def __init__(self, filename):
self._filename = filename
def read_text(self):
return cmdline
with mock.patch('agent.os_helper.Path', mockPath):
cmdline = kernel_cmdline()
assert cmdline['one'] == ''
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,337
|
WoTTsecurity/agent
|
refs/heads/master
|
/agent/os_helper.py
|
import hashlib
import os
import platform
import re
from os.path import isfile
from pathlib import Path
from enum import Enum
from functools import cmp_to_key
import pkg_resources
DEBIAN_KERNEL_PKG_NAME_RE = re.compile(r'(linux-image-\d+\.\d+\.\d+-)(\d+)([.-].+)')
def detect_raspberry_pi():
metadata = {
'is_raspberry_pi': None,
'hardware_model': None,
'serial_number': None
}
proc_model = Path('/proc/device-tree/model')
proc_serial = Path('/proc/device-tree/serial-number')
if proc_model.is_file():
model = proc_model.open().read().strip('\0')
metadata['hardware_model'] = model
metadata['is_raspberry_pi'] = model.startswith('Raspberry Pi')
if proc_serial.is_file():
metadata['serial_number'] = proc_serial.open().read().strip('\0')
return metadata
class Confinement(Enum):
NONE = 0
DOCKER = 1
BALENA = 2
SNAP = 3
class Installation(Enum):
NONE = 0
DEB = 1
PYTHON_PACKAGE = 2
RPM = 3
class CloudProvider(Enum):
NONE = 0
AMAZON = 1
GOOGLE = 2
MICROSOFT = 3
def detect_confinement():
if os.getenv('SNAP'):
return Confinement.SNAP
is_docker = isfile('/proc/1/cgroup') and 'docker' in open('/proc/1/cgroup', 'rt').read()
if is_docker:
if os.getenv('BALENA') or os.getenv('RESIN'):
return Confinement.BALENA
else:
return Confinement.DOCKER
return Confinement.NONE
def is_debian():
os_release = get_os_release()
return os_release.get('distro_root', os_release['distro']) == 'debian'
def is_amazon_linux2():
"""
Check if the node is running Amazon Linux 2.
"""
os_release = get_os_release()
return os_release.get('codename') == 'amzn2'
def detect_installation():
if is_debian(): # For apt-based distros.
import apt
cache = apt.Cache()
if 'wott-agent' in cache and __file__ in cache['wott-agent'].installed_files:
return Installation.DEB
elif is_amazon_linux2(): # For Amazon Linux 2.
import rpm
ts = rpm.ts()
package_iterator = ts.dbMatch('name', 'python3-wott-agent')
if package_iterator.count() > 0:
package_header = next(package_iterator)
if __file__.encode() in package_header[rpm.RPMTAG_FILENAMES]:
return Installation.RPM
# Other.
from agent import __version__
if isinstance(__version__, pkg_resources.Distribution):
return Installation.PYTHON_PACKAGE
return Installation.NONE
def detect_cloud():
bios_version = Path('/sys/devices/virtual/dmi/id/bios_version')
if bios_version.is_file():
bios_version = bios_version.read_text().strip()
if bios_version == 'Google':
return CloudProvider.GOOGLE
elif bios_version.endswith('.amazon'):
return CloudProvider.AMAZON
else:
chassis = Path('/sys/devices/virtual/dmi/id/chassis_asset_tag')
if chassis.is_file() and chassis.read_text().strip() == '7783-7084-3265-9085-8269-3286-77':
return CloudProvider.MICROSOFT
return CloudProvider.NONE
def get_packages():
if is_debian(): # For apt-based distros.
import apt
cache = apt.Cache()
packages = [deb for deb in cache if deb.is_installed]
# Calculate packages hash.
packages_str = str(sorted((deb.installed.package.name, deb.installed.version) for deb in packages))
packages_hash = hashlib.md5(packages_str.encode()).hexdigest()
return {
'hash': packages_hash,
'packages': [{
'name': deb.installed.package.name,
'version': deb.installed.version,
'arch': deb.installed.architecture,
'source_name': deb.installed.source_name,
'source_version': deb.installed.source_version
} for deb in packages]
}
elif is_amazon_linux2(): # For Amazon Linux 2.
import rpm
ts = rpm.ts()
package_iterator = ts.dbMatch()
# All packages except for kernel.
packages = [package_header for package_header in package_iterator if
package_header[rpm.RPMTAG_NAME].decode() != 'kernel']
# Find the newest kernel package.
package_iterator = ts.dbMatch('name', 'kernel')
if package_iterator.count() > 0:
kernel_package = sorted([package_header for package_header in package_iterator],
key=cmp_to_key(rpm.versionCompare), reverse=True)[0]
packages.append(kernel_package)
# Calculate packages hash.
packages_str = str(
sorted((package_header[rpm.RPMTAG_NAME].decode(), package_header[rpm.RPMTAG_EVR].decode())
for package_header in packages))
packages_hash = hashlib.md5(packages_str.encode()).hexdigest()
return {
'hash': packages_hash,
'packages': [{
'name': package_header[rpm.RPMTAG_NAME].decode(),
'version': package_header[rpm.RPMTAG_EVR].decode(),
'arch': package_header[rpm.RPMTAG_ARCH].decode() if package_header[rpm.RPMTAG_ARCH] else 'noarch',
# Looks like there's no source name/version in the rpm package info.
# TEMP: pass package name and version.
'source_name': package_header[rpm.RPMTAG_NAME].decode(),
'source_version': package_header[rpm.RPMTAG_EVR].decode()
} for package_header in packages]
}
return None
def get_os_release():
"""
Returns a dict with the following items:
distro: Concrete distro name. Examples: raspbian, ubuntu, debian, ubuntu-core.
version: Short, numerical version. Examples: 9, 18.04, 18.
distro_root: The root distro (from which the distro was branched). Optional. Examples: debian.
full_version: Longer, human-readable version. Optional. Examples (last one is from ubuntu-core):
"9 (stretch)", "18.04.3 LTS (Bionic Beaver)", 18
codename: Distro version codename. Optional. Examples: stretch, bionic.
"""
os_release = Path('/etc/os-release')
# Normally this file should be present on any Linux system starting with Jessie (and not only Debian).
# But we may be running in some pre-2012 system...
if not os_release.is_file():
# hopefully Python can give us at least some info
# FIXME: linux_distribution is removed since Python 3.7
name, version, codename = platform.linux_distribution()
return {'distro': name, 'version': version, 'codename': codename}
PARAM_NAMES = {
'ID': 'distro',
'ID_LIKE': 'distro_root',
'VERSION_ID': 'version',
'VERSION': 'full_version',
'VERSION_CODENAME': 'codename'
}
with os_release.open() as os_release_file:
lines = os_release_file.read().splitlines()
os_info = {PARAM_NAMES[param]: value.strip('"') for param, value in map(
lambda line: line.split('=', 1), lines) if param in PARAM_NAMES}
# Set proper codename for Debian/Raspbian Jessie.
if 'codename' not in os_info and os_info.get('distro', '') in ('debian', 'raspbian') and \
os_info.get('version', '') == '8':
os_info['codename'] = 'jessie'
# Set proper codename for Amazon Linux 2.
if 'codename' not in os_info and os_info.get('distro', '') == 'amzn' and os_info.get('version', '') == '2':
os_info['codename'] = 'amzn2'
return os_info
def auto_upgrades_enabled():
"""
Checks if auto-updates are enabled on a system.
:return: boolean
"""
if is_debian(): # For apt-based distros.
import apt_pkg
apt_pkg.init_config()
config = apt_pkg.config
if 'Unattended-Upgrade' in config and 'APT::Periodic' in config:
apt_periodic = config.subtree('APT::Periodic')
unattended_upgrade = apt_periodic.get('Unattended-Upgrade')
update_package_lists = apt_periodic.get('Update-Package-Lists')
allowed_origins = config.subtree('Unattended-Upgrade').value_list('Allowed-Origins') # Ubuntu
origins_pattern = config.subtree('Unattended-Upgrade').value_list('Origins-Pattern') # Debian
return unattended_upgrade == '1' and \
update_package_lists == '1' and \
(('${distro_id}:${distro_codename}' in allowed_origins
and '${distro_id}:${distro_codename}-security' in allowed_origins)
or 'origin=Debian,codename=${distro_codename},label=Debian-Security' in origins_pattern)
return False
elif is_amazon_linux2(): # For Amazon Linux 2.
# 1. check if yum-cron installed
# 2. check if it's running
# 3. check if it has proper values in its config file
import rpm
try:
from sh import systemctl
except ImportError:
# No systemd - probably yum-cron is not running
# TODO: use "service" executable which also works without systemd and on older systems
return False
ts = rpm.ts()
package_iterator = ts.dbMatch('name', 'yum-cron')
if package_iterator.count() > 0: # Package is installed.
result = systemctl(['is-active', 'yum-cron'], _ok_code=[0, 3]).stdout.decode().strip()
if result == 'active':
config = open('/etc/yum/yum-cron.conf').read()
if '\ndownload_updates = yes' in config and '\napply_updates = yes' in config:
return True
return False
return None
def kernel_cmdline():
"""
Parses kernel parameters (aka cmdline).
:return: A dict where 'name' is kernel parameter name and 'value' is its value or empty string if no value provided.
"""
cmdline_path = Path('/proc/cmdline')
cmdline_matches = re.compile(r"([\w\-\.]+)(\=(\"[\w\W]+\"|[\w\S]+)?)?").findall(cmdline_path.read_text())
return {name: value.strip('"') for name, _, value in cmdline_matches}
def get_kernel_deb_package(boot_image_path):
"""
Return a deb package instance for the currently running kernel.
"""
import apt
class FileFilter(apt.cache.Filter):
def apply(self, pkg):
return pkg.is_installed and boot_image_path in pkg.installed_files
cache = apt.cache.FilteredCache(apt.Cache())
cache.set_filter(FileFilter())
kernel_debs = list(cache)
if kernel_debs:
return kernel_debs[0]
def get_kernel_rpm_package(boot_image_path):
"""
Return an rpm package instance for the currently running kernel.
"""
import rpm
ts = rpm.ts()
package_iterator = ts.dbMatch()
boot_image_path_bytes = boot_image_path.encode()
packages = [package_header for package_header in package_iterator if boot_image_path_bytes in
package_header[rpm.RPMTAG_FILENAMES]]
if packages:
return packages[0]
return None
def kernel_package_info():
"""
Return the newest installed version of the currently running kernel package's info.
"""
boot_image_path = kernel_cmdline().get('BOOT_IMAGE')
if boot_image_path is None:
return None
if is_debian(): # For apt-based distros.
kernel_pkg = get_kernel_deb_package(boot_image_path)
if kernel_pkg is not None:
match = DEBIAN_KERNEL_PKG_NAME_RE.match(kernel_pkg.name)
if match:
name_parts = match.groups() # E.g. ('linux-image-4.4.0-', '174', '-generic')
latest_kernel_pkg = get_latest_same_kernel_deb(name_parts[0], name_parts[2])
return {
'name': latest_kernel_pkg.name,
'version': latest_kernel_pkg.installed.version,
'source_name': latest_kernel_pkg.installed.source_name,
'source_version': latest_kernel_pkg.installed.source_version,
'arch': latest_kernel_pkg.installed.architecture
}
elif is_amazon_linux2(): # For Amazon Linux 2.
import rpm
ts = rpm.ts()
package_iterator = ts.dbMatch('name', 'kernel')
if package_iterator.count() > 0:
latest_kernel_pkg = sorted([package_header for package_header in package_iterator],
key=cmp_to_key(rpm.versionCompare), reverse=True)[0]
return {
'name': latest_kernel_pkg[rpm.RPMTAG_NAME].decode(),
'version': latest_kernel_pkg[rpm.RPMTAG_EVR].decode(),
'arch': latest_kernel_pkg[rpm.RPMTAG_ARCH].decode() if latest_kernel_pkg[rpm.RPMTAG_ARCH] is not None
else 'noarch',
# Looks like there's no source name/version in the rpm package info.
# TEMP: pass package name and version.
'source_name': latest_kernel_pkg[rpm.RPMTAG_NAME].decode(),
'source_version': latest_kernel_pkg[rpm.RPMTAG_EVR].decode()
}
return None
def get_latest_same_kernel_deb(name_part0, name_part2):
"""
Return the latest version of a deb package for given name parts.
"""
import apt
search_pattern = re.compile(name_part0 + r'(\d+)' + name_part2)
class KernelFilter(apt.cache.Filter):
"""Filter class for checking for matching with a RE search pattern."""
def apply(self, pkg):
return pkg.is_installed and search_pattern.match(pkg.name)
cache = apt.cache.FilteredCache(apt.Cache())
cache.set_filter(KernelFilter())
return sorted([(int(search_pattern.match(deb.name).group(1)), deb) for deb in cache],
reverse=True)[0][1]
def reboot_required():
"""
Check if reboot required by comparing running kernel package's version
with the newest installed kernel package's one.
"""
boot_image_path = kernel_cmdline().get('BOOT_IMAGE')
if boot_image_path is None:
return None
if is_debian(): # For apt-based distros.
import apt_pkg
apt_pkg.init()
kernel_pkg = get_kernel_deb_package(boot_image_path)
if kernel_pkg is not None:
match = DEBIAN_KERNEL_PKG_NAME_RE.match(kernel_pkg.name)
if match:
name_parts = match.groups() # E.g. ('linux-image-4.4.0-', '174', '-generic')
latest_kernel_pkg = get_latest_same_kernel_deb(name_parts[0], name_parts[2])
return apt_pkg.version_compare(latest_kernel_pkg.installed.version, kernel_pkg.installed.version) > 0
elif is_amazon_linux2(): # For Amazon Linux 2.
import rpm
kernel_pkg = get_kernel_rpm_package(boot_image_path)
if kernel_pkg is not None:
ts = rpm.ts()
# Find the newest kernel package.
package_iterator = ts.dbMatch('name', 'kernel')
if package_iterator.count() > 0:
latest_kernel_pkg = sorted([package_header for package_header in package_iterator],
key=cmp_to_key(rpm.versionCompare), reverse=True)[0]
return rpm.versionCompare(latest_kernel_pkg, kernel_pkg) > 0
return None
def confirmation(message):
yesno = input(message + " [y/N]")
return yesno.strip() == 'y'
def upgrade_packages(pkg_names):
"""
Update all passed (as a list) OS packages.
"""
unique_names = set(pkg_names)
message = "The following packages will be upgraded:\n\t{}\nConfirm:"
packages = []
if is_debian(): # For apt-based distros.
import apt
cache = apt.cache.Cache()
cache.update(apt.progress.text.AcquireProgress())
cache.open()
for pkg_name in unique_names:
# Older versions of python3-apt don't provide full dict interface, namely .get().
# The result of this expression will either be False or a apt.package.Package instance.
pkg = pkg_name in cache and cache[pkg_name]
if pkg and pkg.is_installed and pkg.is_upgradable:
packages.append(pkg_name)
pkg.mark_upgrade()
if confirmation(message.format(', '.join(packages))):
cache.commit()
elif is_amazon_linux2(): # For Amazon Linux 2.
import rpm
from sh import yum # pylint: disable=E0401
ts = rpm.ts()
# This will be a list like:
# package.arch version repo
list_updates = yum(['list', 'updates', '-q', '--color=no']).stdout
# This will get a list of "package.arch"
updates = [line.split(maxsplit=1)[0] for line in list_updates.splitlines()[1:]]
for pkg_name in unique_names:
package_iterator = ts.dbMatch('name', pkg_name)
for package in package_iterator:
# Package may be installed for multiple architectures. Get them all.
fullname = b'.'.join((package[rpm.RPMTAG_NAME], package[rpm.RPMTAG_ARCH]))
if fullname in updates:
packages.append(fullname.decode())
if confirmation(message.format(', '.join(packages))):
yum(['update', '-y'] + packages)
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,338
|
WoTTsecurity/agent
|
refs/heads/master
|
/agent/executor.py
|
import asyncio
import os
from concurrent.futures import ThreadPoolExecutor
import fcntl
from multiprocessing import Process
from multiprocessing import Queue
from typing import Callable
from typing import Dict
from typing import Any
import logging
logger = logging.getLogger('agent.executor')
class Executor:
MAX_WORKERS = 10
processes = MAX_WORKERS or os.cpu_count()
executor = ThreadPoolExecutor(max_workers=processes)
def __init__(self,
interval,
func, fargs,
timeout=None,
callback_timeout=None,
daemon=False):
"""
Periodic process executor. Calls func and sleeps for interval,
repeatedly. Kills the process after a timeout.
Call schedule() to put it into asyncio loop.
:param interval: sleep interval between calls, in seconds. If None, Executor will only execute once.
:param func: the function to call
:param fargs: function args (tuple) or a single arg
:param timeout: kill the process after this many seconds
:param callback_timeout: will be called if the process gets killed on timeout
:param daemon:
"""
self.interval = interval
self.params = {'func': func, 'fn_args': fargs, "p_kwargs": {},
'timeout': timeout, 'callback_timeout': callback_timeout,
'daemon': daemon}
self.process = None
self.oneshot = interval is None
self.should_stop = False
async def start(self):
""" start calling the process periodically """
while not self.should_stop:
self.executor.submit(self._submit_unpack_kwargs, self.params)
if self.oneshot:
break
await asyncio.sleep(self.interval)
def stop(self):
""" terminate running process """
self.should_stop = True
if self.process:
self.process.terminate()
def _submit_unpack_kwargs(self, params):
""" unpack the kwargs and call submit """
return self._submit(**params)
def _submit(self,
func: Callable,
fn_args: Any,
p_kwargs: Dict,
timeout: float,
callback_timeout: Callable[[Any], Any],
daemon: bool):
"""
Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as func(*args, **kwargs) in a new
process.
:param func: the function to execute
:param fn_args: the arguments to pass to the function. Can be one argument
or a tuple of multiple args.
:param p_kwargs: the kwargs to pass to the function
:param timeout: after this time, the process executing the function
will be killed if it did not finish
:param callback_timeout: this function will be called with the same
arguments, if the task times out.
:param daemon: run the child process as daemon
:return: the result of the function, or None if the process failed or
timed out
"""
p_args = fn_args if isinstance(fn_args, tuple) else (fn_args,)
queue = Queue()
logger.debug("Executor: starting {} {}".format(func.__name__, p_args))
p = Process(target=self._process_run,
args=(queue, func, *p_args,), kwargs=p_kwargs)
if daemon:
p.daemon = True
self.process = p
p.start()
p.join(timeout=timeout)
if not queue.empty():
return queue.get()
if callback_timeout:
callback_timeout(*p_args, **p_kwargs)
if p.is_alive():
logger.debug('Executor: terminating by timeout')
p.terminate()
p.join()
@staticmethod
def _process_run(queue: Queue, func: Callable[[Any], Any] = None,
*args, **kwargs):
"""
Executes the specified function as func(*args, **kwargs).
The result will be stored in the shared dictionary
:param func: the function to execute
:param queue: a Queue
"""
queue.put(func(*args, **kwargs))
def schedule(executor: Executor) -> asyncio.Future:
"""
Put executor into asyncio loop.
:param executor:
:return: executor.start() wrapped in Future
"""
awaitable = executor.start()
future = asyncio.ensure_future(awaitable)
return future
def spin():
asyncio.get_event_loop().run_forever()
class Locker:
LOCKDIR = '/var/lock'
LOCKFILE = 'wott.lock'
def __init__(self, name=None):
if name:
self.filename = name + '-' + self.LOCKFILE
else:
self.filename = self.LOCKFILE
def __enter__(self):
self.f = os.open(self.LOCKDIR + '/' + self.filename, os.O_WRONLY | os.O_CREAT)
fcntl.lockf(self.f, fcntl.LOCK_EX)
def __exit__(self, exc_type, exc_val, exc_tb):
fcntl.lockf(self.f, fcntl.LOCK_UN)
os.close(self.f)
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,339
|
WoTTsecurity/agent
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
import version
static_version, commit_hash, _, build_number = version.version()
full_version = version.version_string(static_version, commit_hash, build_number)
setup(
name="wott-agent",
version=full_version,
author="Viktor Petersson",
author_email="v@viktopia.io",
description="WoTT agent",
packages=find_packages(exclude=('tests',)),
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
'Topic :: System :: Networking',
'Topic :: System :: Systems Administration',
'Topic :: Utilities',
],
setup_requires=[
'GitPython'
],
install_requires=[
'certifi',
'cffi',
'chardet',
'cryptography',
'idna',
'netifaces',
'psutil',
'pyOpenSSL',
'python-iptables',
'requests',
'sh',
'pytz',
'setuptools',
'systemd-python'
],
entry_points={
'console_scripts': [
'wott-agent = agent.__main__:main',
],
}
)
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,340
|
WoTTsecurity/agent
|
refs/heads/master
|
/agent/iptables_helper.py
|
from typing import List, Tuple
from itertools import product
import logging
import ctypes as ct
import iptc
from pkg_resources import parse_version
from sh import iptables
logger = logging.getLogger('agent.iptables_helper')
TABLE = 'filter'
DROP_CHAIN = 'WOTT_LOG_DROP'
OUTPUT_CHAIN = 'WOTT_OUTPUT'
INPUT_CHAIN = 'WOTT_INPUT'
# Fix a bug in python-iptables by monkey-patching a couple of methods.
# The bug is actually fixed in:
# https://github.com/chruss2/python-iptables/commit/282c790738a111b1ddc27b43ecb0acfab8b09024
# and the bugfix is gonna be released in the next (after 0.14.0) release of python-iptables.
iptables_version = iptables('--version').split(maxsplit=2)[1]
upgrade = parse_version(iptc.version.__version__) <= parse_version('0.14.0') \
and parse_version(iptables_version) >= parse_version('1.8.0')
downgrade = parse_version(iptc.version.__version__) >= parse_version('0.15.0-dev') \
and parse_version(iptables_version) < parse_version('1.8.0')
if upgrade or downgrade:
def find_match(self, name):
if isinstance(name, str):
name = name.encode()
name = self._check_extname(name)
ext = self._get_loaded_ext(name)
if ext is not None:
return ext
if downgrade:
iptc.xtables.xtables._xtables_matches.value = ct.c_void_p(None).value
if iptc.xtables.xtables._xtables_pending_matches:
iptc.xtables.xtables._xtables_pending_matches.value = ct.c_void_p(None).value
match = iptc.xtables.xtables._xtables_find_match(name, iptc.xtables.XTF_TRY_LOAD, None)
if not match:
self._try_register(name)
match = iptc.xtables.xtables._xtables_find_match(name, iptc.xtables.XTF_DONT_LOAD, None)
if not match:
return match
m = ct.cast(match, ct.POINTER(self._match_struct))
self._loaded(m[0].name, m)
return m
def find_target(self, name):
if isinstance(name, str):
name = name.encode()
name = self._check_extname(name)
ext = self._get_loaded_ext(name)
if ext is not None:
return ext
if downgrade:
iptc.xtables.xtables._xtables_targets.value = ct.c_void_p(None).value
if iptc.xtables.xtables._xtables_pending_targets:
iptc.xtables.xtables._xtables_pending_targets.value = ct.c_void_p(None).value
target = iptc.xtables.xtables._xtables_find_target(name, iptc.xtables.XTF_TRY_LOAD)
if not target:
self._try_register(name)
target = iptc.xtables.xtables._xtables_find_target(name, iptc.xtables.XTF_DONT_LOAD)
if not target:
return target
t = ct.cast(target, ct.POINTER(self._target_struct))
self._loaded(t[0].name, t)
return t
iptc.xtables.xtables.find_match = iptc.xtables.set_nfproto(find_match)
iptc.xtables.xtables.find_target = iptc.xtables.set_nfproto(find_target)
from . import iptc_helper
else:
from . import iptc_helper
def dump():
"""
Get all rules for all chains in all tables for both IPv4 and IPv6.
"""
tables = {'v6': {}, 'v4': {}}
for table_name, ipv6 in product(('filter', 'nat', 'mangle'), (False, True)):
table = iptc_helper.dump_table(table_name, ipv6=ipv6).items()
chains = {}
for chain_name, chain in table:
policy = iptc_helper.get_policy(table_name, chain_name, ipv6=ipv6)
rules = {'rules': [rule for rule in chain if chain_name != OUTPUT_CHAIN]}
if policy:
rules['policy'] = policy
chains[chain_name] = rules
tables['v6' if ipv6 else 'v4'][table_name] = chains
return tables
def prepare():
"""
Add INPUT_CHAIN and OUTPUT_CHAIN to TABLE if they don't exist.
Otherwise clear (flush) them.
"""
for ipv6 in (False, True):
if not iptc_helper.has_chain(TABLE, DROP_CHAIN, ipv6=ipv6):
iptc_helper.add_chain(TABLE, DROP_CHAIN, ipv6=ipv6)
iptc_helper.batch_add_rules(TABLE, [
{'target': {'LOG': {'log-prefix': 'DROP: ', 'log-level': '3'}}},
{'target': 'DROP'}
], chain=DROP_CHAIN, ipv6=ipv6)
if not iptc_helper.has_chain(TABLE, INPUT_CHAIN, ipv6=ipv6):
iptc_helper.add_chain(TABLE, INPUT_CHAIN, ipv6=ipv6)
else:
iptc_helper.flush_chain(TABLE, INPUT_CHAIN, ipv6=ipv6)
if not iptc_helper.has_chain(TABLE, OUTPUT_CHAIN, ipv6=ipv6):
iptc_helper.add_chain(TABLE, OUTPUT_CHAIN, ipv6=ipv6)
else:
iptc_helper.flush_chain(TABLE, OUTPUT_CHAIN, ipv6=ipv6)
# Check for the first rule in OUTPUT and in INPUT and add it if missing.
# The first rule jumps to WOTT_INPUT or WOTT_OUTPUT where we decide what to do with it.
# If we don't block the packet it returns back to INPUT or OUTPUT and gets handled by existing rules.
# This way we don't interfere with the filtering which was already configured on the device.
for target_chain, chain in ((INPUT_CHAIN, 'INPUT'), (OUTPUT_CHAIN, 'OUTPUT')):
# -I $chain -j $target_chain
jump_to_target = {'target': target_chain}
first_rule = iptc_helper.get_rule(TABLE, chain, 1, ipv6=ipv6)
if jump_to_target != first_rule:
# Another rule may have been added on top, which means our rule may be somewhere else.
iptc_helper.delete_rule(TABLE, chain, jump_to_target, ipv6=ipv6, raise_exc=False)
iptc_helper.add_rule(TABLE, chain, jump_to_target, 1, ipv6=ipv6)
def add_block_rules():
"""
Adds rules which permit localhost and related/established connections
(ipv4 and ipv6) and drops the rest of input traffic.
"""
for ipv6 in (False, True):
# -I WOTT_INPUT -i lo -j ACCEPT
# -I WOTT_INPUT -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT
# -A WOTT_INPUT -j DROP
iptc_helper.batch_add_rules(TABLE, [
(INPUT_CHAIN, {
'in-interface': 'lo',
'target': 'ACCEPT'
}, 1),
(INPUT_CHAIN, {
'conntrack': {'ctstate': 'RELATED,ESTABLISHED'},
'target': 'ACCEPT'
}, 2),
(INPUT_CHAIN, {
'target': 'DROP'
}, 0)
], ipv6=ipv6)
def add_rules(table, chain, rules):
"""
Insert rules into the chain for both ipv4 and ipv6.
:param table: table name
:param chain: chain name
:param rules: a list of rules in iptc.easy format
:return: None
"""
for ipv6 in (False, True):
rules_ipv = [rule for rule, is_ipv6 in rules if ipv6 == is_ipv6]
iptc_helper.batch_add_rules(table, rules_ipv, chain=chain, ipv6=ipv6)
def block_ports(allow: bool, ports_data: List[Tuple[str, str, int, bool]]):
"""
Block or allow incoming TCP/UDP packets to the ports supplied in the list.
:param allow: True if policy is "allow by default" (which means: block the supplied ports)
:param ports_data: dict of protocols/ports to be blocked or allowed
:return: None
"""
def remove_unspecified(r):
if r['dst'] in ['0.0.0.0', '::']:
del(r['dst'])
return r
rules = [(remove_unspecified({
'protocol': proto,
proto: {'dport': str(port)},
'dst': host,
'target': DROP_CHAIN if allow else 'ACCEPT'
}), ipv6)
for host, proto, port, ipv6 in ports_data]
add_rules(TABLE, INPUT_CHAIN, rules)
def block_networks(network_list: List[Tuple[str, bool]]):
"""
Block outgoing packets to the networks supplied in the list.
:param network_list: list of IPs in dot-notation or subnets (<IP>/<mask>)
:return: None
"""
rules = [({'dst': n,
'target': DROP_CHAIN,
}, ipv6)
for n, ipv6 in network_list]
add_rules(TABLE, OUTPUT_CHAIN, rules)
def block(blocklist):
policy = blocklist.get('policy', 'allow')
try:
prepare()
block_networks(blocklist.get('block_networks', []))
if policy == 'allow':
block_ports(True, blocklist.get('block_ports', []))
elif policy == 'block':
block_ports(False, blocklist.get('allow_ports', []))
add_block_rules()
else:
logger.error('Error: unknown policy "{}"'.format(policy))
except iptc.IPTCError as e:
logger.error('Error while updating iptables: %s', str(e))
logger.debug(exc_info=True)
if 'insmod' in str(e):
logger.error('Error: failed to update iptables, try rebooting')
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,341
|
WoTTsecurity/agent
|
refs/heads/master
|
/tests/conftest.py
|
import pytest
import socket
from unittest.mock import Mock
from agent.iptables_helper import DROP_CHAIN
RASPBERRY_FIXTURE = """
Hardware : BCM2708
Revision : 900092
Serial : 00000000ebd5f1e8
"""
INVALID_CERT = """
-----BEGIN CERTIFICATE-----
MIIC5TCCAc2gAwIBAgIJAPMjGMrzQcI/MA0GCSqGSIb3DQEBCwUAMBQxEjAQBgNV
BAMMCWxvY2FsaG9zdDAeFw0xOTAzMDUyMDE5MjRaFw0xOTA0MDQyMDE5MjRaMBQx
EjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
ggEBAOgfhzltW1Bx/PLve7sk228G9FeBQmTVkEwiU1tgagvIzM8fhoeDnXoMVRf5
GPWZr4h0E4BtDRQUO7NqgW+r3RQMq4nJljTV9f8Om3Owx41BM5M5w5YH75JZzcZ1
OVBmJRPOG06I3Hk/uQjCGo1YN7ZggAdUmFQqQ03GdstqQhd6UzbV2dPphq+R2npV
oAjByawBwuxi+NJXxz20dUVkXrrxGgDUKcUn4NPsIUGf9hSHZcDMZ3XQcQQ/ykD9
i/zeVU6jGnsMOO+YZUguBlq/GKI2fzezfG7fv394oAJP9mV0T8k9ArciTigUehuv
a8sHA+vrvRXCNbpV8vEQbRh/+0sCAwEAAaM6MDgwFAYDVR0RBA0wC4IJbG9jYWxo
b3N0MAsGA1UdDwQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0B
AQsFAAOCAQEAL+KRDdqbbAFiMROy7eNkbMUj3Dp4S24y5QnGjFl4eSFLWu9UhBT+
FcElSbo1vKaW5DJi+XG9snyZfqEuknQlBEDTuBlOEqguGpmzYE/+T0wt9zLTByN8
N44fGr4f9ORj6Y6HJkzdlp+XCDdzHb2+3ienNle6bWlmBpbQaMVrayDxJ5yxldgJ
czUUClEc0OJDMw8PsHyYvrl+jk0JFXgDqBgAutPzSiC+pWL3H/5DO8t/NcccNNlR
2UZyh8r3qmVWo1jROR98z/J59ytNgMfYTmVI+ClUWKF5OWEOneKTf7dvic0Bqiyb
1lti7kgwF5QeRU2eEn3VC2F5JreBMpTkeA==
-----END CERTIFICATE-----
"""
INVALID_KEY = """
-----BEGIN PRIVATE KEY-----
MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDoH4c5bVtQcfzy
73u7JNtvBvRXgUJk1ZBMIlNbYGoLyMzPH4aHg516DFUX+Rj1ma+IdBOAbQ0UFDuz
aoFvq90UDKuJyZY01fX/DptzsMeNQTOTOcOWB++SWc3GdTlQZiUTzhtOiNx5P7kI
whqNWDe2YIAHVJhUKkNNxnbLakIXelM21dnT6Yavkdp6VaAIwcmsAcLsYvjSV8c9
tHVFZF668RoA1CnFJ+DT7CFBn/YUh2XAzGd10HEEP8pA/Yv83lVOoxp7DDjvmGVI
LgZavxiiNn83s3xu379/eKACT/ZldE/JPQK3Ik4oFHobr2vLBwPr670VwjW6VfLx
EG0Yf/tLAgMBAAECggEBALPEbxJfid+UV+TA6Z823SZwSV1XgtbauqTr1Iio85fq
zAsAjEx49sWltWUaimTywAm6c7v7OKy7Or0pl9KnVFEJuvO8BjMnHRuJ8YQ4fWL9
AvdbPgj8XmKGYCH5eQi2ArMC5Qz+W1kfq6qHwM6Eaqk4tQ54SnysOnGKaUgCI+tP
XBIuWTs6OrWmJDuW6J0zNPRBZAbVEsaFaTdLtJ4kDPlmDmHHMzrLkQhvQ7oSFoEW
FtLNlWAV0uZ2PpHQbrcx1ALabH1Yz3yRcgjDYtu5oCRN6+4wJEylg1NxiQk9BP/m
amRFIuyBVpnh69ErYeLrP320nHew3NML6Xxr3dI9yVECgYEA/3oAR6rCVtjrozHG
hWq/SdRY5Cq4nvt+ocTlgZo2/qULDR8xo4HE4ABliE9/TMEysgA2erAfEvSV15mt
m/BWOHZZ1mbpAm1jbRmBMjVPGytH997LOAnBCwLLjtIjbJMrRxKws6fSO+gwRY9v
MMeiJdW2LpVgBd+AunZEBjyMYCMCgYEA6JlHM5SyCfwuZSdIue0iI3t8n8nBV7zu
mqwItZHX/h8xu/V5cT7eVLsC3yti2+EHir8V6AXJ919LlSFjZObEBwL+xtyK+HZj
uQmXN78QtnFRUO3EBlTmYCYzPGE0cNwg9t1RQS0KMs5ypQ9vYUoXwqNvp97XxsB0
d4+wMLz+lrkCgYEA1ibWhTzGmzZKkAnxd3T71E+EE/8bs2jtxXzfRbyXzO1cTiuP
2Je3CG5Mre61rwlkDYHQKRfpdGJCGPBhbw4PuFS9CdRKDhbT+WgfvI6jOQsW0NiZ
UOgcQbaeG6Jav3C+Hl20cWSD/mOr0yNg+WreqQh0JqhgTYwExEjOzMuEgDECgYBD
niugxx1q4bDrHxx5UIKYJhH4scJPK1GCDXkKr7dG3PKsXZRMY6Zmo2cWUZqPqT90
ClDn/qbUDxP96pLmhl9+WlSOoxaTXHdpF2yqfBTztMWa7UQLQysl0HUcnHWOSbAb
lANHGzzXwER7z5zlf5CguLqA5rt7v/8bst3ZjVfFoQKBgQCFepRalYYqKUYbl6Lx
y0UxgC/XRPUlsL5IANipOt8Yu2M/+RJKW1jdUJx3sUCRYBV5IpX8jqnHax+MIki5
wU3JBrpGqAAoGa/78B572+9Dmr6Bj0yAoWQ67tht87M1mQxpKv6IE4CEt8+o+5sR
I9bBs17EE1GV43TaxFaOc/oUYw==
-----END PRIVATE KEY-----
"""
CERT = """
-----BEGIN CERTIFICATE-----
MIIClzCCAj2gAwIBAgIUFXu9cEa7n79yDQWNHG9nfHHiw+kwCgYIKoZIzj0EAwIw
XzELMAkGA1UEBhMCVUsxDzANBgNVBAcTBkxvbmRvbjEjMCEGA1UEChMaV2ViIG9m
IFRydXN0ZWQgVGhpbmdzLCBMdGQxGjAYBgNVBAMTEWNhMC1jYS53b3R0LmxvY2Fs
MB4XDTE5MDMxMjEwMjQwMFoXDTE5MDMxOTEwMjQwMFowezELMAkGA1UEBhMCVUsx
DzANBgNVBAgTBkxvbmRvbjEjMCEGA1UEChMaV2ViIG9mIFRydXN0ZWQgVGhpbmdz
LCBMdGQxNjA0BgNVBAMTLTQ4NTNiNjMwODIyOTQ2MDE5MzkzYjE2YzViNzEwYjll
LmQud290dC5sb2NhbDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABEzKyyQJ2VSw
5F90xOkHLaJmTHjJwu3C/G2fgYDMw02NbuTzjIhTCyqhHbeY8GO/ZXIZ5ASE1ACB
4OJVYrpRUVajgbowgbcwDgYDVR0PAQH/BAQDAgeAMB0GA1UdJQQWMBQGCCsGAQUF
BwMCBggrBgEFBQcDATAMBgNVHRMBAf8EAjAAMB0GA1UdDgQWBBSshZgvlzrA9p6p
EJXTRi4wgYOyITAfBgNVHSMEGDAWgBSpts1xq4g96OM2x5RvKrEUAIU3ATA4BgNV
HREEMTAvgi00ODUzYjYzMDgyMjk0NjAxOTM5M2IxNmM1YjcxMGI5ZS5kLndvdHQu
bG9jYWwwCgYIKoZIzj0EAwIDSAAwRQIgGSUuYz+Osx1FFZnIntWlb2g3dkpT1O/C
5zSuz7b/JcECIQDTa1z7edWWjwBLmFwaCR/2XXU6pt/52Fh+YUq/vwGq5A==
-----END CERTIFICATE-----
"""
KEY = """
-----BEGIN PRIVATE KEY-----
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgGJEzRpQVxxo0jRKh
0zV00O5iyOkUajHp9ULu0vE6J3KhRANCAARMysskCdlUsORfdMTpBy2iZkx4ycLt
wvxtn4GAzMNNjW7k84yIUwsqoR23mPBjv2VyGeQEhNQAgeDiVWK6UVFW
-----END PRIVATE KEY-----
"""
@pytest.fixture
def raspberry_cpuinfo():
return RASPBERRY_FIXTURE
@pytest.fixture
def netif_gateways():
return {
'default': {
2: ('192.168.1.1', 'wlo1')
},
2: [('192.168.1.1', 'wlo1', True)]
}
@pytest.fixture
def netif_ifaddresses():
return {
17: [
{
'addr': 'aa:aa:aa:aa:aa:aa',
'broadcast': 'ff:ff:ff:ff:ff:ff'
}
],
2: [
{
'addr': '192.168.1.3',
'netmask': '255.255.255.0',
'broadcast': '192.168.1.255'
}
],
10: [
{
'addr': 'fe80::1e93:cce9:0000:0000%wlo1',
'netmask': 'ffff:ffff:ffff:ffff::/64'
}
]
}
@pytest.fixture
def netif_gateways_invalid():
return {}
@pytest.fixture
def cert():
return CERT
@pytest.fixture
def key():
return KEY
@pytest.fixture
def invalid_cert():
return INVALID_CERT
@pytest.fixture
def invalid_key():
return INVALID_KEY
@pytest.fixture
def gen_id():
return {"device_id": "60f4e66c1e7746c3ba8f3301d8a4d1c4.d.wott.local"}
@pytest.fixture
def uptime():
return "60 60"
@pytest.fixture
def ipt_rules():
return (
{'dst': '10.10.10.10', 'target': DROP_CHAIN},
{'dst': '10.20.10.20', 'target': DROP_CHAIN}
)
@pytest.fixture
def ipt_networks():
return (('10.10.10.10', False), ('10.20.10.20', False))
@pytest.fixture
def ipt_ports():
return [
('0.0.0.0', 'tcp', 80, False),
('::', 'tcp', 80, True),
('192.168.1.1', 'tcp', 80, False),
('fe80::adf3:7685:af9f:c151', 'tcp', 80, True)
]
@pytest.fixture
def ipt_ports_rules():
return [
({'protocol': 'tcp', 'tcp': {'dport': '80'}, 'target': DROP_CHAIN}, False),
({'protocol': 'tcp', 'tcp': {'dport': '80'}, 'dst': '192.168.1.1', 'target': DROP_CHAIN}, False),
]
@pytest.fixture
def net_connections_fixture():
return [
Mock(family=socket.AF_INET,
type=socket.SOCK_STREAM,
laddr=('192.168.1.1', 1234),
raddr=('192.168.1.2', 1234),
status='CONNECTED',
pid=1234),
Mock(family=socket.AF_INET,
type=socket.SOCK_STREAM,
laddr=('192.168.1.1', 1234),
raddr=(),
status='LISTENING',
pid=1234)
]
@pytest.fixture
def netstat_result():
return (
{
'ip_version': 4,
'type': 'tcp',
'local_address': ('192.168.1.1', 1234),
'remote_address': ('192.168.1.2', 1234),
'status': 'CONNECTED',
'pid': 1234
},
{
'ip_version': 4,
'host': '192.168.1.1',
'port': 1234,
'proto': 'tcp',
'state': 'LISTENING',
'pid': 1234
}
)
@pytest.fixture
def sshd_config():
return """
# a comment
PermitEmptyPasswords no
PermitRootLogin "yes"
# Ignored with OpenSSH >= 7.0
Protocol "2,1"
# PasswordAuthentication param's default value will be checked
LoginGraceTime 60
# outside of range
MaxAuthTries 5
# inside the range
ClientAliveCountMax 1
# default: ClientAliveInterval 0
AnotherOption another value
"""
@pytest.fixture
def cmdline():
return """one t-wo= fo_ur="fix 1-2asqwe six+\\0123!@#$%^%^&*()_=" se.ven=eight,nine+ten*eleven -"""
|
{"/agent/__main__.py": ["/agent/__init__.py", "/agent/security_helper.py"], "/agent/security_helper.py": ["/agent/os_helper.py", "/agent/__init__.py"], "/agent/__init__.py": ["/agent/executor.py", "/agent/os_helper.py"], "/tests/test_agent.py": ["/agent/__init__.py", "/agent/journal_helper.py", "/agent/os_helper.py", "/agent/iptables_helper.py", "/agent/security_helper.py"], "/agent/os_helper.py": ["/agent/__init__.py"], "/setup.py": ["/version.py"], "/agent/iptables_helper.py": ["/agent/__init__.py"], "/tests/conftest.py": ["/agent/iptables_helper.py"]}
|
39,345
|
HasanB13/Django-Authentication
|
refs/heads/master
|
/users/views.py
|
from django.shortcuts import render, redirect
from django.contrib import messages
from django.contrib.auth import login, logout, get_user_model
from django.db.models import Q
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic.list import ListView
from .forms import UserCreationForm, UserLoginForm
MyUser = get_user_model()
# Create your views here.
def register(request, *args, **kwargs):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request,f'{username} Account created')
return redirect('login')
form = UserCreationForm()
return render(request, 'users/register.html',{'form':form})
def login_user(request, *args, **kwargs):
form = UserLoginForm(request.POST or None)
if form.is_valid():
query = form.cleaned_data.get('query')
user = MyUser.objects.filter(
Q(username=query) |
Q(email=query)
).distinct().first()
login(request, user)
return redirect('home')
return render(request,'users/login.html',{'form':form})
def logout_user(request, *args, **kwargs):
logout(request)
return redirect('login')
class AllUsers(LoginRequiredMixin, ListView):
login_url = '/login/'
model = MyUser
template_name = 'users/home.html'
context_object_name = 'users'
|
{"/users/urls.py": ["/users/views.py"]}
|
39,346
|
HasanB13/Django-Authentication
|
refs/heads/master
|
/users/urls.py
|
from django.urls import path
from .views import register, login_user, logout_user, AllUsers
urlpatterns = [
path('', AllUsers.as_view(), name='home'),
path('register/', register, name='register'),
path('login/', login_user, name='login'),
path('logout/', logout_user, name='logout')
]
|
{"/users/urls.py": ["/users/views.py"]}
|
39,374
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0007_auto_20171102_2344.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-11-02 23:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0006_btleevent_domain'),
]
operations = [
migrations.RenameField(
model_name='btleevent',
old_name='accessaddress',
new_name='advertisingdata',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppireserved',
new_name='advertisingtype',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppiversion',
new_name='btletype',
),
migrations.RemoveField(
model_name='btleevent',
name='caplength',
),
migrations.RemoveField(
model_name='btleevent',
name='epochtime',
),
migrations.RemoveField(
model_name='btleevent',
name='highestlayer',
),
migrations.RemoveField(
model_name='btleevent',
name='layers',
),
migrations.RemoveField(
model_name='btleevent',
name='ppidlt',
),
migrations.RemoveField(
model_name='btleevent',
name='ppiflags',
),
migrations.RemoveField(
model_name='btleevent',
name='ppiheaderlength',
),
migrations.AddField(
model_name='btleevent',
name='channelindex',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='btleevent',
name='company',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='btleevent',
name='companydata',
field=models.CharField(max_length=300, null=True),
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,375
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_webapp/api/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.contrib import admin
# Create your models here.
class Temperature(models.Model):
time = models.DateTimeField(auto_now_add=True)
celsius = models.CharField(max_length=1000)
def __self__(self):
return self.time
def __unicode__(self):
return unicode(self.time)
class Meta:
verbose_name_plural = "temperature"
class JSONAPIMeta:
resource_name = "temperature"
class TemperatureAdmin(admin.ModelAdmin):
list_display = ('time','celsius')
class Barometer(models.Model):
time = models.DateTimeField(auto_now_add=True)
pressure = models.CharField(max_length=200)
altitude = models.CharField(max_length=200)
def __self__(self):
return self.pressure
def __unicode__(self):
return self.pressure
class Meta:
verbose_name_plural = "barometer"
class JSONAPIMeta:
resource_name = "barometer"
class BarometerAdmin(admin.ModelAdmin):
list_display = ("time", "pressure", "altitude")
class Illuminance(models.Model):
time = models.DateTimeField(auto_now_add=True)
illuminance = models.CharField(max_length=200)
def __self__(self):
return self.illuminance
def __unicode__(self):
return self.illuminance
class Meta:
verbose_name_plural = "Illuminance"
class JSONAPIMeta:
resource_name = "Illuminance"
class IlluminaceAdmin(admin.ModelAdmin):
list_display = ("time", "illuminance")
'''
class Event(models.Model):
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
clocktimedifference = models.CharField(max_length=200, blank=True)
eventtype = models.CharField(max_length=200, blank=True)
event = models.CharField(max_length=200, blank=True)
codereference = models.TextField(blank=True)
domain = models.CharField(max_length=200)
#run = models.CharField(max_length=100)
def __self__(self):
return self.eventtype
def __unicode__(self):
return self.eventtype
class Meta:
verbose_name_plural = "Event"
class JSONAPIMeta:
resource_name = "Event"
class EventAdmin(admin.ModelAdmin):
list_display = ("created", "updated", "clocktimedifference", "event", "eventtype")
'''
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,376
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_webapp/api/migrations/0006_event.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-07 22:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0005_delete_event'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('clocktimedifference', models.CharField(blank=True, max_length=200)),
('eventtype', models.CharField(blank=True, max_length=200)),
('event', models.CharField(blank=True, max_length=200)),
('codereference', models.TextField(blank=True)),
('domain', models.CharField(max_length=200)),
('run', models.CharField(max_length=100)),
],
options={
'verbose_name_plural': 'Event',
},
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,377
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear-client-ubertooth/src/pcaps.py
|
#!/usr/bin/env python
import pyshark, requests, sys, os, re, pprint, ast, json
from random import randint
import datetime
"""Pcaps Object"""
class Pcaps:
#This is the constructor for the Pcaps class.
def __init__(self, loginUrl, username, password, btleUrl):
# This is to make sure that user enters the appropriate information to parse the file.
self.pp = pprint.PrettyPrinter(indent=4)
# compiling a regex to ignore some files that are not required.
self.regexPath = re.compile('.*\.docx|.*\.DS_Store')
# Creating the url links for the django server to create requests!.
# Under the URLS we also entered the information for the secuwear user login.
self.loginUrl = loginUrl
self.btleUrl = btleUrl
self.username = username
self.password = password
# Requesting the session so that i can use the session object!.
self.client = requests.session()
# # Here we are preparing the data which is required for logging in to secuwear and sending a post request to the django server to login!
# loginData = dict(username=self.username, password=self.password, next='/')
# self.loginResponse = self.client.post(self.loginUrl, data=loginData, headers=dict(Referer=self.loginUrl))
#This method uses the username and password and the url to login to the website and get the response back.
def loginPost(self):
loginData = dict(username=self.username, password=self.password, next='/')
return self.client.post(self.loginUrl, data=loginData, headers=dict(Referer=self.loginUrl))
#This is a method to get an experiment with the id from the api.
def getExperiment(self, experiment_id, loginResponse):
experimentUrl='http://localhost:8000/api/experiments/'+str(experiment_id)
return self.client.get(experimentUrl, cookies=loginResponse.cookies).json()
#This is a method to post a new Run to the api.
def postRun(self, loginResponse, user_id, experiment_id, run_id):
runUrl='http://localhost:8000/api/runs'
experimentUrl='http://localhost:8000/api/experiments/'+str(experiment_id)
ownerUrl='http://localhost:8000/api/users/'+str(user_id)
#Send the urls to the experiments and owners for the relationships.
name = raw_input('Please enter a name for the new Run: ')
description = raw_input('Please enter a description for the new Run: ')
runData=dict(owner=ownerUrl, experiment=experimentUrl, name=name, description=description)
return self.client.post(runUrl, data=runData, cookies=loginResponse.cookies, headers=dict(Referer=self.btleUrl)).json()
# This function sends a request to the database with the data from the packets of the file.
def postData(self, walkFile, loginResponse, run_id):
i=1
wearablecounter = 0
loopCounter = 1
mobileFunctionCounter = 0
handlenumber = 2
handlenumberData = ''
secondsTime = 10
run = 'http://localhost:8000/api/runs/'+str(run_id['data']['id'])
print 'Capturing Information from ' + os.path.abspath(walkFile)
# Using try except just in case there are any attribute errors while parsing the data.
try:
# Here we are using the pyshark plugin to parse each packet and get the information.
cap = pyshark.FileCapture(os.path.abspath(walkFile), display_filter='btle')
# The loop below goes through an array of the packets that are captured. Then inside the loop we are gathering the information required for analysis and sending a post request to the django server.
for pkt in cap:
arrivalTime = '2017-06-15T09:26:'+str(secondsTime)+'.389643Z'
# Preparing the btle data that we can send to the django server.
if i == 1:
webappData = 'request from 192.168.'+str(randint(0, 9))+'.1'+str(randint(1, 9))+str(randint(1, 9))+' for /api/events/'+str(loopCounter)
btle_data = dict( run=run, arrivaltime=arrivalTime, caplength=pkt.captured_length, domain='webapp', layers=pkt.layers, highestlayer=pkt.highest_layer, epochtime=pkt.sniff_timestamp, advertisingaddress=pkt.btle.access_address, advertisingheader=pkt.btle.advertising_header, crc=pkt.btle.crc, btledata=webappData, ppiflags=pkt.ppi.flags, ppiversion=pkt.ppi.version, ppidlt=pkt.ppi.dlt, ppiheaderlength=pkt.ppi.length, ppireserved=pkt.ppi.reserved, accessaddress='asdf' , next='/')
i += 1
elif i == 2:
if mobileFunctionCounter == 5:
mobileFunctionCounter = 0
mobileFunctions = ['accelerometer','tempreture','orientation','heartrate','devices']
mobileData = 'function '+mobileFunctions[mobileFunctionCounter]+' handler executed: line'+str(loopCounter)
mobileFunctionCounter+=1
# print('This is mobile data: '+ mobileData)
btle_data = dict( run=run, arrivaltime=arrivalTime, caplength=pkt.captured_length, domain='mobile', layers=pkt.layers, highestlayer=pkt.highest_layer, epochtime=pkt.sniff_timestamp, advertisingaddress=pkt.btle.access_address, advertisingheader=pkt.btle.advertising_header, crc=pkt.btle.crc, btledata=mobileData, ppiflags=pkt.ppi.flags, ppiversion=pkt.ppi.version, ppidlt=pkt.ppi.dlt, ppiheaderlength=pkt.ppi.length, ppireserved=pkt.ppi.reserved, accessaddress='asdf' , next='/')
i += 1
elif i == 3:
secondsTime += 1
if secondsTime > 60:
secondsTime = 10
wearablecounter+=1
if wearablecounter < 11:
wearableData = 'ADV_IND packet'
elif (wearablecounter > 10 and wearablecounter < 15):
wearableData = 'CONNECT packet'
elif wearableData == handlenumberData:
wearableData = 'OP Write (handle '+str(handlenumber)+')'
else:
handlenumberData = 'OP Read (handle '+str(handlenumber)+')'
wearableData = handlenumberData
#print('This is the wearable data: '+wearableData)
btle_data = dict( run=run, arrivaltime=arrivalTime, caplength=pkt.captured_length, domain='wearable', layers=pkt.layers, highestlayer=pkt.highest_layer, epochtime=pkt.sniff_timestamp, advertisingaddress=pkt.btle.access_address, advertisingheader=pkt.btle.advertising_header, crc=pkt.btle.crc, btledata=wearableData, ppiflags=pkt.ppi.flags, ppiversion=pkt.ppi.version, ppidlt=pkt.ppi.dlt, ppiheaderlength=pkt.ppi.length, ppireserved=pkt.ppi.reserved, accessaddress='asdf' , next='/')
i = 1
# Sending the request to the django server using the python requests.
btlePostResponse = self.client.post(self.btleUrl, data=btle_data, cookies=loginResponse.cookies, headers=dict(Referer=self.btleUrl))
loopCounter+=1
handlenumber = randint(1, 6)
except AttributeError:
print "Some Part of the packet data could not be parsed!"
print 'Your request has recieved a status code of: '+str(btlePostResponse.status_code)
# response = ast.literal_eval(btlePostResponse.text)
# print(response['data']['result']+'\n')
#This function is used to check which type of handle the plugin is dealing with.
def validateInfo(self, handle_type, handle_path, loginResponse, run_id):
# This is how we import the files to read them into the django server!.
# The if statement is to differentiate between file and a folder to parse a file.
if handle_type == 'directory':
walkDirectory = handle_path
print '\nLocation of the directory to be parsed (absolute): ' + os.path.abspath(walkDirectory)
print '\n Beginning to Capture data to the Database!'
for root, dirs, files in os.walk(walkDirectory):
print '='*115
for name in files:
m = self.regexPath.match(name)
if not m:
self.postData(os.path.join(root, name), loginResponse, run_id)
elif handle_type == 'file':
walkFile = handle_path
print '\nLocation of the file to be parsed (absolute): ' + os.path.abspath(walkFile)
print '\n Beginning to Capture data to the Database!'
print '='*115
self.postData(os.path.abspath(walkFile), loginResponse, run_id)
def validatePacket(self, handle_type, handle_path, experiment_id, user_id, loginResponse, run_id):
advertisingheader=''
channelindex = ''
btletype = ''
advertisingaddress = ''
advertisingdata =''
advertisingtype = ''
Company = ''
companydata = ''
btledata = ''
crc = ''
arrivaltime = ''
run = 'http://localhost:8000/api/runs/'+str(run_id)
p = re.compile('(.*)\s+.*:\s+(\S+)\s+.*:\s+(\S+)\s+\S+:\s+(.*)\s+.*:\s+(.*)\s+(.*)\s+\S+\s+(.*)\s+\S+\s+(.*)\s+\S+\s+(.*)\s+\S+\s+(.*)')
m = p.findall(handle_path)
for row in m:
# print row
advertisingheader = row[0]
channelindex = row[1]
btletype = row[2]
advertisingaddress = row[3]
advertisingdata = row[4]
advertisingtype = row[5]
company = row[6]
companydata = row[7]
btledata = row[8]
crc = row[9]
arrivaltime = datetime.datetime.now()
btle_data = dict( run=run, arrivaltime=arrivaltime, advertisingheader=advertisingheader, channelindex=channelindex, btletype=btletype, advertisingaddress=advertisingaddress, advertisingdata=advertisingdata, advertisingtype=advertisingtype, company=company, companydata=companydata, btledata=btledata, crc=crc, domain='wearable')
btlePostResponse = self.client.post(self.btleUrl, data=btle_data, cookies=loginResponse.cookies, headers=dict(Referer=self.btleUrl))
btle_data = ''
btlePostResponse = ''
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,378
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/admin.py
|
from django.contrib import admin
#if ENVIRONMENT == 'PROD':
# from api.models import *
#else:
from api.models import *
# Register your models here.
admin.site.register(Experiment, ExperimentAdmin)
admin.site.register(Run, RunAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Profile, ProfileAdmin)
#admin.site.register(BtleEvent, BtleEventAdmin)
#admin.site.register(AndroidEvent, AndroidEventAdmin)
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,379
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0004_auto_20170606_2005.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-06 20:05
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0003_auto_20170606_1948'),
]
operations = [
migrations.AlterModelOptions(
name='btleevent',
options={'ordering': ('arrivalTime',), 'verbose_name_plural': 'btleevents'},
),
migrations.RenameField(
model_name='btleevent',
old_name='access_address',
new_name='accessAddress',
),
migrations.RenameField(
model_name='btleevent',
old_name='advertising_address',
new_name='advertisingAddress',
),
migrations.RenameField(
model_name='btleevent',
old_name='advertising_header',
new_name='advertisingHeader',
),
migrations.RenameField(
model_name='btleevent',
old_name='arrival_time',
new_name='arrivalTime',
),
migrations.RenameField(
model_name='btleevent',
old_name='data',
new_name='btleData',
),
migrations.RenameField(
model_name='btleevent',
old_name='cap_length',
new_name='capLength',
),
migrations.RenameField(
model_name='btleevent',
old_name='epoch_time',
new_name='epochTime',
),
migrations.RenameField(
model_name='btleevent',
old_name='highest_layer',
new_name='highestLayer',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppi_dlt',
new_name='ppiDlt',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppi_flags',
new_name='ppiFlags',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppi_header_length',
new_name='ppiHeaderLength',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppi_reserved',
new_name='ppiReserved',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppi_version',
new_name='ppiVersion',
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,380
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_webapp/api/views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt
from django.utils.decorators import method_decorator
from .models import *
from .forms import *
from .serializers import *
#import from rest_framework
from rest_framework import viewsets
from rest_framework.permissions import *
from rest_framework.response import Response
from rest_framework import status
from rest_framework.views import APIView
import time, datetime
#httplib... pip install httplib2 to use this module
import httplib2
import urllib
#Global URL
URL = "http://10.12.14.145:8000/api/events"
def home(request):
return render(request, 'base.html', {})
# Create your views here.
#Temperature
class TemperatureViewSet(viewsets.ModelViewSet):
permission_classes = (AllowAny,)
queryset = Temperature.objects.all()
serializer_class = TemperatureSerializer
'''
To POST data from MetaWear App
'''
'''def create(self, request, *args, **kwargs):
print "Received request:"
#time = request.POST.get('time')
celsius = request.POST.get('celsius')
#print request.outerObject
#print time
print celsius
return Response(status=status.HTTP_204_NO_CONTENT)
'''
@csrf_exempt
def temperature_list(request):
'''
POST to secuwear server start
'''
strUrl = URL
systemTime = time.time()
systemTimeMS = long(systemTime * 1000) #converting to systemTime to MS
datatoSecuWear = {'systemTime': systemTimeMS, 'eventtype': "call to api/temperature", 'event':"temperature_list() function triggered", 'codereference':"api/views.py: line 54", 'domain': "WebApp", 'run' :"1" }
body = urllib.urlencode(datatoSecuWear)
headers = {'Content-type': 'application/x-www-form-urlencoded'}
h = httplib2.Http()
response, content = h.request(strUrl, headers=headers, method="POST", body=body)
'''
POST to secuwear server end
'''
data = Temperature.objects.all()
return render(request, 'temperature_list.html', {'data': data})
@csrf_exempt
def temperature_new(request):
if request.method == 'POST':
form = TemperatureForm(request.POST)
#print form
if form.is_valid():
temperature = form.save(commit=False)
celsius = form.cleaned_data['celsius']
print celsius
temperature.save()
return HttpResponseRedirect('/temperature_list')
else:
form = TemperatureForm()
return render(request, 'temperature_edit.html', {'form': form})
#Barometer
class BarometerViewSet(viewsets.ModelViewSet):
permission_classes = (AllowAny,)
queryset = Barometer.objects.all()
serializer_class = BarometerSerializer
def barometer_list(request):
data = Barometer.objects.all()
'''
POST to secuwear server start
'''
strUrl = URL
systemTime = time.time()
systemTimeMS = (systemTime * 1000) #converting to systemTime to MS
datatoSecuWear = {'systemTime': systemTimeMS, 'eventtype': "call to api/barometer", 'event':"barometer_list() function triggered", 'codereference':"api/views.py: line 98", 'domain': "WebApp", 'run' :"1" }
body = urllib.urlencode(datatoSecuWear)
headers = {'Content-type': 'application/x-www-form-urlencoded'}
h = httplib2.Http()
response, content = h.request(strUrl, headers=headers, method="POST", body=body)
'''
POST to secuwear server end
'''
return render(request, 'barometer_list.html', {'data': data})
#Illuminance
class IlluminanceViewSet(viewsets.ModelViewSet):
permission_classes = (AllowAny,)
queryset = Illuminance.objects.all()
serializer_class = IlluminanceSerializer
def illuminance_list(request):
data = Illuminance.objects.all()
'''
POST to secuwear server start
'''
strUrl = URL
systemTime = time.time()
systemTimeMS = (systemTime * 1000) #converting to systemTime to MS
datatoSecuWear = {'systemTime': systemTimeMS, 'eventtype': "call to api/illuminance", 'event':"illuminance_list() function triggered", 'codereference':"api/views.py: line 122", 'domain': "WebApp", 'run' :"1" }
body = urllib.urlencode(datatoSecuWear)
headers = {'Content-type': 'application/x-www-form-urlencoded'}
h = httplib2.Http()
response, content = h.request(strUrl, headers=headers, method="POST", body=body)
'''
POST to secuwear server end
'''
return render(request, 'illuminance_list.html', {'data': data})
'''
#For events testing
class EventViewSet(viewsets.ViewSet):
permission_classes = (AllowAny,)
queryset = Event.objects.all()
serializer_class = EventSerializer
@method_decorator(csrf_exempt)
#def post(sefl, request, *args, **kwargs): posts the data and here create posts it too
def create(self, request, *args, **kwargs):
receivedTime = request.POST.get('clocktimedifference') #unicode
eventType = request.POST.get('eventtype')
event = request.POST.get('event')
codereference = request.POST.get('codereference')
domain = request.POST.get('domain')
currentTime = time.time()
rTime = (float(receivedTime)/1000)
clocktimedifference = (rTime - currentTime)
newEvent = Event(clocktimedifference=clocktimedifference, eventtype=eventType, event=event, codereference=codereference, domain=domain)
newEvent.save()
return Response(status=status.HTTP_204_NO_CONTENT)
'''
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,381
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-01 20:21
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BtleEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cap_length', models.IntegerField()),
('layers', models.TextField()),
('highest_layer', models.CharField(max_length=100)),
('epoch_time', models.FloatField()),
('arrival_time', models.DateTimeField(auto_now_add=True)),
('access_address', models.CharField(max_length=200)),
('advertising_address', models.CharField(max_length=300)),
('advertising_header', models.CharField(max_length=200)),
('crc', models.CharField(max_length=200)),
('data', models.CharField(max_length=300, null=True)),
('ppi_flags', models.CharField(max_length=100)),
('ppi_version', models.CharField(max_length=200)),
('ppi_dlt', models.CharField(max_length=100)),
('ppi_header_length', models.CharField(max_length=100)),
('ppi_reserved', models.CharField(max_length=200)),
],
options={
'ordering': ('arrival_time',),
'verbose_name_plural': 'btles',
},
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('clocktimedifference', models.DurationField(blank=True, null=True)),
('eventtype', models.CharField(blank=True, max_length=1024)),
('event', models.TextField(blank=True)),
('codereference', models.TextField(blank=True)),
('domain', models.CharField(blank=True, max_length=1024)),
],
options={
'verbose_name_plural': 'events',
},
),
migrations.CreateModel(
name='Experiment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('name', models.CharField(blank=True, max_length=1024)),
('description', models.TextField(blank=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='experiments', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'experiments',
},
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('roles', models.CharField(default='{"admin": false, "researcher": false, "subject": true}', max_length=200)),
('gender', models.CharField(max_length=100)),
('age', models.IntegerField()),
('educationlevel', models.CharField(max_length=200)),
('city', models.CharField(max_length=200)),
('state', models.CharField(max_length=200)),
('ip', models.CharField(max_length=200)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Run',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('name', models.CharField(blank=True, max_length=1024)),
('description', models.TextField(blank=True)),
('experiment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='api.Experiment')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='runs', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'runs',
},
),
migrations.AddField(
model_name='event',
name='run',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='events', to='api.Run'),
),
migrations.AddField(
model_name='btleevent',
name='run',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='btleEvents', to='api.Run'),
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,382
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_webapp/api/forms.py
|
from django import forms
from .models import *
class TemperatureForm(forms.ModelForm):
class Meta:
model = Temperature
fields = ('celsius',)
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,383
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0005_auto_20170606_2027.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-06 20:27
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0004_auto_20170606_2005'),
]
operations = [
migrations.AlterModelOptions(
name='btleevent',
options={'ordering': ('arrivaltime',), 'verbose_name_plural': 'btleevents'},
),
migrations.RenameField(
model_name='btleevent',
old_name='accessAddress',
new_name='accessaddress',
),
migrations.RenameField(
model_name='btleevent',
old_name='advertisingAddress',
new_name='advertisingaddress',
),
migrations.RenameField(
model_name='btleevent',
old_name='advertisingHeader',
new_name='advertisingheader',
),
migrations.RenameField(
model_name='btleevent',
old_name='arrivalTime',
new_name='arrivaltime',
),
migrations.RenameField(
model_name='btleevent',
old_name='btleData',
new_name='btledata',
),
migrations.RenameField(
model_name='btleevent',
old_name='capLength',
new_name='caplength',
),
migrations.RenameField(
model_name='btleevent',
old_name='epochTime',
new_name='epochtime',
),
migrations.RenameField(
model_name='btleevent',
old_name='highestLayer',
new_name='highestlayer',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppiDlt',
new_name='ppidlt',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppiFlags',
new_name='ppiflags',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppiHeaderLength',
new_name='ppiheaderlength',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppiReserved',
new_name='ppireserved',
),
migrations.RenameField(
model_name='btleevent',
old_name='ppiVersion',
new_name='ppiversion',
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,384
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/secuwear_backend/localsettings.py
|
# Set to DEV for debug and other configuration items. PROD otherwise...
ENVIRONMENT = 'DEV'
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<insert secret key here>'
#ROOT_URLCONF = 'urls'
ROOT_URLCONF = 'secuwear_backend.urls'
WSGI_APPLICATION = 'secuwear_backend.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'secuwear',
'USER': 'secudbadmin',
'PASSWORD': 'secuwear',
'HOST': 'localhost',
'PORT': '',
}
}
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,385
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/urls.py
|
from django.conf.urls import include, url
#Django Rest Framework
from rest_framework import routers
from api import views
from django.views.decorators.csrf import csrf_exempt
#from rest_framework.urlpatterns import format_suffix_patterns
#REST API routes
router = routers.DefaultRouter(trailing_slash=False)
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
router.register(r'events', views.EventViewSet)
router.register(r'runs', views.RunViewSet)
router.register(r'experiments', views.ExperimentViewSet)
router.register(r'profiles', views.ProfileViewSet)
#router.register(r'btleevents', views.BtleEventViewSet)
#router.register(r'androidevents', views.AndroidEventViewSet)
#REST API
urlpatterns = [
url(r'^session/', views.Session.as_view()),
url(r'^register', csrf_exempt(views.Register.as_view())),
url(r'^', include(router.urls)),
#Django Rest Auth
# url(r'^auth/', include('rest_framework.urls')),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,386
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_webapp/api/serializers.py
|
from rest_framework import serializers
from .models import *
class TemperatureSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Temperature
fields = ('time', 'celsius')
class BarometerSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Barometer
fields = ('time', 'pressure', 'altitude')
class IlluminanceSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Illuminance
fields = ('time', 'illuminance')
'''
class EventSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Event
fields = ('created', 'updated', 'clocktimedifference', 'eventtype', 'event', 'codereference', 'domain', 'run')
'''
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,387
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/serializers.py
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
from rest_framework_json_api.relations import *
#load django and webapp models
#from django.contrib.auth.models import *
from api.models import *
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username', 'email', 'groups', 'experiments', 'password')
#fields = ('url', 'username', 'email', 'groups', 'experiments')
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ('url', 'name')
class EventSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Event
fields = ('created', 'updated', 'clocktimedifference', 'eventtype', 'event','data', 'codereference','size', 'domain','domainVal', 'run')
class RunSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Run
fields = ('created', 'updated', 'owner', 'name', 'description', 'experiment', 'events', 'btleevents')
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Experiment
fields = ('created', 'updated', 'owner', 'name', 'description', 'runs')
class ProfileSerializer(serializers.HyperlinkedModelSerializer):
#fields = '__all__'
owner = UserSerializer(read_only=True)
class Meta:
model = Profile
fields = ( 'owner', 'roles', 'gender', 'age', 'educationlevel', 'city','ip','state')
#class BtleEventSerializer(serializers.HyperlinkedModelSerializer):
# run = RunSerializer(read_only=True)
#class Meta:
#model = BtleEvent
#fields = ('run', 'arrivaltime', 'btletype', 'advertisingaddress', 'advertisingheader', 'crc', 'btledata', 'channelindex', 'advertisingdata', 'advertisingtype', 'company', 'companydata', 'domain')
#class AndroidEventSerializer(serializers.HyperlinkedModelSerializer):
#class Meta:
#model = AndroidEvent
#fields = ('arrivaltime', 'fragment', 'setup', 'boardReady', 'request', 'response')
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,388
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0013_event_size.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2018-04-05 16:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0012_auto_20180113_1300'),
]
operations = [
migrations.AddField(
model_name='event',
name='size',
field=models.CharField(blank=True, max_length=256),
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,389
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0002_auto_20170606_1939.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-06 19:39
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='btleevent',
name='run',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='btleevents', to='api.Run'),
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,390
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/models.py
|
from __future__ import unicode_literals
from django.db import models
from django.core.validators import *
from django.contrib.auth.models import User, Group
from django.contrib import admin
import base64
# Create your models here.
class Experiment(models.Model):
"""
This is a Experiment record for storing Experiment information.
"""
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=1024, blank=True) #, validators=[validate_no_xss, validate_no_html])
description = models.TextField(blank=True) #, validators=[validate_no_xss, validate_no_html])
#users foriegn key
#if a Experiment model has a User that is, a User contains multiple Experiments but each Experiment only has one User use the following definitions:
owner = models.ForeignKey(User, on_delete=models.CASCADE, related_name='experiments')
#patterns
def __str__(self):
return str(self.name)
class Meta:
#This will be used by the admin interface
verbose_name_plural = "experiments"
class JSONAPIMeta:
resource_name = "experiments"
class ExperimentAdmin(admin.ModelAdmin):
#This inner class indicates to the admin interface how to display a post
#See the Django documentation for more information
list_display = ('created', 'updated', 'name', 'description', 'owner')
class Run(models.Model):
"""
This is a Run record for storing Run information.
"""
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=1024, blank=True) #, validators=[validate_no_xss, validate_no_html])
description = models.TextField(blank=True) #, validators=[validate_no_xss, validate_no_html])
#experiment (fk)
#if a Run model has a Experiment that is, an Experiment contains multiple Runs but each Run only has one Experiment use the following definitions:
experiment = models.ForeignKey(Experiment, on_delete=models.CASCADE, related_name='runs')
owner = models.ForeignKey(User, on_delete=models.CASCADE, related_name='runs')
def __str__(self):
return str(self.name)
class Meta:
#This will be used by the admin interface
verbose_name_plural = "runs"
class RunAdmin(admin.ModelAdmin):
#This inner class indicates to the admin interface how to display a post
#See the Django documentation for more information
list_display = ('created', 'updated', 'owner', 'name', 'description', 'experiment')
class BtleEvent(models.Model):
'''
This is frame/ppi/btle data from pcaps
'''
arrivaltime = models.DateTimeField(auto_now_add=True)
#btle
btletype = models.CharField(max_length=200)
channelindex = models.CharField(max_length=200, null=True)
advertisingheader = models.CharField(max_length=200)
advertisingaddress = models.CharField(max_length=300)
advertisingdata = models.CharField(max_length=200)
advertisingtype = models.CharField(max_length=200)
company = models.CharField(max_length=200, null=True)
companydata = models.CharField(max_length=300, null=True)
btledata = models.CharField(max_length=300, null=True)
crc = models.CharField(max_length=200)
domain = models.CharField(max_length=1024, blank=True) #, validators=[validate_no_xss, validate_no_html])
# run (fk)
# If the btle event has a run that is, a run contains multiple btle events but each btle event has only one run use the following defenitions:
#run = models.ForeignKey(Run, on_delete=models.CASCADE, related_name='btleevents')
run = models.CharField(max_length=100, blank=True) #Set CharField for testing purpose
def __str__(self):
return str(self.arrivaltime)
class Meta:
#This will be used by the admin interface
verbose_name_plural = "btleevents"
ordering = ('arrivaltime',)
class JSONAPIMeta:
resource_name = "btleevents"
class BtleEventAdmin(admin.ModelAdmin):
#This inner class indicates to the admin interface how to display a post
#See the Django documentation for more information
list_display = ('id','run', 'arrivaltime', 'btletype', 'advertisingaddress', 'advertisingheader', 'crc', 'btledata', 'channelindex', 'advertisingdata', 'advertisingtype', 'company', 'companydata', 'domain' )
class Event(models.Model):
"""
This is a Event record for storing Event information.
"""
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
clocktimedifference = models.CharField(max_length=200, blank=True) #DecimalField(null=True, blank=True, max_digits=None, decimal_places=None)
eventtype = models.CharField(max_length=1024, blank=True) #, validators=[validate_no_xss, validate_no_html])
event = models.TextField(blank=True) #, validators=[validate_no_xss, validate_no_html])
data = models.TextField(blank=True)
codereference = models.TextField(blank=True) #, validators=[validate_no_xss, validate_no_html])
size = models.CharField(max_length=256, blank=True)
domain = models.CharField(max_length=1024, blank=True) #, validators=[validate_no_xss, validate_no_html])
domainVal = models.CharField(max_length=10, blank=True)
#runid(fk)
#if an Event model has a Run that is, a Run contains multiple Events but each Event only has one Run use the following definitions:
#run = models.ForeignKey(Run, on_delete=models.CASCADE, related_name='events')
run = models.CharField(max_length=100, blank=True) #Set CharField for testing purpose
def __str__(self):
return str(self.eventtype) + "_" + str(self.domain)
class Meta:
#This will be used by the admin interface
verbose_name_plural = "events"
class JSONAPIMeta:
resource_name = "events"
class EventAdmin(admin.ModelAdmin):
#This inner class indicates to the admin interface how to display a post
#See the Django documentation for more information
list_display = ('created', 'updated', 'run', 'clocktimedifference', 'eventtype', 'event', 'data', 'codereference', 'size', 'domain', 'domainVal')
class Profile(models.Model):
'''
This is a profile record for storing the users profile information.
'''
roles = models.CharField(max_length=200, blank=False, default="{\"admin\": false, \"researcher\": false, \"subject\": true}")
gender = models.CharField(max_length=100, blank=False)
age = models.IntegerField(blank=False)
educationlevel = models.CharField(max_length=200, blank=False)
city = models.CharField(max_length=200, blank=False)
state = models.CharField(max_length=200, blank=False)
ip = models.CharField(max_length=200, blank=False)
# user (fk)
# This line defines the relatinoship between the user and the profile.
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='user')
def __str__(self):
return self.user.username
class JSONAPIMeta:
resource_name = "profiles"
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user',)
class AndroidEvent(models.Model):
arrivaltime = models.DateTimeField(auto_now_add=True)
fragment = models.CharField(max_length=500) #give general names change fragments into codefile, AndroidEvent into AppEvent
setup = models.TextField(blank=True)
boardReady = models.CharField(max_length=500)
request = models.TextField(blank=True)
response = models.TextField(blank=True)
#run = models.ForeignKey(Run, on_delete=models.CASCADE, related_name='androidevents')
def __str__(self):
return str(self.arrivaltime)
class JSONAPIMeta:
resource_name = "androidevents"
class AndroidEventAdmin(admin.ModelAdmin):
list_display = ('arrivaltime', 'fragment')
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,391
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/views.py
|
#from django.shortcuts import render
# Create your views here.
from django.contrib.auth.models import *
from django.contrib.auth import *
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from rest_framework import viewsets
from rest_framework.views import APIView
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework import status
from api.serializers import *
#from django.shortcuts import render_to_response
from django.template import RequestContext
from django_filters.rest_framework import DjangoFilterBackend
#Import time
import time, datetime
from django.shortcuts import *
# Import models
from django.db import models
from django.contrib.auth.models import *
from api.models import *
#REST API
from rest_framework import viewsets, filters
from django.http import Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from django.contrib.auth import authenticate, login, logout
from rest_framework.permissions import *
from rest_framework.decorators import *
from rest_framework.authentication import *
import os, pyshark
#filters
#from filters.mixins import *
from api.serializers import *
from api.pagination import *
from pprint import PrettyPrinter
def home(request):
"""
Send requests to / to the ember.js clientside app
"""
return render_to_response('index.html',
{}, RequestContext(request))
# This is the Group view set.
class GroupViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
# This is a Event view set for handling request.
class EventViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
permission_classes = (AllowAny,)
queryset = Event.objects.all()
serializer_class = EventSerializer
filter_backends = (DjangoFilterBackend,)
filter_fields = ('run',)
def get(self, request, format=None):
snippets = Event.objects.all()
serializer = EventSerializer(snippets, many=True)
return Response(serializer.data)
@method_decorator(csrf_exempt)
def create(self, request, *args, **kwargs): #change create to post after testing
sendTime = request.POST.get('systemTime') #Time request occurred in App
eventType = request.POST.get('eventtype')
event = request.POST.get('event')
data = request.POST.get('data')
codereference = request.POST.get('codereference')
size = request.POST.get('size')
domain = request.POST.get('domain')
currentTime = time.time() #gives current system time in seconds
currentTimeMS = currentTime * 1000 #converting time to ms
run = 59
sTime = long(sendTime)
cTime = long(currentTimeMS)
print "Received time= "+str(sendTime)
print "System time= "+str(cTime)
timeDiff = abs(cTime - sTime)
print "Difference = "+str(timeDiff)
if (domain == "bluetooth"):
domainVal = 0
elif (domain == "Mobile"):
domainVal = 1
elif (domain == "Web"):
domainVal = 2
else:
domainVal = NULL
newEvent = Event(clocktimedifference=timeDiff, eventtype=eventType, event=event, codereference=codereference, data=data, size=size ,domain=domain, domainVal=domainVal, run=run)
newEvent.save()
return Response(status=status.HTTP_204_NO_CONTENT)
# This is a Run view set for handling request.
class RunViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Run.objects.all()
serializer_class = RunSerializer
filter_backends = (DjangoFilterBackend,)
filter_fields = ('events',)
# This is a Experiment view set for handling request.
class ExperimentViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
filter_backends = (DjangoFilterBackend,)
filter_fields = ('owner',)
# This is a User view set for handling request.
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed.
"""
resource_name = 'users'
queryset = User.objects.all().order_by('-date_joined')
serializer_class = UserSerializer
# This is a Profile view set for handling request.
class ProfileViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed.
"""
resource_name = 'profiles'
queryset = Profile.objects.all()
serializer_class = ProfileSerializer
# This is a Btle Event view set for handling request.
'''
class BtleEventViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows Btle to be viewed.
"""
#permission_classes = (AllowAny,)
resource_name = 'btleevents'
queryset = BtleEvent.objects.all()
serializer_class = BtleEventSerializer
filter_backends = (DjangoFilterBackend,)
Sfilter_fields = ('run',)
# This is a Btle Event view set for handling request.
class AndroidEventViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows Btle to be viewed.
"""
permission_classes = (AllowAny,)
resource_name = 'androidevent'
queryset = AndroidEvent.objects.all()
serializer_class = AndroidEventSerializer
#filter_backends = (DjangoFilterBackend,)
#filter_fields = ('run',)
#def create (self, request, *args, **kwargs):
#print request.data
#return Response(status=status.HTTP_204_NO_CONTENT)
'''
# This is a Session view set for handling request.
class Session(APIView):
permission_classes = (AllowAny,)
def form_response(self, isauthenticated, userid, username, error=""):
data = {
'isauthenticated': isauthenticated,
'userid': userid,
'username': username
}
if error:
data['message'] = error
return Response(data)
def get(self, request, *args, **kwargs):
# Get the current user
if request.user.is_authenticated():
return self.form_response(True, request.user.id, request.user.username)
return self.form_response(False, None, None)
def post(self, request, *args, **kwargs):
# Login
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return self.form_response(True, user.id, user.username)
return self.form_response(False, None, None, "Account is suspended")
return self.form_response(False, None, None, "Invalid username or password")
def delete(self, request, *args, **kwargs):
# Logout
logout(request)
return Response(status=status.HTTP_204_NO_CONTENT)
# This is a Register view set for handling request.
class Register(APIView):
permission_classes = (AllowAny,)
def post(self, request, *args, **kwargs):
# Login
username = request.POST.get('username') #you need to apply validators to these
print username
password = request.POST.get('password') #you need to apply validators to these
email = request.POST.get('email') #you need to apply validators to these
gender = request.POST.get('gender') #you need to apply validators to these
age = request.POST.get('age') #you need to apply validators to these
educationlevel = request.POST.get('educationlevel') #you need to apply validators to these
city = request.POST.get('city') #you need to apply validators to these
state = request.POST.get('state') #you need to apply validators to these
print request.POST.get('username')
if User.objects.filter(username=username).exists():
return Response({'username': 'Username is taken.', 'status': 'error'})
elif User.objects.filter(email=email).exists():
return Response({'email': 'Email is taken.', 'status': 'error'})
#especially before you pass them in here
newuser = User.objects.create_user(email=email, username=username, password=password)
newprofile = Profile(user=newuser, gender=gender, age=age, educationlevel=educationlevel, city=city, state=state)
newprofile.save()
return Response({'status': 'success', 'userid': newuser.id, 'profile': newprofile.id})
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,392
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0010_auto_20180112_1353.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-01-12 19:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0009_auto_20171208_0454'),
]
operations = [
migrations.AlterField(
model_name='event',
name='run',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='events', to='api.Run'),
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,393
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0003_auto_20170606_1948.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-06 19:48
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0002_auto_20170606_1939'),
]
operations = [
migrations.AlterModelOptions(
name='btleevent',
options={'ordering': ('arrival_time',), 'verbose_name_plural': 'btleevents'},
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,394
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0015_event_domainval.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2018-08-07 22:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0014_event_data'),
]
operations = [
migrations.AddField(
model_name='event',
name='domainVal',
field=models.CharField(blank=True, max_length=10),
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,395
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_backend/api/migrations/0008_androidevent.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-05 21:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0007_auto_20171102_2344'),
]
operations = [
migrations.CreateModel(
name='AndroidEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('arrivaltime', models.DateTimeField(auto_now_add=True)),
('fragment', models.CharField(max_length=500)),
('setup', models.TextField(blank=True)),
('boardReady', models.CharField(max_length=500)),
('request', models.TextField(blank=True)),
('response', models.TextField(blank=True)),
],
),
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,396
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear-client-ubertooth/setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup
except:
from distutils.core import setup
dependencies=['docopt', 'termcolor']
setup(
name='pcaps',
version='0.0.1',
description='This file will push data to the backend server',
url='github.com\secuwear-client',
author='nadusumilli',
author_email='nadusumilli@unomaha.edu',
license='',
install_requires=dependencies,
packages=['src'],
entry_points={
'console_scripts':[
'pcaps=src.main:main'
]
},
zip_safe=False
)
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,397
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear-client-ubertooth/src/main.py
|
#!/usr/bin/env python
"""
Usage: pcaps <experiment_id> <handle_type> <handle_path>
Options:
-h, --help
-f, --file
-d. --directory
"""
from docopt import docopt
from pcaps import Pcaps
def main():
arguments = docopt(__doc__, version='0.0.1')
pcaps = Pcaps('http://localhost:8000/api/session/', 'secuwear', 'secuwear', 'http://localhost:8000/api/btleevents')
loginResponse = pcaps.loginPost()
experiment = pcaps.getExperiment(arguments['<experiment_id>'], loginResponse)
user_id = experiment['data']['relationships']['owner']['data']['id']
if experiment['data']['relationships']['runs']['data']:
run_id = experiment['data']['relationships']['runs']['data'][0]['id']
else:
run_id = 1
if not arguments['<handle_type>'] == 'packet':
run = pcaps.postRun(loginResponse, user_id, arguments['<experiment_id>'], run_id)
pcaps.validateInfo(arguments['<handle_type>'],arguments['<handle_path>'], loginResponse, run)
else:
pcaps.validatePacket(arguments['<handle_type>'],arguments['<handle_path>'], arguments['<experiment_id>'], user_id, loginResponse, run_id)
if __name__ == '__main__':
sys.exit(main())
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,398
|
MLHale/secuwear-project
|
refs/heads/master
|
/secuwear_webapp/api/urls.py
|
from django.conf.urls import url, include
from . import views
from rest_framework import routers
router=routers.DefaultRouter(trailing_slash=False)
router.register(r'temperature', views.TemperatureViewSet)
router.register(r'barometer', views.BarometerViewSet)
router.register(r'illuminance', views.IlluminanceViewSet)
#router.register(r'event', views.EventViewSet)
urlpatterns = [
#url(r'^$',views.home, name='home'),
url(r'^temperature_list$',views.temperature_list, name='temperature_list'),
url(r'^temperature_new$', views.temperature_new, name='temperature_new'),
url(r'^barometer_list$', views.barometer_list, name='barometer_list'),
url(r'^illuminance_list$', views.illuminance_list, name='illuminance_list'),
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
{"/secuwear_webapp/api/forms.py": ["/secuwear_webapp/api/models.py"], "/secuwear_webapp/api/serializers.py": ["/secuwear_webapp/api/models.py"]}
|
39,418
|
pertschuk/symmetrical-waffle
|
refs/heads/master
|
/model/transformers.py
|
import numpy as np
from transformers import (
AutoModelForSequenceClassification,
AutoTokenizer,
)
import torch.nn
import torch
from nboost.model.base import BaseModel
from model.bert_model import tokenization
from collections import defaultdict
class TransformersModel(BaseModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.download()
self.logger.info('Loading from checkpoint %s' % str(self.model_dir))
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if self.device == torch.device("cpu"):
self.logger.info("RUNNING ON CPU")
else:
self.logger.info("RUNNING ON CUDA")
torch.cuda.synchronize(self.device)
self.rerank_model = AutoModelForSequenceClassification.from_pretrained(
str(self.model_dir))
self.tokenizer = AutoTokenizer.from_pretrained(str(self.model_dir))
self.rerank_model.to(self.device, non_blocking=True)
def rank(self, query, choices):
input_ids, attention_mask, token_type_ids = self.encode(query, choices)
with torch.no_grad():
logits = self.rerank_model(input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids)[0]
log_probs = torch.log_softmax(logits, dim=-1)
scores = np.squeeze(log_probs.detach().cpu().numpy())
if len(scores.shape) > 1 and scores.shape[1] == 2:
scores = np.squeeze(scores[:,1])
if len(scores) == 1:
scores = [scores]
return np.argsort(scores)[::-1], log_probs
def encode(self, query, choices):
self.vocab_file = str(self.model_dir.joinpath('vocab.txt'))
tokenizer = tokenization.FullTokenizer(vocab_file=self.vocab_file, do_lower_case=True)
query = tokenization.convert_to_unicode(query.decode())
query_token_ids = tokenization.convert_to_bert_input(
text=query, max_seq_length=self.max_seq_len, tokenizer=tokenizer,
add_cls=True)
all_features = []
for i, choice in enumerate(choices):
doc_text = str(choice.body)
doc_token_id = tokenization.convert_to_bert_input(
text=tokenization.convert_to_unicode(doc_text),
max_seq_length=self.max_seq_len - len(query_token_ids),
tokenizer=tokenizer,
add_cls=False)
query_ids = query_token_ids
doc_ids = doc_token_id
input_ids = query_ids + doc_ids
query_segment_id = [0] * len(query_ids)
doc_segment_id = [1] * len(doc_ids)
segment_ids = query_segment_id + doc_segment_id
input_mask = [1] * len(input_ids)
features = {
"input_ids": input_ids,
"segment_ids": segment_ids,
"attention_mask": input_mask,
}
all_features.append(features)
def to_tsv(name):
return ','.join([str(f) for f in features[name]])
with open('pt_features.txt', 'a') as tf_features:
tf_features.write(query + '\t' + doc_text + '\t' + to_tsv('input_ids') + '\t'
+ to_tsv('segment_ids') + '\n')
max_len = min(max(len(t['input_ids']) for t in all_features), self.max_seq_len)
batches = defaultdict(list)
for features in all_features:
for k, v in features.items():
batches[k].append(v + [0] * (max_len - len(v[:max_len])))
tensors = dict()
for k, v in batches.items():
tensors[k] = torch.tensor(v).to(self.device, non_blocking=True)
return tensors['input_ids'], tensors['attention_mask'], tensors['segment_ids']
def __exit__(self, *args):
self.rerank_model = None
def __enter__(self, *args):
return self
|
{"/msmarco.py": ["/model/transformers.py"]}
|
39,419
|
pertschuk/symmetrical-waffle
|
refs/heads/master
|
/msmarco.py
|
import argparse
import numpy as np
from tqdm import tqdm
from nboost.types import Choice
def eval(model):
# load_and_cache_eval()
qrels = []
i = 0
total = 0
total_mrr = 0
with open('test_set.tsv', 'r') as test_set:
eval_iterator = tqdm(test_set, desc="Evaluating")
candidates = []
labels = []
queries = []
for line in eval_iterator:
query, passage, label = line.rstrip().split('\t')
queries.append(query)
candidates.append(passage)
labels.append(int(float(label)))
i += 1
if i % args.rerank_num == 0:
if sum(labels) == 0:
candidates = []
labels = []
queries = []
continue
assert len(set(queries)) == 1
total += 1
print('ranking %s' % len(candidates))
choices = [Choice('0', candidate.encode()) for candidate in candidates]
ranks = model.rank(query.encode(), choices)
top_rank = np.argmax(np.array(labels)[ranks])
total_mrr += 1/(top_rank + 1)
eval_iterator.set_description("Current rank: %s" % top_rank +
" MRR: %s" % (total_mrr / total) + "Total: %s " % len(candidates))
candidates = []
labels = []
queries = []
def test_equivilency():
from model.transformers import TransformersModel
from model.bert_model import BertModel
tf_model = BertModel(model_dir=args.tf_model, batch_size=args.batch_size)
pt_model = TransformersModel(model_dir=args.pt_model, batch_size=args.batch_size)
with open('test_set.tsv', 'r') as test_set:
for line in test_set:
query, passage, label = line.rstrip().split('\t')
choices = [Choice('0', passage)]
_, tf_logits = tf_model.rank(query.encode(), choices)
_, pt_logits = pt_model.rank(query.encode(), choices)
try:
np.testing.assert_allclose(tf_logits, pt_logits)
except:
import pdb
pdb.set_trace()
def main():
if args.test_eq:
test_equivilency()
return
if args.model_class == 'bert_model':
from nboost.model.bert_model import BertModel
model = BertModel(model_dir=args.tf_model, batch_size=args.batch_size)
else:
from nboost.model.transformers import TransformersModel
model = TransformersModel(model_dir=args.pt_model, batch_size=args.batch_size)
eval(model)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--eval_steps', default=1000, type=int)
parser.add_argument('--tf_model', default='bert-base-uncased-msmarco')
parser.add_argument('--pt_model', default='pt-bert-base-uncased-msmarco')
parser.add_argument('--batch_size', default=1, type=int)
parser.add_argument('--max_length', default=128, type=int)
parser.add_argument("--model_class", default='bert_model')
parser.add_argument("--rerank_num", default=1000, type=int)
parser.add_argument('--test_eq', action='store_true')
args = parser.parse_args()
main()
|
{"/msmarco.py": ["/model/transformers.py"]}
|
39,420
|
pertschuk/symmetrical-waffle
|
refs/heads/master
|
/test_eq_features.py
|
def main():
with open('tf_features.txt') as tf_features, open('pt_features.txt') as pt_features:
for tf_line, pt_line in zip(tf_features, pt_features):
try:
assert tf_line == pt_line
except:
print('***TF LINE***')
print(tf_line)
print('***PT LINE***')
print(pt_line)
if __name__ == '__main__':
main()
|
{"/msmarco.py": ["/model/transformers.py"]}
|
39,421
|
pertschuk/symmetrical-waffle
|
refs/heads/master
|
/create_test_set.py
|
from collections import defaultdict
RERANK_NUM = 1000
def pad_passages(passages):
while len(passages) < RERANK_NUM:
passages.append((passages[0][0], 'FAKE PASSAGE', 0.0))
return passages
# create dev subset of 100 queries = 100,000 to rank
def main():
qrels = []
with open('./qrels.dev.small.tsv', 'r') as qrels_file:
for line in qrels_file:
qid, _, cid, _ = line.rstrip().split('\t')
qrels.append((qid, cid))
dev_set = defaultdict(list)
with open('./top1000.dev', 'r') as top1000_dev:
for line in top1000_dev:
qid, cid, query, passage = line.rstrip().split('\t')
label = 1 if (qid, cid) in qrels else 0
dev_set[qid].append((query, passage, label))
with open('./test_set.dev', 'w') as test_set:
i = 0
for qid, passages in dev_set.items():
passages = pad_passages(passages)
for (query, passage, label) in passages:
test_set.write(query + '\t' + passage + '\t' + str(label) + '\n')
i += 1
if i > 100: break
if __name__ == '__main__':
main()
|
{"/msmarco.py": ["/model/transformers.py"]}
|
39,422
|
pertschuk/symmetrical-waffle
|
refs/heads/master
|
/get_pytorch_model.py
|
from transformers import *
import os
def main():
model = BertForSequenceClassification.from_pretrained('bert-base-uncased')
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
save_dir = './pt_model'
os.makedirs(save_dir)
model.save_pretrained(save_dir)
tokenizer.save_pretrained(save_dir)
if __name__ == '__main__':
main()
|
{"/msmarco.py": ["/model/transformers.py"]}
|
39,433
|
ninadmhatre/zual
|
refs/heads/master
|
/controller/authentication.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
import hashlib
import datetime
import pdb
from flask import Blueprint, render_template, abort, current_app, request, flash, redirect, url_for, session, g
from flask.ext.login import login_user, logout_user, login_required
from application import User, mailer, app
from itsdangerous import URLSafeTimedSerializer
from collections import namedtuple
auth = Blueprint('auth', __name__)
chaabi = 'your_password_123' # <-- EDIT_THIS
MailInfo = namedtuple('MailInfo', 'Sender To Message Subject')
MAX_FAILED_ATTEMPTS = 2
DISABLE_LOGIN_FOR = 15 * 60
DISABLE_LOGIN = False
NOTIFIED = False
_failed_cnt = 0
_last_attempt = datetime.datetime.utcnow()
ts = URLSafeTimedSerializer(app.config['SECRET_KEY'])
email_confirm_key = 'some other secret key' # <-- EDIT_THIS
def authenticate(u, p):
if u and p:
if u in (current_app.config['ADMIN_MAIL'],):
p_salt = p + current_app.config["SECRET_KEY"]
e_salt = chaabi + current_app.config["SECRET_KEY"]
d = hashlib.sha384()
e = hashlib.sha384()
d.update(p_salt.encode())
e.update(e_salt.encode())
return d.hexdigest() == e.hexdigest()
else:
return False
else:
return False
def notify_admin(msg):
global NOTIFIED
if NOTIFIED:
return
print('Sending Warning email...{0}'.format(msg))
token = ts.dumps(current_app.config['ADMIN_MAIL'], salt=email_confirm_key)
url = url_for('auth.enable', token=token, _external=True)
data = '''<h3>Login Disabled!!</h3><br>
<p>Please use below link in within next 72 hours to re-enable the login!</p><br>
Enable Link: <a href="{0}">{0}</a>
<p style="color: red; font-size: 80%">Note: In Case you fail to re-enable login, please bounce the site!
Sorry but this is the best i could think at this moment!!!</p>
'''.format(url)
print('data -> {0}'.format(data))
mail_details = MailInfo('alert@<< your_domain_com >>', current_app.config['ADMIN_MAIL'], 'Login Disabled!', data)
mailer.send_simple_mail(mail_details)
NOTIFIED = True
def disable_login():
global DISABLE_LOGIN
DISABLE_LOGIN = True
def guard(stage):
"""
Last failed attempt should be at least done within 15 mins from now!
:param stage:
:return:
"""
global _failed_cnt, _last_attempt
if stage == 'GET':
if _failed_cnt > MAX_FAILED_ATTEMPTS:
notify_admin('Warning: Maximum failed attempts reached!')
disable_login()
else:
since_last_attempt = datetime.datetime.utcnow() - _last_attempt
if since_last_attempt.total_seconds() > DISABLE_LOGIN_FOR:
_last_attempt = datetime.datetime.utcnow()
_failed_cnt += 0
elif stage == 'POST':
_failed_cnt += 1
_last_attempt = datetime.datetime.utcnow()
print('Failed login attempt! {0}/{1} attempts...'.format(_failed_cnt, MAX_FAILED_ATTEMPTS))
@auth.route("/login/", methods=['GET', 'POST'])
def login():
session.permanent = True
#pdb.set_trace()
if DISABLE_LOGIN:
flash('error:Login is disable because of many failed login attempts!')
return render_template('login/login.html', disable=True)
if request.method == 'POST':
user = request.form['user']
pawd = request.form['chaabi']
if not authenticate(user, pawd):
guard('POST')
flash("error:Invalid Username or Password!")
#return render_template('login/login.html')
else:
flash("info:Login Successful!")
user = User("test_user")
login_user(user)
return redirect("/blog")
guard('GET')
return render_template('login/login.html')
@auth.route("/enable/<token>", methods=['GET', 'POST'])
def enable(token):
email = ''
try:
email = ts.loads(token, salt=email_confirm_key, max_age=84600 * 3)
except:
flash('error:Invalid Enable Link!!')
abort(404)
if email == current_app.config['ADMIN_MAIL']:
global DISABLE_LOGIN, NOTIFIED
DISABLE_LOGIN = False
NOTIFIED = False
else:
flash('error:Invalid Enable Link!!!')
abort(404)
flash('info:Account enabled successfully!')
return redirect(url_for('auth.login'))
@auth.route("/logout/")
def logout():
logout_user()
flash('info:User Logged Out Successfully!')
return redirect("/")
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,434
|
ninadmhatre/zual
|
refs/heads/master
|
/model/StatsCollector.py
|
# -*- coding: utf-8 -*-
__author__ = 'Ninad'
import os
import datetime
class Stats(object):
def __init__(self, stat_dir):
self.stats_dir = stat_dir
self.download_stats = os.path.join(self.stats_dir, 'download.stats')
self.view_stats = os.path.join(self.stats_dir, 'view.stats')
def update_download_count(self, file_name, remote_addr):
data = '{0}\t{1}\t{2}\n'.format(self._get_time(), remote_addr, os.path.basename(file_name))
self._update(self.download_stats, data)
def update_view_count(self, page, remote_addr):
data = '{0}\t{1}\t{2}\n'.format(self._get_time(), remote_addr, page)
self._update(self.view_stats, data)
def get_download_count(self):
data = self._read(self.download_stats).split('\n')
return len(data)
def get_download_stats(self):
pass
def get_view_count(self):
pass
def get_view_stats(self):
pass
def _get_time(self):
return datetime.datetime.utcnow()
def _update(self, x_file, data):
with open(x_file, 'a') as f:
f.write(data)
f.flush()
def _read(self, x_file):
with open(x_file, 'r') as f:
data = f.read()
return data
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,435
|
ninadmhatre/zual
|
refs/heads/master
|
/controller/file_io.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
import os
import shutil
from flask import Blueprint, render_template, abort, request, flash, send_from_directory
from flask.ext.login import login_required
from werkzeug import secure_filename
from application import app, stat
from libs.Utils import Utility
fileio = Blueprint('fileio', __name__)
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'doc', 'docx', 'xls', 'xlsx'}
@fileio.route('/img/<path:img_path>', methods=['GET'])
def get_img(img_path):
ext = os.path.basename(img_path).split('.')[-1]
if ext and ext in app.config['IMAGE_VALID_EXTS']:
return send_from_directory(app.config['IMAGE_FOLDER'], img_path)
else:
abort(404)
@fileio.route('/doc/<path:doc_path>', methods=['GET'])
def get_doc(doc_path):
ext = os.path.basename(doc_path).split('.')[-1]
if ext and ext in app.config['DOCS_VALID_EXTS']:
stat.update_download_count(doc_path, Utility.get_ip(request))
return send_from_directory(app.config['DOCS_FOLDER'], doc_path, as_attachment=True)
else:
abort(404)
@fileio.route('/upload', methods=['GET', 'POST'])
@login_required
def upload():
# import pdb
# pdb.set_trace()
if request.method == "POST":
typ = request.form.get('type')
file = request.files.get('file')
result = False
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
save_as = os.path.join(app.config['UPLOAD_DIR'], filename)
try:
file.save(save_as)
except Exception as e:
return render_template('error_code/404.html', msg='Failed to save file...[Err:{0}]'.format(e))
else:
result = move_file(typ, save_as, filename, backup=True, linkit=True)
if not result:
flash('error:Failed To Upload file..., Try again...')
else:
flash('info:File uploaded Successfully!')
return render_template('upload/upload.html')
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
def __move(src, dst, backup, linkit, link_name):
if backup and os.path.isfile(dst):
import datetime
now = datetime.datetime.utcnow()
now = now.strftime('%Y%m%d%H%M%S')
shutil.move(dst, "{0}_{1}".format(dst, now))
try:
shutil.move(src, dst)
except OSError:
return False
if linkit and link_name:
# Change the link creation to be relative to current directory to avoid link failing across dev & prod setup
# Like, if you create link and upload that will copy the link path the git, this will break prod
# Hopefully, this will solve the issue
_old_pwd = os.path.abspath('.')
os.chdir(os.path.dirname(dst))
result = True
if os.path.islink(link_name):
try:
os.remove(link_name)
except OSError:
result = False
else:
os.symlink(os.path.basename(dst), link_name)
finally:
os.chdir(_old_pwd)
return result
def move_file(typ, src, name, backup=True, linkit=True):
result = False
link_names = {'dp': 'dp.jpg', 'resume': 'Resume.docx'}
if typ == 'dp':
target = os.path.join(app.config['IMAGE_FOLDER'], name)
result = __move(src, target, backup, linkit, link_names.get('dp'))
elif typ == 'resume':
target = os.path.join(app.config['DOCS_FOLDER'], name)
result = __move(src, target, backup, linkit, link_names.get('resume'))
elif typ == 'blog':
target = os.path.join(app.config['IMAGE_FOLDER'], name)
result = __move(src, target, backup=False, linkit=False, link_name=None)
return result
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,436
|
ninadmhatre/zual
|
refs/heads/master
|
/instance/uat.py
|
__author__ = 'ninad'
PORT = 6000
ASSETS_DEBUG = True
LOG_QUERIES = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
DEBUG = False
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,437
|
ninadmhatre/zual
|
refs/heads/master
|
/libs/RedisCache.py
|
# -*- coding: utf-8 -*-
author = 'ninad'
import redis
class RedisCache(object):
def __init__(self, host='localhost', port=6379):
self.host = host
self.port = port
self.conn = self._connect()
self.offline = self._check_availibility()
def _connect(self):
return redis.Redis(host=self.host, port=self.port, encoding='utf-8')
def _check_availibility(self):
try:
result = self.conn.ping()
except redis.exceptions.ConnectionError:
return True
else:
return False
def set(self, key, val, timeout=86400, **kwargs):
"""
Set key with timeout default with 12 hrs
:param string key:
:param bool permanent:
:param object val:
:param int timeout:
:return:
"""
if not self.offline:
self.conn.setex(key, val, timeout)
def rm(self, key):
if not self.offline:
self.conn.delete(key)
def get(self, key):
if not self.offline:
return self.conn.get(key)
return None
def info(self):
if not self.offline:
return self.conn.info(section='all')
return None
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,438
|
ninadmhatre/zual
|
refs/heads/master
|
/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py
|
import jinja2
def snippet(text, length=200):
"""
Trim the text till given length.
:param text: Text to be trimmed
:param length: number of characters to be kept, rest will be trimmed.
:return: Trimmer to the length text
"""
if text is None or not hasattr(text, '__len__'):
return text
t_snippet = text[:length]
return t_snippet
jinja2.filters.FILTERS['quick_look'] = snippet
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,439
|
ninadmhatre/zual
|
refs/heads/master
|
/controller/dashboard.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
import pdb
import os
from flask import Blueprint, render_template, abort, current_app, request, flash, redirect, url_for
from flask.ext.login import login_required
from application import app, stat, blog_engine, BASE_DIR
dash = Blueprint('dash', __name__)
from addonpy.addonpy import AddonHelper, AddonLoader
# Dashboard will/should list
# 1. Resume download count
# 2. List of all posts and their read count!
# 3. It should be protected!
# 4. Status of gunicorn & redis process
# 5. Redis cache status (memory and process)
# 6. Server processes
module_list = None
def load():
loader = AddonLoader(verbose=True, logger=app.logger, recursive=False, lazy_load=False)
loader.set_addon_dirs([os.path.join(BASE_DIR, app.config['DASHBOARD_MODS'])])
loader.set_addon_methods(['execute', 'template', 'get_result'])
loader.load_addons()
return loader, loader.get_loaded_addons(list_all=True)
def run(loader, mod):
if mod != 'SiteStatsAddon':
mod_inst = loader.get_instance(mod)
mod_inst.print_addon_info()
mod_inst.execute(current_app.config)
return mod_inst.get_result(as_html=True)
@dash.route('/dashboard', methods=['GET'])
@login_required
def dashboard():
#pdb.set_trace()
global module_list
loader, module_list = load()
result = {}
for module in module_list:
result[module] = run(loader, module)
return render_template('dashboard/dashboard.html', result=result)
@dash.route('/reload', methods=['GET'])
@login_required
def reload():
global module_list
module_list = load()
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,440
|
ninadmhatre/zual
|
refs/heads/master
|
/dashboard_mods/RequestParamsAddon.py
|
__author__ = 'Ninad Mhatre'
from addonpy.IAddonInfo import IAddonInfo
from jinja2 import Template
from libs.AddonReturnType import AddonReturnType
import json
class RequestParamsAddon(IAddonInfo, AddonReturnType):
result = {}
status = True
def execute(self, *args, **kwargs):
r = args[1]
for d in dir(r):
if d.startswith('_') or d in ('cookies', 'headers'):
continue
val = getattr(r, d)
if isinstance(val, dict):
self.result[d] = {}
for k, v in val.items():
self.result[d][k] = v
else:
self.result[d] = val
import pprint
# pprint.pprint(self.result, indent=1)
self.result = pprint.pformat(self.result, indent=2)
def template(self):
html = '<h3>Failed to parse <code>request</code> object!</h3>'
if self.result:
html = '''<div class="col-lg-12"><div class="panel panel-success">
<div class="panel-heading">
<h3 class="panel-title">
{{- name -}}
<span class="pull-right glyphicon glyphicon-thumbs-up"></span>
</h3>
</div>
<div id="requestparam" class="panel-collapse">
<div class="panel-body">
<div class="col-lg-12">
<pre class="prettyprint linenums language-js">
{{- data -}}
</pre>
</div>
</div>
<div class="panel-footer" style="font-size: 80%;">Know more about this module <a href="{{ help_url }}" target="_blank">here</a></div>
</div>
</div></div>'''
t = Template(html)
return t.render(name=self.name, data=self.result, help_url=self.get_help_url())
def get_data(self, as_html=True):
if as_html:
return self.template()
return self.result
@property
def name(self):
return 'What is "request" made of?'
@staticmethod
def __addon__():
return 'RequestParamsAddon'
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,441
|
ninadmhatre/zual
|
refs/heads/master
|
/dashboard_mods/RedisAddon.py
|
# -*- coding: utf-8 -*-
__author__ = 'Ninad Mhatre'
from addonpy.IAddonInfo import IAddonInfo
from libs.RedisCache import RedisCache
from libs.AddonReturnType import AddonReturnType
from jinja2 import Template
# Sample Output
# {'used_memory_lua': 36864, 'config_file': '', 'uptime_in_seconds': 267,
# 'repl_backlog_first_byte_offset': 0, 'blocked_clients': 0, 'aof_rewrite_in_progress': 0,
# 'used_memory_human': '796.83K', 'migrate_cached_sockets': 0, 'instantaneous_ops_per_sec': 0,
# 'aof_last_rewrite_time_sec': -1, 'lru_clock': 16144256, 'cluster_enabled': 0, 'redis_version': '3.0.5',
# 'redis_git_dirty': 0, 'used_memory_peak_human': '796.83K',
# 'cmdstat_ping': {'usec': 5, 'calls': 1, 'usec_per_call': 5.0},
# 'latest_fork_usec': 0, 'rdb_changes_since_last_save': 0, 'instantaneous_input_kbps': 0.0,
# 'aof_last_write_status': 'ok', 'connected_slaves': 0, 'aof_last_bgrewrite_status': 'ok',
# 'total_net_input_bytes': 37, 'master_repl_offset': 0, 'used_cpu_sys': 0.37,
# 'redis_mode': 'standalone', 'hz': 10, 'rdb_last_save_time': 1458984565,
# 'rdb_last_bgsave_status': 'ok', 'rdb_current_bgsave_time_sec': -1,
# 'client_biggest_input_buf': 0, 'keyspace_misses': 0, 'total_commands_processed': 1,
# 'aof_current_rewrite_time_sec': -1, 'repl_backlog_size': 1048576, 'used_memory': 815952,
# 'sync_partial_ok': 0, 'expired_keys': 0, 'used_cpu_user': 0.13, 'repl_backlog_histlen': 0,
# 'rejected_connections': 0, 'uptime_in_days': 0, 'aof_enabled': 0, 'os': 'Linux 4.2.0-34-generic x86_64',
# 'redis_git_sha1': 0, 'connected_clients': 1, 'used_cpu_sys_children': 0.0, 'arch_bits': 64,
# 'keyspace_hits': 0, 'total_connections_received': 1, 'total_net_output_bytes': 7,
# 'used_memory_rss': 4501504, 'pubsub_channels': 0, 'redis_build_id': '33b20773abdbdb2',
# 'sync_full': 0, 'repl_backlog_active': 0, 'used_cpu_user_children': 0.0,
# 'multiplexing_api': 'epoll', 'loading': 0, 'client_longest_output_list': 0,
# 'role': 'master', 'run_id': 'bf87695d7cfa5e4537c1da8ca7c2034ffe5a1deb',
# 'aof_rewrite_scheduled': 0, 'gcc_version': '5.2.1', 'mem_allocator': 'jemalloc-3.6.0',
# 'sync_partial_err': 0, 'rdb_last_bgsave_time_sec': -1, 'process_id': 11645,
# 'used_memory_peak': 815952, 'evicted_keys': 0, 'tcp_port': 6379, 'mem_fragmentation_ratio': 5.52,
# 'pubsub_patterns': 0, 'instantaneous_output_kbps': 0.0, 'rdb_bgsave_in_progress': 0}
class RedisAddon(IAddonInfo, AddonReturnType):
result = None
status = True # Data received properly
def execute(self, *args, **kwargs):
r_instance = RedisCache()
data = r_instance.info()
self.result = {}
if not data:
return
keys = ('redis_version', 'redis_mode', 'uptime_in_seconds', 'used_memory_human', 'used_memory_peak_human',
'used_cpu_sys', 'expired_keys', 'uptime_in_days',
'connected_clients', 'arch_bits', 'total_connections_received', 'role', 'evicted_keys',
'tcp_port', 'mem_fragmentation_ratio')
nested_keys = (('db0', 'keys', 'total_keys'),)
for key in keys:
self.result[key] = data[key]
for k in nested_keys:
self.result[k[2]] = data[k[0]][k[1]]
def template(self):
keys = ('redis_version', 'redis_mode', 'arch_bits', 'uptime_in_seconds',
'used_memory_human', 'used_memory_peak_human',
'used_cpu_sys', 'expired_keys', 'uptime_in_days',
'connected_clients', 'total_connections_received', 'role', 'evicted_keys',
'tcp_port', 'mem_fragmentation_ratio', 'total_keys')
if self.result:
html = '''<div class="col-lg-12"><div class="panel panel-success">
<div class="panel-heading">
<h3 class="panel-title">
{{- name -}}
<span class="pull-right glyphicon glyphicon-thumbs-up"></span>
</h3>
</div>
<div id="redis" class="panel-collapse">
<div class="panel-body">
<div class="col-lg-8">
<table class="table table-bordered table-responsive">
{% for k in keys %}
<tr>
<td>{{ k|replace('_', ' ') }}</td>
<td>{{ data[k] }}</td>
</tr>
{% endfor %}
</table>
</div>
</div>
<div class="panel-footer" style="font-size: 80%;">Know more about this module <a href="{{ help_url }}" target="_blank">here</a></div>
</div>
</div></div>'''
else:
html = '''<div class="col-lg-12"><div class="panel panel-danger">
<div class="panel-heading">
<h3 class="panel-title">
{{- name -}}
<span class="pull-right glyphicon glyphicon-thumbs-down"></span>
</h3>
</div>
<div id="redis" class="panel-collapse">
<div class="panel-body">
<div class="col-lg-12">
<h4>Redis seems to be down! please check...</h4>
</div>
</div>
<div class="panel-footer" style="font-size: 80%;">Know more about this module <a href="{{ help_url }}" target="_blank">here</a></div>
</div>
</div></div>
'''
self.status = False
t = Template(html)
return t.render(name=self.name, keys=keys, data=self.result, help_url=self.get_help_url())
def get_data(self, as_html=True):
if as_html:
return self.template()
return self.result
@property
def name(self):
return self.__addon__().replace('Addon', ' Info')
@staticmethod
def __addon__():
return 'RedisAddon'
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,442
|
ninadmhatre/zual
|
refs/heads/master
|
/libs/AppLogger.py
|
__author__ = 'ninad'
import logging
import logging.handlers
import enum
class LoggerTypes(enum.Enum):
File = 0
Mail = 1
Console = 2
class AppLogger(object):
def __init__(self, logger_config):
self.logger_cfg = logger_config
self.logger = None
self.file_logger = None
self.mail_logger = None
self.console_logger = None
def get_log_handler(self, logger_type=LoggerTypes.File):
return self._configure_handler(logger_type)
def get_stand_alone_logger(self):
return self.logger
def _configure_handler(self, logger_type):
cfg = None
if logger_type == LoggerTypes.File:
logger = self.file_logger
cfg = self.logger_cfg['FILE']
elif logger_type == LoggerTypes.Mail:
logger = self.mail_logger
cfg = self.logger_cfg['MAIL']
if self.logger is not None:
return self.logger
if cfg is None:
return None
# log = logging.getLogger('{0}_logger'.format(str(logger_type).lower()))
self.logger = logging.getLogger(cfg['NAME'])
handler = logging.handlers.TimedRotatingFileHandler(cfg['FILE'], **cfg['EXTRAS'])
handler.setLevel(cfg['LEVEL'])
handler.setFormatter(logging.Formatter(cfg['FORMAT']))
self.logger.addHandler(handler)
return handler
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,443
|
ninadmhatre/zual
|
refs/heads/master
|
/libs/QuickCache.py
|
# -*- coding: utf-8 -*-
author = 'ninad'
import pickle
from libs.RedisCache import RedisCache
class QuickCache(RedisCache):
def __init__(self, host='localhost', port=6379):
super().__init__(host, port)
def pset(self, key, val, use_cache=True, **kwargs):
if use_cache:
self.set(key, pickle.dumps(val), **kwargs)
def pget(self, key, use_cache=True):
if use_cache:
data = self.get(key)
if data:
return pickle.loads(data)
return None
else:
return None
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,444
|
ninadmhatre/zual
|
refs/heads/master
|
/instance/default.py
|
__author__ = 'ninad'
import logging
from datetime import timedelta
import os
DEBUG = True
LOG_QUERIES = False
SECRET_KEY = 'Secret_key' # <-- EDIT_THIS
PORT = 5000
ADMIN_MAIL = 'test.mail@yourdomain.com' # <-- EDIT_THIS
LOGGER = {
'FILE': dict(FILE='logs/log.log',
LEVEL=logging.DEBUG,
NAME='web_logger',
HANDLER='File',
FORMAT='%(asctime)s %(levelname)s %(filename)s %(module)s [at %(lineno)d line] %(message)s',
EXTRAS=dict(when='D', interval=1, backupCount=7))
}
ASSETS_DEBUG = False
BLOGGING_URL_PREFIX = '/blog'
BLOGGING_DISQUS_SITENAME = 'Echo'
IMAGE_VALID_EXTS = ['jpg', 'jpeg', 'png']
DOCS_VALID_EXTS = ['doc', 'docx', 'xlsx', 'xls', 'pdf']
permanent_session_lifetime = timedelta(minutes=240)
SESSION_TIMEOUT = timedelta(minutes=240)
CACHE = dict(redis=dict(
CACHE_KEY_PREFIX='site',
CACHE_DEFAULT_TIMEOUT=60,
CACHE_TYPE='redis',
CACHE_REDIS_HOST='localhost',
CACHE_REDIS_PORT=9779
)
)
# URL's
FACEBOOK = 'https://www.facebook.com/<< Your FB profile name >>' # <-- EDIT_THIS
GOOGLE_PLUS = 'https://plus.google.com/<< Your G+ Profile >>' # <-- EDIT_THIS
GIT_HUB = 'https://github.com/<< Your GitHub >>' # <-- EDIT_THIS
LINKED_IN = '<< linked in profile >>' # <-- EDIT_THIS
PERSONAL_EMAIL = ADMIN_MAIL
# Upload Folder
IMAGE_FOLDER = os.path.abspath('blog/img')
DOCS_FOLDER = os.path.abspath('blog/docs')
STATS_FOLDER = os.path.abspath('stats')
# Dashboard
DASHBOARD_MODS = 'dashboard_mods'
# Mail
MAIL_SERVER = '' # <-- EDIT_THIS (Put mail server ), I am using zoho
MAIL_PORT = 465 # <-- EDIT_THIS
#MAIL_USE_TLS = True
MAIL_USE_SSL = True # <-- EDIT_THIS
MAIL_USERNAME = 'alert@yourdomain.com' # <-- EDIT_THIS
MAIL_PASSWORD = 'blah..blah' # <-- EDIT_THIS
MAIL_DEFAULT_SENDER = 'alert@yourdomain.com' # <-- EDIT_THIS
# Enable Page Counter
ENABLE_PAGE_COUNTER = True
__RELEASE__ = 'm4'
__VERSION__ = '2.5.05-0' # Year.Month.Day-Patch Note: 2015 is 1
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,445
|
ninadmhatre/zual
|
refs/heads/master
|
/libs/Utils.py
|
__author__ = 'ninad'
import os
import simplejson as json
import hashlib
from urllib import parse
class Utility(object):
@staticmethod
def write_to_file(filename, content, as_json=False, safe=False):
if safe:
return Utility.safe_write_to_file(filename, content, as_json)
with open(filename, 'w') as f:
if as_json:
f.write(json.dumps(content, indent=4))
else:
f.write(content)
@staticmethod
def read_from_file(filename, as_json=False, safe=False):
if safe:
return Utility.safe_read_from_file(filename, as_json)
if os.path.isfile(filename):
with open(filename, 'r') as f:
content = f.read() # Read full file
if as_json:
data = json.loads(content, encoding='utf-8')
return data, None
return content, None
@staticmethod
def safe_read_from_file(filename, as_json=False):
try:
with open(filename, 'r+') as f:
content = f.read() # Read full file
if as_json:
data = json.loads(content, encoding='utf-8')
return data, None
return content, None
except IOError as e:
return False, e
except ValueError as e:
return False, e
@staticmethod
def safe_write_to_file(filename, content, as_json=False):
try:
with open(filename, 'w+') as f:
if as_json:
f.write(json.dumps(content, indent=4))
else:
f.write(content)
except IOError as e:
return False, e
except ValueError as e:
return False, e
else:
return True, None
@staticmethod
def get_ip(request):
if 'HTTP_X_REAL_IP' in request.environ:
return request.environ['HTTP_X_REAL_IP']
elif 'HTTP_X_FORWARDED_FOR' in request.environ:
ips = request.environ['HTTP_X_FORWARDED_FOR']
return ips.split(',')[0]
else:
return request.remote_addr
@staticmethod
def get_md5_hash(string):
if string and isinstance(string, str):
return hashlib.md5(string.encode('utf-8')).hexdigest()
@staticmethod
def get_md5_hash_of_title(string):
if string:
_title = parse.quote(string)
return hashlib.md5(_title.encode('utf-8')).hexdigest()
return None
@staticmethod
def quote_string(string):
if string:
return parse.quote(string)
@staticmethod
def unquote_string(string):
if string:
return parse.unquote(string, encoding='utf-8')
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,446
|
ninadmhatre/zual
|
refs/heads/master
|
/controller/apps.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
from flask import Blueprint, render_template, current_app, request, jsonify
apps = Blueprint('application', __name__, url_prefix='/apps')
@apps.route('/')
def listing():
return render_template('apps/listing.html')
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,447
|
ninadmhatre/zual
|
refs/heads/master
|
/dashboard_mods/SiteLogViewerAddon.py
|
# -*- coding: utf-8 -*-
__author__ = 'Ninad Mhatre'
from addonpy.IAddonInfo import IAddonInfo
from jinja2 import Template
from libs.AddonReturnType import AddonReturnType
import os
class SiteLogViewerAddon(IAddonInfo, AddonReturnType):
result = None
status = True
def execute(self, *args, **kwargs):
config = args[0]
base = config.get('BASE_DIR')
log_file = config['LOGGER']['FILE']['FILE']
log_full_path = os.path.join(base, log_file)
with open(log_full_path, 'r') as log_file:
self.result = log_file.read().split('\n')
def template(self):
html = '''<div class="col-lg-12">
<div class="panel panel-success">
<div class="panel-heading">
<h3 class="panel-title">
{{- name -}}
<span class="pull-right glyphicon glyphicon-thumbs-up"></span>
</h3>
</div>
<div id="logviewer" class="panel-collapse">
<div class="panel-body">
<pre class="prettyprint language-py" style="height: 500px;">
{%- for line in result -%}
{{ line }}<br>
{%- endfor -%}
</pre>
</div>
</div>
</div></div>
'''
t = Template(html)
return t.render(name=self.name, result=self.result, help_url=self.get_help_url())
def get_data(self, as_html=True):
if as_html:
return self.template()
return self.result
@property
def name(self):
return 'Site Log'
@staticmethod
def __addon__():
return 'SiteLogViewerAddon'
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,448
|
ninadmhatre/zual
|
refs/heads/master
|
/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py
|
from .engine import BloggingEngine
from .processor import PostProcessor
from .sqlastorage import SQLAStorage
from .storage import Storage
from .blogging_filters import snippet
"""
Flask-Blogging is a Flask extension to add blog support to your
web application. This extension uses Markdown to store and then
render the webpage.
Author: Gouthaman Balaraman
Date: June 1, 2015
0.5.1.local is the local copy of my changes not ** officially **
released!
"""
__author__ = 'Gouthaman Balaraman'
__version__ = '0.5.1.local'
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,449
|
ninadmhatre/zual
|
refs/heads/master
|
/model/StaticAssets.py
|
__author__ = 'ninad'
# Flask
from flask.ext.assets import Bundle, Environment
from application import app
bundles = {
'third_party_js': Bundle(
'js/jquery-2.1.3.min.js',
'js/jquery-ui.min.js',
'js/bootstrap.js',
'js/toastr.min.js',
'js/freelancer.js',
'js/run_prettify.js',
'js/site_personal.js',
'js/prettify.js',
'js/run_prettify.js',
'js/classie.js',
'js/socialShare.min.js',
'js/cbpAnimatedHeader.js',
filters='jsmin'
),
'third_party_css': Bundle(
'css/jquery-ui.min.css',
'css/font-awesome.min.css',
'css/bootstrap.min.css',
'css/prettify.css',
'css/site_personal.css',
'css/arthref.min.css',
'css/toastr.min.css',
'css/freelancer.css',
filters='cssmin'
)
}
site_assets = Environment(app)
site_assets.register(bundles)
def get_assets():
print('Here from lblrsm.assets....')
return site_assets
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,450
|
ninadmhatre/zual
|
refs/heads/master
|
/dashboard_mods/ViewStatsAddon.py
|
__author__ = 'ninad mhatre'
from addonpy.IAddonInfo import IAddonInfo
from libs.AddonReturnType import AddonReturnType
from jinja2 import Template
from application import page_view_stats
class ViewStatsAddon(IAddonInfo, AddonReturnType):
result = None
status = True # Data received properly
def template(self):
if self.result:
html = '''<div class="col-lg-12"><div class="panel panel-success">
<div class="panel-heading">
<h3 class="panel-title">
{{- name -}}
<span class="pull-right glyphicon glyphicon-thumbs-up"></span>
</h3>
</div>
<div id="counter" class="panel-collapse">
<div class="panel-body">
<table class="table table-bordered table-responsive">
<thead>
<tr>
<th>ID</th>
<th>Title</th>
<th>Views</th>
<th>Deleted?</th>
<th>Last Viewed On</th>
</tr>
</thead>
<tbody>
{% for k in data %}
<tr>
<td>{{ k }}</td>
<td>{{ data[k]['title']|toAscii }}</td>
<td>{{ data[k]['count'] }}</td>
<td>{{ data[k]['deleted']|toBoolean }}</td>
<td>{{ data[k]['last_modified_date'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
<div class="panel-footer" style="font-size: 80%;">Know more about this module <a href="{{ help_url }}" target="_blank">here</a></div>
</div>
</div></div>'''
else:
html = '''<div class="col-lg-12"><div class="panel panel-danger">
<div class="panel-heading">
<h3 class="panel-title">
{{- name -}}
<span class="pull-right glyphicon glyphicon-thumbs-down"></span>
</h3>
</div>
<div id="counter" class="panel-collapse">
<div class="panel-body">
<div class="col-lg-12">
<h4>Something wrong with collecting stats from database...</h4>
</div>
</div>
<div class="panel-footer" style="font-size: 80%;">Know more about this module <a href="{{ help_url }}" target="_blank">here</a></div>
</div>
</div></div>'''
self.status = False
t = Template(html)
return t.render(name=self.name, data=self.result, help_url=self.get_help_url())
def get_data(self, as_html=True):
if as_html:
return self.template()
return self.result
@property
def name(self):
return self.__addon__().replace('Addon', '')
def execute(self, *args, **kwargs):
self.result = page_view_stats.list(include_all=True)
@staticmethod
def __addon__():
return 'ViewStatsAddon'
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,451
|
ninadmhatre/zual
|
refs/heads/master
|
/controller/admin.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
import pdb
import os
from collections import namedtuple
import json
from flask import Blueprint, render_template, abort, current_app, request, flash, redirect, url_for
from flask.ext.login import login_required
from application import app, stat, blog_engine, BASE_DIR, mailer, custom_logger
from libs.Utils import Utility
admin = Blueprint('admin', __name__)
from addonpy.addonpy import AddonHelper, AddonLoader
from cerberus import Validator, ValidationError, SchemaError
MailInfo = namedtuple('MailInfo', 'Sender To Message Subject')
# Dashboard will/should list
# 1. Resume download count
# 2. List of all posts and their read count!
# 3. It should be protected!
# 4. Status of gunicorn & redis process
# 5. Redis cache status (memory and process)
# 6. Server processes
log = custom_logger.get_stand_alone_logger()
def load():
ldr = AddonLoader(verbose=True, logger=app.logger, recursive=False, lazy_load=False)
ldr.set_addon_dirs([os.path.join(BASE_DIR, app.config['DASHBOARD_MODS'])])
ldr.set_addon_methods(['execute', 'template', 'get_data', 'name'])
ldr.load_addons()
return ldr, ldr.get_loaded_addons(list_all=True)
def run(ldr, mod, req):
mod_inst = ldr.get_instance(mod)
mod_inst.print_addon_info()
mod_inst.execute(current_app.config, req)
return mod_inst.get_result(as_html=True)
loader, module_list = load()
alerts_file = os.path.join(BASE_DIR, 'files', 'alerts.txt')
@admin.route('/dashboard', methods=['GET'])
@login_required
def dashboard():
global module_list, loader
result = {}
for module in module_list:
result[module] = run(loader, module, request)
return render_template('admin/dashboard.html', result=result)
@admin.route('/reload', methods=['GET'])
@login_required
def reload():
global module_list
module_list = load()
@admin.route('/mail_test', methods=['GET', 'POST'])
@login_required
def mail_test():
if request.method == 'POST':
from_addr = request.form.get('from_addr')
to_addr = request.form.get('to_addr')
subject = request.form.get('subject')
content = request.form.get('body')
mail_details = MailInfo(from_addr, to_addr, content, subject)
current_app.logger.info('Sending Test Mail, details {0}'.format(mail_details))
mailer.send_simple_mail(mail_details)
flash('info:Mail Sent Successfully!')
return render_template('admin/mail_test.html')
def parse_alert_data(data):
current = {}
for d in data.splitlines():
header, text = d.split('=', 2)
current[header] = text.strip('\n')
return current
def update_alert_message(alert_file):
data, err = Utility.safe_read_from_file(alert_file, as_json=False)
if not err:
# Ignore error to read file
current = parse_alert_data(data)
if current:
app.jinja_env.globals['alert'] = current
#flash('info:New Alert Message Setup Successfully!!')
log.debug('Setup new alert message!')
else:
log.error('Failed to setup alert in jinja evn variable, Error: %s' % err)
update_alert_message(alerts_file)
@admin.route('/alert', methods=['GET', 'POST'])
def alert():
if request.method == 'POST':
_type = request.form.get('type')
data = request.form.get('data')
disabled = request.form.get('disable', False)
disabled = True if disabled == 'on' else False
text = 'type={0}\ntext={1}\ndisabled={2}\n'.format(_type, data, disabled)
result, err = Utility.safe_write_to_file(alerts_file, text, as_json=False)
if err:
current_app.logger.error('Failed to create new alert! Error: %s' % err)
flash('error:Failed to setup alert, please check the log and try again...')
return redirect(url_for('admin.dashboard'))
update_alert_message(alerts_file)
current_alert = {}
if os.path.isfile(alerts_file):
data, err = Utility.safe_read_from_file(alerts_file, as_json=False)
if err:
current_app.logger.error('Failed to read alert file! Error: %s' % err)
flash('error:Failed to read alert file, please check the log and try again!!')
return redirect(url_for('admin.dashboard'))
current_alert = parse_alert_data(data)
return render_template('admin/alert.html', data=current_alert)
@admin.route('/notice_manage', methods=['POST'])
@login_required
def notice_manage():
alerts_file = os.path.join(BASE_DIR, 'files', 'alerts.json')
pdb.set_trace()
action = request.form['notice_action']
toggle_from = request.form['alert_stat']
data, err = Utility.safe_read_from_file(alerts_file, as_json=True)
if not err:
if action == 'toggle':
data['enabled'] = not data['enabled']
ok, err = Utility.safe_write_to_file(alerts_file, data, as_json=True)
elif action == 'replace':
notice = request.form['setfor']
parser = Notice(notice)
try:
key_count, ok = parser.parse()
if ok:
# pdb.set_trace()
errors = parser.validate()
if errors:
return render_template('admin/site_message.html', error=errors, data=notice)
data = parser.result
except Exception as e:
return render_template('admin/site_message.html', error=e, data=notice)
if ok:
flash('info:Alert Disabled Successfully!')
return redirect(url_for('admin.manage_notice'))
else:
flash('error:Falied To Disable Alert! Please Try Again...')
return redirect(url_for('admin.manage_notice'))
@admin.route('/notice', methods=['GET', 'POST'])
@login_required
def manage_notice():
data = {}
alerts_file = os.path.join(BASE_DIR, 'files', 'alerts.json')
if request.method == 'POST':
notice = request.form['setfor']
parser = Notice(notice)
try:
key_count, ok = parser.parse()
if ok:
# pdb.set_trace()
errors = parser.validate()
if errors:
return render_template('admin/site_message.html', error=errors, data=notice)
data = parser.result
except Exception as e:
return render_template('admin/site_message.html', error=e, data=notice)
ok, err = Utility.safe_write_to_file(alerts_file, data, as_json=True)
if not ok:
return render_template('admin/site_message.html', error=err)
else:
return render_template('admin/site_message.html', data=data)
data, err = Utility.safe_read_from_file(alerts_file, as_json=True)
if not err:
return render_template('admin/site_message.html', data=data)
class Notice(object):
def __init__(self, notice_str):
self.data = notice_str
self.schema = {'text': {'type': 'string', 'minlength': 1, 'required': True},
'type': {'type': 'string', 'allowed': ['info', 'warn', 'error'], 'minlength': 1, 'required': True},
'enabled': {'type': 'boolean', 'required': True},
'can_remove': {'type': 'boolean', 'required': True},
'set_for': {'type': 'list', 'required': True},
'unset_for': {'type': 'list', 'required': True},
}
self.result = {}
def parse(self):
_n = self.data.replace('\r\n', '').strip(' ')
print("[%s]" % _n)
# '{text: "<h1>This is Heading</h1>",type: "info",enabled: "true",can_remove: "true",set_for: "",unset_for: "",}'
_n = _n[1:-1]
parts = _n.split(',')
for entry in parts:
if entry == '' or ':' not in entry:
continue
k, v = [i.strip(' ') for i in entry.split(':', 2)]
self.result[k] = self._convert_to_py_types(v, k)
return len(self.result), len(self.result) == len(self.schema)
def validate(self):
# pdb.set_trace()
v = Validator(self.schema)
status = v.validate(self.result)
if not status:
return v.errors
return None
def _convert_to_py_types(self, value, key_name):
value = value.replace('"', '')
if key_name in ('enabled', 'can_remove'):
return value.lower() in ('yes', 'true')
elif key_name in ('set_for', 'unset_for'):
#value = value.replace(' ', '')
if value == '':
return []
else:
return value.split(',')
else:
return value
def _text_to_type(self, txt_type):
if txt_type.startswith('str'):
return str
elif txt_type.startswith('bool'):
return bool
elif txt_type.startswith('list'):
return list
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,452
|
ninadmhatre/zual
|
refs/heads/master
|
/libs/Informer.py
|
try:
from builtins import str
except ImportError:
pass
import logging
from urllib import parse
import sqlalchemy as sqla
import datetime
from libs.Utils import Utility
class BaseStorage(object):
def get_all_stats(self, include_all):
raise NotImplemented('Please Implement!')
def update_count(self, page_id):
raise NotImplemented('Please Implement!')
def get_count(self, page_id):
raise NotImplemented('Please Implement!')
def remove_page(self, page_id):
raise NotImplemented('Please Implement!')
def add_page(self, page_id, title):
raise NotImplemented('Please Implement!')
def create_table(self):
raise NotImplemented('Please Implement!')
class SqliteStorage(BaseStorage):
"""
Create a database and implement the ``BaseStorage`` methods to store the
information about pages and their view count!
"""
_db = None
_logger = logging.getLogger("page-view-count")
_logger.setLevel(logging.DEBUG)
def __init__(self, engine=None, metadata=None, db=None):
if db:
self._engine = db.engine
self._metadata = db.metadata
else:
if not engine:
raise ValueError('Both db and engine args cannot be none!!!')
self._engine = engine
self._metadata = metadata or sqla.MetaData()
self._metadata.reflect(bind=self._engine)
self.table_name = 'page_views'
self._count_table = None
self.create_table()
@property
def metadata(self):
return self._metadata
def add_page(self, page_id, title):
success = True
with self._engine.begin() as conn:
try:
add_post = self._count_table.insert()
post_statement = add_post.values(
page_id=page_id, title=title,
count=1, deleted=0,
last_modified_date=datetime.datetime.utcnow()
)
post_result = conn.execute(post_statement)
except Exception as e:
self._logger.exception(str(e))
success = False
return success
def get_count(self, page_id):
r = None
with self._engine.begin() as conn:
try:
post_statement = sqla.select([self._count_table]).where(
self._count_table.c.page_id == page_id
)
post_result = conn.execute(post_statement).fetchone()
if post_result:
r = dict(post_id=post_result[0], title=post_result[1],
count=post_result[2], deleted=post_result[3],
last_modified_date=post_result[4])
except Exception as e:
self._logger.exception(str(e))
r = None
return r
def get_all_stats(self, include_all=False):
r = {}
with self._engine.begin() as conn:
if include_all:
post_statement = sqla.select([self._count_table])
else:
post_statement = sqla.select([self._count_table]).where(
self._count_table.c.deleted != 1
)
try:
result = conn.execute(post_statement)
for row in result.fetchall():
r[row[0]] = dict(title=row[1],
count=row[2], deleted=row[3],
last_modified_date=row[4])
except Exception as e:
self._logger.exception(str(e))
r = None
return r
def update_count(self, page_id):
success = True
with self._engine.begin() as conn:
try:
post_statement = self._count_table.update().\
where(self._count_table.c.page_id == page_id).\
values(count=self._count_table.c.count + 1)
result = conn.execute(post_statement)
except Exception as e:
self._logger.exception(str(e))
success = False
return success
def remove_page(self, page_id):
success = True
with self._engine.begin() as conn:
try:
post_statement = self._count_table.update().\
where(self._count_table.c.page_id == page_id).\
values(deleted=1)
result = conn.execute(post_statement)
except Exception as e:
self._logger.exception(str(e))
success = False
return success
def create_table(self):
with self._engine.begin() as conn:
if not conn.dialect.has_table(conn, self.table_name):
self._count_table = sqla.Table(
self.table_name, self._metadata,
sqla.Column("page_id", sqla.String(256), primary_key=True),
sqla.Column("title", sqla.String(512)),
sqla.Column("count", sqla.Integer, default=0),
sqla.Column("deleted", sqla.SmallInteger, default=0),
sqla.Column("last_modified_date", sqla.DateTime)
)
self._logger.debug("Created table with table name %s" %
self.table_name)
else:
self._count_table = self._metadata.tables[self.table_name]
self._logger.debug("Reflecting to table with table name %s" %
self.table_name)
class Informer(object):
def __init__(self, db):
self.db = db
self.cache = None
self.refresh = True
self._update_cache()
def _encode_title(self, page_title):
return parse.quote(page_title)
def _decode_title(self, encoded_title):
return parse.unquote(encoded_title)
def _update_cache(self):
self.cache = self.list()
self.refresh = False
def insert(self, page_id, page_title):
if not page_id:
page_id = Utility.get_md5_hash_of_title(page_title)
if self.is_existing_page(page_id):
return self.update(page_id)
else:
return self.db.add_page(page_id, page_title)
def is_existing_page(self, page_id):
return page_id in self.cache
def update(self, page_title, is_page_id=False):
if not is_page_id:
page_id = Utility.get_md5_hash_of_title(page_title)
else:
page_id = page_title
return self.db.update_count(page_id)
def list(self, include_all=False):
return self.db.get_all_stats(include_all=include_all)
def get_count(self, page_id):
return self.db.get_count(page_id)
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,453
|
ninadmhatre/zual
|
refs/heads/master
|
/instance/dev.py
|
__author__ = 'ninad'
PORT = 5000
ASSETS_DEBUG = True
LOG_QUERIES = True
DEBUG_TB_INTERCEPT_REDIRECTS = False
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,454
|
ninadmhatre/zual
|
refs/heads/master
|
/dashboard_mods/ServerAddon.py
|
__author__ = 'Ninad Mhatre'
from addonpy.IAddonInfo import IAddonInfo
from jinja2 import Template
import psutil as ps
from libs.AddonReturnType import AddonReturnType
from datetime import datetime
class ServerAddon(IAddonInfo, AddonReturnType):
result = None
status = True
def _get_cpu_stats(self):
cpu_info = {'processors': ps.cpu_count(), 'times': ps.cpu_times(),
'load': ps.cpu_percent(percpu=True)}
return cpu_info
def _get_process_stats(self, process):
p_info = {}
for p in process:
p_info[p.pid] = {}
_p = p_info[p.pid]
_p['exe'] = p.exe()
_p['user'] = p.username()
_p['created_at'] = datetime.fromtimestamp(p.create_time()).strftime('%Y-%m-%d %H:%M:%S')
_p['cpu_usage'] = '{0:3.3f}'.format(p.cpu_percent())
_p['memory_usage'] = '{0:3.3f}'.format(p.memory_percent())
return p_info
def _search(self, name):
match = []
for p in ps.process_iter():
if p.name() == name:
match.append(p)
return match
def _get_memory_stats(self):
mem_info = {'virtual': {}, 'swap': {}}
v_mem = ps.virtual_memory()
s_mem = ps.swap_memory()
_c = mem_info['virtual']
_c['total'] = v_mem.total
_c['used'] = v_mem.used
_c['free'] = v_mem.free
_c['used_percent'] = v_mem.percent
_c = mem_info['swap']
_c['total'] = s_mem.total
_c['used'] = s_mem.used
_c['free'] = s_mem.free
_c['used_percent'] = s_mem.percent
return mem_info
def _get_disk_stats(self):
return ps.disk_usage('/')
def execute(self, *args, **kwargs):
from collections import OrderedDict
self.result = OrderedDict()
self.result['cpu'] = self._get_cpu_stats()
self.result['memory'] = self._get_memory_stats()
self.result['disk'] = self._get_disk_stats()
self.result['process'] = self._get_process_stats(self._search('python'))
def template(self):
html = '''<div class="col-lg-12"><div class="panel panel-success">
<div class="panel-heading">
<h3 class="panel-title">
{{- name -}}
<span class="pull-right glyphicon glyphicon-thumbs-up"></span>
</h3>
</div>
<div id="server" class="panel-collapse">
<div class="panel-body">
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">CPU Info</h3>
</div>
<div class="panel-body">
<table class="table table-bordered">
<tr><td>Processors</td><td>{{ data['cpu']['processors'] }}</td></tr>
<tr><td>Server Timings</td><td>{{ data['cpu']['times'].user }}</td></tr>
<tr><td>Processors</td><td>{{ data['cpu']['load'] }}%</td></tr>
</table>
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">Memory Info</h3>
</div>
<div class="panel-body">
<table class="table table-bordered">
<thead>
<tr>
<th>Type</th>
<th>Total</th>
<th>Used</th>
<th>Free</th>
<th>Used %</th>
</tr>
</thead>
<tbody>
{% for t in data['memory']|sort %}
<tr>
<td>{{ t }}</td>
<td class="bytes">{{ data['memory'][t]['total'] }}</td>
<td class="bytes">{{ data['memory'][t]['used'] }}</td>
<td class="bytes">{{ data['memory'][t]['free'] }}</td>
<td>{{ data['memory'][t]['used_percent'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">Disk Usage of '/'</h3>
</div>
<div class="panel-body">
<table class="table table-bordered">
<tr><td>Total</td><td class="bytes">{{ data['disk'].total }}</td></tr>
<tr><td>Used (bytes)</td><td class="bytes">{{ data['disk'].used }}</td></tr>
<tr><td>Free</td><td class="bytes">{{ data['disk'].free }}</td></tr>
<tr><td>Used %</td><td>{{ data['disk'].percent }}</td></tr>
</table>
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">Process Info</h3>
</div>
<div class="panel-body">
<table class="table table-bordered">
<thead>
<th>PID</th>
<th>User</th>
<th>Executable</th>
<th>Created At</th>
<th>CPU Use %</th>
<th>Memory Use %</th>
</thead>
<tbody>
{% for p in data['process']|sort %}
{% set proc = data['process'][p] %}
<tr>
<td>{{ p }}</td>
<td>{{ proc['user'] }}</td>
<td>{{ proc['exe'] }}</td>
<td>{{ proc['created_at'] }}</td>
<td>{{ proc['cpu_usage'] }}</td>
<td>{{ proc['memory_usage'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
<div class="panel-footer" style="font-size: 80%;">Know more about this module <a href="{{ help_url }}" target="_blank">here</a></div>
</div>
</div></div>'''
t = Template(html)
return t.render(data=self.result, name=self.name)
def get_data(self, as_html=True):
if as_html:
return self.template()
return self.result
@property
def name(self):
return self.__addon__().replace('Addon', ' Info')
@staticmethod
def __addon__():
return 'ServerAddon'
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,455
|
ninadmhatre/zual
|
refs/heads/master
|
/custom_filter.py
|
__author__ = 'ninad'
import jinja2
import hashlib
from libs.Utils import Utility
def snippet(text, length=200):
if text is None or not isinstance(text, str):
return text
t_snippet = text[:length]
return t_snippet
def hash_me(text, prefix='some_text'):
t = prefix + text
md5 = hashlib.md5()
md5.update(t.encode())
return md5.hexdigest()
def toBoolean(text):
if isinstance(text, int):
return text == 1
return text.lower() in ('on', 'yes', 'true')
def toAscii(text):
return Utility.unquote_string(text)
jinja2.filters.FILTERS['snippet'] = snippet
jinja2.filters.FILTERS['page_id'] = hash_me
jinja2.filters.FILTERS['toBoolean'] = toBoolean
jinja2.filters.FILTERS['toAscii'] = toAscii
# env = Environment()
# env.filters['snippet'] = snippet
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,456
|
ninadmhatre/zual
|
refs/heads/master
|
/model/__init__.py
|
__author__ = 'ninad'
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,457
|
ninadmhatre/zual
|
refs/heads/master
|
/controller/api.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
from flask import Blueprint, current_app, request, jsonify
from application import page_view_stats
import simplejson as json
from libs.Utils import Utility
import time
api = Blueprint('api', __name__, url_prefix='/api')
dumb_cache = {}
@api.route('/views', methods=['GET', 'POST'])
def view_counter():
page_id = None
if request.method == 'POST':
data = json.loads(request.data)
if 'page_title' not in data:
return jsonify(view='Invalid Parameters Passed!!')
title = data['page_title']
page_id = Utility.get_md5_hash_of_title(title)
current_app.logger.debug('Page Title : {0} & Page Id: {1}'.format(title, page_id))
remote_ip = Utility.get_ip(request)
update_page_count(page_id, title, remote_ip)
result = page_view_stats.get_count(page_id)
return jsonify(views=result['count'])
def update_page_count(page_id, title, remote_ip):
joined = '{0}:{1}'.format(page_id, remote_ip)
if joined in dumb_cache:
last_time = dumb_cache[joined]
now = time.time()
if now - last_time > 180:
if page_view_stats.update(page_id, is_page_id=True):
dumb_cache[joined] = now
else:
if page_view_stats.insert(page_id, Utility.quote_string(title)):
dumb_cache[joined] = time.time()
else:
current_app.logger.error('Failed to insert page_id [{0}] in page_view_counter'.format(page_id))
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,458
|
ninadmhatre/zual
|
refs/heads/master
|
/application.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
# Core
import os
from datetime import timedelta
import custom_filter
# Flask
from flask import Flask, render_template, redirect, request, session, flash, send_from_directory, abort, url_for
from flask.ext.cache import Cache
from sqlalchemy import create_engine, MetaData
from flask.ext.login import UserMixin, LoginManager, login_user, logout_user, login_required
from flask.ext.blogging import SQLAStorage, BloggingEngine
from flask.ext.seasurf import SeaSurf
# App
from model.StatsCollector import Stats
from libs.AppLogger import AppLogger, LoggerTypes
from libs.Mailer import Mailer
from libs.Informer import SqliteStorage, Informer
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
_static_folder = os.path.join(BASE_DIR, 'static')
instance_dir = os.path.join(BASE_DIR, 'instance')
blog_posts_dir = os.path.join(BASE_DIR, 'templates', 'blog')
upload_dir = os.path.join(BASE_DIR, 'uploads')
app = Flask(__name__, instance_path=instance_dir, static_path=_static_folder, static_url_path='/static')
# Trying on Windows? Comment out __above__ line and use __below__ line!
# app = Flask(__name__, instance_path=instance_dir, static_path='/static', static_url_path='/static')
app.config.from_object('instance.default')
app.config.from_object('instance.{0}'.format(os.environ.get('APP_ENVIRONMENT', 'dev')))
app.config['BASE_DIR'] = BASE_DIR
app.config['UPLOAD_DIR'] = upload_dir
custom_logger = AppLogger(app.config['LOGGER'])
app.logger.addHandler(custom_logger.get_log_handler(LoggerTypes.File))
if not app.config['DEBUG']:
cache = Cache(config=app.config.get('CACHE')['redis'])
else:
cache = Cache(config={'CACHE_TYPE': 'simple'})
mailer = Mailer(app)
cache.init_app(app)
csrf = SeaSurf(app)
engine = create_engine('sqlite:///blog.db')
meta = MetaData()
sql_storage = SQLAStorage(engine, metadata=meta)
blog_engine = BloggingEngine(app, sql_storage, cache=cache)
login_manager = LoginManager(app)
meta.create_all(bind=engine)
page_view_engine = create_engine('sqlite:///stats.db')
page_view_meta = MetaData()
page_view_storage = SqliteStorage(page_view_engine, metadata=page_view_meta)
page_view_meta.create_all(bind=page_view_engine)
page_view_stats = Informer(page_view_storage)
stat = Stats(app.config["STATS_FOLDER"])
app.logger.info('Starting Application')
# Login manager settings
login_manager.session_protection = "strong"
class User(UserMixin):
"""
User Mixin; with get name being changed!
"""
def __init__(self, user_id):
self.id = user_id
def get_name(self):
# This is personal site, so default it's single/owner name!
return "Test_User"
@app.before_request
def make_session_permanent():
session.permanent = True
app.permanent_session_lifetime = app.config['SESSION_TIMEOUT']
@login_manager.needs_refresh_handler
def refresh():
return redirect(url_for('auth.login'))
@login_manager.user_loader
@blog_engine.user_loader
def load_user(user_id):
"""
This is called for every request to get the user name
:param user_id:
:return:
"""
return User(user_id)
# Initialize Other Modules
# Modules which requires instance of 'app' to be created first!
from model.StaticAssets import site_assets
site_assets.init_app(app)
@app.route('/')
def home():
"""
Any guesses?
:return:
"""
return render_template('freelancer.html')
@app.errorhandler(404)
def page_not_found(e):
"""
Flask does not support having error handler in different blueprint!
:param e: error
:return: error page with error code
"""
return render_template('error_code/404.html'), 404
@app.errorhandler(401)
def page_not_found(e):
"""
Flask does not support having error handler in different blueprint!
:param e: error
:return: error page with error code
"""
return render_template('error_code/401.html'), 401
@app.errorhandler(500)
def internal_server_error(e):
"""
Flask does not support having error handler in different blueprint!
:param e: error
:return: error page with error code
"""
return render_template('error_code/500.html'), 500
@app.route('/test')
def test():
d = {}
for p in dir(request):
d[p] = getattr(request, p)
return render_template('dump/dict.html', data=d)
# Code Separated to Blueprints!
from controller.file_io import fileio
from controller.authentication import auth
from controller.admin import admin
from controller.apps import apps
from controller.api import api
app.register_blueprint(fileio)
app.register_blueprint(auth)
app.register_blueprint(apps)
app.register_blueprint(admin)
app.register_blueprint(api)
if __name__ == '__main__':
app.run(port=app.config['PORT'])
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,459
|
ninadmhatre/zual
|
refs/heads/master
|
/libs/Mailer.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
from flask_mail import Mail, Message
class Mailer(object):
def __init__(self, app):
self.mail = Mail()
self.mail.init_app(app)
def send_simple_mail(self, details):
msg = Message(subject=details.Subject,
sender=details.Sender,
recipients=details.To.split(','),
html=details.Message)
self._send(msg)
def _send(self, msg):
self.mail.send(msg)
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,460
|
ninadmhatre/zual
|
refs/heads/master
|
/libs/AddonReturnType.py
|
# -*- coding: utf-8 -*-
__author__ = 'ninad'
from collections import namedtuple
ReturnType = namedtuple('AddonReturnType', 'name html status desc')
class AddonReturnType(object):
def get_result(self, as_html=True):
return ReturnType(name=self.name,
html=self.get_data(as_html),
status=self.status,
desc=self.get_desc())
|
{"/controller/authentication.py": ["/application.py"], "/controller/file_io.py": ["/application.py", "/libs/Utils.py"], "/controller/dashboard.py": ["/application.py"], "/dashboard_mods/RequestParamsAddon.py": ["/libs/AddonReturnType.py"], "/dashboard_mods/RedisAddon.py": ["/libs/RedisCache.py", "/libs/AddonReturnType.py"], "/libs/QuickCache.py": ["/libs/RedisCache.py"], "/dashboard_mods/SiteLogViewerAddon.py": ["/libs/AddonReturnType.py"], "/local_mods/flask-blogging/build/lib/flask_blogging/__init__.py": ["/local_mods/flask-blogging/build/lib/flask_blogging/blogging_filters.py"], "/model/StaticAssets.py": ["/application.py"], "/dashboard_mods/ViewStatsAddon.py": ["/libs/AddonReturnType.py", "/application.py"], "/controller/admin.py": ["/application.py", "/libs/Utils.py"], "/libs/Informer.py": ["/libs/Utils.py"], "/dashboard_mods/ServerAddon.py": ["/libs/AddonReturnType.py"], "/custom_filter.py": ["/libs/Utils.py"], "/controller/api.py": ["/application.py", "/libs/Utils.py"], "/application.py": ["/custom_filter.py", "/model/StatsCollector.py", "/libs/AppLogger.py", "/libs/Mailer.py", "/libs/Informer.py", "/model/StaticAssets.py", "/controller/file_io.py", "/controller/authentication.py", "/controller/admin.py", "/controller/apps.py", "/controller/api.py"]}
|
39,461
|
Prabesh77/Django-Books
|
refs/heads/master
|
/Books_app/views.py
|
from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect
from .models import addBook
# Create your views here.
def books_view(request):
items = addBook.objects.all()
return render(request, 'Books_app/base.html', {'all_items': items})
def addBooks(request):
new_item = addBook(content = request.POST['content'])
new_item.save()
return HttpResponseRedirect('/book/')
def deleteBooks(request, book_id):
delete_item = addBook.objects.get(id=book_id)
delete_item.delete()
return HttpResponseRedirect('/book/')
|
{"/Books_app/views.py": ["/Books_app/models.py"]}
|
39,462
|
Prabesh77/Django-Books
|
refs/heads/master
|
/Books_app/models.py
|
from django.db import models
# Create your models her
class addBook(models.Model):
content = models.TextField()
|
{"/Books_app/views.py": ["/Books_app/models.py"]}
|
39,463
|
Prabesh77/Django-Books
|
refs/heads/master
|
/Books_app/migrations/0002_delete_book.py
|
# Generated by Django 2.1.7 on 2019-04-06 03:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Books_app', '0001_initial'),
]
operations = [
migrations.DeleteModel(
name='Book',
),
]
|
{"/Books_app/views.py": ["/Books_app/models.py"]}
|
39,466
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/read_csv.py
|
from pandas import read_csv
def win_percent():
# Read the csv produced by wfb_simulation.py
df = read_csv('results.csv')
# Tbl records the number of wins, total is number of simulated battles
tbl = df['Winner'].value_counts()
total = df['Winner'].count()
# Calculate percentage wins for each entry in tbl
tbl = tbl/total * 100
return(tbl)
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,467
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/wfb_simulation.py
|
from unit_class import Unit
from copy import deepcopy
import csv
from cc_round import *
class Return_wfb_simulation:
def __init__(self, winner, round, unit1_size, unit2_size):
self.winner = winner
self.round = round
self.unit1_size = unit1_size
self.unit2_size = unit2_size
def wfb_simulation(unit1, unit2, runs, filename = 'results'):
# Runs simulations given two Unit objects and prints results to filename.csv
log = open("log.txt", "w+")
with open(filename + ".csv", "w") as output:
outputwriter = csv.writer(output)
outputwriter.writerow(["Battle", "Winner", "Rounds", "Final_size_" + str(unit1.name), "Final_size_" + str(unit2.name)])
for i in range(0, runs):
log.write('Battle simulation ' + str(i + 1) + '\n')
# Create a fresh copy of the Unit before each new combat to modify as the unit takes losses
unit1_copy = deepcopy(unit1)
unit2_copy = deepcopy(unit2)
c_result = close_combat(unit1_copy, unit2_copy, log = log)
# If both units were wiped out (only possible if they same the same I), record that Both lost
if c_result.winner == None:
outcome = Return_wfb_simulation(winner = "Both_wiped", round = c_result.round,
unit1_size = unit1_copy.models,
unit2_size = unit2_copy.models)
outputwriter.writerow([i + 1, outcome.winner, outcome.round,
outcome.unit1_size, outcome.unit2_size])
continue
else:
outcome = Return_wfb_simulation(winner=c_result.winner, round=c_result.round,
unit1_size=unit1_copy.models,
unit2_size=unit2_copy.models)
outputwriter.writerow([i + 1, outcome.winner.name, outcome.round,
outcome.unit1_size, outcome.unit2_size])
continue
log.close
print("Finished, results printed in " + filename + ".csv")
return "Finished, results printed in " + filename + ".csv"
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,468
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/cc_round.py
|
from roll_dice import *
from compare_characteristics import *
# Calculate number of wounds taken by the defender
def cc_attack(attacker, defender, log):
# attacker and defender are Unit class objects
# log is optional logging text file
# Rolls for success is calculated by the compare_characteristics functions
# Attacker rolls to hit per number of models, then to wound
# Defender rolls how many wounds are saved
# Final return is total number of wounds taken by the defender
to_hit_value = to_hit(attacker.WS, defender.WS)
hits = roll_dice(attacker.models, to_hit_value)
to_wound_value = to_wound(attacker.S, defender.T)
wounds = roll_dice(hits, to_wound_value)
save_value = armour_save(attacker.S, defender.Sv)
saves = roll_dice(wounds, save_value)
wounds_final = wounds - saves
log.write("Attacks: " + str(attacker.models) + '\n')
log.write("Hits: " + str(hits) + " (" + str(to_hit_value) + "+)" + '\n')
log.write("Wounds: " + str(wounds) + " (" + str(to_wound_value) + "+)" + '\n')
log.write("Saves: " + str(saves) + " (" + str(save_value) + "+)" + '\n')
log.write("Final Wounds: " + str(wounds_final) + '\n')
return wounds_final;
# Temp class for returning both roll and pass / fail result of leadership tests
class Return_cc_round(object):
def __init__(self, winner, combat_continues):
self.winner = winner
self.combat_continues = combat_continues
# Calculate a round of close combat, and update the input Unit objects with remaining number of models
def cc_round(unit1, unit2, log):
log.write("Combat Round Starts" + '\n')
log.write("Number of " + str(unit1.name) + ": " + str(unit1.models) + '\n')
log.write("Number of " + str(unit2.name) + ": " + str(unit2.models) + '\n')
# Determine who goes first, or if simultaneous
# The unit with lower I first removes casualties before attacking back
if unit1.I > unit2.I:
log.write(str(unit1.name) + " attacks first" + '\n')
log.write(str(unit1.name) + " I: " + str(unit1.I) + " vs " + str(unit2.name) + " I: " + str(unit2.I) + '\n')
log.write(str(unit1.name) + " attacks " + '\n')
to_remove_unit2 = cc_attack(attacker = unit1, defender = unit2, log = log)
unit2.models = unit2.models - to_remove_unit2
log.write("Remaining " + str(unit2.name) + ": " + str(unit2.models) + '\n')
if unit2.models <= 0:
return Return_cc_round(winner = unit1, combat_continues = False)
log.write(str(unit2.name) + " attacks " + '\n')
to_remove_unit1 = cc_attack(attacker = unit2, defender = unit1, log = log)
unit1.models = unit1.models - to_remove_unit1
log.write("Remaining " + str(unit1.name) + ": " + str(unit1.models) + '\n')
if unit1.models <= 0:
return Return_cc_round(winner = unit2, combat_continues = False)
if unit1.I < unit2.I:
log.write(str(unit2.name) + " attacks first" + '\n')
log.write(str(unit1.name) + " I: " + str(unit1.I) + " vs " + str(unit2.name) + " I: " + str(unit2.I) + '\n')
log.write(str(unit2.name) + " attacks " + '\n')
to_remove_unit1 = cc_attack(attacker = unit2, defender = unit1, log = log)
unit1.models = unit1.models - to_remove_unit1
log.write("Remaining " + str(unit1.name) + ": " + str(unit1.models) + '\n')
if unit1.models <= 0:
return Return_cc_round(winner = unit2, combat_continues = False)
log.write(str(unit1.name) + "attacks" + '\n')
to_remove_unit2 = cc_attack(attacker = unit1, defender = unit2, log = log)
unit2.models = unit2.models - to_remove_unit2
log.write("Remaining " + str(unit2.name) + ": " + str(unit2.models) + '\n')
if unit2.models <= 0:
return Return_cc_round(winner = unit1, combat_continues = False)
if unit1.I == unit2.I:
log.write("Simultaneous attacks" + '\n')
log.write(str(unit1.name) + " I: " + str(unit1.I) + " vs " + str(unit2.name) + " I: " + str(unit2.I) + '\n')
log.write(str(unit2.name) + " attacks " + '\n')
to_remove_unit1 = cc_attack(attacker = unit2, defender = unit1, log = log)
log.write(str(unit1.name) + " attacks " + '\n')
to_remove_unit2 = cc_attack(attacker = unit1, defender = unit2, log = log)
unit1.models = unit1.models - to_remove_unit1
log.write("Remaining " + str(unit1.name) + ": " + str(unit1.models) + '\n')
unit2.models = unit2.models - to_remove_unit2
log.write("Remaining " + str(unit2.name) + ": " + str(unit2.models) + '\n')
if unit2.models <= 0 and unit1.models > 0:
return Return_cc_round(winner = unit1, combat_continues=False)
if unit1.models <= 0 and unit2.models > 0:
return Return_cc_round(winner=unit2, combat_continues=False)
if unit2.models <= 0 and unit1.models <= 0:
return Return_cc_round(winner=None, combat_continues=False)
# Break test
adj_w_unit1 = to_remove_unit2
if unit1.standard == True:
log.write(str(unit1.name) + " has Standard" + '\n')
adj_w_unit1 = adj_w_unit1 + 1
adj_w_unit2 = to_remove_unit1
if unit2.standard == True:
log.write(str(unit2.name) + " has Standard" + '\n')
adj_w_uni2 = adj_w_unit2 + 1
log.write("Combat Score: " + str(unit1.name) + " " + str(adj_w_unit1) + " vs " + str(unit2.name) + " " + str(adj_w_unit2) + '\n')
if adj_w_unit1 > adj_w_unit2:
loser = unit2
winner = unit1
loser_diff = adj_w_unit1 - adj_w_unit2
log.write(str(unit2.name) + " lost the round by " + str(loser_diff) + '\n')
if adj_w_unit1 < adj_w_unit2:
loser = unit1
winner = unit2
loser_diff = adj_w_unit2 - adj_w_unit1
log.write(str(unit1.name) + " lost the round by " + str(loser_diff) + '\n')
if adj_w_unit1 == adj_w_unit2:
log.write("Round was a draw" + '\n')
combat_continues = True
return Return_cc_round(winner = None, combat_continues = True)
break_value = loser.Ld - loser_diff
break_result = ld_test(break_value)
log.write(str(loser.name) + " break test against modified Ld " + str(break_value) + '\n')
log.write("Roll: " + str(break_result.roll) + '\n')
log.write("Break test result: " + break_result.result + '\n')
if break_result.result == "Fail":
combat_continues = False
else:
combat_continues = True
log.write("Combat Round ends" + '\n')
return Return_cc_round(winner = winner, combat_continues = combat_continues)
# Temp class for returning overall loser and number of rounds of fighting
class Return_close_combat(object):
def __init__(self, winner, round):
self.winner = winner
self.round = round
# Continues the combat by iterating cc_round until one units fails a break test or is wiped out
def close_combat(unit1, unit2, log):
cont = True
round = 0
# Update cont by result of combat_continues, if this is False the loop breaks
while cont == True:
round = round + 1
log.write("Combat Round " + str(round) + '\n')
cc_result = cc_round(unit1, unit2, log = log)
cont = cc_result.combat_continues
winner = cc_result.winner
if winner == None:
log.write("Both units were wiped out" + '\n' + '\n' + '\n')
return Return_close_combat(winner = None, round = round)
log.write(str(winner.name) + " won the battle" + '\n' + '\n' + '\n')
return Return_close_combat(winner = winner, round = round)
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,469
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/compare_characteristics.py
|
def to_hit(WS_att, WS_def):
diff = WS_att - WS_def
if diff > 0:
success_roll = 3
return success_roll
if WS_def > (2 * WS_att):
success_roll = 5
return success_roll
else:
success_roll = 4
return success_roll
def to_wound(S_att, T_def):
diff = S_att - T_def
if diff >= 2:
success_roll = 2
return success_roll;
if diff == 1:
success_roll = 3
return success_roll;
if diff == 0:
success_roll = 4
return success_roll;
if diff == -1:
success_roll = 5
return success_roll;
if diff <= -2:
success_roll = 6
return success_roll;
def armour_save(S_att, save):
# Armour save is lowered (higher roll needed to save) by every point S > 3
lower = S_att - 3
if lower < 0:
lower = 0
success_roll = save + lower
if success_roll < 2:
success_roll = 2
return success_roll;
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,470
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/unit_test.py
|
import unittest
from roll_dice import *
import random
from statistics import mean
from compare_characteristics import *
# Tests for roll_dice
class roll_dice_test(unittest.TestCase):
def test_return_int(self):
# Should return an integer
out = roll_dice(random.randint(0, 100), random.randint(0, 10))
self.assertTrue(type(out) == int)
def test_seed1(self):
# Should return 7 passed rolls with this seed
random.seed(100)
out = roll_dice(10, 4)
self.assertEqual(out, 7)
def test_seed2(self):
# Should return 85 successful rolls
random.seed(7124)
out = roll_dice(100, 2)
self.assertEqual(out, 85)
def test_greater(self):
# Tests that if running 1000 rolls, the average passes where the number needed for success is 4
# is higher than 1000 rolls where it is 5
out_list_4 = []
for i in range(0, 1000):
out_list_4.append(roll_dice(10, 4))
out_list_5 = []
for i in range(0, 1000):
out_list_5.append(roll_dice(10, 5))
self.assertTrue(mean(out_list_4) > mean(out_list_5))
class ld_roll_test(unittest.TestCase):
def test_return_int(self):
# Roll return should be integer, result should be string
out = ld_test(random.randint(0, 20))
self.assertTrue(type(out.result) == str)
self.assertTrue(type(out.roll) == int)
def test_seed1(self):
# Expects pass result with set seed
random.seed(578)
out = ld_test(7)
self.assertEqual(out.result, "Pass")
self.assertEqual(out.roll, 5)
def test_result_is_2(self):
# With random.seed(2), the rolls should be 2, check it passes even with lower Ld
random.seed(2)
out = ld_test(-2)
self.assertTrue(out.result == "Pass")
self.assertTrue(out.roll == 2)
# Unit tests for compare characteristics
class to_hit_test(unittest.TestCase):
def test_return_int(self):
# Should return an integer between 3 and 5, given any inputs between 1 and 9
out = to_hit(random.randint(1, 9), random.randint(1,9))
self.assertTrue(type(out) == int)
self.assertTrue(3 <= out <= 5)
# A couple of spotchecks making sure the to-hit calculations are correct
def test_spot_checks(self):
self.assertEqual(to_hit(3, 3), 4)
self.assertEqual(to_hit(4, 3), 3)
self.assertEqual(to_hit(3, 4), 4)
self.assertEqual(to_hit(3, 5), 4)
self.assertEqual(to_hit(3, 6), 4)
self.assertEqual(to_hit(3, 7), 5)
self.assertEqual(to_hit(2, 4), 4)
self.assertEqual(to_hit(2, 5), 5)
self.assertEqual(to_hit(5, 2), 3)
self.assertEqual(to_hit(3, 2), 3)
self.assertEqual(to_hit(4, 2), 3)
class to_wound_test(unittest.TestCase):
def test_return_int(self):
# Should return integer between 2 and 6 given random inputs (between 1 and 9)
out = to_wound(random.randint(1,9), random.randint(1, 9))
self.assertTrue(type(out) == int)
self.assertTrue(2 <= out <= 6)
# Spotchecks for a few common combinations
def test_spot_checks(self):
self.assertEqual(to_wound(3, 3), 4)
self.assertEqual(to_wound(4, 3), 3)
self.assertEqual(to_wound(3, 4), 5)
self.assertEqual(to_wound(5, 3), 2)
self.assertEqual(to_wound(3, 5), 6)
self.assertEqual(to_wound(6, 3), 2)
self.assertEqual(to_wound(3, 6), 6)
self.assertEqual(to_wound(7, 3), 2)
self.assertEqual(to_wound(3, 7), 6)
self.assertEqual(to_wound(8, 3), 2)
self.assertEqual(to_wound(3, 8), 6)
self.assertEqual(to_wound(2, 3), 5)
self.assertEqual(to_wound(4, 2), 2)
class armour_save_test(unittest.TestCase):
def test_return_int(self):
# Should return integer for any input value
out = armour_save(random.randint(0, 10), random.randint(0, 10))
self.assertTrue(type(out) == int)
def test_less_than_2(self):
# Check that if save is less than 2, a 2+ roll is still returned
out = armour_save(3, 1)
self.assertTrue(out >= 2)
def test_spot_checks(self):
# A couple of spot checks on common combinations
self.assertEqual(armour_save(3, 5), 5)
self.assertEqual(armour_save(4, 5), 6)
self.assertEqual(armour_save(5, 5), 7)
self.assertEqual(armour_save(2, 4), 4)
self.assertEqual(armour_save(4, 2), 3)
self.assertEqual(armour_save(5, 2), 4)
self.assertEqual(armour_save(4, 1), 2)
self.assertEqual(armour_save(4, -1), 2)
self.assertEqual(armour_save(7, -1), 3)
if __name__ == '__main__':
unittest.main()
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,471
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/unit_class.py
|
# Defines the characterstics of a unit
# Currently has name, models, WS, S, T, Sv (armour save) and Ld (leadership)
# Create a basic orc unit using orc = Unit(WS = 3, S = 4, T = 3, Sv = 6)
# Then you can use orc.WS for their weapon skill etc
class Unit:
def __init__(self, name, models, WS, S, T, I, Sv, Ld, standard = False):
self.name = name
self.models = models
self.WS = WS
self.S = S
self.T = T
self.I = I
self.Sv = Sv
self.Ld = Ld
self.standard = standard
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
39,472
|
bosefalk/WFB-simulation
|
refs/heads/master
|
/run.py
|
from unit_class import Unit
from wfb_simulation import *
orc = Unit(name="Orc", models=60, WS=3, S=4, T=3, I=3, Sv=6, Ld=7, standard = True)
dwarf = Unit(name="Dwarf", models=20, WS=3, S=3, T=4, I=2, Sv=4, Ld=8)
wfb_simulation(orc, dwarf, 10) # Prints outcomes to result.csv
|
{"/wfb_simulation.py": ["/unit_class.py", "/cc_round.py"], "/cc_round.py": ["/roll_dice.py", "/compare_characteristics.py"], "/unit_test.py": ["/roll_dice.py", "/compare_characteristics.py"], "/run.py": ["/unit_class.py", "/wfb_simulation.py"], "/flask_main.py": ["/forms.py", "/unit_class.py", "/wfb_simulation.py", "/read_csv.py"], "/sandbox.py": ["/unit_class.py", "/wfb_simulation.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.