index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
61,644 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /setup.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import setuptools
import setuptools.command.build_py
def _make_long_description():
with open(os.path.join(os.path.dirname(__file__), "README.md"), "r") as _stream: return _stream.read()
def _make_packages():
_packages = setuptools.find_namespace_packages(where = "src")
_packages.remove("p5")
return _packages
class _Generators(object):
@classmethod
def get(cls): return tuple()
class _Commands(object):
@staticmethod
def build_py():
class _Result(setuptools.command.build_py.build_py):
def run(self):
# noinspection PyNoneFunctionAssignment
_original_result = super().run()
for _generator in _Generators.get(): _generator(command_interface = self)
return _original_result
def get_outputs(self, *args, **kwargs):
_original_result = super().get_outputs(*args, **kwargs)
return (type(_original_result))((*_original_result, *[os.path.join(self.build_lib, _generated.path) for _generated in _Generators.get()]))
return _Result
setuptools.setup(
name = "p5.aiogoldsrcrcon",
url = "https://github.com/p5-vbnekit/p5-python3-aiogoldsrcrcon",
license = "",
version = "0.0.3",
author = "Nikita Pushchin",
author_email = "vbnekit@gmail.com",
description = "asyncio rcon client for GoldSrc engine",
long_description = _make_long_description(),
long_description_content_type = "text/markdown",
package_dir = {"": "src"},
packages = _make_packages(),
cmdclass = {
"build_py": _Commands.build_py()
},
entry_points = {
"console_scripts": ('p5-aiogoldsrcrcon=p5.aiogoldsrcrcon:entry_point', ),
},
install_requires = ("asyncio", ),
setup_requires = ("wheel", )
)
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,645 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /src/p5/aiogoldsrcrcon/__init__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
asyncio rcon client for GoldSrc engine
There's 2 parts
1. python library (p5.aiogoldsrcrcon)
2. executable python script (p5-aiogoldsrcrcon[.exe]) with cli and interactive interface
"""
if "__main__" != __name__:
def _private():
from . _common import module_helpers as _module_helpers_module
class _Result(object):
module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = {
"entry_point": lambda module: getattr(module, "_entry_point").execute,
"Connection": lambda module: module.connection.Class
})
return _Result
_private = _private()
def __getattr__(name: str): return _private.module_getter(name = name)
__all__ = _private.module_getter.keys
__date__ = None
__author__ = None
__version__ = None
__credits__ = None
_fields = tuple()
__bases__ = tuple()
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,646 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /src/p5/aiogoldsrcrcon/__main__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def _private():
from . import entry_point as _entry_point
class _Result(object):
entry_point = _entry_point
return _Result
_private = _private()
try: _private.entry_point()
finally: del _private
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,647 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /src/p5/aiogoldsrcrcon/_entry_point.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
if "__main__" != __name__:
def _private():
import io
import os
import sys
import typing
import atexit
import asyncio
import argparse
from . import connection as _connection_module
_interactive_capability = sys.stdin.isatty() and sys.stderr.isatty()
async def _coroutine(address: str, password: str, verbose: bool, transaction_timeout: typing.Optional[float], response_sink: typing.Callable):
async with _connection_module.make(address = address, password = password) as _connection:
await _connection.open()
if transaction_timeout is None:
def _execute_command(command: str): return _connection(command = command)
else:
assert isinstance(transaction_timeout, float) and (0 < transaction_timeout)
def _execute_command(command: str): return asyncio.wait_for(_connection(command = command), timeout = transaction_timeout)
_response = await _execute_command(command = "wait")
try: assert _response is None
except AssertionError:
_response = tuple([_response for _response in [_response.strip() for _response in _response.splitlines()] if _response])
if 1 == len(_response): _response = f"unexpected server response: {_response[0]}"
else:
with io.StringIO() as _stream:
print(f"unexpected server response:", file = _stream)
for _response in _response: print(f"> {_response}", file = _stream)
_response = _stream.getvalue()
raise ConnectionError(_response)
if verbose:
print("udp client initialized for \"{}:{}\"".format(*address), flush = True, file = sys.stderr)
if _interactive_capability: print("type your commands here, or press Ctrl+C for exit", flush = True, file = sys.stderr)
while True:
try: _command = input()
except EOFError: break
_command = _command.strip()
if not _command: continue
if verbose: print(f"sending request, command: {_command}", flush = True, file = sys.stderr)
_response = await _execute_command(command = _command)
response_sink(response = ("" if _response is None else _response))
if verbose: print("request/response transaction finished", flush = True, file = sys.stderr)
def _parse_address(source: str):
assert isinstance(source, str) and bool(source)
assert source == source.strip()
source = source.split(":")
if 1 == len(source): _host, _port = source[0], 27015
else:
_host, _port = ":".join(source[:-1]), int(source[-1])
assert (0 <= _port) and (65536 > _port)
assert _host == _host.strip()
assert bool(_host)
return _host, _port
def _make_response_sink(verbose: bool, original_stdout: bool):
assert isinstance(verbose, bool)
assert isinstance(original_stdout, bool)
_actions = []
def _decorator(delegate: typing.Callable): _actions.append(delegate)
if verbose:
@_decorator
def _action(response: str):
response = tuple([response.strip() for response in response.strip().splitlines()])
if 1 == len(response): print(f"server response: {response[0]}", flush = True, file = sys.stderr)
elif response:
print(f"server response:", flush = True, file = sys.stderr)
for response in response: print(f"> {response}", flush = True, file = sys.stderr)
if not (verbose and sys.stdout.isatty()):
if original_stdout:
@_decorator
def _action(response: str):
if (response): print(response, end = "", flush = True, file = sys.stdout)
else:
@_decorator
def _action(response: str):
for response in [response.strip() for response in response.strip().splitlines()]: print(response, flush = True, file = sys.stdout)
del _decorator
def _result(response: str):
assert isinstance(response, str)
for _action in _actions: _action(response = response)
return _result
def _make_arguments_parser():
_result = argparse.ArgumentParser(
prog = f"{sys.executable} -m {__package__}" if "__main__.py" == os.path.basename(sys.argv[0]) else None,
description = "python3 asyncio rcon client for GoldSrc engine"
)
_result.add_argument("-a", "--address", type = str, help = "server address (aka rcon_address): host[:port]")
_result.add_argument("-p", "--password", type = str, help = "server password (aka rcon_password)")
_result.add_argument("-t", "--transaction-timeout", type = float, default = +0.0e+0, help = "transaction timeout in seconds, 0 for infinite")
_result.add_argument("-o", "--original-stdout", action = "store_true", help = "don't stip server responses for stdout")
_result.add_argument("-q", "--quiet", action = "store_true", help = "brief output, less interactivity and verbosity - suitable for automation")
return _result
def _execute():
_arguments_parser = _make_arguments_parser()
_help_message = _arguments_parser.format_help()
def _parse_arguments():
_arguments, _argv = _arguments_parser.parse_known_args()
if _argv: raise ValueError("unrecognized arguments: %s" % " ".join(_argv))
return _arguments
try:
_parsed_arguments = _parse_arguments()
_address = _parse_address(source = _parsed_arguments.address) if _parsed_arguments.address else None
_password = _parsed_arguments.password
_quiet = _parsed_arguments.quiet
_original_stdout = _parsed_arguments.original_stdout
_transaction_timeout = _parsed_arguments.transaction_timeout
assert isinstance(_transaction_timeout, float)
if 0 == _transaction_timeout: _transaction_timeout = None
else: assert 0 < _transaction_timeout
except BaseException as _exception:
if not (isinstance(_exception, SystemExit) and (0 == _exception.code)):
def _at_exit_handler(): print(_help_message, flush = True, file = sys.stderr)
atexit.register(_at_exit_handler)
del _at_exit_handler
raise
del _arguments_parser, _parsed_arguments
_verbose = not _quiet
if _verbose and _interactive_capability:
if not (1 < len(sys.argv)): print(_help_message, flush = True, file = sys.stderr)
_exit_without_waiting = False
def _at_exit_handler():
if _exit_without_waiting: return
_prompt_state = False
try:
if "nt" == os.name:
import msvcrt
_prompt_state = True
print("press any key for exit", end = "", flush = True, file = sys.stderr)
msvcrt.getch()
return
import termios
_descriptor = sys.stdin.fileno()
try:
_old_tty_attributes = termios.tcgetattr(_descriptor)
_new_tty_attributes = termios.tcgetattr(_descriptor)
_new_tty_attributes[3] = _new_tty_attributes[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(_descriptor, termios.TCSANOW, _new_tty_attributes)
except termios.error: return
_prompt_state = True
print("press any key for exit", end = "", flush = True, file = sys.stderr)
try: sys.stdin.read(1)
except IOError: pass
finally: termios.tcsetattr(_descriptor, termios.TCSAFLUSH, _old_tty_attributes)
finally:
if _prompt_state: print("", flush = True, file = sys.stderr)
atexit.register(_at_exit_handler)
del _at_exit_handler
try:
if _address is None:
if _interactive_capability: print("server address is not specified with cli interface (--address), enter it: ", end = "", flush = True, file = sys.stderr)
else: print("server address is not specified with cli interface (--address) and will be obtained from pipe stdin", flush = True, file = sys.stderr)
try: _address = input()
except BaseException:
if _interactive_capability: print("", flush = True, file = sys.stderr)
raise
if _address: _address = _address.strip()
else:
_address = "localhost:27015"
print(f"using default server address: {_address}", flush = True, file = sys.stderr)
_address = _parse_address(source = _address)
if _password is None:
if _interactive_capability:
import getpass
print("server password is not specified with cli interface (--password), enter it: ", end = "", flush = True, file = sys.stderr)
try: _password = getpass.getpass(prompt = "")
except BaseException:
print("", flush = True, file = sys.stderr)
raise
else:
print("server password is not specified with cli interface (--password) and will be obtained from pipe stdin", flush = True, file = sys.stderr)
_password = input()
asyncio.get_event_loop().run_until_complete(_coroutine(
address = _address, password = _password, verbose = _verbose, transaction_timeout = _transaction_timeout,
response_sink = _make_response_sink(verbose = _verbose, original_stdout = _original_stdout)
))
except KeyboardInterrupt: _exit_without_waiting = True
class _Result(object):
execute = _execute
return _Result
_private = _private()
try: execute = _private.execute
finally: del _private
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,648 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /src/p5/aiogoldsrcrcon/_common/__init__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
if "__main__" != __name__:
def _private():
from . import module_helpers as _module_helpers_module
class _Result(object):
module_getter = _module_helpers_module.lazy_attributes.make_getter()
return _Result
_private = _private()
def __getattr__(name: str): return _private.module_getter(name = name)
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,649 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
if "__main__" != __name__:
def _private():
from . import lazy_attributes as _lazy_attributes_module
class _Result(object):
module_getter = _lazy_attributes_module.make_getter()
return _Result
_private = _private()
def __getattr__(name: str): return _private.module_getter(name = name)
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,650 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /example.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import asyncio
import p5.aiogoldsrcrcon
async def _coroutine():
async with p5.aiogoldsrcrcon.Connection(address = ("hlds.host.address.example.com", 27015), password = "super-secret-rcon-password") as _connection:
await _connection.open()
_response = await _connection.execute(command = "status")
print(_response.strip())
asyncio.get_event_loop().run_until_complete(asyncio.wait_for(_coroutine(), timeout = 3))
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,651 | p5-vbnekit/p5-python3-aiogoldsrcrcon | refs/heads/main | /src/p5/aiogoldsrcrcon/_common/module_helpers/import_module.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
if "__main__" != __name__:
def _private():
import sys
import importlib
_this_module = sys.modules[__name__]
def _execute(*args, **kwargs): return importlib.import_module(*args, **kwargs)
class _Callable(_this_module.__class__):
def __call__(self, *args, **kwargs): return _execute(*args, **kwargs)
_this_module.__class__ = _Callable
class _Result(object):
execute = _execute
return _Result
_private = _private()
execute = _private.execute
del _private
| {"/src/p5/aiogoldsrcrcon/_common/module_helpers/lazy_attributes.py": ["/src/p5/aiogoldsrcrcon/_common/module_helpers/__init__.py"], "/src/p5/aiogoldsrcrcon/__init__.py": ["/src/p5/aiogoldsrcrcon/_common/__init__.py"], "/src/p5/aiogoldsrcrcon/__main__.py": ["/src/p5/aiogoldsrcrcon/__init__.py"], "/src/p5/aiogoldsrcrcon/_entry_point.py": ["/src/p5/aiogoldsrcrcon/__init__.py"]} |
61,664 | cloudmesh/cloudmesh-nn | refs/heads/main | /service/service.py | from flask import jsonify
import connexion
import os
from pathlib import Path
from cloudmesh.nn.spec import specification_dir
print(specification_dir)
# Create the application instance
app = connexion.App(__name__, specification_dir=specification_dir)
# Read the yaml file to configure the endpoints
app.add_api("nn.yaml")
# create a URL route in our application for "/"
@app.route("/")
def home():
msg = {"msg": "Cloudmesh Machine Learning Service"}
return jsonify(msg)
if __name__ == "__main__":
app.run(host="127.0.0.1",
port=8080,
debug=True)
class Manager(object):
def __init__(self):
print("init {name}".format(name=self.__class__.__name__))
def list(self, parameter):
print("list", parameter)
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,665 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/train.py | import requests
import numpy as np
from sklearn.externals.joblib import Memory
from sklearn.datasets import load_svmlight_file
from sklearn.svm import SVC
from os import listdir
from flask import Flask, request
from flask import jsonify
def get_data(filename):
data = load_svmlight_file(filename)
return data[0], data[1]
def gettraindata():
Xtrain, ytrain = get_data("data/testnew_train_25.0")
return "Return Xtrain and Ytrain arrays"
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,666 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/svm.py | import requests
import numpy as np
from sklearn.externals.joblib import Memory
from sklearn.datasets import load_svmlight_file
from sklearn.svm import SVC
from os import listdir
from flask import Flask, request
from flask import jsonify
def get_data(filename):
data = load_svmlight_file(filename)
return data[0], data[1]
def svm():
Xtrain, ytrain = get_data("data/iris.scale_train")
Xtest, ytest = get_data("data/iris.scale_test")
clf = SVC(gamma=0.001, C=100, kernel='linear')
clf.fit(Xtrain, ytrain)
test_size = Xtest.shape[0]
accuarcy_holder = []
for i in range(0, test_size):
prediction = clf.predict(Xtest[i])
print("Prediction from SVM: " + str(
prediction) + ", Expected Label : " + str(ytest[i]))
accuarcy_holder.append(prediction == ytest[i])
correct_predictions = sum(accuarcy_holder)
print(correct_predictions)
total_samples = test_size
accuracy = float(float(correct_predictions) / float(total_samples)) * 100
print("Prediction Accuracy: " + str(accuracy))
return "Prediction Accuracy: " + str(accuracy)
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,667 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/partition.py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial import distance
import requests
from flask import Flask, request, jsonify, send_file, make_response
import random
from numpy.random import permutation
from sklearn.neighbors import KNeighborsRegressor
import math
import io
from cloudmesh.nn.service import code_dir
def data_selection(filename, player_selection):
data_dir = code_dir + '/data/'
file = data_dir + filename
with open(filename, 'r') as csvfile:
my_file = pd.read_csv(csvfile)
nfl = my_file
nfl_numeric = nfl.select_dtypes(include=[np.number])
nfl_normalized = (nfl_numeric - nfl_numeric.mean()) / nfl_numeric.std()
nfl_normalized.fillna(0, inplace=True)
player_normalized = nfl_normalized[nfl["Player"] == str(player_selection)]
euclidean_distances = nfl_normalized.apply(
lambda row: distance.euclidean(row, player_normalized), axis=1)
distance_frame = pd.DataFrame(
data={"dist": euclidean_distances, "idx": euclidean_distances.index})
distance_frame.sort_values("dist", inplace=True)
second_smallest = distance_frame.iloc[1]["idx"]
five_smallest = [distance_frame.iloc[1]["idx"],
distance_frame.iloc[2]["idx"],
distance_frame.iloc[3]["idx"],
distance_frame.iloc[4]["idx"],
distance_frame.iloc[5]["idx"]]
lst = []
i = 0
for i in range(5):
players = (nfl.iloc[int(five_smallest[i])]["Player"])
lst.append(players)
return lst
def selection(filename, player_selection):
player_selection = str(player_selection)
data_dir = code_dir + '/data/'
path = data_dir + filename
return data_selection(path, player_selection)
def nfl_knn(filename):
my_file = pd.read_csv('data/' + filename)
nfl = my_file
nfl_numeric = nfl.select_dtypes(include=[np.number])
nfl_normalized = (nfl_numeric - nfl_numeric.mean()) / nfl_numeric.std()
nfl_normalized.fillna(0, inplace=True)
# Randomly shuffle the index of nba.
random_indices = permutation(nfl_normalized.index)
# Set a cutoff for how many items we want in the test set (in this case
# 1/3 of the items)
test_cutoff = math.floor(len(nfl_normalized) / 3)
# Generate the test set by taking the first 1/3 of the randomly shuffled
# indices.
test = nfl_normalized.loc[random_indices[1:test_cutoff]]
# Generate the train set with the rest of the data.
train = nfl_normalized.loc[random_indices[test_cutoff:]]
# Use sklearn
nfl_normalized.fillna(0, inplace=True)
distance_columns = nfl_normalized.head(0)
predict = "Forty"
y_columns = predict
distance_columns.drop([predict], axis=1, inplace=True)
x_columns = distance_columns
random_indices = permutation(nfl_normalized.index)
test_cutoff = math.floor(len(nfl_normalized) / 3)
test = nfl_normalized.loc[random_indices[1:test_cutoff]]
train = nfl_normalized.loc[random_indices[test_cutoff:]]
knn = KNeighborsRegressor(n_neighbors=7)
x_train = np.nan_to_num(train[x_columns])
y_train = np.nan_to_num(train[y_columns])
x_test = np.nan_to_num(test[x_columns])
y_test = np.nan_to_num(test[y_columns])
knn.fit(x_train, y_train)
predictions = knn.predict(x_test)
# predictions
actual = y_test
mse = (((predictions - actual) ** 2).sum()) / len(predictions)
img = plt.scatter(predictions, actual)
return
def nfl_knn_results(filename):
my_file = pd.read_csv('data/' + filename)
nfl = my_file
nfl_numeric = nfl.select_dtypes(include=[np.number])
nfl_normalized = (nfl_numeric - nfl_numeric.mean()) / nfl_numeric.std()
nfl_normalized.fillna(0, inplace=True)
# Randomly shuffle the index of nba.
random_indices = permutation(nfl_normalized.index)
# Set a cutoff for how many items we want in the test set (in this case
# 1/3 of the items)
test_cutoff = math.floor(len(nfl_normalized) / 3)
# Generate the test set by taking the first 1/3 of the randomly shuffled
# indices.
test = nfl_normalized.loc[random_indices[1:test_cutoff]]
# Generate the train set with the rest of the data.
train = nfl_normalized.loc[random_indices[test_cutoff:]]
# Use sklearn
nfl_normalized.fillna(0, inplace=True)
distance_columns = nfl_normalized.head(0)
predict = "Forty"
y_columns = predict
distance_columns.drop([predict], axis=1, inplace=True)
x_columns = distance_columns
random_indices = permutation(nfl_normalized.index)
test_cutoff = math.floor(len(nfl_normalized) / 3)
test = nfl_normalized.loc[random_indices[1:test_cutoff]]
train = nfl_normalized.loc[random_indices[test_cutoff:]]
knn = KNeighborsRegressor(n_neighbors=12)
x_train = np.nan_to_num(train[x_columns])
y_train = np.nan_to_num(train[y_columns])
x_test = np.nan_to_num(test[x_columns])
y_test = np.nan_to_num(test[y_columns])
knn.fit(x_train, y_train)
predictions = knn.predict(x_test)
# predictions
actual = y_test
mse = (((predictions - actual) ** 2).sum()) / len(predictions)
new = [predictions, actual]
plt.boxplot(new)
bytes_image = io.BytesIO()
# bytes_image
plt.savefig(bytes_image, format='png')
bytes_image.seek(0)
return bytes_image
def nfl_knn_results_boxplot(filename):
bytes_obj = nfl_knn_results(filename)
return send_file(bytes_obj,
attachment_filename='plot.png',
mimetype='image/png')
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,668 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/__init__.py | import os
code_dir = os.path.dirname(__file__)
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,669 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/spec/__init__.py | import os
specification_dir = os.path.dirname(__file__)
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,670 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/command/nn.py | from cloudmesh.shell.command import command
from cloudmesh.shell.command import PluginCommand
from cloudmesh.nn.api.manager import Manager
from cloudmesh.common.console import Console
from cloudmesh.common.util import path_expand
from pprint import pprint
import os
from service import server_dir
class NnCommand(PluginCommand):
# noinspection PyUnusedLocal
@command
def do_nn(self, args, arguments):
"""
::
Usage:
nn start
nn stop
This command does some useful things.
Arguments:
FILE a file name
Options:
-f specify the file
"""
m = Manager()
if arguments.start:
print("Cloudmesh ML/AI server starting")
my_path = os.getcwd()
print(server_dir)
os.chdir(server_dir)
os.system("python service.py")
m.list(path_expand(arguments.FILE))
elif arguments.stop:
print("option b")
m.list("just calling list without parameter")
Console.error("This is just a sample")
return ""
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,671 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/nfl_2019.py | import sys
import os
import pandas
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial import distance
# prompt user for file name to read, assuming this is a csv
pwd = os.getcwd()
pwd = str(pwd) + "/"
file_name = input("Enter the name of the file including the extension: ")
file_path = str(pwd) + str(file_name)
print(file_path)
with open(str(file_path), 'r') as csvfile:
my_file = pandas.read_csv(csvfile)
player_selection = input("Enter the player name you want to explore: ")
nfl = my_file
# Manipulate file for the nfl example
nfl_numeric = nfl.select_dtypes(include=[np.number])
nfl_normalized = (nfl_numeric - nfl_numeric.mean()) / nfl_numeric.std()
nfl_normalized.fillna(0, inplace=True)
player_normalized = nfl_normalized[nfl["Player"] == str(player_selection)]
euclidean_distances = nfl_normalized.apply(
lambda row: distance.euclidean(row, player_normalized), axis=1)
def euclidean_distance(row, selected_player):
diff = row - selected_player
squares = diff ** 2
sum_squares = squares.sum(axis=1)
sqrt_squares = sum_squares ** 0.5
return sqrt_squares
nfl_dist = euclidean_distance(nfl_normalized, player_normalized)
# Create a new dataframe with distances.
distance_frame = pandas.DataFrame(
data={"dist": euclidean_distances, "idx": euclidean_distances.index})
distance_frame.sort_values("dist", inplace=True)
second_smallest = distance_frame.iloc[1]["idx"]
five_smallest = [distance_frame.iloc[1]["idx"], distance_frame.iloc[2]["idx"],
distance_frame.iloc[3]["idx"],
distance_frame.iloc[4]["idx"], distance_frame.iloc[5]["idx"]]
lst = np.zeros(5)
i = 0
for i in range(5):
lst = (nfl.iloc[int(five_smallest[i])]["Player"])
print(i, lst)
"""
def euclidean_distance(row, selected_player):
diff = row - selected_player
squares = diff ** 2
sum_squares = squares.sum(axis=1)
sqrt_squares = sum_squares ** 0.5
return sqrt_squares
# look at it to make sure we have a real data set
# We need to extract only the numeric volumns for obvious reasons
nfl_numeric = nfl.select_dtypes(include=[np.number])
distance_columns = list(nfl_numeric.head(0))
selected_player = nfl_numeric[nfl["Player"] == "Julio Jones"].iloc[0]
# Test box plot
# Normalize all of the numeric columns
nfl_normalized = (nfl_numeric - nfl_numeric.mean()) / nfl_numeric.std()
# Fill in NA values in nfl_normalized
nfl_normalized.fillna(0, inplace=True)
# Find the normalized vector for lebron james.
brady_normalized = nfl_normalized[nfl["Player"] == "Julio Jones"]
# Find the distance between lebron james and everyone else.
euclidean_distances = nfl_normalized.apply(lambda row: distance.euclidean(row, brady_normalized), axis=1)
# Create a new dataframe with distances.
distance_frame = pandas.DataFrame(data={"dist": euclidean_distances, "idx": euclidean_distances.index})
distance_frame.sort_values("dist", inplace=True)
# Find the most similar player to lebron (the lowest distance to lebron is lebron,
# the second smallest is the most similar non-lebron player)
second_smallest = distance_frame.iloc[1]["idx"]
five_smallest = [distance_frame.iloc[1]["idx"], distance_frame.iloc[2]["idx"], distance_frame.iloc[3]["idx"],
distance_frame.iloc[4]["idx"], distance_frame.iloc[5]["idx"]]
lst = np.zeros(5)
i=0
for i in range(5):
lst = (nfl.iloc[int(five_smallest[i])]["Player"])
print(i, lst)
most_similar_to_brady = nfl.iloc[int(second_smallest)]["Player"]
print("The player most similar to %s is: %s" % ("Tom Brady", most_similar_to_brady))
"""
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,672 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/control.py | from flask import Flask, send_file, make_response
from cloudmesh.nn.service.data import generate_figure
def create_boxplot():
bytes_obj = generate_figure()
return send_file(bytes_obj,
attachment_filename='plot.png',
mimetype='image/png')
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,673 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/data.py | # This is the data downloading module and it also includes plotting defs
import requests
import io
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.externals.joblib import Memory
from sklearn.datasets import load_svmlight_file
from sklearn.svm import SVC
from os import listdir
from flask import Flask, request, send_file, make_response
from cloudmesh.nn.service import code_dir
from pymongo import MongoClient
# url = 'https://drive.google.com/file/d/1ge5hCVEcSh57XKCh3CVY3GcnHc6WETpA/view?usp=sharing'
# url = 'https://drive.google.com/uc?export=download&id=12u9eviakwqiqsz7x8Sp1ybG9uBaF9bJV'
# url = 'https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multiclass/glass.scale'
# Obviously we should use a text file and a post to get this value and read it in here.
#url = 'https://drive.google.com/uc?export=download&id=1ge5hCVEcSh57XKCh3CVY3GcnHc6WETpA'
# This implementation allows the user to place a file in called input.txt in the dir called input
# The structure of this could be imporved
def get_url():
input_path = code_dir + '/input/input.txt'
input_file = open(input_path, "rt")
contents = input_file.read()
url = contents.rstrip()
input_file.close()
return str(url)
def new_download(filename):
url = get_url()
r = requests.get(url, allow_redirects=True)
open(filename, 'wb').write(r.content)
def download_data(url, filename):
r = requests.get(url, allow_redirects=True)
open(filename, 'wb').write(r.content)
return
def download(output):
data_dir = code_dir + '/data/'
output_file = data_dir + output
new_download(filename=output_file)
return str(output) + " Downloaded" + "to" + str(code_dir)
def upload(uploadfile):
client = MongoClient()
dbname = client.list_database_names()
if 'nfl' not in dbname:
db = client.nfl
players=db.items
df=pd.read_csv(code_dir + '/data/' + uploadfile)
records_=df.to_dict(orient='records')
result = db.players.insert_many(records_)
return
else:
return "nfl database with players collection already exist"
def generate_figure(filename):
data_dir = code_dir + '/data/'
file = data_dir + filename
with open(file, 'r') as csvfile:
my_file = pd.read_csv(csvfile)
nfl = my_file
nfl_numeric = nfl.select_dtypes(include=[np.number])
nfl_numeric.boxplot()
bytes_image = io.BytesIO()
# bytes_image
plt.savefig(bytes_image, format='png')
bytes_image.seek(0)
return bytes_image
def generate_figureNorm(filename):
data_dir = code_dir + '/data/'
file = data_dir + filename
with open(file, 'r') as csvfile:
my_file = pd.read_csv(csvfile)
nfl = my_file
nfl_numeric = nfl.select_dtypes(include=[np.number])
nfl_normalized = (nfl_numeric - nfl_numeric.mean()) / nfl_numeric.std()
nfl_normalized.boxplot()
bytes_image = io.BytesIO()
# bytes_image
plt.savefig(bytes_image, format='png')
bytes_image.seek(0)
return bytes_image
def create_boxplot(filename):
bytes_obj = generate_figure(filename)
return send_file(bytes_obj,
attachment_filename='plot.png',
mimetype='image/png')
def create_boxplotNorm(filename):
bytes_obj = generate_figureNorm(filename)
return send_file(bytes_obj,
attachment_filename='plot.png',
mimetype='image/png')
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,674 | cloudmesh/cloudmesh-nn | refs/heads/main | /tests/test_nn_service.py | from cloudmesh.management.configuration.config import Config
from cloudmesh.management.debug import myself, HEADING
from pprint import pprint
# nosetests -v --nocapture tests/test_nn_service.py
class TestConfig:
def setup(self):
self.config = Config()
def test_00_config(self):
HEADING(myself())
pprint(self.config.dict())
print(self.config)
print(type(self.config.data))
# pprint(config.credentials('local'))
assert self.config is not None
# assert 'cloud' in config.cloud
def test_10_config_print(self):
HEADING(myself())
print(self.config)
assert True is True
def test_20_config_subscriptable(self):
HEADING(myself())
data = self.config["cloudmesh"]["data"]["mongo"]
assert data is not None
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,675 | cloudmesh/cloudmesh-nn | refs/heads/main | /cloudmesh/nn/service/test.py | import requests
import numpy as np
from sklearn.externals.joblib import Memory
from sklearn.datasets import load_svmlight_file
from sklearn.svm import SVC
from os import listdir
from flask import Flask, request
from flask import jsonify
def get_data(filename):
data = load_svmlight_file(filename)
return data[0], data[1]
def gettestdata():
Xtest, ytest = get_data("data/testnew_test_25.0")
return "Return Xtest and Ytest arrays"
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,676 | cloudmesh/cloudmesh-nn | refs/heads/main | /service/__init__.py | import os
server_dir = os.path.dirname(__file__)
| {"/service/service.py": ["/cloudmesh/nn/spec/__init__.py"], "/cloudmesh/nn/service/partition.py": ["/cloudmesh/nn/service/__init__.py"], "/cloudmesh/nn/command/nn.py": ["/service/__init__.py"], "/cloudmesh/nn/service/control.py": ["/cloudmesh/nn/service/data.py"], "/cloudmesh/nn/service/data.py": ["/cloudmesh/nn/service/__init__.py"]} |
61,677 | chimez/GoalSkill | refs/heads/master | /service/skill_tree/views.py | from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.urls import reverse
from .models import Skill, SkillFamily, User, UserMethod, UserSkill
def index(request):
user = User.objects.get(name="me")
skill_families = user.skill_family.all()
context = {
"skill_families": skill_families,
}
return render(request, 'skill_tree/index.html', context)
def skill_family(request, skill_family_name):
me = User.objects.get(name="me")
skill_family = SkillFamily.objects.get(name=skill_family_name)
all_skill = UserSkill.objects.filter(
user=me, skill__skill_family=skill_family)
context = {
"all_skill": all_skill,
"skill_family_name": skill_family_name,
}
return render(request, 'skill_tree/skill_family.html', context)
def skill(request, skill_family_name, skill_name):
me = User.objects.get(name="me")
skill = Skill.objects.get(name=skill_name)
skill_exp = UserSkill.objects.get(user=me, skill=skill).skill_exp
level = UserSkill.objects.get(user=me, skill=skill).skill_level
methods_obj = UserMethod.objects.filter(user=me, skill=skill)
methods = []
for method in methods_obj:
method_need_times = (level.need_exp - skill_exp) / method.method.exp
methods.append({
"method": method,
"method_need_times": "%0.f" % method_need_times,
})
context = {
"skill": skill,
"level": level,
"skill_exp": skill_exp,
"methods": methods,
"skill_family_name": skill_family_name,
"skill_name": skill.name,
}
return render(request, 'skill_tree/skill.html', context)
def inc_method_times(request, skill_family_name, skill_name, user_method_id):
me = User.objects.get(name="me")
user_method = get_object_or_404(UserMethod, pk=user_method_id)
user_method.method_times += 1
user_method.save()
user_skill = UserSkill.objects.get(user=me, skill__name=skill_name)
user_skill.skill_exp += user_method.method.exp
user_skill.save()
return HttpResponseRedirect(
reverse('skill_tree:skill', args=(skill_family_name, skill_name)))
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,678 | chimez/GoalSkill | refs/heads/master | /service/skill_tree/admin.py | from django.contrib import admin
# Register your models here.
from .models import Level, Method, MethodFamily, Skill, SkillFamily, User, UserMethod, UserSkill
# Register your models here.
admin.site.register(Level)
admin.site.register(Method)
admin.site.register(MethodFamily)
admin.site.register(Skill)
admin.site.register(SkillFamily)
admin.site.register(User)
admin.site.register(UserMethod)
admin.site.register(UserSkill)
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,679 | chimez/GoalSkill | refs/heads/master | /service/skill_tree_api/models.py | from django.db import models
# Create your models here.
class Level(models.Model):
short_name = models.CharField(max_length=10, unique=True)
name = models.CharField(max_length=30, unique=True)
describe = models.TextField()
need_exp = models.IntegerField()
def __str__(self):
return self.short_name
class MethodFamily(models.Model):
name = models.CharField(max_length=50, unique=True)
def __str__(self):
return self.name
class Method(models.Model):
name = models.TextField(unique=True)
exp = models.IntegerField()
method_family = models.ForeignKey(MethodFamily, on_delete=models.CASCADE)
def __str__(self):
return self.name
class SkillFamily(models.Model):
name = models.CharField(max_length=50, unique=True)
def __str__(self):
return self.name
class User(models.Model):
name = models.CharField(max_length=50, unique=True)
skill_family = models.ManyToManyField(SkillFamily)
def __str__(self):
return self.name
class Skill(models.Model):
name = models.CharField(max_length=30, unique=True)
tier = models.IntegerField()
skill_family = models.ForeignKey(SkillFamily, on_delete=models.CASCADE)
method_family = models.ManyToManyField(MethodFamily)
def __str__(self):
return self.name
class UserSkill(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
skill = models.ForeignKey(Skill, on_delete=models.CASCADE)
skill_exp = models.IntegerField(default=0)
skill_level = models.ForeignKey(Level, on_delete=models.CASCADE)
def __str__(self):
return "user:{},skill:{}".format(self.user, self.skill)
class UserMethod(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
method = models.ForeignKey(Method, on_delete=models.CASCADE)
skill = models.ForeignKey(Skill, on_delete=models.CASCADE)
method_times = models.IntegerField(default=0)
def __str__(self):
return "user:{},skill:{},method:{}".format(self.user, self.skill,
self.method)
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,680 | chimez/GoalSkill | refs/heads/master | /service/skill_tree_api/serializers.py | from rest_framework import serializers
from .models import (Level, Method, MethodFamily, Skill, SkillFamily, User,
UserMethod, UserSkill)
class LevelSerializer(serializers.ModelSerializer):
class Meta:
model = Level
fields = ('id', 'short_name', 'name', 'describe', 'need_exp')
class MethodFamilySerializer(serializers.ModelSerializer):
class Meta:
model = MethodFamily
fields = ('id', 'name')
class MethodSerializer(serializers.ModelSerializer):
class Meta:
model = Method
fields = '__all__'
class SkillFamilySerializer(serializers.ModelSerializer):
class Meta:
model = SkillFamily
fields = ('id', 'name')
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'name', 'skill_family')
class SkillSerializer(serializers.ModelSerializer):
class Meta:
model = Skill
fields = ('id', 'name', 'tier', 'skill_family', 'method_family')
class UserSkillSerializer(serializers.ModelSerializer):
class Meta:
model = UserSkill
fields = ('id', 'user', 'skill', 'skill_exp', 'skill_level')
class UserMethodSerializer(serializers.ModelSerializer):
class Meta:
model = UserMethod
fields = ('id', 'user', 'method', 'skill', 'method_times')
# 以下是为某些查询特设的序列器
class SkillNameSerializer(serializers.ModelSerializer):
class Meta:
model = Skill
fields = ('id', 'name', 'tier')
class LevelShortNameSerializer(serializers.ModelSerializer):
class Meta:
model = Level
fields = ('short_name', 'need_exp')
class AllSkillSerializer(serializers.Serializer):
skill = SkillNameSerializer()
skill_exp = serializers.IntegerField()
skill_level = LevelShortNameSerializer()
class Meta:
model = UserSkill
class SkillMethodSerializer(serializers.ModelSerializer):
method = MethodSerializer()
method_times = serializers.IntegerField()
class Meta:
model = UserMethod
fields = '__all__'
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,681 | chimez/GoalSkill | refs/heads/master | /service/skill_tree_api/urls.py | from django.conf.urls import include, url
from rest_framework.documentation import include_docs_urls
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from skill_tree_api import views
schema_view = get_schema_view(title='Pastebin API')
skill_family_detail = views.UserSkillFamilyViewSet.as_view({
'get': 'all_skill'
})
router = DefaultRouter()
router.register(r'skill_family', views.UserSkillFamilyViewSet)
router.register(r'skills', views.SkillMethodViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^schema/$', schema_view),
url(r'^docs/', include_docs_urls(title='Skill Tree API')),
url(
r'^skill_family/(?P<pk>[0-9]+)/all_skill/(?P<skill_family_id>[0-9]+)/$',
skill_family_detail),
]
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,682 | chimez/GoalSkill | refs/heads/master | /service/skill_tree_api/views.py | from rest_framework import mixins, status, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from .models import (Level, Method, MethodFamily, Skill, SkillFamily, User,
UserMethod, UserSkill)
from .serializers import (
AllSkillSerializer, LevelSerializer, MethodFamilySerializer,
MethodSerializer, SkillFamilySerializer, SkillMethodSerializer,
SkillSerializer, UserMethodSerializer, UserSerializer, UserSkillSerializer)
class UserSkillFamilyViewSet(viewsets.ReadOnlyModelViewSet):
"""
list:
返回 me 用户下所有的技能家族 SkillFamily 的 id 和 name
read:
返回指定 id 的用户的相关信息
all_skill:
返回 me 用户下家族 id 为 pk 的所有技能的信息, skill_family_id 是技能家族的 id 而不是用户 id
"""
queryset = User.objects.all()
serializer_class = UserSerializer
def list(self, request):
me = User.objects.get(name="me")
skill_families = me.skill_family.all()
serializer = SkillFamilySerializer(skill_families, many=True)
return Response(serializer.data)
def read(serf, request, pk=None):
user = User.objects.get(pk=pk)
skill_families = user.skill_family.all()
serializer = SkillFamilySerializer(skill_families, many=True)
return Response(serializer.data)
def all_skill(self, request, pk=None, skill_family_id=None):
user = User.objects.get(pk=pk)
skill_family = SkillFamily.objects.get(pk=skill_family_id)
all_skill = UserSkill.objects.filter(
user=user, skill__skill_family=skill_family)
serializer = AllSkillSerializer(all_skill, many=True)
return Response(serializer.data)
class SkillMethodViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
"""
read:
给出 me 用户的 id 技能的所有方法的信息
inc_method_times:
用户技能总编号为 id 的加1
"""
queryset = UserMethod.objects.all()
serializer_class = SkillMethodSerializer
def read(self, request, pk=None):
me = User.objects.get(name="me")
skill = Skill.objects.get(pk=pk)
methods = UserMethod.objects.filter(user=me, skill=skill)
serializer = SkillMethodSerializer(methods, many=True)
return Response(serializer.data)
@action(detail=True)
def inc_method_times(self, request, pk=None):
method = UserMethod.objects.get(pk=pk)
method.method_times += 1
method.save()
user_skill = UserSkill.objects.filter(user=method.user).filter(
skill=method.skill)[0]
user_skill.skill_exp = method.method_times * method.method.exp
user_skill.save()
levels = {}
level_exp_array = []
for level in Level.objects.all():
levels[level.need_exp] = level
level_exp_array.append(level.need_exp)
level_exp_array.sort()
now_level_exp = list(
filter(lambda x: x > user_skill.skill_exp, level_exp_array))[0]
now_level = levels[now_level_exp]
user_skill.skill_level = now_level
user_skill.save()
return Response(status.HTTP_200_OK)
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,683 | chimez/GoalSkill | refs/heads/master | /service/skill_tree_api/utils.py | from skill_tree_api.models import Level, Method, Skill, User, UserMethod, UserSkill
def flush_all_userskill():
me = User.objects.get(name="me")
level_f = Level.objects.get(short_name="F")
skills = Skill.objects.all()
for skill in skills:
UserSkill.objects.get_or_create(
user=me, skill=skill, skill_level=level_f)
def flush_all_usermethod():
me = User.objects.get(name="me")
skills = Skill.objects.all()
for skill in skills:
method_families = skill.method_family.all()
for method_family in method_families:
methods = Method.objects.filter(method_family=method_family)
for method in methods:
UserMethod.objects.get_or_create(
user=me, skill=skill, method=method)
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,684 | chimez/GoalSkill | refs/heads/master | /service/skill_tree_api/apps.py | from django.apps import AppConfig
class SkillTreeApiConfig(AppConfig):
name = 'skill_tree_api'
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,685 | chimez/GoalSkill | refs/heads/master | /service/skill_tree/urls.py | from django.urls import path
from . import views
app_name = "skill_tree"
urlpatterns = [
path('', views.index, name="index"),
path('<str:skill_family_name>/', views.skill_family, name="skill_family"),
path(
'<str:skill_family_name>/<str:skill_name>/', views.skill,
name="skill"),
path(
'<str:skill_family_name>/<str:skill_name>/inc_method_times/<int:user_method_id>/',
views.inc_method_times,
name="inc_method_times"),
]
| {"/service/skill_tree_api/serializers.py": ["/service/skill_tree_api/models.py"], "/service/skill_tree_api/views.py": ["/service/skill_tree_api/models.py", "/service/skill_tree_api/serializers.py"]} |
61,686 | victorhos/merge_requirements | refs/heads/master | /setup.py | #!/usr/bin/env python
# encoding: utf-8
from setuptools import setup
setup(
name='merge-requirements',
version='0.6',
keywords=['merge requirements'],
url='https://github.com/victorhos/merge_requirements',
license='MIT',
author='victorhos',
author_email='victor.hos@gmail.com',
description='simple lib for organize two requirements.txt in a unique requirements.txt file',
packages=['merge_requirements'],
scripts=['scripts/merge_requirements'],
install_requires=['packaging'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7'
]
)
| {"/merge_requirements/manage_file.py": ["/merge_requirements/utils.py"]} |
61,687 | victorhos/merge_requirements | refs/heads/master | /merge_requirements/manage_file.py | #!/usr/bin/env python
# encoding: utf-8
import sys
import os
import logging
from merge_requirements.utils import remove_comments, merge_dict
CURRENT_DIRECTORY = os.getcwd()
DIR = os.path.dirname(os.path.realpath(__file__))
class ManageFile(object):
def __init__(self, ff, sf):
self.first_file = self.generate_dict_libs(ff)
self.second_file = self.generate_dict_libs(sf)
def open_file(self, file):
try:
return open(file, 'r').read()
except Exception as e:
logging.error(e)
raise
def generate_dict_libs(self, file):
text = remove_comments(self.open_file(file))
lib_list = []
for item in text.split('\n'):
item = item.split('==')
if len(item) == 1:
item.append('')
lib_list.append(tuple(item))
return dict(lib_list)
def show(self):
print('------------ first_file ------------')
print(self.first_file)
print('------------ second_file ------------')
print(self.second_file)
class Merge(object):
def __init__(self, mf):
self.manage_file = mf
self.dict_libs = {}
self.merge_dict_libs()
def merge_dict_libs(self):
(self.dict_libs, self.error_count) = merge_dict(
self.manage_file.first_file,
self.manage_file.second_file
)
def generate_requirements_txt(self):
txt = ''
for key, value in self.dict_libs.items():
if len(value):
txt += ''.join('{}=={}\n'.format(key, value))
else:
txt += ''.join('{}\n'.format(key))
file_path = './requirements-merged.txt'
count = 0
while os.path.exists(file_path):
count += 1
file_path = './requirements-merged({}).txt'.format(count)
mode = 'wx' if sys.version_info[0] < 3 else 'x'
f = open(file_path, mode)
f.write(txt)
f.close()
print('create new file {}'.format(file_path))
if self.error_count > 0:
print('WARN: {} values failed to merge.'.format(self.error_count), file=sys.stderr)
sys.exit(1)
| {"/merge_requirements/manage_file.py": ["/merge_requirements/utils.py"]} |
61,688 | victorhos/merge_requirements | refs/heads/master | /merge_requirements/utils.py | #!/usr/bin/env python
# encoding: utf-8
import re
import sys
from packaging.version import parse, InvalidVersion
def remove_comments(text):
#remove comments
rx_comments = re.compile( '#+.*?\\n|^\\n|\\n$', re.M | re.S)
#remove whitespace
rx_whitespace = re.compile( '\\n+', re.M | re.S)
text = rx_whitespace.sub('\n', text)
text = rx_comments.sub('', text)
return text
def merge_dict(l_dict, r_dict):
new_dict = dict()
error_count = 0
# merge the keys into a unique list/set
all_keys = set(list(l_dict.keys()) + list(r_dict.keys()))
for key in all_keys:
if key in l_dict and key not in r_dict:
new_dict[key] = l_dict[key]
elif key not in l_dict and key in r_dict:
new_dict[key] = r_dict[key]
else:
try:
l_version = parse(l_dict[key])
r_version = parse(r_dict[key])
if l_version <= r_version:
new_dict[key] = r_dict[key]
else:
new_dict[key] = l_dict[key]
except InvalidVersion:
error_count = error_count+1
print('WARN: Unable to merge {0}, value "{1}" vs "{2}"'.format(
key_item, l_dict[key], r_dict[key]
), file=sys.stderr)
continue
return (new_dict, error_count)
| {"/merge_requirements/manage_file.py": ["/merge_requirements/utils.py"]} |
61,689 | victorhos/merge_requirements | refs/heads/master | /merge_requirements/main.py | #!/usr/bin/env python
# encoding: utf-8
import argparse
from manage_file import ManageFile, Merge
parser = argparse.ArgumentParser()
parser.add_argument(
'first_file',
help='first file to merged'
)
parser.add_argument(
'second_file',
help='second file to merged'
)
args = parser.parse_args()
def main():
mf = ManageFile(
args.first_file,
args.second_file
)
mg = Merge(mf)
mg.generate_requirements_txt()
if __name__ == '__main__':
main()
| {"/merge_requirements/manage_file.py": ["/merge_requirements/utils.py"]} |
61,690 | victorhos/merge_requirements | refs/heads/master | /merge_requirements/tests/tests_utils.py | #!/usr/bin/env python
# encoding: utf-8
from unittest import TestCase
from unittest.mock import MagicMock
from utils import remove_comments, merge_dict
class TestUtils(TestCase):
def test_remove_comments(self):
text_file = '#COMENTARIOS\nCherryPy==3.2.4\nDjango==2.1.7\nIPTCInfo==1.9.5-6\nIon==0.6.4.2\n#COMENTARIO2\nJinja2==2.7\nMarkupSafe==0.18\nMySQL-python==1.2.3\nPIL==1.1.7-1\nPillow==2.1.0\nRoutes==2.0\nSQLAlchemy==0.5.8\nSouth==0.7.3\n' # noqa
expected_text_file = 'CherryPy==3.2.4\nDjango==2.1.7\nIPTCInfo==1.9.5-6\nIon==0.6.4.2\nJinja2==2.7\nMarkupSafe==0.18\nMySQL-python==1.2.3\nPIL==1.1.7-1\nPillow==2.1.0\nRoutes==2.0\nSQLAlchemy==0.5.8\nSouth==0.7.3' # noqa
self.assertEqual(
remove_comments(text_file),
expected_text_file,
'test_remove_comments ok'
)
def test_merge_dict(self):
bdict = {
'CherryPy': '3.2.4',
'Django': '1.4.13',
'MySQL-python': '1.2.3',
'Pillow': '2.1.0',
'MarkupSafe': '0.18'
}
mdict = {
'CherryPy': '3.2.0',
'Django': '1.4.14',
'MySQL-python': '1.2.3',
'Pillow': '2.1.0',
'MarkupSafe': '0.18',
'SQLAlchemy': '0.5.8'
}
merged_dict = {
'Django': '1.4.14',
'MarkupSafe': '0.18',
'MySQL-python': '1.2.3',
'Pillow': '2.1.0',
'SQLAlchemy': '0.5.8',
'CherryPy': '3.2.4'
}
self.assertDictEqual(
merge_dict(bdict, mdict),
merged_dict,
'test_merge_dict'
)
if __name__ == '__main__':
unittest.main()
| {"/merge_requirements/manage_file.py": ["/merge_requirements/utils.py"]} |
61,691 | MKaczkow/reversi_game | refs/heads/master | /player.py | '''
Author: Maciej Kaczkowski
26.03-13.04.2021
'''
from config import *
from minimax import *
class AlgoPlayer(object):
def __init__(self, color, board_instance, max_depth=MAX_DEPTH):
self.max_depth = max_depth
self.minimax_object = Minimax(heuristic_evaluation=0)
self.color = color
self.board = board_instance
def play(self):
return self.minimax_object.minimax(self.board, None, self.max_depth,
self.color, -self.color)
class RandomPlayer (AlgoPlayer):
def play(self, board_instance):
board_instance.get_moves()
index = np.random.randint(len(board_instance.possible_moves))
chosen_move = board_instance.possible_moves[index]
return chosen_move
| {"/player.py": ["/config.py", "/minimax.py"], "/reversi.py": ["/player.py", "/board.py", "/config.py"], "/board.py": ["/config.py"], "/minimax.py": ["/board.py"]} |
61,692 | MKaczkow/reversi_game | refs/heads/master | /reversi.py | '''
Author: Maciej Kaczkowski
26.03-07.04.2021
'''
import numpy as np
import player
import board
from config import *
class Reversi:
def __init__(self, first_player, second_player):
self.board_instance = board.Board()
if first_player == RANDOM:
self.black_player = player.RandomPlayer(color=BLACK, board_instance=self.board_instance)
elif first_player == ALGO:
self.black_player = player.AlgoPlayer(color=BLACK, board_instance=self.board_instance)
else:
print("Wrong black player!")
if second_player == RANDOM:
self.white_player = player.RandomPlayer(color=WHITE, board_instance=self.board_instance)
elif second_player == ALGO:
self.black_player = player.AlgoPlayer(color=WHITE, board_instance=self.board_instance)
else:
print("Wrong white player!")
self.winner = None
self.white_wins = 0
self.black_wins = 0
self.draws = 0
def run(self):
running = True
while running:
passes = 0
self.board_instance.playing_next = BLACK
self.board_instance.get_moves(colour=BLACK)
if len(self.board_instance.possible_moves) == 0:
passes += 1
else:
_, best_child = self.black_player.play()
diff_board = abs(best_child.board_state) - abs(self.board_instance.board_state)
chosen_move = np.where(diff_board == 1)
self.board_instance.attempt_move(chosen_move, BLACK)
self.board_instance.playing_next = WHITE
self.board_instance.get_moves(colour=WHITE)
if len(self.board_instance.possible_moves) == 0:
passes += 1
else:
self.board_instance.attempt_move(self.white_player.play(self.board_instance))
if passes >= 2:
running = False
if np.sum(self.board_instance.board_state) > 0:
self.winner = BLACK
self.black_wins += 1
print("And the winner is...\nBlack!")
elif np.sum(self.board_instance.board_state) < 0:
self.winner = WHITE
self.white_wins += 1
print("And the winner is...\nWhite!")
else:
self.winner = None
self.draws += 1
print("Draw! Nobody wins!")
def reset(self):
self.winner = None
self.board_instance.reset_board()
def main():
game = Reversi(ALGO, RANDOM)
for i in range(100):
game.run()
game.reset()
print("Game nr " + str(i) + " finished")
print("Black: " + str(game.black_wins))
print("White: " + str(game.white_wins))
print("Draws: " + str(game.draws))
if __name__ == '__main__':
main()
| {"/player.py": ["/config.py", "/minimax.py"], "/reversi.py": ["/player.py", "/board.py", "/config.py"], "/board.py": ["/config.py"], "/minimax.py": ["/board.py"]} |
61,693 | MKaczkow/reversi_game | refs/heads/master | /config.py | '''
Author: Maciej Kaczkowski
26.03-13.04.2021
'''
# configuration file with constans, etc.
# using "reversi convention" - black has first move,
# hence BLACK is Max player, while WHITE is Min player
WHITE = -1
BLACK = 1
EMPTY = 0
# player's codenames
RANDOM = 'random'
ALGO = 'minmax'
# min-max parameters
MAX_DEPTH = 4
# cordinates for better readability
NORTHEAST = (-1, 1)
NORTH = (-1, 0)
NORTHWEST = (-1, -1)
WEST = (0, -1)
SOUTHWEST = (1, -1)
SOUTH = (1, 0)
SOUTHEAST = (1, 1)
EAST = (0, 1)
| {"/player.py": ["/config.py", "/minimax.py"], "/reversi.py": ["/player.py", "/board.py", "/config.py"], "/board.py": ["/config.py"], "/minimax.py": ["/board.py"]} |
61,694 | MKaczkow/reversi_game | refs/heads/master | /board.py | '''
Author: Maciej Kaczkowski
26.03-13.04.2021
'''
import numpy as np
import copy
from config import *
class Board:
def __init__(self, board_size=8):
self.board_state = np.zeros((board_size, board_size), dtype=int)
self.board_state[3, 4] = BLACK
self.board_state[4, 3] = BLACK
self.board_state[3, 3] = WHITE
self.board_state[4, 4] = WHITE
self.possible_moves = np.empty([1, 2], dtype=int)
self.playing_next = BLACK
self.board_size = board_size
def get_moves(self, colour=None, need_return=False) -> 'get list of possible moves':
if colour is None:
colour = self.playing_next
places = []
for i in range(8):
for j in range(8):
if self.board_state[i][j] == colour:
places = places + self.look_around(i, j)
places = list(set(places))
self.possible_moves = places
if need_return:
return places
def look_around(self, x, y) -> "check for opponent's stones in 8 directions":
'''
:param x: first coordinate of checked point
:param y: second coordinate of checked point
:return: list of lists representing directions
'''
result = []
colour = self.board_state[x][y]
if colour == EMPTY:
return result
directions = [NORTHWEST, NORTH, NORTHEAST,
WEST, EAST,
SOUTHWEST, SOUTH, SOUTHEAST]
for (x_add, y_add) in directions:
point = self.check_direction(x, y, x_add, y_add)
if point:
result.append(point)
return result
def check_direction(self, x, y, x_add, y_add) -> "check how much opponent's stones are in given direction":
'''
:return: point, where it's possible to place stone
'''
x_temp = x + x_add
y_temp = y + y_add
colour = self.board_state[x, y]
while 0 <= x_temp <= 7 and 0 <= y_temp <= 7 and self.board_state[x_temp, y_temp] == -colour:
x_temp += x_add
y_temp += y_add
if 0 <= x_temp <= 7 and 0 <= y_temp <= 7 and self.board_state[x_temp, y_temp] == EMPTY:
return x_temp, y_temp
def attempt_move(self, move, colour=None):
if colour is None:
colour = self.playing_next
if move in self.possible_moves:
self.board_state[move[0], move[1]] = colour
for i in range(1, 9):
self.flip(i, move[0], move[1])
def reset_board(self, board_size=8):
self.board_state = np.zeros((board_size, board_size), dtype=int)
self.board_state[3, 4] = BLACK
self.board_state[4, 3] = BLACK
self.board_state[3, 3] = WHITE
self.board_state[4, 4] = WHITE
self.possible_moves = np.empty([1, 2], dtype=int)
self.playing_next = BLACK
def flip(self, direction, x, y):
if direction == 1:
# north
row_inc = -1
col_inc = 0
elif direction == 2:
# northeast
row_inc = -1
col_inc = 1
elif direction == 3:
# east
row_inc = 0
col_inc = 1
elif direction == 4:
# southeast
row_inc = 1
col_inc = 1
elif direction == 5:
# south
row_inc = 1
col_inc = 0
elif direction == 6:
# southwest
row_inc = 1
col_inc = -1
elif direction == 7:
# west
row_inc = 0
col_inc = -1
elif direction == 8:
# northwest
row_inc = -1
col_inc = -1
stones_to_flip = []
i = x + row_inc
j = y + col_inc
if i in range(8) and j in range(8) and self.board_state[i, j] == -self.playing_next:
stones_to_flip = stones_to_flip + [(i, j)]
i = i + row_inc
j = j + col_inc
while i in range(8) and j in range(8) and self.board_state[i, j] == -self.playing_next:
# search for more pieces to flip
stones_to_flip = stones_to_flip + [(i, j)]
i = i + row_inc
j = j + col_inc
if i in range(8) and j in range(8) and self.board_state[i, j] == self.playing_next:
# found a piece of the right color to flip the pieces between
for pos in stones_to_flip:
# flips
self.board_state[pos[0], pos[1]] = self.playing_next
def heuristic_evaluate(self):
game_state = self.board_state
fields_values = np.ones_like(game_state, dtype=int)
fields_values[0] += 1
fields_values[-1] += 1
fields_values[:, 0] += 1
fields_values[:, -1] += 1
fields_values[0][0] += 5
fields_values[0][-1] += 5
fields_values[-1][0] += 5
fields_values[-1][-1] += 5
result = np.multiply(game_state, fields_values)
result = np.sum(result)
return result
def get_child_states(self, colour):
valid_moves = self.get_moves(colour=colour, need_return=True)
for move in valid_moves:
new_board = copy.deepcopy(self)
new_board.attempt_move(move)
yield new_board
| {"/player.py": ["/config.py", "/minimax.py"], "/reversi.py": ["/player.py", "/board.py", "/config.py"], "/board.py": ["/config.py"], "/minimax.py": ["/board.py"]} |
61,695 | MKaczkow/reversi_game | refs/heads/master | /minimax.py | '''
Author: Maciej Kaczkowski
26.03-13.04.2021
'''
import numpy as np
import board
class Minimax(object):
def __init__(self, heuristic_evaluation=0):
self.heuristic_evaluation = heuristic_evaluation
def minimax(self, board_instance: board.Board, parent_board, depth, player, opponent,
alfa=-np.Infinity, beta=np.Infinity):
best_child = board_instance
if depth == 0:
return self.heuristic_evaluate(board_instance), best_child
for child in board_instance.get_child_states(colour=player):
score, new_child = self.minimax(child, board_instance, depth - 1,
opponent, player, -beta, -alfa)
score = -score
if score > alfa:
alfa = score
best_child = child
if beta <= alfa:
break
return self.heuristic_evaluate(best_child), best_child
def heuristic_evaluate(self, board_instance):
fields_values = np.ones_like(board_instance.board_state, dtype=int)
fields_values[0] += 1
fields_values[-1] += 1
fields_values[:, 0] += 1
fields_values[:, -1] += 1
fields_values[0][0] += 5
fields_values[0][-1] += 5
fields_values[-1][0] += 5
fields_values[-1][-1] += 5
result = np.multiply(board_instance.board_state, fields_values)
result = np.sum(result)
return result
| {"/player.py": ["/config.py", "/minimax.py"], "/reversi.py": ["/player.py", "/board.py", "/config.py"], "/board.py": ["/config.py"], "/minimax.py": ["/board.py"]} |
61,701 | shyamtarams/studentManagementSystem | refs/heads/dev | /home/forms.py | from django import forms
from .models import *
from accounts.models import myUser
class studentRegisterForm(forms.ModelForm):
name=forms.CharField(max_length=254)
contact =forms.CharField(max_length=15)
email = forms.EmailField(max_length=254, help_text='Required. Inform a valid email address.')
status = forms.CharField(max_length=20)
rule = forms.CharField(max_length=20)
# password1=forms.CharField(max_length=50)
# password2=forms.CharField(max_length=50)
class Meta:
model=myUser
fields = ('name','email','username','contact','status','rule','password')
| {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,702 | shyamtarams/studentManagementSystem | refs/heads/dev | /accounts/migrations/0003_alter_myuser_password.py | # Generated by Django 3.2.7 on 2021-09-07 19:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_myuser_name'),
]
operations = [
migrations.AlterField(
model_name='myuser',
name='password',
field=models.CharField(default=False, max_length=100),
),
]
| {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,703 | shyamtarams/studentManagementSystem | refs/heads/dev | /accounts/urls.py | from django.contrib import admin
from django.urls import path,include
from .views import *
from django.contrib.auth.views import LoginView
urlpatterns = [
path('login',LoginView.as_view(),name='login'),
path('',include('django.contrib.auth.urls')),
] | {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,704 | shyamtarams/studentManagementSystem | refs/heads/dev | /home/views.py | from django.shortcuts import render, redirect
from .models import *
from home.forms import studentRegisterForm
from accounts.models import myUser
from django.contrib.auth import login, authenticate
from django.contrib.auth.decorators import login_required
# Create your views here.
# def signup(request):
# if request.method == 'POST':
# form = studentRegisterForm(request.POST)
# print(form)
# if form.is_valid():
# rule = form.cleaned_data.get('rule')
# status = form.cleaned_data.get('status')
# print(rule)
# print(status)
# form.save()
# username = form.cleaned_data.get('username')
# raw_password = form.cleaned_data.get('password')
# user = authenticate(username=username, password=raw_password)
# print(user)
# login(request, user)
# return redirect('/home/adminsite')
# else:
# form = studentRegisterForm()
# return render(request, 'home/addstudent.html', {'form': form})
def login(request):
if request.method=='POST':
try:
un=request.POST['username']
pw=request.POST['password']
# if Login.objects.get(username=un,password=pw):
user=Login.objects.get(username=un,password=pw)
request.session['log_id']=user.id
return redirect('/home/studentDetails/')
except Exception as err:
return redirect("/home/login")
else:
return render(request,"home/home.html")
def student(request):
return render(request,"home/home.html")
@login_required(login_url='/accounts/login/')
def adminsite(request):
data={}
if Student.objects.filter(rule="student"):
student_dtl=Student.objects.filter(rule="student")
student_cnt=Student.objects.filter(rule="student").count()
student_cnt_a=Student.objects.filter(status="active").count()
student_cnt_a=Student.objects.filter(status="active").count()
student_cnt_i=student_cnt - student_cnt_a
data={
'student_dtl':student_dtl,
'student_cnt':student_cnt,
'student_cnt_a':student_cnt_a,
'student_cnt_i':student_cnt_i,
}
return render(request,"home/adminsite.html",data)
# register student
@login_required(login_url='/accounts/login/')
def studentRegister(request):
if request.method == "POST":
name=request.POST["name"]
contact=request.POST["contact"]
email=request.POST["email"]
username=request.POST["username"]
password=request.POST["password"]
rule="student"
status="active"
login=Login(username=username,password=password)
login.save()
newStudent=Student(name=name,contact=contact,email=email,username=username,password=password,rule=rule,status=status,login=login)
print(newStudent)
newStudent.save()
return redirect("/home/adminsite")
else:
return render(request,"home/addstudent.html")
# active student
@login_required(login_url='/accounts/login/')
def activeStudent(request):
student_dtl=Student.objects.filter(status="active")
data={
'student_dtl':student_dtl,
}
return render(request,"home/activestudent.html",data)
# inactive student
@login_required(login_url='/accounts/login/')
def inactiveStudent(request):
student_dtl=Student.objects.filter(status="inactive")
data={
'student_dtl':student_dtl,
}
return render(request,"home/inactivestudent.html",data)
#change student status
@login_required(login_url='/accounts/login/')
def studentStatus(request,id):
student_dtl=Student.objects.get(id=id)
if student_dtl.status == "active":
student_dtl.status="inactive"
student_dtl.save()
else:
student_dtl.status="active"
student_dtl.save()
return redirect("/home/adminsite")
#get loged student details
def studentDetails(request):
try:
id= request.session['log_id']
user=Login.objects.get(id=id)
student_dtl=Student.objects.filter(id=1)
data={
'user':user,
'student_dtl':student_dtl,
}
return render(request,"student/studentsite.html",data)
except:
return redirect("/home/login/")
def logout(request):
try:
del request.session['log_id']
return redirect("/home/login/")
except:
return redirect("/home/login/")
#update student details
def updateStudent(request):
try:
id= request.session['log_id']
user=Login.objects.get(id=id)
if Student.objects.get(login=user.id,status="active"):
student_dtl=Student.objects.get(login=user.id,status="active")
if request.method=="POST":
name=request.POST["name"]
contact=request.POST["contact"]
email=request.POST["email"]
username=request.POST["username"]
password=request.POST["password"]
rule="student"
status="active"
update_data=Student.objects.get(login=user.id)
update_data.name=name
update_data.contact=contact
update_data.email=email
update_data.username=username
update_data.password=password
update_data.rule=rule
update_data.status=status
update_data.save()
return redirect("/home/updatedetails/")
else:
data={
'user':user,
'student_dtl':student_dtl,
}
return render(request,"student/studentupdate.html",data)
elif Student.objects.get(login=user.id,status="inactive"):
return redirect("/home/studentDetails/")
except:
return redirect("/home/studentDetails/")
| {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,705 | shyamtarams/studentManagementSystem | refs/heads/dev | /home/models.py | from django.db import models
# Create your models here.
class Login(models.Model):
username=models.CharField(max_length=150)
password=models.CharField(max_length=200)
def __str__(self):
return '{} {}'.format(self.username, self.password)
class Student(models.Model):
name=models.CharField(max_length=50)
contact=models.IntegerField()
email=models.CharField(max_length=200)
username=models.CharField(max_length=100)
password=models.CharField(max_length=100)
status=models.CharField(max_length=100)
rule=models.CharField(max_length=100)
date=models.DateTimeField(auto_now_add=True)
login=models.ForeignKey(Login,on_delete=models.CASCADE)
def __str__(self):
return '{} {} {} {} {} '.format(self.name,self.contact,self.email,self.username,self.password)
| {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,706 | shyamtarams/studentManagementSystem | refs/heads/dev | /accounts/migrations/0002_myuser_name.py | # Generated by Django 3.2.7 on 2021-09-07 18:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='myuser',
name='name',
field=models.CharField(default=False, max_length=100),
),
]
| {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,707 | shyamtarams/studentManagementSystem | refs/heads/dev | /accounts/forms.py | from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .models import myUser
class CustomUserCreationForm(UserCreationForm):
class meta:
model = myUser
field = "__all__"
| {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,708 | shyamtarams/studentManagementSystem | refs/heads/dev | /home/urls.py | from django.urls import path,include
from .views import *
urlpatterns = [
# path('student/', student),
# path('signup/', signup),
path('login/', login),
path('logout/', logout),
path('adminsite/', adminsite,name="admin site"),
path('addstudent/', studentRegister,name="studentRegister"),
path('active/', activeStudent,name="activeStudent"),
path('inactive/', inactiveStudent,name="inactiveStudent"),
path('status/<int:id>', studentStatus,name="studentStatus"),
path('studentDetails/', studentDetails,name="studentDetails"),
path('updatedetails/', updateStudent,name="updateStudent"),
] | {"/home/forms.py": ["/home/models.py"], "/home/views.py": ["/home/models.py", "/home/forms.py"], "/home/urls.py": ["/home/views.py"]} |
61,710 | kuldeepmewara/Trio | refs/heads/master | /student.py | import csv
filename = "record.csv"
fields = ['name', 'regi_no', '11', '12', '13', '14', '15', '16','T1', '21', '22', '23', '24', '25', '26','T2', '31', '32',
'33', '34', '35', '36', 'T3','41', '42', '43', '44', '45', '46', 'T4','51', '52', '53', '54', '55', '56','T5', '61',
'62', '63', '64', '65', '66','T6','1DMS','2DMS','3DMS','4DMS','5DMS','1MP','2MP','3MP','4MP','5MP','1POC','2POC','3POC','4POC','5POC','1PPL','2PPL','3PPL','4PPL','5PPL','1SE','2SE','3SE','4SE','5SE','1SPT','2SPT','3SPT','4SPT','5SPT']
def show_news():
with open('notification.csv','r' ,newline='') as csvFile:
reader=csv.reader(csvFile)
print("\nVirtual BUlletin Board...")
for row in reader:
print("# ",row[0])
csvFile.close()
def show_term(name):
with open('record.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if row['name'] == name:
print("\nMid Term Marks...")
print("Sub\\Term\t\t[1]\t\t[2]\t\t[3]\t\t[4]\t\t[5]\t\t[Total]\n\n",end='')
for i in range(1,7):
if i == 1:
subject = "MP "
elif i == 2:
subject = "DMS"
elif i == 3:
subject = "PPL"
elif i == 4:
subject = "SE "
elif i == 5:
subject = "POC"
elif i == 6:
subject = "SPT"
print(subject,"->\t\t ",end='')
total=0
for j in range(1,6):
field=str(j)+str(i)
if row[field] =='Ab' or row[field]=='':
print(" ",row[field]," ",end='')
row[field]=0
total = total + int(row[field])
else:
print(" %2d"%int(row[field])," ", end='')
total = total + int(row[field])
print("\t%2d"%int(total), "\t", end='')
print("\n")
print('-'*52,"\n",end='')
print("Total (60)\t\t\b",end='')
for i in range(1,6):
if row[str('T')+str(i)]!='Ab' and row[str('T')+str(i)] !='':
print("\t%2d" %int(row[str('T')+str(i)]),"\t",end='')
csvfile.close()
def show_ass(name):
with open('record.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if row['name'] == name:
print("\n\nAssignment Status...")
print("Sub\\Ass \t\t[1]\t\t[2]\t\t[3]\t\t[4]\t\t[5]\n\n",end='')
for i in range(1,7):
if i == 1:
subject = "MP"
elif i == 2:
subject = "DMS"
elif i == 3:
subject = "PPL"
elif i == 4:
subject = "SE"
elif i == 5:
subject = "POC"
elif i == 6:
subject = "SPT"
print(subject,"->\t\t",end='')
for j in range(1,6):
field=str(j)+str(subject)
if row[field]=='':
print("\tNO\t", end='')
elif row[field]=='1':
print("\tYes\t",end='')
else:
print("\tNO\t", end='')
print("\n")
csvfile.close()
def name_present(mr):
with open('record.csv', 'r', newline='') as csvinput:
names=[]
for row in csv.reader(csvinput):
names.append(row[0])
csvinput.close()
if mr in names:
return 1
else:
return 0
csvinput.close()
def show_rec(n):
with open('record.csv') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if row['name']==n:
print("Name :",row['name'])
print("Registration no. :",row['regi_no'])
break
show_term(name)
show_ass(name)
csvfile.close()
def analysis(m):
n=0
att=0
total=0
n=size()
max=0
with open('record.csv','r')as f:
reader=csv.DictReader(f,fieldnames=fields)
next(reader)
for row in reader:
if row[m]!='Ab':
row[m]=int(row[m])
if row[m]>max:
max=row[m]
total=total+row[m]
att=att+1
term=int(m[0])
if term == 1:
t = "FIRST"
elif term == 2:
t = "SECONOD"
elif term == 3:
t = "THIRD"
elif term == 4:
t = "FOUR"
elif term == 5:
t = "FIVE"
subject=int(m[-1])
if subject == 1:
sub="Micro processor"
elif subject == 2:
sub= "Discrete Mathematics"
elif subject == 3:
sub= "Principl of programming language"
elif subject == 4:
sub= "Software engineering"
elif subject == 5:
sub= "Principle of communication"
elif subject == 6:
sub= "Statistic and probability"
print("\n******************IN ", t, "mid term for ",sub,"******************\n",end='' )
print("maximum marks : ",max)
print("Average marks : ",round((total/att)))
print("no of student attend the paper : ",att)
print("no of student not attend the paper : ",n-att)
f.close()
def size():
n=0
with open(filename, 'r', newline='') as csvfile:
# creating a csv dict writer object
reader = csv.DictReader(csvfile, fieldnames=fields)
for row in reader:
data = list(reader)
n = len(data)
return n
def mid_term():
while True:
try:
term = int(input("WHICH MID TERM [1],[2],[3],[4] OR [5] : "))
break
except:
print("integer value only")
if int(term) not in range(1, 6):
return 0
else:
while True:
try:
subject = int(input("SUBJECT : [1]MP\t[2]DMS\t[3]PPL\t[4]SE\t[5]POC]\t[6]SPT : "))
break
except:
print("integer value only")
if int(subject) not in range(1, 7):
return 0
return str(term) + str(subject)
choice=1
print("\t"*10,"****************STUDENT MODE******************")
print("[1] See notification\n[2] SHOW RECORD \n[3] ANALYSIS \n[4] EXIT")
while choice !=0:
while True:
try:
choice = int(input("\nEnter the choice : "))
break
except:
print("INVALID INPUT..")
if choice==1:
show_news()
elif choice==2:
name=input("Name : ")
a=name_present(name)
if a==1:
show_rec(name)
else:
print("\n\tRecord with name ", name, " is not present ")
elif choice==3:
print("select the term and subject : ")
m = mid_term()
if int(m)!=0:
analysis(m)
else:
print("Input out of choice ..")
elif choice==4:
choice=0
| {"/start.py": ["/admin.py", "/teacher.py", "/student.py"]} |
61,711 | kuldeepmewara/Trio | refs/heads/master | /start.py | import os
import subprocess
os.system('cls')
def clear():
subprocess.call("cls",shell=True)
mode=1
os.system('cls')
while mode!=0:
print("\n[1]\tAdmin \n[2]\tTeacher\n[3]\tStudent\n[4]\tExit")
while True:
try:
mode=int(input("\n\nEnter the mode : "))
break
except:
print("Invalid ")
if mode==1:
os.system('cls')
clear()
import admin
elif mode==2:
#os.system('cls')
clear()
import teacher
elif mode==3:
os.system('cls')
import student
else:
mode=0
print("Thank you ,have Good Day...") | {"/start.py": ["/admin.py", "/teacher.py", "/student.py"]} |
61,712 | kuldeepmewara/Trio | refs/heads/master | /admin.py | import csv
import datetime
import string
fields = ['name', 'regi_no', '11', '12', '13', '14', '15', '16','T1', '21', '22', '23', '24', '25', '26','T2', '31', '32',
'33', '34', '35', '36', 'T3','41', '42', '43', '44', '45', '46', 'T4','51', '52', '53', '54', '55', '56','T5', '61',
'62', '63', '64', '65', '66','T6','1DMS','2DMS','3DMS','4DMS','5DMS','1MP','2MP','3MP','4MP','5MP','1POC','2POC','3POC','4POC','5POC','1PPL','2PPL','3PPL','4PPL','5PPL','1SE','2SE','3SE','4SE','5SE','1SPT','2SPT','3SPT','4SPT','5SPT']
#name of csv file
filename = "record.csv"
def data_entry():
myrec = []
mystudent = {}
name = input("NAME : ")
#reg_no=input("REGISTRATION NO : ")
mystudent['name']=name
mystudent['regi_no'] ="jics"+ str(last_id())
myrec.append(mystudent)
with open(filename, 'a',newline='') as csvfile:
# creating a csv dict writer object
writer = csv.DictWriter(csvfile, fieldnames=fields)
#writing headers (field names)
with open('record.csv','r',newline='') as csvinput:
reader=csv.reader(csvinput)
data=list(reader)
no_lines=len(data)
if no_lines ==0:
writer.writeheader()
# writing data rows
writer.writerows(myrec)
csvfile.close()
def last_id():
id=0
n=0
n = size()
if n>0:
with open(filename,'r',newline='') as csvfile:
# creating a csv dict writer object
reader = csv.DictReader(csvfile, fieldnames=fields)
for row in reader:
id = row['regi_no']
#print(row['b_id'])
csvfile.close()
#c = 1
c = id[-3:].strip()
k = str(int(c) + 1).zfill(3)
return k
else:
return "000"
def size():
n=0
with open(filename, 'r', newline='') as csvfile:
# creating a csv dict writer object
reader = csv.DictReader(csvfile, fieldnames=fields)
for row in reader:
data = list(reader)
n = len(data)
return n
def data_entries(n):
for i in range(1,n+1):
myrec = []
mystudent = {}
name=input("NAME of student "+str(i)+" : ")
mystudent['name']=name
mystudent['regi_no']="jics" +str(last_id())
myrec.append(mystudent)
with open(filename, 'a',newline='') as csvfile:
# creating a csv dict writer object
writer = csv.DictWriter(csvfile, fieldnames=fields)
# writing headers (field names)
# writer.writeheader()
with open('record.csv', 'r', newline='') as csvinput:
reader = csv.reader(csvinput)
data = list(reader)
no_lines = len(data)
if no_lines == 0:
writer.writeheader()
# writing data rows
writer.writerows(myrec)
csvfile.close()
def delete_entry(n):
input = open('record.csv', 'r',newline='')
output = open('records.csv', 'w',newline='')
writer = csv.writer(output)
for row in csv.reader(input):
if row[0]!=n:
writer.writerow(row)
input.close()
output.close()
def move():
input = open('records.csv', 'r',newline='')
output = open('record.csv', 'w',newline='')
writer = csv.writer(output)
for row in csv.reader(input):
writer.writerow(row)
input.close()
output.close()
def add_news(filename, line):
now = datetime.datetime.now()
line=str('(A) ')+str(now.date())+str(" ")+str(line)
with open(filename, 'r+') as f:
content = f.read()
f.seek(0, 0)
f.write(line.rstrip('\r\n') + '\n' + content)
def show_news():
with open('notification.csv','r',newline='') as csvFile:
reader=csv.reader(csvFile)
print("\nVirtual Bulletin Board : \n")
for row in reader:
print("# ",row[0])
csvFile.close()
choice=1
print("\t"*10,"****************ADMIN MODE******************")
print("[1] WANT TO ENTER A RECORD \n[2] WANT TO ADD NO OF RECORDS \n[3] WANT TO DELETE AN RECORD")
print("[4] ADD NEWS \n[5] SHOW NEWS\n[6] EXIT")
while choice!=0:
while True:
try:
choice = int(input("\n\nENTER THE CHOICE : "))
break
except:
print("\n\tINVALID INPUT...")
if choice==1:
data_entry()
print("ENTRY IS added INTO DATABASE")
elif choice==2:
while True:
try:
n=int(input("no student to add into record : "))
break
except:
print("integer only")
data_entries(n)
print(n,"ENTRY IS ADDED INTO DATABASE")
elif choice==3:
while True:
try:
n=str(input("Enter the Name : "))
break
except:
print("Invalid name entered ..")
delete_entry(n)
move()
print("RECRD OF ",n," IS DELETED")
elif choice==4:
news=str(input("enter the news"))
add_news("notification.csv",news)
elif choice==5:
show_news()
elif choice==6:
choice=0
| {"/start.py": ["/admin.py", "/teacher.py", "/student.py"]} |
61,713 | kuldeepmewara/Trio | refs/heads/master | /teacher.py | import csv
import os
import datetime
fields = ['name', 'regi_no', '11', '12', '13', '14', '15', '16','T1', '21', '22', '23', '24', '25', '26','T2', '31', '32',
'33', '34', '35', '36', 'T3','41', '42', '43', '44', '45', '46', 'T4','51', '52', '53', '54', '55', '56','T5', '61',
'62', '63', '64', '65', '66','T6','1DMS','2DMS','3DMS','4DMS','5DMS','1MP','2MP','3MP','4MP','5MP','1POC','2POC','3POC','4POC','5POC','1PPL','2PPL','3PPL','4PPL','5PPL','1SE','2SE','3SE','4SE','5SE','1SPT','2SPT','3SPT','4SPT','5SPT']
#name of csv file
filename = "record.csv"
#display the notification
def read():
with open('notification.csv','r',newline='') as csvFile:
reader=csv.reader(csvFile)
print("\nVirtual Bulletin Board : \n")
for row in reader:
print("# ",row[0])
csvFile.close()
#adding a new news
def add_news(filename, line):
now = datetime.datetime.now()
line = str('(T) ')+str(now.date()) + str(" ") + str(line)
with open(filename, 'r+') as f:
content = f.read()
f.seek(0, 0)
f.write(line.rstrip('\r\n') + '\n' + content)
#return data of student
def return_data(mr):
with open('record.csv', 'r', newline='') as csvinput:
names=[]
for row in csv.reader(csvinput):
names.append(row[0])
csvinput.close()
if mr in names:
with open('record.csv', 'r', newline='') as csvinput:
for row in csv.reader(csvinput):
if row[0]==mr:
return row
else:
return "name"
csvinput.close()
#return mid term field name
def mid_term():
while True:
try:
term = int(input("WHICH MID TERM [1],[2],[3],[4] OR [5] : "))
break
except:
print("integer value only")
if int(term) not in range(1, 6):
return 0
else:
while True:
try:
subject = int(input("SUBJECT : [1]MP\t[2]DMS\t[3]PPL\t[4]SE\t[5]POC]\t[6]SPT : "))
break
except:
print("integer value only")
if int(subject) not in range(1, 7):
return 0
return str(term) + str(subject)
#deleting a record
def delete_entry(n):
input = open('record.csv', 'r',newline='')
output = open('records.csv', 'w',newline='')
writer = csv.writer(output)
for row in csv.reader(input):
if row[0]!=n:
writer.writerow(row)
input.close()
output.close()
move()
def move():
input = open('records.csv', 'r',newline='')
output = open('record.csv', 'w',newline='')
writer = csv.writer(output)
for row in csv.reader(input):
writer.writerow(row)
input.close()
output.close()
def total(mydic):
for i in range(1, 6):
total = 0
for j in range(1, 7):
field = str(i) + str(j)
if mydic[field]!= ''and mydic[field]!='Ab':
total = total + int(mydic[field])
else:
mydic[field] = 'Ab'
mydic[str('T')+str(i)]=total
#writing a data into database
def data_entry(myrec):
with open(filename, 'a',newline='') as csvfile:
# creating a csv dict writer object
writer = csv.DictWriter(csvfile, fieldnames=fields)
#writing headers (field names)
with open('record.csv','r',newline='') as csvinput:
reader=csv.reader(csvinput)
data=list(reader)
no_lines=len(data)
if no_lines ==0:
writer.writeheader()
# writing data rows
writer.writerows(myrec)
csvfile.close()
# return assignment field name
def assignment():
term = input("WHICH ASSIGNMENT [1],[2],[3],[4] OR [5]")
if int(term) not in range(1,6) :
print("INVALID INPUT ...")
return 0
else:
subject = int(input("SUBJECT : [1]MP\t[2]DMS\t[3]PPL\t[4]SE\t[5]POC]\t[6]SPT"))
if subject ==1:
return term+"MP"
elif subject==2:
return term+"DMS"
elif subject==3:
return term+"PPL"
elif subject==4:
return term+"SE"
elif subject==5:
return term+"POC"
elif subject==6:
return term+"SPT"
else:
print("INVALID SELECTION ..")
return 0
def analysis(m):
n=0
att=0
total=0
n=size()
max=0
with open('record.csv','r')as f:
reader=csv.DictReader(f)
row.next()
for row in reader:
if row[m]!='Ab':
row[m]=int(row[m])
if row[m]>max:
max=row[m]
total=total+row[m]
att=att+1
term=int(m[0])
if term == 1:
t = "FIRST"
elif term == 2:
t = "SECONOD"
elif term == 3:
t = "THIRD"
elif term == 4:
t = "FOUR"
elif term == 5:
t = "FIVE"
subject=int(m[-1])
if subject == 1:
sub="Micro processor"
elif subject == 2:
sub= "Discrete Mathematics"
elif subject == 3:
sub= "Principl of programming language"
elif subject == 4:
sub= "Software engineering"
elif subject == 5:
sub= "Principle of communication"
elif subject == 6:
sub= "Statistic and probability"
print("\n******************IN ", t, "mid term for ",sub,"******************\n",end='' )
print("maximum marks : ",max)
print("Average marks : ",(total/att))
print("no of student attend the paper : ",att)
print("no of student not attend the paper : ",n-att)
f.close()
def size():
n=0
with open(filename, 'r', newline='') as csvfile:
# creating a csv dict writer object
reader = csv.DictReader(csvfile, fieldnames=fields)
for row in reader:
data = list(reader)
n = len(data)
return n
print("\t"*10,"****************Teacher MODE******************")
print("[1] See notification\n[2] Add notification \n[3] Add mid term marks\n[4] Adding Assignment Status \n[5] Analysis \n[6] Exit\n\n")
choice=1
mydic={}
while choice!=0:
while True:
try:
choice = int(input("\n\nENTER THE CHOICE : "))
break
except:
print("\n\tINVALID INPUT..")
if choice==1:
read()
elif choice==2:
text=input("\nENTER THE NEWS")
add_news("notification.csv",text)
elif choice==3:
mr = input("Name : ")
a = return_data(mr)
if a!="name":
for i in range(0, len(a)):
mydic[fields[i]] = a[i]
m = mid_term()
if m!=0:
while True: #exception handling concept here
try:
marks = int(input("\nEnter the marks ..."))
if marks in range(0,11):
break
except:
print("\n\tThat's not a valid input!")
for key in mydic.keys():
if key == m:
mydic[key] = marks
delete_entry(mr)
total(mydic)
mylist = []
mylist.append(mydic)
data_entry(mylist)
print("\n\tMarks is successfully Uploaded")
else:
print("\n\tINVALID INPUT NO ACTION PERFORMED ,YOU HAVE TO TRY ONCE AGAIN ")
else:
print("\n\tRecord with name ", mr, " is not present ")
elif choice==4:
mr =input("Name : ")
a = return_data(mr)
if a!="name":
for i in range(0, len(a)):
mydic[fields[i]] = a[i]
m=assignment()
if m!=0:
marks=9
while int(marks) != 1 and int(marks) != 0: #handling of run time error
marks = input("ASSIGNMENT SUBMITTED [1] OR NOT [0] : ")
for key in mydic.keys():
if key==m:
mydic[key] = marks
delete_entry(mr)
mylist = []
mylist.append(mydic)
data_entry(mylist)
print("\n\tAssingment status is successfully Uploaded")
else:
print("\n\tNO ACTION PERFORMED ,YOU HAVE TO TRY ONCE AGAIN ")
else:
print("\n\tRecord with name ", mr, " is not present ")
elif choice==5:
print("select the term and subject : ")
m = mid_term()
if m!=0:
analysis(m)
else:
print("Input out of choice..")
elif choice==6:
choice = 0
| {"/start.py": ["/admin.py", "/teacher.py", "/student.py"]} |
61,732 | chui101/xmlvis | refs/heads/master | /app.py | from flask import Flask, request, Response, make_response
from flask_cors import CORS
import json
import kaplanmeier
from io import StringIO
import csv
from pymongo import MongoClient
state_postal_fips_mapping = {}
us_states = []
app = Flask(__name__)
CORS(app)
client = MongoClient(host="localhost",port=27017)
db = client['test']
collection = db['naaccr']
def load_state_fips():
if len(state_postal_fips_mapping) == 0:
with open("statefips.csv", mode='r') as infile:
reader = csv.DictReader(infile)
for row in reader:
state_postal_fips_mapping[row['usps']] = row['fips']
us_states.append(row['usps'])
def state_to_fips(state):
return state_postal_fips_mapping[state.upper()]
def get_dbfilter_from_request():
dbfilter = request.values.get("filter")
if dbfilter is not None:
dbfilter = json.loads(dbfilter)
return dbfilter
def get_groupings_from_db(field_name, dbfilter = None):
query = []
if dbfilter is not None:
query.append({'$match': dbfilter})
query.append({'$group': {'_id': '$' + field_name, 'count': {'$sum': 1}}})
result = collection.aggregate(query)
return result
@app.route('/')
def root():
count = collection.find().count()
data = {"case_count": count}
return Response(json.dumps(data),mimetype='application/json')
@app.route('/counts/',methods=['GET','POST'])
def get_counts():
dbfilter = get_dbfilter_from_request()
response = {}
if dbfilter is not None:
response['dbfilter'] = dbfilter
response['data'] = []
things_to_count = ['sex','primarySite','vitalStatus','ageAtDiagnosis','race1']
for groupby in things_to_count:
result = get_groupings_from_db(groupby, dbfilter)
if groupby == 'ageAtDiagnosis':
under18 = 0
between18and65 = 0
over65 = 0
for row in result:
if row['_id'] < 18:
under18 += row['count']
elif row['_id'] >= 18 and row['_id'] < 65:
between18and65 += row['count']
elif row['_id'] >= 65:
over65 += row['count']
response['data'].append({groupby:'<18','count':under18})
response['data'].append({groupby:'18-65','count':between18and65})
response['data'].append({groupby:'>65','count':over65})
else:
for row in result:
response['data'].append({groupby : row['_id'], 'count': row['count']})
result.close()
response['success'] = True
return Response(json.dumps(response),mimetype='application/json')
@app.route('/charts/bar',methods=['GET','POST'])
def get_site_groupings():
response = []
dbfilter = get_dbfilter_from_request()
result = get_groupings_from_db('primarySite',dbfilter)
for row in result:
response.append({'site':row['_id'], 'count':row['count']})
return Response(json.dumps(response),mimetype='application/json')
@app.route('/charts/pie', methods=['GET', 'POST'])
def get_sex_counts():
response = [0,0]
dbfilter = get_dbfilter_from_request()
result = get_groupings_from_db('sex',dbfilter)
for row in result:
if row['_id'] == "1":
response[1] = row['count']
if row['_id'] == "2":
response[0] = row['count']
return Response(json.dumps(response),mimetype='application/json')
@app.route('/charts/map',methods=['GET','POST'])
def get_geo_data():
response = []
dbfilter = get_dbfilter_from_request()
result = get_groupings_from_db('addrAtDxState',dbfilter)
for row in result:
response.append({'state':row['_id'], 'count':row['count']})
return Response(json.dumps(response),mimetype='application/json')
@app.route('/charts/countymap',methods=['GET', 'POST'])
def get_geo_data_by_county():
response = []
load_state_fips()
for state in us_states:
dbfilter = get_dbfilter_from_request()
if dbfilter is None:
dbfilter = {"addrAtDxState": state}
elif '$and' in dbfilter:
dbfilter['$and'].append({"addrAtDxState": state})
else:
dbfilter = {'$and':[dbfilter,{"addrAtDxState": state}]}
result = get_groupings_from_db('countyAtDx', dbfilter)
for row in result:
fips_county = str(state_to_fips(state)) + str(row['_id'])
response.append({'county': fips_county, 'count':row['count']})
return Response(json.dumps(response),mimetype='application/json')
@app.route('/charts/map/<state>',methods=['GET', 'POST'])
def get_geo_data_by_state(state):
response = []
load_state_fips()
state = state.upper()
dbfilter = get_dbfilter_from_request()
if dbfilter is None:
dbfilter = {"addrAtDxState": state}
elif '$and' in dbfilter:
dbfilter['$and'].append({"addrAtDxState": state})
else:
dbfilter = {'$and':[dbfilter,{"addrAtDxState": state}]}
result = get_groupings_from_db('countyAtDx', dbfilter)
for row in result:
fips_county = str(state_to_fips(state)) + str(row['_id'])
response.append({'county': fips_county, 'count':row['count']})
return Response(json.dumps(response),mimetype='application/json')
@app.route('/charts/survival', methods=['GET', 'POST'])
def get_kaplan_meier_by_stage():
response = {}
response['treatments'] = []
dbfilter = get_dbfilter_from_request()
for stage in ['1','2','3','4']:
if dbfilter is not None:
stage_dbfilter = {'$and':[dbfilter, {"majorStageGrp":stage}]}
else:
stage_dbfilter = {"majorStageGrp":stage}
result = collection.find(stage_dbfilter)
km = kaplanmeier.KaplanMeier()
for row in result:
km.add_record(row)
km.calculate()
response['treatments'].append(km.to_timepoints())
result.close()
return Response(json.dumps(response),mimetype='application/json')
@app.route('/export/', methods=['GET', 'POST'])
def export_data_to_csv():
data = []
csv_items = ["patientIdNumber","sex","vitalStatus","ageAtDiagnosis","countyAtDx","addrAtDxState","dateOfDiagnosis","primarySite","dateOfLastContact","derivedAjcc6StageGrp","derivedAjcc7StageGrp","race1"]
dbfilter = get_dbfilter_from_request()
# limit results to 1000
result = collection.find(dbfilter).limit(1000)
for row in result:
delete_keys = []
for key in row.keys():
if key not in csv_items:
delete_keys.append(key)
for key in delete_keys:
del row[key]
data.append(row)
# make csv
csv_data = StringIO()
csv_writer = csv.DictWriter(csv_data, fieldnames=csv_items)
csv_writer.writeheader()
csv_writer.writerows(data)
output = make_response(csv_data.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=export.csv"
output.headers["Content-type"] = "text/csv"
return output
if __name__ == '__main__':
app.run(host='0.0.0.0')
| {"/app.py": ["/kaplanmeier.py"]} |
61,733 | chui101/xmlvis | refs/heads/master | /kaplanmeier.py | class KaplanMeier:
def __init__(self):
self.km_counts = {}
self.population = 0
def to_timepoints(self):
result= []
for time in sorted(self.km_counts):
row = {
"t": time,
"d": self.km_counts[time][0],
"n": self.km_counts[time][1]
}
result.append(row)
return result
# reading from a single JSON
def add_record(self,json):
self.population += 1
vital_status = json["vitalStatus"]
follow_up_length = json['lengthOfFollowup']
if follow_up_length < 0:
follow_up_length = 0
if follow_up_length not in self.km_counts:
self.km_counts[follow_up_length] = [0,0]
if vital_status == '0':
self.km_counts[follow_up_length][0] += 1
else:
self.km_counts[follow_up_length][1] -= 1
def calculate(self):
time_points = sorted(self.km_counts)
for i in range(len(time_points)):
# start calculating final numbers with the final population count
if i == 0:
time = time_points[i]
self.km_counts[time][1] = self.population - self.km_counts[time][0] + self.km_counts[time][1]
# calculate each subsequent time point using the remaining population from the previous time point
else:
time = time_points[i]
prev_time = time_points[i-1]
self.km_counts[time][1] = self.km_counts[prev_time][1] - self.km_counts[time][0] + self.km_counts[time][1]
if __name__ == "__main__":
jsonstring =[
{ "_id" : "5b1d4fd78e5651446c445449", "patientIdNumber" : 935, "dateOfDiagnosis" : 20110710, "addrAtDxState" : "AR", "derivedAjcc6StageGrp" : "99", "sex" : "2", "race1" : "99", "dateOfLastContact" : 20161104, "lengthOfFollowup" : 1944, "primarySite" : "C51", "ageAtDiagnosis" : 89, "derivedAjcc7StageGrp" : 999, "vitalStatus" : "1" },
{ "_id" : "5b1d4fd78e5651446c4454c7", "patientIdNumber" : 1061, "dateOfDiagnosis" : 20110318, "addrAtDxState" : "MO", "derivedAjcc6StageGrp" : "01", "lengthOfFollowup" : 0, "sex" : "1", "race1" : "99", "dateOfLastContact" : 20110318, "majorStageGrp" : "0", "primarySite" : "C67", "ageAtDiagnosis" : 50, "derivedAjcc7StageGrp" : 10, "vitalStatus" : "1" },
{ "_id" : "5b1d4fd78e5651446c4455ce", "patientIdNumber" : 1324, "dateOfDiagnosis" : 20120130, "addrAtDxState" : "TX", "derivedAjcc6StageGrp" : "01", "lengthOfFollowup" : 0, "sex" : "1", "race1" : "96", "dateOfLastContact" : 20120130, "majorStageGrp" : "0", "primarySite" : "C67", "ageAtDiagnosis" : 72, "derivedAjcc7StageGrp" : 10, "vitalStatus" : "1" },
{ "_id" : "5b1d4fd88e5651446c445634", "patientIdNumber" : 1426, "dateOfDiagnosis" : 20100306, "addrAtDxState" : "NY", "derivedAjcc6StageGrp" : "01", "lengthOfFollowup" : 0, "sex" : "2", "race1" : "99", "dateOfLastContact" : 20100306, "majorStageGrp" : "0", "primarySite" : "C67", "ageAtDiagnosis" : 70, "derivedAjcc7StageGrp" : 10, "vitalStatus" : "1" }
]
km=KaplanMeier()
a = []
for record in jsonstring:
a+=km.readFromJson(record)
km.buildKaplanMeier(len(jsonstring))
print(km.buildJson()) | {"/app.py": ["/kaplanmeier.py"]} |
61,734 | Muki-v/XiahangFlightOptimize | refs/heads/master | /src/PlaneJobLine.py | from src.CommonDirectory import commonDirectory as D
class planeJobLine:
def __init__(self, planeID):
self.planeID = planeID
self.planeType = D.planeID_totype(planeID)
self.planeFlightList = []
def insert_airline(self, newAirline):
"""
Insert a new airline based on the time sequence
:param newAirline: airLine
:return: None
"""
if self.planeID != newAirline.LinePlandID:
raise ValueError("New airline doesn't belong to target plane job line.")
if self.planeFlightList == []:
self.planeFlightList.append(newAirline)
return
for index in range(len(self.planeFlightList)):
if self.planeFlightList[index].LineFlyPeriod.start > newAirline.LineFlyPeriod.end:
self.planeFlightList.insert(index, newAirline)
return
self.planeFlightList.append(newAirline)
def is_planejobline_illegal(self):
"""
check if the airline is satisfied the consistency request. means the cur land airport must
be the next line's departure airport
:return: boolean
"""
if len(self.planeFlightList) <= 1:
return True
for cur, next in zip(self.planeFlightList[:-1], self.planeFlightList[1:]):
if cur.LineLandAirport != next.LineDepartureAirport:
return False
return True
| {"/src/PlaneJobLine.py": ["/src/CommonDirectory.py"], "/src/CommonDirectory.py": ["/src/CommonParameter.py"], "/src/AirLine.py": ["/src/CommonDirectory.py", "/src/CommonParameter.py"]} |
61,735 | Muki-v/XiahangFlightOptimize | refs/heads/master | /src/CommonDirectory.py | import csv
from datetime import datetime
from datetime import timedelta
from src.TimePeriod import timePeriod
from src.CommonParameter import commonParameter as cpara
class commonDirectory:
dict_NationalityToNum = {'国内':0, '国际':1}
# dict_PlaneIDToType = {}
dict_NumToNationality = {0:'国内', 1:'国际'}
dict_InfluenceToNum = {'降落':0, '起飞':1, '停机':2}
newLineID = 9001
# dict_FlightTimeCost = {}
@staticmethod
def planeID_totype(planeID):
with open("../Secenrio/Xiahang_Airline.csv", encoding="gbk") as f:
data = csv.reader(f)
head = next(data)
for row in data:
if row[8] == planeID:
return row[9]
raise ValueError("plane ID connot be found in Xiahang_Airline.csv")
@staticmethod
def flight_timecost(PlaneType, DepartureAirport, LandAirport):
"""
get the timecost of a flight based on planetype, start and end airport
all data could be found in Xiahang_FlightTime.csv
:param PlaneType: int
:param DepartureAirport: int
:param LandAirport: int
:return: int
"""
with open("../Scenario/Xiahang_FlightTime.csv", encoding="gbk") as f:
data = csv.reader(f)
head = next(data)
for row in data:
if row[0:3] == [str(PlaneType), str(DepartureAirport), str(LandAirport)]:
return timedelta(minutes = int(row[3]))
return False
@staticmethod
def get_cur_newlineID():
"""
get the current new airline ID and increase it after used.
:return:
"""
commonDirectory.newLineID = commonDirectory.newLineID + 1
return commonDirectory.newLineID - 1
if __name__ == '__main__':
print(commonDirectory.flight_timecost(3, 49, 5))
start = datetime.strptime("05/05/2017 15:25", cpara.DATETIME_FORM)
end = start + timedelta(minutes= 70)
print((end - start).__class__)
print(timePeriod(start.strftime(cpara.DATETIME_FORM), (end+timedelta(minutes=50)).strftime(cpara.DATETIME_FORM)))
| {"/src/PlaneJobLine.py": ["/src/CommonDirectory.py"], "/src/CommonDirectory.py": ["/src/CommonParameter.py"], "/src/AirLine.py": ["/src/CommonDirectory.py", "/src/CommonParameter.py"]} |
61,736 | Muki-v/XiahangFlightOptimize | refs/heads/master | /src/CommonParameter.py |
# from src.CommonParameter import commonParameter as cpara
class commonParameter:
DATE_FORM = "%m/%d/%Y" # D.DATE_FORM
DATETIME_FORM = "%m/%d/%Y %H:%M" # D.DATETIME_FORM | {"/src/PlaneJobLine.py": ["/src/CommonDirectory.py"], "/src/CommonDirectory.py": ["/src/CommonParameter.py"], "/src/AirLine.py": ["/src/CommonDirectory.py", "/src/CommonParameter.py"]} |
61,737 | Muki-v/XiahangFlightOptimize | refs/heads/master | /src/AirLine.py | from datetime import datetime, date
from src.CommonDirectory import commonDirectory as D
from src.CommonParameter import commonParameter as cpara
from src.TimePeriod import timePeriod
import csv
class airLine:
def __init__(self, line_info):
if len(line_info) != 11:
raise ValueError("line_info length is not 11")
self.LineID = int(line_info[0])
self.LineDate = datetime.strptime(line_info[1], cpara.DATE_FORM)
self.LineType = D.dict_NationalityToNum[line_info[2]]
self.LineNum = int(line_info[3])
self.LineDepartureAirport = int(line_info[4])
self.LineLandAirport = int(line_info[5])
self.LineFlyPeriod = timePeriod(line_info[6], line_info[7]) # departure and land time.
self.LinePlaneID = int(line_info[8])
# self.LinePlaneType = line_info[9] #igore the plane type
self.LineIF = float(line_info[10]) # IF: influence factor
def reconstruct_list(self):
"""
return a list of a flight with original information type
:return: list
"""
rtl = []
rtl.append(self.LineID)
rtl.append(self.LineDate.strftime(cpara.DATE_FORM))
rtl.append(D.dict_NumToNationality[self.LineType])
rtl.append(self.LineNum)
rtl.append(self.LineDepartureAirport)
rtl.append(self.LineLandAirport)
rtl.append(self.LineFlyPeriod.start.strftime(cpara.DATETIME_FORM))
rtl.append(self.LineFlyPeriod.end.strftime(cpara.DATETIME_FORM))
rtl.append(self.LinePlaneID)
rtl.append(D.planeID_totype(self.LinePlaneID))
rtl.append(self.LineIF)
return rtl
if __name__ == '__main__':
with open("../Scenario/Xiahang_Airline.csv") as f:
data = csv.reader(f)
head = next(data)
airlineSet = []
for row in data:
airlineSet.append(airLine(row))
print(airlineSet[1].LineDate.date())
| {"/src/PlaneJobLine.py": ["/src/CommonDirectory.py"], "/src/CommonDirectory.py": ["/src/CommonParameter.py"], "/src/AirLine.py": ["/src/CommonDirectory.py", "/src/CommonParameter.py"]} |
61,749 | duspeccoll/astools | refs/heads/master | /2019/auth.py | #!/usr/bin/env python
# This script just logs you into your ArchivesSpace backend and prints a session ID.
# You can copy it into whatever script you want to write to do useful things with your database.
import configparser, requests
config = configparser.ConfigParser()
config.read('local_settings.cfg')
dictionary = {
'baseURL': config.get('ArchivesSpace', 'baseURL'),
'repository':config.get('ArchivesSpace', 'repository'),
'user': config.get('ArchivesSpace', 'user'),
'password': config.get('ArchivesSpace', 'password')
}
repositoryBaseURL = '{baseURL}/repositories/{repository}'.format(**dictionary)
resourceURL = '{baseURL}'.format(**dictionary)
auth = requests.post('{baseURL}/users/{user}/login?password={password}&expiring=false'.format(**dictionary)).json()
session = auth['session']
headers = {'X-ArchivesSpace-Session': session}
print(session)
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,750 | duspeccoll/astools | refs/heads/master | /make_digital_object.py | #!/usr/bin/env python
# What this script does:
# 1. logs into the ArchivesSpace backend via the ArchivesSnake library (you will need to configure that before you
# run the script)
# 2. accepts a digital object package constructed according to the Digital Repository ingest specification as input
# 3. checks to see if uri.txt exists and generates it if it does not
# (check_uri_txt)
# 4. checks to see if a digital object has been attached to the item record matching the provided component ID,
# and creates it if it does not
# (check_digital_object)
# 5. processes each file and attaches a digital object component to the digital object containing that file's name,
# format, and size in bytes
# (process_files)
import argparse
import csv
import json
import magic
import os
import sys
import os.path
from asnake.aspace import ASpace
DEFAULT_URL = r"http://as02.coalliance.org:8080"
TESTING_URL = r"http://as0222.coalliance.org:8080/"
AS = None
VERBOSE_LOG = False
class DigitalObjectException(Exception):
def __init__(self, message):
super(DigitalObjectException, self).__init__()
self.message = message
def get_json(uri):
r = AS.client.get(uri)
if r.status_code == 200:
return json.loads(r.text)
else:
r.raise_for_status()
def post_json(uri, data):
r = AS.client.post(uri, json=data)
message = json.loads(r.text)
if r.status_code == 200:
if VERBOSE_LOG:
if 'uri' in message:
as_log("{}: {}".format(message['status'], message['uri']))
else:
as_log("{}: {}".format(message['status'], uri))
else:
as_log("Error: {}".format(message['error']))
def magic_to_as(file_format_name):
# translate libmagic's file format enumeration labels to archivesspace
if file_format_name == "x-wav":
file_format_name = "wav"
elif file_format_name == "quicktime":
file_format_name = "mov"
elif file_format_name == "mpeg":
file_format_name = "mp3"
elif file_format_name == "x-font-gdos":
file_format_name = "mp3"
elif file_format_name == "vnd.adobe.photoshop":
file_format_name = "tiff"
return file_format_name
# process the files in the path and add digital object components where necessary for each file
def process_files(ref, path, no_kaltura_id, no_caption, no_publish):
print("Fetching digital object tree... ")
tree = get_json("{}/tree/root".format(ref))
print("Checking files... ")
files = [f for f in sorted(os.listdir(path), key=lambda a: a) if os.path.isfile(os.path.join(path, f)) and
f != "uri.txt" and f != "Thumbs.db"]
if files:
for file in files:
path_to_file = os.path.join(path, file)
file_format_name = magic.from_file(path_to_file, mime=True).split("/")[-1]
# ignore file size if it won't fit in an int(11) to bypass mysql db constraints
if os.path.getsize(path_to_file) < 2147483647:
file_size_bytes = os.path.getsize(path_to_file)
else:
file_size_bytes = ''
file_format_name = magic_to_as(file_format_name)
#file_format_name = "tiff"
make_new = True
if tree["child_count"] > 0:
tree_files = [child for child in tree["precomputed_waypoints"][""]['0'] if child['title'] == file]
print("Checking for file-level metadata updates... ")
for child in tree_files:
make_new = False
record = get_json(child['uri'])
updates = False
if 'component_id' not in record:
if not no_kaltura_id:
kaltura_id = input("> Kaltura ID (leave blank for none): ")
if kaltura_id:
record['component_id'] = kaltura_id
updates = True
if record['file_versions']:
version = record['file_versions'][0]
if 'file_uri' not in version or version['file_uri'] != file:
record['file_versions'][0]['file_uri'] = file
updates = True
if 'file_format_name' not in version or version['file_format_name'] != file_format_name:
record['file_versions'][0]['file_format_name'] = file_format_name
updates = True
if 'file_size_bytes' not in version or version['file_size_bytes'] != file_size_bytes:
record['file_versions'][0]['file_size_bytes'] = file_size_bytes
updates = True
else:
record['file_versions'].append({
'jsonmodel_type': "file_version",
'file_uri': file,
'file_format_name': file_format_name,
'file_size_bytes': file_size_bytes,
'is_representative': True,
})
updates = True
if not no_caption:
caption = input("> Caption (leave blank for none): ")
if caption:
if 'caption' in record['file_versions'][0]:
if record['file_versions'][0]['caption'] != caption:
record['file_versions'][0]['caption'] = caption
updates = True
else:
record['file_versions'][0]['caption'] = caption
updates = True
if no_publish:
record["publish"] = False
record['file_versions'][0]["publish"] = False
if updates:
print("Updating {}... ".format(file))
record['digital_object'] = {'ref': ref}
post_json(child['uri'], record)
else:
print("No updates to make")
if make_new:
print("Adding file-level metadata to ArchivesSpace... ")
data = {
'jsonmodel_type': "digital_record_children",
'children': [{
'title': file,
'publish': True,
'file_versions': [{
'jsonmodel_type': "file_version",
'file_uri': file,
'publish': True,
'file_format_name': file_format_name,
'file_size_bytes': file_size_bytes,
'is_representative': True
}],
'digital_object': {'ref': ref}
}]
}
if not no_kaltura_id:
kaltura_id = input("> Kaltura ID (leave blank for none): ")
if kaltura_id:
data['children'][0]['component_id'] = kaltura_id
if not no_caption:
caption = input("> Caption (leave blank for none): ")
if caption:
data['children'][0]['file_versions'][0]['caption'] = caption
if no_publish:
data['children'][0]['publish'] = False
data['children'][0]['file_versions'][0]['publish'] = False
post_json("{}/children".format(ref), data)
else:
print("No files found.")
# create a digital object and attach it to the provided item record
def write_digital_object(item):
as_log("Creating digital object... ")
obj = {'title': item['display_string'], 'jsonmodel_type': "digital_object"}
links = [d for d in item['external_documents'] if d['title'] == "Special Collections @ DU"]
if links:
if len(links) > 1:
raise DigitalObjectException("There shouldn't be more than one repository link on an item record.")
else:
obj['digital_object_id'] = links[0]['location']
else:
obj['digital_object_id'] = item['component_id']
# post the digital object and link its reference URI to the provided item record
r = AS.client.post('/repositories/2/digital_objects', json=obj)
if r.status_code == 200:
ref = json.loads(r.text)['uri']
item['instances'].append({
'instance_type': "digital_object",
'jsonmodel_type': "instance",
'is_representative': True,
'digital_object': {'ref': ref}
})
post_json(item['uri'], item)
return ref
else:
sys.exit("Error: {}".format(json.loads(r.text)['error']))
# given a URI from uri.txt, download the item record and check that it is cataloged according to the digital object
# metadata specification
def check_digital_object(uri):
as_log("Downloading item... ")
item = get_json(uri)
as_log("Checking for digital object... ")
instances = [i for i in item['instances'] if i['instance_type'] == "digital_object"]
if instances:
# script exits if the item has more than one digital object attached to it
if len(instances) > 1:
raise DigitalObjectException(
"An item cannot have more than one digital object. Please check ArchivesSpace to confirm your "
"item is cataloged properly.")
else:
ref = instances[0]['digital_object']['ref']
objects = [i for i in instances if i['is_representative']]
if not objects:
as_log("Making the digital object representative... ")
for instance in item['instances']:
if instance['digital_object']['ref'] == ref:
instance['is_representative'] = True
post_json(item['uri'], item)
else:
ref = write_digital_object(item)
return ref
# write uri.txt by searching for the component ID specified in the directory name and fetching its URI
def write_uri_txt(component_id, path):
global AS
resp = AS.client.get('/repositories/2/search', params={'q': component_id, 'type[]': "archival_object", 'page': "1"})
if resp.status_code == 200:
results = json.loads(resp.text)['results']
uris = [r for r in results if r['component_id'] == component_id and 'pui' not in r['types']]
# script exits if there are no results or if more than one archival_object has the provided call number
if not uris:
raise DigitalObjectException("Couldn't find this item in ArchivesSpace. Has it been cataloged? Is the "
"call number accurate?")
if len(uris) > 1:
raise DigitalObjectException("Multiple objects with this call number found. Check ArchivesSpace for more "
"information.")
uri = uris[0]['uri']
if os.path.exists(path):
os.remove(path)
as_log("Writing uri.txt... ")
with open(path, 'w') as f:
f.write(uri)
return uri
else:
resp.raise_for_status()
# confirm that uri.txt exists and that its URI matches the object (based on the call number provided in the directory
# name)
def check_uri_txt(path):
component_id = os.path.basename(path)
uri_txt = "{}/uri.txt".format(path)
as_log("Checking for uri.txt... ")
if os.path.exists(uri_txt):
as_log("Checking if URI matches item record... ")
with open(uri_txt, 'r') as f:
uri = f.read().replace('\n', '')
item = get_json(uri)
if item['component_id'] != component_id:
uri = write_uri_txt(component_id, uri_txt)
else:
uri = write_uri_txt(component_id, uri_txt)
return uri
# get the digital object's relative path from the user
def get_path(path=None):
if path:
return os.path.join(r"../digital_object_workspace/", path)
else:
raise Exception
while True:
path = os.path.join(r"../digital_object_workspace/", input('Path to your digital object: '))
if path:
if not os.path.exists(path):
sys.exit("{} does not exist".format(path))
if not os.path.isdir(path):
sys.exit("{} is not a directory".format(path))
return path
else:
print("Please enter a path")
def as_log(message):
print(message)
def process(path, no_kaltura_id, no_caption, no_publish):
uri = check_uri_txt(path)
ref = check_digital_object(uri)
process_files(ref, path, no_kaltura_id, no_caption, no_publish)
def main():
global AS
parser = argparse.ArgumentParser(description='Make a digital object based on the contents of a directory')
parser.add_argument('-u', '--user', help="ArchivesSpace username")
parser.add_argument('-p', '--password', help="ArchivesSpace password")
parser.add_argument('path', help="The directory to process")
parser.add_argument('-b', '--batch', action='store_true', help="A CSV file containing a list of directories to process in a batch")
parser.add_argument('--no_kaltura-id', help="Do not prompt the user to provide Kaltura IDs", action='store_true')
parser.add_argument('--no_caption', help="Do not prompt the user to provide captions", action='store_true')
parser.add_argument('--no_publish', help="Do not publish digital object component", action='store_true')
parser.add_argument('--test', help="Run on test ArchivesSpace server.", action='store_true')
args = parser.parse_args()
if args.test:
AS = ASpace(baseurl=TESTING_URL, username=args.user, password=args.password)
else:
AS = ASpace(baseurl=DEFAULT_URL, username=args.user, password=args.password)
no_kaltura_id = args.no_kaltura_id
no_caption = args.no_caption
no_publish = args.no_publish
if args.batch:
for f in sorted(os.scandir(get_path(args.path)), key=lambda a: a.name):
if f.is_dir():
path = f.path
print("CURRENT ITEM: " + f.name)
process(path, no_kaltura_id, no_caption, no_publish)
else:
print("CURRENT ITEM: " + args.path)
path = get_path(args.path)
process(path, no_kaltura_id, no_caption, no_publish)
if __name__ == "__main__":
try:
main()
except DigitalObjectException as e:
sys.exit(e.message)
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,751 | duspeccoll/astools | refs/heads/master | /2019/get_kaltura_ids.py | #!/usr/bin/env python
##########
#
# get_kaltura_ids.py -- get Kaltura IDs from a bulk ingest log XML file
#
# Feed this script an XML log file of a Kaltura bulk ingest; it will scan the
# Reference ID (if it's a digital_object_component URI), download the object
# found at that URI, and add the Kaltura entry ID to it
#
##########
import argparse
import json
import os
from lxml import etree
from asnake.aspace import ASpace
ap = argparse.ArgumentParser(description='Add or update ArchivesSpace metadata properties from CSV input')
ap.add_argument('-f', '--file', help='The CSV file containing the metadata to add')
args = ap.parse_args()
AS = ASpace()
def get_json(uri):
r = AS.client.get(uri)
if r.status_code == 200:
return json.loads(r.text)
else:
r.raise_for_status()
def post_json(uri, data):
r = AS.client.post(uri,json=data)
message = json.loads(r.text)
if r.status_code == 200:
print("{}: {}".format(message['status'], message['uri']))
else:
print("Error: {}".format(message['error']))
if args.file:
if os.path.exists(args.file):
parser = etree.XMLParser(remove_blank_text=True)
with open(args.file, 'r') as xml:
data = etree.parse(xml, parser).getroot()
for item in data.xpath('channel/item'):
component_id = item.find('customData/metadata/ReferenceID').text
kaltura_id = item.find('entryId').text
print("{} has kaltura ID {}".format(component_id, kaltura_id))
obj = get_json(component_id)
obj['component_id'] = kaltura_id
post_json(component_id, obj)
else:
print("File not found: {}".format(args.file))
else:
print("No file provided")
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,752 | duspeccoll/astools | refs/heads/master | /make_digital_object_gui.py | import tkinter as tk
from tkinter import filedialog
from tkinter import ttk
import make_digital_object
from make_digital_object import *
from asnake.client.web_client import ASnakeAuthError
import threading
from requests.exceptions import MissingSchema, InvalidSchema, ConnectionError
import configparser
item_dict = dict()
delete_item_dict = dict()
pad_width = 10
try:
mag = magic.Magic(mime=True, magic_file=r"magic.mgc")
pymagic_flag = True
except magic.MagicException:
pymagic_flag = False
ignored_file_extensions = ('db', 'xml', '.DS_Store')
log_text = None
root = None
process_lock = threading.Lock()
config = configparser.ConfigParser()
try:
with open('config.ini') as f:
config.read_file(f)
default_url = config.get('DEFAULT', 'url')
except FileNotFoundError:
default_url = ""
config['DEFAULT'] = {"url": ""}
with open('config.ini', 'w') as f:
config.write(f)
except KeyError:
default_url = ""
class MainFrame(ttk.Frame):
def __init__(self, master):
global log_text
super(MainFrame, self).__init__(master)
self.item_listbox = ItemListbox(self)
self.item_listbox.grid(column=3, row=1, rowspan=3, sticky='NSWE')
self.item_listbox_scrollbar = ttk.Scrollbar(self, orient='vertical', command=self.item_listbox.yview)
self.item_listbox_scrollbar.grid(column=4, row=1, rowspan=3, sticky='NSW')
self.item_listbox.configure(yscrollcommand=self.item_listbox_scrollbar.set)
self.file_listbox = FileListbox(self, self.item_listbox)
self.file_listbox.grid(column=0, row=1, rowspan=3, sticky='WENS')
self.file_listbox_scrollbar = ttk.Scrollbar(self, orient='vertical', command=self.file_listbox.yview)
self.file_listbox_scrollbar.grid(column=1, row=1, rowspan=3, sticky='NS')
self.file_listbox.configure(yscrollcommand=self.file_listbox_scrollbar.set)
self.file_info_frame = FileInfoFrame(self, self.file_listbox, self.item_listbox)
self.file_info_frame.grid(column=0, row=0, sticky="EW", padx=pad_width)
self.item_info_frame = ItemInfoFrame(self, self.file_listbox, self.item_listbox)
self.item_info_frame.grid(column=3, row=0, sticky="EW")
self.process_buttons_frame = ttk.Frame(self)
self.process_buttons_frame.grid(column=2, row=1, padx=pad_width)
self.process_button = ProcessButton(self.process_buttons_frame, self.file_listbox, self.item_listbox)
self.process_button.grid(column=0, row=0, sticky='WE')
self.process_all_button = ProcessAllButton(self.process_buttons_frame, self.file_listbox, self.item_listbox)
self.process_all_button.grid(column=0, row=1, sticky='WE')
self.log_frame = ttk.Frame(self)
self.log_frame.grid(column=0, row=5, columnspan=5, sticky='NSWE', pady=pad_width)
self.log_text = LogText(self.log_frame)
log_text = self.log_text
self.log_text.grid(column=0, row=0, sticky='WENS')
self.log_text_scrollbar = ttk.Scrollbar(self.log_frame, orient='vertical', command=self.log_text.yview)
self.log_text_scrollbar.grid(column=1, row=0, sticky='WNS')
self.log_text.configure(yscrollcommand=self.log_text_scrollbar.set)
self.log_frame.grid_remove()
self.show_log_button = ShowLogButton(self, log_frame=self.log_frame)
self.show_log_button.grid(column=3, row=4, sticky="ES")
self.columnconfigure(0, weight=1)
self.columnconfigure(3, weight=2)
self.rowconfigure(1, weight=1)
self.log_frame.columnconfigure(0, weight=1)
self.log_frame.rowconfigure('all', weight=1)
class FileInfoFrame(ttk.Frame):
def __init__(self, master, file_listbox, item_listbox):
super(FileInfoFrame, self).__init__(master)
self.file_path_entry = FilePathEntry(self)
self.file_path_entry.grid(column=0, row=0, columnspan=3, sticky='EW')
self.add_remove_buttons_frame = ttk.Frame(self)
self.add_remove_buttons_frame.grid(column=0, row=1, sticky='W')
self.add_button = AddButton(self.add_remove_buttons_frame, self.file_path_entry, file_listbox, item_listbox)
self.add_button.grid(column=0, row=0, sticky='WE', padx=2, pady=2)
self.remove_button = RemoveButton(self.add_remove_buttons_frame, file_listbox, item_listbox)
self.remove_button.grid(column=2, row=0, sticky='W')
self.batch_add_button = BatchAddButton(self.add_remove_buttons_frame, self.file_path_entry,
file_listbox, item_listbox)
self.batch_add_button.grid(column=1, row=0, sticky="WE", padx=2)
self.browse_button = BrowseButton(self.file_path_entry, self.file_path_entry)
self.browse_button.grid(column=1, row=1, padx=pad_width)
self.columnconfigure(0, weight=1)
class ItemInfoFrame(ttk.Frame):
def __init__(self, master, file_listbox, item_listbox):
super(ItemInfoFrame, self).__init__(master)
self.file_listbox = file_listbox
self.item_listbox = item_listbox
self.caption_entry = CaptionEntry(self)
self.caption_entry.grid(column=0, row=0, sticky='EW')
self.kaltura_entry = KalturaIDEntry(self)
self.kaltura_entry.grid(column=2, row=0, sticky='EW')
self.set_caption_button = SetCaptionButton(self, self.caption_entry,
self.file_listbox, self.item_listbox)
self.set_caption_button.grid(column=1, row=0, sticky='S', padx=pad_width)
self.set_kaltura_button = SetKalturaButton(self, self.kaltura_entry, self.file_listbox, self.item_listbox)
self.set_kaltura_button.grid(column=3, row=0, sticky='S', padx=pad_width)
self.columnconfigure(0, weight=1)
self.columnconfigure(2, weight=1)
self.columnconfigure('all', pad=pad_width)
self.rowconfigure('all', pad=pad_width)
class FilePathEntry(ttk.Frame):
def __init__(self, master):
super(FilePathEntry, self).__init__(master)
self.label = tk.Label(self, text="File Path")
self.label.grid(column=0, row=0, sticky="W")
self.entry = tk.Entry(self, width=30)
self.entry.grid(column=0, row=1, sticky="EW")
self.columnconfigure(0, weight=1)
def get(self):
return self.entry.get()
def set(self, value):
self.entry.delete(0, 'end')
self.entry.insert(9, value)
class KalturaIDEntry(ttk.Frame):
def __init__(self, master):
super(KalturaIDEntry, self).__init__(master)
self.label = tk.Label(self, text="Kaltura ID")
self.label.grid(column=0, row=0, sticky='W')
self.entry = tk.Entry(self)
self.entry.grid(column=0, row=1, sticky='EW')
self.columnconfigure(0, weight=1)
def get(self):
return self.entry.get()
class CaptionEntry(ttk.Frame):
def __init__(self, master):
super(CaptionEntry, self).__init__(master)
self.label = tk.Label(self, text="Caption")
self.label.grid(column=0, row=0, sticky='W')
self.entry = tk.Entry(self)
self.entry.grid(column=0, row=1, sticky='EW')
self.columnconfigure(0, weight=1)
def get(self):
return self.entry.get()
class AddButton(tk.Button):
def __init__(self, master, file_path_entry, file_listbox, item_listbox):
super(AddButton, self).__init__(master, text="Add", command=self._button_command)
self.file_path_entry = file_path_entry
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def _button_command(self):
global root
add_thread = AddThread(self.file_path_entry, self.file_listbox, self.item_listbox)
add_thread.start()
disable_all_buttons(root)
class BatchAddButton(tk.Button):
def __init__(self, master, file_path_entry, file_listbox, item_listbox):
super(BatchAddButton, self).__init__(master, text='Batch Add', command=self._button_command)
self.file_path_entry = file_path_entry
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def _button_command(self):
global root
root.configure(cursor='wait')
batch_add_thread = BatchAddThread(self.file_path_entry, self.file_listbox, self.item_listbox)
batch_add_thread.start()
class RemoveButton(tk.Button):
def __init__(self, master, file_listbox, item_listbox):
super(RemoveButton, self).__init__(master, text="Remove", command=self._button_command)
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def _button_command(self):
self.file_listbox.delete_selection()
class BrowseButton(tk.Button):
def __init__(self, master, file_path_entry):
super(BrowseButton, self).__init__(master, text="Browse", command=self._button_command)
self.file_path_entry = file_path_entry
def _button_command(self):
dirname = filedialog.askdirectory()
self.file_path_entry.set(dirname)
class SetCaptionButton(tk.Button):
def __init__(self, master, caption_entry, file_listbox, item_listbox):
super(SetCaptionButton, self).__init__(master, text='Set', command=self._button_command)
self.caption_entry = caption_entry
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def _button_command(self):
file_selection = self.file_listbox.selection()
file_index = file_selection[0]
item_selection = self.item_listbox.selection()
item_index = int(item_selection[0][1:]) - 1
item_list = item_dict[file_index]
if self.caption_entry.get() != item_list[item_index]['caption']:
item_list[item_index]['caption'] = self.caption_entry.get()
if item_list[item_index]['type'] == 'old':
item_list[item_index]['type'] = 'changed'
self.caption_entry.entry.delete(0, 'end')
display_items(self.file_listbox, self.item_listbox)
class SetKalturaButton(tk.Button):
def __init__(self, master, kaltura_entry, file_listbox, item_listbox):
super(SetKalturaButton, self).__init__(master, text='Set', command=self._button_command)
self.kaltura_entry = kaltura_entry
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def _button_command(self):
file_selection = self.file_listbox.selection()
file_index = file_selection[0]
item_selection = self.item_listbox.selection()
item_index = int(item_selection[0][1:]) - 1
item_list = item_dict[file_index]
if self.kaltura_entry.get() != item_list[item_index]['kaltura']:
item_list[item_index]['kaltura'] = self.kaltura_entry.get()
if item_list[item_index]['type'] == 'old':
item_list[item_index]['type'] = 'changed'
self.kaltura_entry.entry.delete(0, 'end')
display_items(self.file_listbox, self.item_listbox)
class ProcessButton(tk.Button):
def __init__(self, master, file_listbox, item_listbox):
super(ProcessButton, self).__init__(master, text="Process", command=self._button_command)
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def _button_command(self):
process_thread = ProcessThread(self.file_listbox)
process_thread.start()
class ProcessAllButton(tk.Button):
def __init__(self, master, file_listbox, item_listbox):
super(ProcessAllButton, self).__init__(master, text='Process All', command=self._button_command)
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def _button_command(self):
process_all_thread = ProcessAllThread(self.file_listbox)
process_all_thread.start()
class ShowLogButton(tk.Button):
def __init__(self, master, log_frame):
super(ShowLogButton, self).__init__(master, text='Show Log', command=self._button_command)
self.hidden = True
self.log_frame = log_frame
def _button_command(self):
if self.hidden:
self.log_frame.grid()
self.hidden = False
self.configure(text='Hide Log')
else:
self.log_frame.grid_remove()
self.hidden = True
self.configure(text='Show Log')
class FileListbox(ttk.Treeview):
def __init__(self, master, item_listbox):
super(FileListbox, self).__init__(master, selectmode='browse')
self.item_listbox = item_listbox
self.heading('#0', text='Path')
self.column('#0', width=300)
self.bind('<<TreeviewSelect>>', lambda _: display_items(self, self.item_listbox))
def delete_selection(self):
index = self.selection()
for i in index:
self.delete(i)
def delete(self, index):
super(FileListbox, self).delete((index,))
del item_dict[index]
display_items(self, self.item_listbox)
class ItemListbox(ttk.Treeview):
def __init__(self, master):
super(ItemListbox, self).__init__(master, columns=("kaltura", "caption"), selectmode='browse')
self.heading('#0', text="Digital Object ID")
self.heading('caption', text="Caption")
self.heading('kaltura', text='Kaltura ID')
self.column('#0', width=125)
self.column('caption', width=200)
self.column('kaltura', width=75)
class LogText(tk.Text):
def __init__(self, master):
super(LogText, self).__init__(master, height=20, state='disabled', width=110)
class AddThread(threading.Thread):
def __init__(self, file_path_entry, file_listbox, item_listbox):
super(AddThread, self).__init__(daemon=True)
self.file_path_entry = file_path_entry
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def run(self):
lock_process()
file_path = self.file_path_entry.get()
if file_path != "":
add_file(file_path, self.file_listbox, self.item_listbox, self.file_path_entry)
unlock_process()
self.file_path_entry.entry.delete(0, 'end')
class BatchAddThread(threading.Thread):
def __init__(self, file_path_entry, file_listbox, item_listbox):
super(BatchAddThread, self).__init__(daemon=True)
self.file_path_entry = file_path_entry
self.file_listbox = file_listbox
self.item_listbox = item_listbox
def run(self):
lock_process()
path = self.file_path_entry.get()
if path != "":
files = [f for f in os.scandir(path) if f.is_dir()]
for file in files:
add_file(file.path.replace("\\", '/'), self.file_listbox, self.item_listbox, self.file_path_entry)
unlock_process()
self.file_path_entry.entry.delete(0, 'end')
class ProcessThread(threading.Thread):
def __init__(self, file_listbox):
super(ProcessThread, self).__init__(daemon=True)
self.file_listbox = file_listbox
def run(self):
ask_to_delete_items()
lock_process()
try:
file_selection = self.file_listbox.selection()[0]
process_items(self.file_listbox, file_selection)
except IndexError:
pass
unlock_process()
class ProcessAllThread(threading.Thread):
def __init__(self, file_listbox):
super(ProcessAllThread, self).__init__()
self.file_listbox = file_listbox
def run(self):
ask_to_delete_items()
lock_process()
try:
files = self.file_listbox.get_children()
for f in files:
process_items(self.file_listbox, f)
except IndexError:
pass
unlock_process()
class CredentialsWindow(tk.Toplevel):
def __init__(self, master, url='', username='', password='', save_credentials=False):
super(CredentialsWindow, self).__init__(master)
self.transient(master)
self.minsize(width=200, height=200)
self.frame = tk.Frame(self)
self.frame.grid(padx=10, pady=10, sticky='EW')
self.title('Credentials')
self.configure(width=200)
if url == '':
url = default_url
self.baseurl_label = ttk.Label(self.frame, text="Base Archivesspace URL")
self.baseurl_label.grid(column=0, row=0)
self.baseurl_entry = ttk.Entry(self.frame)
self.baseurl_entry.grid(column=0, row=1, sticky='EW')
self.baseurl_entry.insert(0, url)
self.username_label = ttk.Label(self.frame, text="Username")
self.username_label.grid(column=0, row=2)
self.username_entry = ttk.Entry(self.frame)
self.username_entry.grid(column=0, row=3, sticky='EW')
self.username_entry.insert(0, username)
self.password_label = ttk.Label(self.frame, text="Password")
self.password_label.grid(column=0, row=4)
self.password_entry = ttk.Entry(self.frame, show="*")
self.password_entry.grid(column=0, row=5, sticky='EW')
self.password_entry.insert(0, password)
self.save_credentials_flag = tk.BooleanVar(self, save_credentials)
self.save_credentials = ttk.Checkbutton(self.frame, text='Save credentials?',
variable=self.save_credentials_flag, onvalue=True, offvalue=False)
self.save_credentials.grid(column=0, row=6)
self.confirm_button = ttk.Button(self.frame, text="Confirm", command=self._confirm_button_command)
self.confirm_button.grid(column=0, row=8)
self.invalid_login = ttk.Label(self.frame, foreground='red')
self.invalid_login.grid(column=0, row=7)
self.bind('<Return>', self._confirm_button_command)
self.columnconfigure(0, weight=1)
self.frame.columnconfigure(0, weight=1)
def _confirm_button_command(self, _=None):
global root
try:
make_digital_object.AS = ASpace(baseurl=self.baseurl_entry.get(),
username=self.username_entry.get(),
password=self.password_entry.get())
if self.save_credentials_flag.get():
with open('credentials.json', mode='w') as credentials:
data = {'baseurl': self.baseurl_entry.get(),
'username': self.username_entry.get(),
'password': self.password_entry.get()}
json.dump(data, credentials)
else:
try:
os.remove('credentials.json')
except FileNotFoundError:
pass
self.destroy()
except ASnakeAuthError:
self.username_entry.delete(0, 'end')
self.password_entry.delete(0, 'end')
self.invalid_login.configure(text="Incorrect username/password.")
except (MissingSchema, InvalidSchema, ConnectionError):
self.baseurl_entry.delete(0, 'end')
self.invalid_login.configure(text="Incorrect URL.")
class AskDeleteWindow(tk.Tk):
def __init__(self):
global root
super(AskDeleteWindow, self).__init__()
self.title('Delete Files?')
lock_process()
disable_all_buttons(root)
self.label = ttk.Label(self, text="There are digital objects in Archivesspace that don't exist in the "
"processed folder(s). Would you like to delete those records?")
self.label.grid(column=0, row=0, columnspan=2)
self.yes_button = ttk.Button(self, text="Yes", command=self._yes_button_command)
self.yes_button.grid(column=0, row=1)
self.no_button = ttk.Button(self, text="No", command=self._no_button_command)
self.no_button.grid(column=1, row=1)
self.mainloop()
def _yes_button_command(self, _=None):
global root
for folder in delete_item_dict.values():
for uri, title in folder:
make_digital_object.AS.client.delete(uri)
as_log("Deleted {} ({}).".format(title, uri))
delete_item_dict.clear()
unlock_process()
disable_all_buttons(root, True)
self.destroy()
def _no_button_command(self, _=None):
global root
unlock_process()
disable_all_buttons(root, True)
self.destroy()
def add_file(file_path, file_listbox, item_listbox, file_path_entry):
try:
uri = check_uri_txt(file_path)
as_log(uri)
ref = check_digital_object(uri)
tree_id = file_listbox.insert('', 'end', text=file_path)
find_items(ref, file_path, tree_id)
display_items(file_listbox, item_listbox)
file_listbox.see(tree_id)
file_listbox.selection_set((tree_id,))
except DigitalObjectException as exc:
as_log(exc.message)
file_path_entry.entry.delete(0, 'end')
def gui_as_log(message=''):
log_text.configure(state='normal')
log_text.insert('end', message + '\n')
log_text.see('end')
log_text.configure(state='disabled')
make_digital_object.as_log = gui_as_log
as_log = gui_as_log
def process_items(file_listbox, file_selection):
item_list = item_dict[file_selection]
for item in item_list:
if item['type'] == 'new':
data = item['data']
data['children'][0]['file_versions'][0]['caption'] = item['caption']
data['children'][0]['component_id'] = item['kaltura']
post_json("{}/children".format(item['ref']), data)
elif item['type'] == 'changed':
data = item['data']
version_index = item['version_index']
data['file_versions'][version_index]['caption'] = item['caption']
data['component_id'] = item['kaltura']
data['digital_object'] = {'ref': item['ref']}
post_json(item['record_uri'], data)
file_listbox.delete(file_selection)
def find_items(ref, path, tree_id):
if tree_id not in item_dict:
item_dict[tree_id] = list()
as_log("Fetching digital object tree... ")
tree = get_json("{}/tree".format(ref))
as_log("Checking files... ")
files = [f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f))
and f != 'uri.txt' and f.split(".")[-1] not in ignored_file_extensions]
if files:
for file in files:
path_to_file = os.path.join(path, file)
try:
file_format_name = magic_from_file(path_to_file).split("/")[-1]
except magic.MagicException:
print('MIME problem, setting file_format_name to blank', file=sys.stderr)
continue
as_log("\nProcessing {}... ".format(file))
file_format_name = magic_to_as(file_format_name)
# ignore file size if it won't fit in an int(11) to bypass mysql db constraints
if os.path.getsize(path_to_file) < 2147483647:
file_size_bytes = os.path.getsize(path_to_file)
else:
file_size_bytes = ''
tree_files = [child for child in tree['children'] if child['title'] == file]
if tree_files and len(tree_files) > 0:
as_log("Checking for file-level metadata updates... ")
for child in tree_files:
record = get_json(child['record_uri'])
item_type = 'old'
version_index = 0
if record['file_versions']:
master_use_statments = {"Audio-Master", "Video-Master"}
version = record['file_versions'][0]
for i in range(len(record['file_versions'])):
try:
v = record['file_versions'][i]["use_statement"]
except KeyError:
pass
else:
if v in master_use_statments:
version = v
version_index = i
break
if 'file_uri' not in version or version['file_uri'] != file:
record['file_versions'][0]['file_uri'] = file
if 'file_format_name' not in version or version['file_format_name'] != file_format_name:
record['file_versions'][0]['file_format_name'] = file_format_name
if 'file_size_bytes' not in version or version['file_size_bytes'] != file_size_bytes:
record['file_versions'][0]['file_size_bytes'] = file_size_bytes
else:
record['file_versions'].append({
'jsonmodel_type': "file_version",
'file_uri': file,
'file_format_name': file_format_name,
'file_size_bytes': file_size_bytes,
'is_representative': True,
})
item_type = 'changed'
caption = ''
if 'caption' in record['file_versions'][0]:
caption = record['file_versions'][version_index]['caption']
kaltura_id = ''
if 'component_id' in record:
kaltura_id = record['component_id']
item_dict[tree_id].append({'child': file, 'caption': caption, 'kaltura': kaltura_id,
'data': record, 'type': item_type, 'ref': ref,
'record_uri': child['record_uri'], 'version_index': version_index})
else:
data = {
'jsonmodel_type': "digital_record_children",
'children': [{
'title': file,
'file_versions': [{
'jsonmodel_type': "file_version",
'file_uri': file,
'file_format_name': file_format_name,
'file_size_bytes': file_size_bytes,
'is_representative': True
}],
'digital_object': {'ref': ref}
}]
}
item_dict[tree_id].append({'child': file, 'caption': '', 'kaltura': '',
'data': data, 'type': 'new', 'ref': ref})
for child in tree['children']:
if child['title'] not in files:
if tree_id not in delete_item_dict:
delete_item_dict[tree_id] = list()
delete_item_dict[tree_id].append((child['record_uri'], child['title']))
def ask_to_delete_items():
if len(delete_item_dict) > 0:
AskDeleteWindow()
def magic_from_file(path_to_file):
if pymagic_flag:
return mag.from_file(path_to_file)
else:
return magic.from_file(path_to_file, mime=True)
def display_items(file_listbox, item_listbox):
item_listbox.delete(*item_listbox.get_children())
selection_id = file_listbox.selection()
id_counter = 1
if len(selection_id) > 0:
for entry in item_dict[selection_id[0]]:
item_listbox.insert('', 'end', text=entry['child'], iid='I' + str(id_counter),
values=(entry['kaltura'], entry['caption']))
id_counter += 1
if len(item_dict[selection_id[0]]) > 0:
item_listbox.selection_set(('I1',))
def setup_gui(toplevel):
main_frame = MainFrame(toplevel)
main_frame.grid(column=0, row=0, sticky='WENS', padx=pad_width, pady=pad_width)
toplevel.columnconfigure(0, weight=1)
toplevel.rowconfigure(0, weight=1)
def disable_all_buttons(widget, enable=False):
disable_set = {"Button", "Entry"}
for c in widget.winfo_children():
if c.winfo_class() in disable_set:
if enable:
c.configure(state='normal')
else:
c.configure(state='disabled')
disable_all_buttons(c, enable=enable)
def lock_process():
global root
process_lock.acquire()
root.configure(cursor='wait')
disable_all_buttons(root)
def unlock_process():
global root
process_lock.release()
root.configure(cursor='')
disable_all_buttons(root, True)
def check_credentials(parent):
try:
with open('credentials.json') as credentials:
data = json.load(credentials)
w = CredentialsWindow(parent,
url=data['baseurl'],
username=data['username'],
password=data['password'],
save_credentials=True)
except FileNotFoundError:
w = CredentialsWindow(parent)
return w
def main():
global root
root = tk.Tk()
root.title("Make Digital Object Utility")
setup_gui(root)
disable_all_buttons(root)
c = check_credentials(root)
root.wait_window(c)
disable_all_buttons(root, True)
if make_digital_object.AS is not None:
root.mainloop()
if __name__ == "__main__":
main()
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,753 | duspeccoll/astools | refs/heads/master | /2019/dalbey.py | #!/usr/bin/env python
################
#
# dalbey.py -- normalize Fridlyand Archive metadata enough that I can work with it in OpenRefine
#
################
import json
from lxml import etree
namespaces = {'mods': 'http://www.loc.gov/mods/v3'}
def parse_title(xml):
titleInfo = xml.find("mods:titleInfo", namespaces=namespaces)
if titleInfo is not None:
title = titleInfo.find("mods:title", namespaces=namespaces).text
subtitle = titleInfo.find("mods:subTitle", namespaces=namespaces)
if subtitle is not None:
title += " ({})".format(subtitle.text)
return title
def parse_dates(xml):
dates = []
if record.xpath("mods:originInfo/mods:dateCreated", namespaces=namespaces) is not None:
for date in record.xpath("mods:originInfo/mods:dateCreated", namespaces=namespaces):
dates.append({
'expression': date.text,
'type': "single"
})
return dates
def parse_extents(xml):
extent = xml.find("mods:physicalDescription", namespaces=namespaces)
if extent is not None:
ext = {}
container_summary = extent.find("mods:extent", namespaces=namespaces)
if container_summary is not None:
ext['container_summary'] = container_summary.text
return [ext]
else:
return []
def parse_notes(xml):
notes = []
# abstract
abstract = xml.find("mods:abstract", namespaces=namespaces)
if abstract is not None:
notes.append({
"type": "abstract",
"content": [abstract.text]
})
# materialspec
materialspec = xml.find("mods:physicalDescription/mods:note[@type='physical details']", namespaces=namespaces)
if materialspec is not None:
notes.append({
'type': "materialspec",
'content': [materialspec.text]
})
# physdesc
physdesc = xml.find("mods:physicalDescription/mods:note[@type='physical description']", namespaces=namespaces)
if physdesc is not None:
notes.append({
'type': "physdesc",
'content': [physdesc.text]
})
# physloc
physloc = xml.find("mods:location/mods:physicalLocation", namespaces=namespaces)
if physloc is not None:
notes.append({
"type": "physloc",
"content": [record.find("mods:location/mods:physicalLocation", namespaces=namespaces).text]
})
# odd
odds = xml.xpath("mods:note", namespaces=namespaces)
if odds is not None:
for odd in odds:
if odd.xpath("@lang")[0] == 'eng':
label = "English note"
elif odd.xpath("@lang")[0] == 'rus':
label = "Russian note"
notes.append({
'type': "odd",
'label': label,
'subnotes': [{
'content': odd.text
}]
})
# relatedmaterial
relatedmaterials = xml.xpath("mods:relatedItem/mods:identifier", namespaces=namespaces)
subnotes = []
if relatedmaterials is not None:
note = {"type": "relatedmaterial"}
for relatedmaterial in relatedmaterials:
subnotes.append(relatedmaterial.text)
if len(subnotes) > 0:
note['subnotes'] = ", ".join(subnotes)
notes.append(note)
# userestrict
userestrict = xml.find("mods:accessCondition[@type='useAndReproduction']", namespaces=namespaces)
if userestrict is not None:
notes.append({
"type": "userestrict",
"subnotes": [{
"content": userestrict.text
}]
})
return notes
def parse_subjects(xml):
subjects = []
form = xml.find("mods:form", namespaces=namespaces)
if form is not None:
subjects.append({'term': form.text, 'type': "genre_form"})
terms = xml.xpath("mods:subject", namespaces=namespaces)
if terms is not None:
for term in terms:
for topic in term.xpath("./mods:topic", namespaces=namespaces):
subjects.append({'term': topic.text, 'type': "topical"})
for geographic in term.xpath("./mods:geographic", namespaces=namespaces):
subjects.append({'term': geographic.text, 'type': "geographic"})
return subjects
def parse_linked_agents(xml):
linked_agents = []
subjects = xml.xpath("mods:subject/mods:name/mods:namePart", namespaces=namespaces)
if subjects is not None:
for subject in subjects:
linked_agents.append({'term': subject.text, 'role': "subject"})
creators = xml.xpath("mods:name[./mods:role/mods:roleTerm = 'creator']", namespaces=namespaces)
if creators is not None:
for creator in creators:
linked_agents.append({'term': creator.find("mods:namePart", namespaces=namespaces).text, 'role': "creator"})
return linked_agents
def parse_documents(xml):
if xml.find("mods:location/mods:url", namespaces=namespaces) is not None:
return [{
'title': "Special Collections @ DU",
'location': xml.find("mods:location/mods:url", namespaces=namespaces).text
}]
else:
return []
parser = etree.XMLParser(remove_blank_text=True)
object = {}
archival_objects = []
with open('dalbey.xml', 'r') as xml:
root = etree.parse(xml, parser).getroot()
records = root.xpath("mods:mods", namespaces=namespaces)
for record in records:
archival_objects.append({
'title': parse_title(record),
'component_id': record.find("mods:identifier", namespaces=namespaces).text,
'dates': parse_dates(record),
'extents': parse_extents(record),
'notes': parse_notes(record),
'linked_agents': parse_linked_agents(record),
'subjects': parse_subjects(record),
'external_documents': parse_documents(record)
})
object['archival_objects'] = archival_objects
with open('dalbey.json', 'w') as f:
json.dump(object, f)
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,754 | duspeccoll/astools | refs/heads/master | /2019/add.py | #!/usr/bin/env python
import argparse
import csv
import json
import os
from asnake.aspace import ASpace
AS = ASpace()
repo = AS.repositories(2)
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', help="A CSV list of URIs to pass to the script")
args = parser.parse_args()
def get_json(uri):
r = AS.client.get(uri)
if r.status_code == 200:
return json.loads(r.text)
else:
r.raise_for_status()
def post_json(uri, data):
r = AS.client.post(uri,json=data)
message = json.loads(r.text)
if r.status_code == 200:
print("{}: {}".format(message['status'], message['uri']))
else:
print("Error: {}".format(message['error']))
def post_digital_object(uri, row, obj):
dao = {
'title': obj['display_string'],
'digital_object_type': "text",
'jsonmodel_type': "digital_object",
'publish': True,
'file_versions': []
}
dao['digital_object_id'] = row[1]
dao['file_versions'].append({
'file_uri': "{}.pdf".format(row[0]),
'file_format_type': "pdf",
'publish': True
})
r = AS.client.post("/repositories/2/digital_objects", json=dao)
message = json.loads(r.text)
if r.status_code == 200:
print("{}: {}".format(message['status'], message['uri']))
obj['external_documents'].append({
'title': "Special Collections @ DU",
'location': row[1]
})
obj['instances'].append({
'instance_type': "digital_object",
'jsonmodel_type': "instance",
'is_representative': True,
'digital_object': { 'ref': message['uri'] }
})
post_json(uri, obj)
else:
print("Error: {}".format(message['error']))
pass
def search_for(obj):
r = AS.client.get('/repositories/2/search', params={'q': obj, 'type[]': "archival_object", 'page': "1"})
if r.status_code == 200:
results = json.loads(r.text)['results']
uris = [r for r in results if r['component_id'] == obj and 'pui' not in r['types']]
if len(uris) == 0:
print("No URI found: {}".format(row[0]))
pass
elif len(uris) == 1:
return uris[0]['uri']
else:
print("Found multiple URIs for {}".format(row[0]))
pass
else:
r.raise_for_status()
if args.file:
if os.path.exists(args.file):
with open(args.file, 'r') as f:
reader = csv.reader(f)
for row in reader:
obj = get_json(row[0])
# here is where you would do your edits
post_json(row[0], obj)
else:
print("File not found: {}".format(args.file))
else:
print("Please provide a file")
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,755 | duspeccoll/astools | refs/heads/master | /2019/reports.py | #!/usr/bin/env python
import configparser, requests, json, os, sys
config = configparser.ConfigParser()
config.read('local_settings.cfg')
dictionary = {
'baseURL': config.get('ArchivesSpace', 'baseURL'),
'repository':config.get('ArchivesSpace', 'repository'),
'user': config.get('ArchivesSpace', 'user'),
'password': config.get('ArchivesSpace', 'password')
}
repositoryBaseURL = '{baseURL}/repositories/{repository}'.format(**dictionary)
resourceURL = '{baseURL}'.format(**dictionary)
auth = requests.post('{baseURL}/users/{user}/login?password={password}&expiring=false'.format(**dictionary)).json()
headers = {'X-ArchivesSpace-Session': auth['session']}
data_models = { "1": "resources", "2": "archival_objects", "3": "agents", "4": "subjects", "5": "digital_objects", "6": "accessions", "7": "top_containers", "8": "container_profiles", "9": "events" }
agent_types = ["people", "corporate_entities", "families", "software"]
# I don't remember where I found the code for this class but it ensures that the script status update prints to one row only
class Printer():
def __init__(self, data):
sys.stdout.write("\r\x1b[K"+data.__str__())
sys.stdout.flush
def get_object(url,max_retries=10,timeout=5):
retry_on_exceptions = (
requests.exceptions.Timeout,
requests.exceptions.ConnectionError,
requests.exceptions.HTTPError
)
for i in range(max_retries):
try:
result = requests.get(url,headers=headers)
except retry_on_exceptions:
print("Connection lost. Retry in five seconds... ")
continue
else:
return result
def string_from_dict(msg, d):
print(msg)
for k in sorted(d):
print('* (' + k + ') ' + d[k])
s = input('> ')
if s in d:
return d[s]
else:
print("Please enter a value from the list.")
string_from_dict(msg, d)
def run_report(jsonmodel):
request_url = "/%s" % jsonmodel
if jsonmodel == "subjects" or jsonmodel == "container_profiles" or jsonmodel.startswith("agents"):
request_url = resourceURL + request_url
else:
request_url = repositoryBaseURL + request_url
ids = get_object(request_url + "?all_ids=true").json()
jsonmodel = jsonmodel.replace('agents/', '')
file_out = config.get('Destinations', 'home') + "/%s_report.json" % jsonmodel
f = open(file_out, "w")
f.write("{\"" + jsonmodel + "\":[")
for idx, val in enumerate(ids,start=1):
Printer("Writing %s (%d of %d)... " % (jsonmodel, idx, len(ids)))
json = get_object("%s/%d" % (request_url, val))
f.write(json.text.rstrip())
if idx < len(ids):
f.write(",")
f.write("]}")
f.close()
print("done!")
jsonmodel = string_from_dict('Please select a data model:', data_models)
if jsonmodel == "agents":
for atype in agent_types:
agent_type = "agents/" + atype
run_report(agent_type)
else:
run_report(jsonmodel)
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,756 | duspeccoll/astools | refs/heads/master | /2019/uris.py | #!/usr/bin/env python
###############################################################################
#
# uris.py -- generate uri.txt files
#
# The Special Collections @ DU ingest process requires a uri.txt file
# alongside the digital object files, so that the repository knows from which
# ArchivesSpace record it should be pulling metadata.
#
# This script iterates over a list of folders in a directory (provided by the
# user), searching for the call number in the folder's title and writing the
# URI it finds to that folder's uri.txt file.
#
###############################################################################
import json, os
from asnake.aspace import ASpace
AS = ASpace()
repo = AS.repositories(2)
def get_path():
path = input('Path to the folder containing your objects: ')
if not os.path.isdir(path):
raise ValueError("Not a directory: {}".format(path))
return path
# this is a bit janky, but the search API returns frontend *and* PUI results,
# so we need to filter out the PUI results for de-duplication
def search_for(object):
r = AS.client.get('/repositories/2/search', params={'q': object, 'type[]': "archival_object", 'page': "1"})
if r.status_code == 200:
results = json.loads(r.text)['results']
return [r for r in results if r['component_id'] == object and 'pui' not in r['types']]
else:
r.raise_for_status()
path = get_path()
for dir in os.listdir(path):
if os.path.isdir(os.path.join(path, dir)):
results = search_for(dir)
if len(results) == 0:
print("No URI found: {}".format(dir))
elif len(results) == 1:
print("Writing {} to {}/uri.txt".format(results[0]['uri'], os.path.join(path, dir)))
with open("{}/uri.txt".format(os.path.join(path, dir)), 'w') as f:
f.write(results[0]['uri'])
else:
print("Found multiple URIs for {}".format(dir))
else:
print("Not a directory: {}".format(dir))
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,757 | duspeccoll/astools | refs/heads/master | /2019/updates.py | #!/usr/bin/env python
# this only works on subjects but it would be rad if it worked on any data model you want
import json, configparser, requests, argparse, os, re, csv
# let configparser get our local settings
config = configparser.ConfigParser()
config.read('local_settings.cfg')
dictionary = {
'baseURL': config.get('ArchivesSpace', 'baseURL'),
'repository':config.get('ArchivesSpace', 'repository'),
'user': config.get('ArchivesSpace', 'user'),
'password': config.get('ArchivesSpace', 'password'),
'path': config.get('Destinations', 'home')
}
# let argparse set up arguments for passing a file to it
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", help="A file of URIs to pass to ArchivesSpace")
args = parser.parse_args()
# set up ArchivesSpace backend URLs
repositoryBaseURL = '{baseURL}/repositories/{repository}'.format(**dictionary)
resourceURL = '{baseURL}'.format(**dictionary)
path = '{path}'.format(**dictionary)
# get a backend session
auth = requests.post('{baseURL}/users/{user}/login?password={password}&expiring=false'.format(**dictionary)).json()
session = auth['session']
headers = {'X-ArchivesSpace-Session': session}
# get an object from the API
def get_object(url,max_retries=10,timeout=5):
retry_on_exceptions = (
requests.exceptions.Timeout,
requests.exceptions.ConnectionError,
requests.exceptions.HTTPError
)
for i in range(max_retries):
try:
result = requests.get(url,headers=headers)
except retry_on_exceptions:
print("Connection failed. Retry in five seconds... ")
continue
else:
return result
# post a revised object to the API
def post_object(url,obj,log,max_retries=10,timeout=5):
retry_on_exceptions = (
requests.exceptions.Timeout,
requests.exceptions.ConnectionError,
requests.exceptions.HTTPError
)
f = open(log, 'a')
# Sometimes process_object returns an error message as a string; we test for that here
if type(obj) is str:
print(obj)
f.write("%s\n" % obj)
else:
for i in range(max_retries):
try:
post = requests.post(url,headers=headers,data=json.dumps(obj))
except retry_on_exceptions:
print("Connection failed. Retry in five seconds... ")
continue
else:
if(post.status_code == requests.codes.ok):
print("%s updated" % url)
f.write("%s updated\n" % url)
else:
error = post.json()
print("Error while processing %s: %s" % (url, error['error']))
f.write("Error while processing %s: %s\n" % (url, error['error']))
break
f.close()
# does whatever find/replace operation you want to do
def process_object(obj):
# do whatever you need to do here...
# ...then return the processed object to the script
return obj
log = "%s/logfile.txt" % path
try:
os.remove(log)
except OSError:
pass
# if a file of URIs is provided, work on that file; otherwise work on all objects of the provided data model
if args.file:
with open(args.file, 'r') as f:
for uri in f:
url = "%s%s" % (resourceURL, uri.rstrip())
obj = get_object(url).json()
obj = process_object(obj)
post_object(url, obj, log)
else:
# make up a way to have the user specify what data model they want to work on instead of having to hard-code it every time
ids = requests.get(("%s/subjects?all_ids=true" % resourceURL),headers=headers).json()
for val in ids:
url = "%s/subjects/%d" % (resourceURL, val)
obj = get_object(url).json()
obj = process_object(obj)
post_object(url, obj, log)
print("Script done and results written to %s" % log)
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,758 | duspeccoll/astools | refs/heads/master | /2019/make_digital_objects.py | #!/usr/bin/env python
###############################################################################
#
# make_digital_objects.py -- make a bunch of digital object records at once
#
# Takes as input the URI for whatever series contains the digitized material,
# as well as a list of handles/file names (in CSV), and constructs digital
# objects from the data it finds
#
###############################################################################
import re, os, errno, json, csv
from asnake.aspace import ASpace
AS = ASpace()
repo = AS.repositories(2)
def get_uri():
uri = input('Enter the URI for the component containing your digitized items: ')
if not re.compile('^\/repositories\/2\/(resources|archival_objects)\/\d+$').match(uri):
raise ValueError("URI must belong to a Resource or Archival Object")
return uri
def get_json(uri):
r = AS.client.get(uri)
if r.status_code == 200:
return json.loads(r.text)
else:
r.raise_for_status()
def build_list(uri):
list = {}
children = get_json("{}/children".format(uri))
for child in children:
key = child['component_id']
list[key] = {
'uri': child['uri'],
'title': child['display_string'],
'file_versions': []
}
# identifiers.csv is how we assign the handles as digital_object_ids;
# row[0] is the Sound Model ID, row[1] is the handle
if os.path.exists('identifiers.csv'):
with open('identifiers.csv', 'r') as f:
reader = csv.reader(f)
for row in reader:
if key == row[0]:
list[key]['digital_object_id'] = row[1]
return list
def link_object(ao, do):
item = get_json(ao)
# append digital object instance to the item and make it representative
item['instances'].append({
'instance_type': 'digital_object',
'jsonmodel_type': 'instance',
'is_representative': True,
'digital_object': { 'ref': do }
})
r = AS.client.post(ao,json=item)
message = json.loads(r.text)
if r.status_code == 200:
print("{}: {}".format(message['status'], message['uri']))
else:
print("Error: {}".format(message['error']))
uri = get_uri()
print("Building the tree... ")
list = build_list(uri)
###############################################################################
#
# Whatever your CSV is named, it should have one row per *file*, and contain
# two columns:
#
# * Column 1 contains the component ID for the item to which the file belongs
# * Column 2 contains the file name
#
# A component's digital object may contain more than one file.
#
# The script will iterate over the CSV file and append the file metadata to
# whatever item is specified in column 1.
#
###############################################################################
file = input('Enter the name of the file containing your digital object file names: ')
if os.path.exists(file):
with open(file, 'r') as f:
reader = csv.reader(f)
for row in reader:
if row[0] in list:
list[row[0]]['file_versions'].append({
'file_uri': row[1],
'caption': row[1],
'file_format_name': 'wav',
'jsonmodel_type': 'file_version'
})
else:
print("No item with this call number: {}".format(row[0]))
else:
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file)
for key in list:
item = list[key]['uri']
object = {
'title': list[key]['title'],
'digital_object_type': 'sound_recording',
'jsonmodel_type': 'digital_object',
'file_versions': list[key]['file_versions']
}
if 'digital_object_id' in list[key].keys():
object['digital_object_id'] = list[key]['digital_object_id']
else:
object['digital_object_id'] = key
r = AS.client.post('/repositories/2/digital_objects',json=object)
message = json.loads(r.text)
if r.status_code == 200:
digital_object = message['uri']
# link_object function accepts the URIs for the item and digital object; links the latter to the former
link_object(item, digital_object)
else:
print("Error: {}".format(message['error']))
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,759 | duspeccoll/astools | refs/heads/master | /2019/marc_export.py | #!/usr/bin/env python
import configparser, requests, time
config = configparser.ConfigParser()
config.read('local_settings.cfg')
dictionary = {
'baseURL': config.get('ArchivesSpace', 'baseURL'),
'repository':config.get('ArchivesSpace', 'repository'),
'user': config.get('ArchivesSpace', 'user'),
'password': config.get('ArchivesSpace', 'password')
}
repositoryBaseURL = '{baseURL}/repositories/{repository}'.format(**dictionary)
resourceURL = '{baseURL}'.format(**dictionary)
auth = requests.post('{baseURL}/users/{user}/login?password={password}&expiring=false'.format(**dictionary)).json()
session = auth['session']
headers = {'X-ArchivesSpace-Session': session}
modified_since = config.get('Timestamps', 'marc_export')
post = requests.post(repositoryBaseURL + "/marc_export?modified_since=" + modified_since,headers=headers)
post.encoding = 'utf8'
if(post.status_code == requests.codes.ok):
filename = config.get('Destinations', 'home') + "/marc_export_" + time.strftime("%Y%m%d_%H%M%S") + ".xml"
f = open(filename, 'w', encoding='utf8')
f.write(post.text)
f.close()
# set the current time as the new timestamp in your config file
config['Timestamps']['marc_export'] = str(int(time.time()))
with open('local_settings.cfg', 'w') as cf:
config.write(cf)
else:
print(post.text)
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,760 | duspeccoll/astools | refs/heads/master | /2019/kaltura_bulk_upload.py | #!/usr/bin/env python
##########
#
# kaltura_bulk_upload.py -- uses the Kaltura Work Order plugin to build Bulk Upload XML
#
# Requires a CSV file of URIs as input; will then create an mrss/channel XML object,
# add the 'item' tree for the Kaltura XML for each URI in the CSV file, and save the
# resulting XML object at 'kaltura.xml' in the directory where this script lives
#
##########
import argparse
import csv
import json
import os
from lxml import etree
from asnake.aspace import ASpace
ap = argparse.ArgumentParser(description='Add or update ArchivesSpace metadata properties from CSV input')
ap.add_argument('-f', '--file', help='The CSV file containing the metadata to add')
ap.add_argument('-o', '--output', help='The name of the output file to create', required=False)
args = ap.parse_args()
AS = ASpace()
nsmap = {'xsd': "http://www.w3.org/2001/XMLSchema", 'xsi': "http://www.w3.org/2001/XMLSchema-instance"}
attr_qname = etree.QName("http://www.w3.org/2001/XMLSchema-instance", "noNamespaceSchemaLocation")
if args.file:
if os.path.exists(args.file):
thumbnail = input("Thumbnail URL (leave blank for none): ")
if args.output:
output = args.output
else:
output = input("Output file: ")
root = etree.Element("mrss", {attr_qname: "ingestion.xsd"}, nsmap=nsmap)
channel = etree.SubElement(root, "channel")
with open(args.file, 'r') as uris:
reader = csv.reader(uris)
for row in reader:
print("downloading {}".format(row[0]))
r = AS.client.get("{}/kaltura.xml".format(row[0]))
if r.status_code == 200:
xml = etree.fromstring(r.text.encode('utf-8'))
items = xml.xpath("channel/item")
if len(items) > 0:
for item in items:
if thumbnail:
tns = etree.Element("thumbnails")
tn = etree.SubElement(tns, "thumbnail", attrib={'isDefault': 'true'})
url = etree.SubElement(tn, "urlContentResource", attrib={'url': thumbnail})
item.find("contentAssets").addnext(tns)
channel.append(item)
else:
print("Error: No files found: {}".format(row[0]))
else:
error = json.loads(r.text)['error']
print("Error: {}: {}".format(error, row[0]))
kxml = etree.ElementTree(root)
with open(output, 'w') as f:
f.write(etree.tostring(kxml, encoding="utf-8", xml_declaration=True, pretty_print=True).decode("utf-8"))
print("Bulk upload file written to {}".format(output))
else:
print("File not found: {}".format(args.file))
else:
print("Please provide a file")
| {"/make_digital_object_gui.py": ["/make_digital_object.py"]} |
61,775 | techstonia/enimkommenteeritud | refs/heads/master | /app/views.py | # -*- coding: utf-8 -*-
from flask import render_template, flash, redirect, url_for, g
from app import app, db
from forms import CommentForm
from markupsafe import Markup
from models import Novelty, Comment
from datetime import datetime, timedelta
from config import POSTS_PER_PAGE, SITES
from werkzeug.exceptions import abort
@app.errorhandler(404)
def internal_error(error):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_error(error):
db.session.rollback()
return render_template('500.html'), 500
sites = {'all': "Kõik"}
for site in SITES:
sites[site] = site.capitalize()
if 'arileht' in sites:
sites['arileht'] = "Ärileht"
time_modes = {'today': {'days': timedelta(days=1), 'title': "Viimased 24h | Enim kommenteeritud", 'time_button_text': "24h"},
'last-three-days': {'days': timedelta(days=3), 'title': "Viimased 3 päeva | Enim kommenteeritud", 'time_button_text': "3 päeva"},
'last-week': {'days': timedelta(days=7), 'title': "Viimane nädal | Enim kommenteeritud", 'time_button_text': "nädal"},
'last-month': {'days': timedelta(days=30), 'title': "Viimane kuu | Enim kommenteeritud", 'time_button_text': "kuu"},
'last-year': {'days': timedelta(days=365), 'title': "Viimane aasta | Enim kommenteeritud", 'time_button_text': "aasta"},
}
def nav_bar_times():
time_mode_keys = ('today', 'last-three-days', 'last-week', 'last-month', 'last-year')
html = ''
for time_mode in time_mode_keys:
active = ''
if g.time_mode == time_mode:
active = ' class=\"active\"'
url = url_for('render_novelties', time_mode=time_mode, site=g.site)
text = time_modes[time_mode]['time_button_text']
html += "<li%s><a href=\"%s\">%s</a></li>" % (active, url, text)
return Markup(html.decode('utf-8'))
def nav_bar_sites():
site_names = ['all']
site_names.extend(SITES)
html = ''
for site in site_names:
active = ''
if g.site == site:
active = ' class=\"active\"'
url = url_for('render_novelties', time_mode=g.time_mode, site=site)
html += "<li%s><a href=\"%s\">%s</a></li>" % (active, url, sites[site])
return Markup(html.decode('utf-8'))
@app.route('/')
@app.route('/<site>/')
@app.route('/<site>/<time_mode>')
@app.route('/<site>/<time_mode>/<int:page>')
def render_novelties(site='all', time_mode='today', page=1):
if site not in sites and not time_mode:
abort(404)
if site and time_mode and (time_mode not in time_modes or site not in sites):
abort(404)
g.time_mode = time_mode
g.site = site
date_from = datetime.utcnow() - time_modes[time_mode]['days']
if g.site != 'all':
novelties = Novelty.query.filter(Novelty.published_date > date_from).filter(Novelty.site == g.site).order_by(
Novelty.comments_count.desc()).paginate(page, POSTS_PER_PAGE, False)
else:
novelties = Novelty.query.filter(Novelty.published_date > date_from).order_by(
Novelty.comments_count.desc()).paginate(page, POSTS_PER_PAGE, False)
return render_template('index.html',
title=time_modes[time_mode]['title'].decode('utf-8'),
novelties=novelties)
@app.route('/novelty/<int:novelty_id>', methods=['GET', 'POST'])
def novelty_discussion(novelty_id):
novelty = Novelty.query.filter(Novelty.id == novelty_id).first()
if not novelty:
abort(404)
if novelty.comments().count() == 0:
flash("Ole esimene, kes artiklit kommenteerib!")
form = CommentForm()
if form.validate_on_submit() and not form.nickname.data:
comment = Comment(body=form.comment.data,
timestamp=datetime.utcnow(),
nickname=form.nimi.data,
novelty_id=novelty_id)
db.session.add(comment)
db.session.commit()
flash('Kommentaar lisatud!')
return redirect(url_for('novelty_discussion', novelty_id=novelty_id))
return render_template('novelty_discussion.html', title=novelty.headline,
novelty=novelty, form=form) | {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,776 | techstonia/enimkommenteeritud | refs/heads/master | /app/forms.py | from flask_wtf import Form
from wtforms import TextAreaField, TextField
from wtforms.validators import Required
class CommentForm(Form):
nimi = TextField('post', validators=[Required()])
comment = TextAreaField('post', validators=[Required()])
nickname = TextField('post')
| {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,777 | techstonia/enimkommenteeritud | refs/heads/master | /app/__init__.py | import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
def log_message(msg):
if not app.debug and os.environ.get('HEROKU') is None:
import logging
from logging.handlers import RotatingFileHandler
file_handler = RotatingFileHandler('tmp/microblog.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('microblog: %s' % msg)
if os.environ.get('HEROKU') is not None:
import logging
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('microblog: %s' % msg)
log_message('startup')
from app import views, models
from app.views import nav_bar_times, nav_bar_sites
app.jinja_env.globals['nav_bar_times'] = nav_bar_times
app.jinja_env.globals['nav_bar_sites'] = nav_bar_sites
| {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,778 | techstonia/enimkommenteeritud | refs/heads/master | /runp-heroku.py | #!flask/bin/python
from app import app
from app.parse import parse_sites, update_all_sites
parse_sites()
update_all_sites() | {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,779 | techstonia/enimkommenteeritud | refs/heads/master | /app/helpers.py | from flask import g
def active_time_mode(time_mode):
if g.time_mode == time_mode:
return 'class=active'
| {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,780 | techstonia/enimkommenteeritud | refs/heads/master | /app/models.py | from app import db
class Novelty(db.Model):
id = db.Column(db.Integer, primary_key=True)
site = db.Column(db.String(15))
url = db.Column(db.String(2048), unique=True, index=True, nullable=False)
headline = db.Column(db.String(512), nullable=False)
published_date = db.Column(db.DateTime)
comments_count = db.Column(db.Integer)
last_update = db.Column(db.DateTime)
def comments(self):
return Comment.query.filter_by(novelty_id=self.id)
def user_comments_count(self):
return Comment.query.filter_by(novelty_id=self.id).count()
def _repr__(self):
return '<Novelty %r>' % self.headline
class Comment(db.Model):
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.String(500))
timestamp = db.Column(db.DateTime)
nickname = db.Column(db.String(20))
novelty_id = db.Column(db.Integer, db.ForeignKey('novelty.id'))
def __repr__(self):
return '<Comment %r>' % self.body | {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,781 | techstonia/enimkommenteeritud | refs/heads/master | /config.py | # -*- coding: utf-8 -*-
from collections import OrderedDict
import os
basedir = os.path.abspath(os.path.dirname(__file__))
CSRF_ENABLED = True
SECRET_KEY = 'This is a secret!'
if os.environ.get('DATABASE_URL') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
# pagination
POSTS_PER_PAGE = 20
# Update
UPDATE_FREQUENCY = 120
# Sites
SITES = OrderedDict([('delfi', 'http://feeds2.feedburner.com/delfiuudised'),
('postimees', 'http://www.postimees.ee/rss/'),
('epl', 'http://feeds.feedburner.com/eestipaevaleht'),
('ekspress', 'http://feeds.feedburner.com/EestiEkspressFeed'),
('sport', 'http://feeds2.feedburner.com/delfisport'),
('arileht', 'http://feeds2.feedburner.com/delfimajandus'),
('forte', 'http://feeds2.feedburner.com/forteuudised'),
('maaleht', 'http://feeds2.feedburner.com/maaleht'),
('publik', 'http://feeds2.feedburner.com/publikuudised'),
('naistekas', 'http://feeds2.feedburner.com/naistekas'),
]) | {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,782 | techstonia/enimkommenteeritud | refs/heads/master | /app/parse.py | #!flask/bin/python
# -*- coding: utf-8 -*-
import urllib2
from app import db, log_message
from app.decorators import async
from app.models import Novelty
from bs4 import BeautifulSoup
from config import UPDATE_FREQUENCY, SITES
import feedparser
import time
from datetime import datetime, timedelta
def parse_site(feed_url, site_name):
d = feedparser.parse(feed_url)
if site_name in ('postimees', 'naistekas'):
for entry in d['entries']:
url = entry['links'][0]['href']
novelty = Novelty.query.filter_by(url=url).first()
if novelty is None:
headline = entry['title']
published_date = datetime(*entry['published_parsed'][:6])
comments_count = get_comments_count(url, site_name)
last_update = datetime.utcnow() if comments_count else None
novelty = Novelty(url=url,
site=site_name,
headline=headline,
comments_count=comments_count,
last_update=last_update,
published_date=published_date)
db.session.add(novelty)
db.session.commit()
else:
for entry in d['entries']:
url = entry['feedburner_origlink']
novelty = Novelty.query.filter_by(url=url).first()
if novelty is None:
headline = entry['title']
published_date = datetime(*entry['published_parsed'][:6])
comments_count = get_comments_count(url, site_name)
last_update = datetime.utcnow() if comments_count else None
novelty = Novelty(url=url,
site=site_name,
headline=headline,
comments_count=comments_count,
last_update=last_update,
published_date=published_date)
db.session.add(novelty)
db.session.commit()
def get_comments_count(novelty_url, site_name):
if site_name == "postimees":
try:
response = urllib2.urlopen(novelty_url)
soup = BeautifulSoup(response.read())
article_title = soup.find_all(attrs={"class": "com"})[0]
count = article_title.find_all("a")[0].contents[0]
count = count.replace('(', '').replace(')', '')
return int(count)
except:
return 0
else:
try:
response = urllib2.urlopen(novelty_url)
soup = BeautifulSoup(response.read())
article_title = soup.find_all(attrs={"class": "articleTitle"})[0]
count = article_title.find_all("a")[0].contents[0]
count = count.replace('(', '').replace(')', '')
return int(count)
except:
return 0
@async
def parse_sites(once=False):
while True:
for site_name, feed_url in SITES.items():
try:
parse_site(feed_url, site_name.decode('utf-8'))
except:
log_message("parse_sites() failed on %s" % datetime.utcnow())
time.sleep(UPDATE_FREQUENCY)
if once:
break
@async
def update_comment_counts(days, interval_min, once=False):
while True:
for site in SITES:
date_from = datetime.utcnow() - timedelta(days=days)
novelties = Novelty.query.filter(Novelty.published_date > date_from).filter(Novelty.site == site)
for novelty in novelties:
count = get_comments_count(novelty.url, novelty.site)
if count > novelty.comments_count:
novelty.comments_count = count
db.session.add(novelty)
db.session.commit()
time.sleep(0.3)
if once:
break
time.sleep(interval_min * 60)
@async
def delete_older_novelties():
while True:
date_from = datetime.utcnow() - timedelta(days=8)
novelties = Novelty.query.filter(Novelty.published_date < date_from).filter(Novelty.comments_count < 30)
for novelty in novelties:
db.session.delete(novelty)
db.session.commit()
time.sleep(3600 * 24)
@async
def update_all_sites(once=False):
delete_older_novelties()
update_comment_counts(1, 30, once)
time.sleep(5 * 60)
update_comment_counts(3, 200, once)
time.sleep(30 * 60)
update_comment_counts(7, 500, once)
if __name__ == '__main__':
delete_older_novelties()
#parse_sites(True)
#update_all_sites(True) | {"/app/views.py": ["/app/__init__.py", "/config.py"], "/app/__init__.py": ["/app/views.py"], "/runp-heroku.py": ["/app/__init__.py", "/app/parse.py"], "/app/models.py": ["/app/__init__.py"]} |
61,784 | Brandon-Ritchie/PythonSlidingPuzzle | refs/heads/main | /main.py | """
Found bugs:
Game is not always solveable
"""
import classes
import random
def generate_list_of_num():
list_of_num = []
while len(list_of_num) < 15:
random_num = random.randint(1, 15)
if random_num not in list_of_num:
list_of_num.append(random_num)
return list_of_num
def generate_pieces():
list_of_num_for_pieces = generate_list_of_num()
pieces = []
for i in range(0,4):
for j in range(0, 4):
if len(list_of_num_for_pieces) == 0:
pieces.append(classes.GamePiece(' ', (i, j)))
else:
pieces.append(classes.GamePiece(list_of_num_for_pieces[0], (i, j)))
list_of_num_for_pieces.pop(0)
return pieces
if __name__ == '__main__':
try:
pieces = generate_pieces()
game = classes.Game()
game.generate_board(pieces)
game.create_piece_dictionary(pieces)
valid_rows_and_columns = [1, 2, 3, 4]
for row in game.board:
print(row)
while True:
if game.is_puzzle_completed() is True:
break
print('The puzzle is completed: ' + str(game.is_puzzle_completed()))
print('Choose the row of the piece you want to move:')
chosen_row = input()
print('Choose the column of the piece you want to move:')
chosen_column = input()
try:
chosen_piece = game.board[int(chosen_row) - 1][int(chosen_column) - 1]
game.move_piece(chosen_piece)
game.update_board(pieces)
except:
print('Invalid move. Please enter numbers between 1 and 4!')
for row in game.board:
print(row)
print('You have completed the puzzle!!')
except KeyboardInterrupt:
exit() | {"/main.py": ["/classes.py"], "/tests.py": ["/classes.py", "/main.py"]} |
61,785 | Brandon-Ritchie/PythonSlidingPuzzle | refs/heads/main | /classes.py | class Game():
@property
def board(self):
return self._board
@board.setter
def board(self, board):
self._board = board
@property
def piece_dictionary(self):
return self._piece_dictionary
@piece_dictionary.setter
def piece_dictionary(self, dict_of_pieces):
if type(dict_of_pieces) is dict:
self._piece_dictionary = dict_of_pieces
def create_piece_dictionary(self, pieces):
self.piece_dictionary = {}
for piece in pieces:
self.piece_dictionary[piece.num] = piece.position
def find_open_space_direction(self, piece):
open_space_position = self.piece_dictionary[' ']
piece_position = self.piece_dictionary[piece.num]
if piece_position[0] == open_space_position[0]:
if piece_position[1] - 1 == open_space_position[1]:
return 'Left'
elif piece_position[1] + 1 == open_space_position[1]:
return 'Right'
elif piece_position[1] == open_space_position[1]:
if piece_position[0] - 1 == open_space_position[0]:
return 'Above'
elif piece_position[0] + 1 == open_space_position[0]:
return 'Below'
else:
return 'Too Far'
def can_piece_move(self, piece):
open_space_position = self.piece_dictionary[' ']
print(open_space_position)
piece_position = self.piece_dictionary[piece.num]
if ((piece_position[0] + 1 == open_space_position[0] or piece_position[0] - 1 == open_space_position[0]) and
(piece_position[1] + 1 == open_space_position[1] or piece_position[1] - 1 == open_space_position[1])):
return False
elif (piece_position[0] + 1 == open_space_position[0] or
piece_position[0] - 1 == open_space_position[0] or
piece_position[1] + 1 == open_space_position[1] or
piece_position[1] - 1 == open_space_position[1]):
return True
else:
return False
def move_piece(self, piece):
open_space_direction = self.can_piece_move(piece)
temp_open_space_position = self.piece_dictionary[' ']
piece_position = self.piece_dictionary[piece.num]
if open_space_direction is True:
self.piece_dictionary[' '] = piece_position
self.piece_dictionary[piece.num] = temp_open_space_position
return True
else:
print('That piece is not next to the open space. Please choose a different piece.')
return False
def generate_board(self, pieces):
list_of_pieces = []
for piece in pieces:
list_of_pieces.append(piece)
self.board = [[], [], [], []]
for i in range(0,4):
for j in range(0, 4):
if len(list_of_pieces) == 0:
break
else:
self.board[i].append(list_of_pieces[0])
list_of_pieces.pop(0)
def update_board(self, pieces):
for key, value in self.piece_dictionary.items():
for piece in pieces:
if piece.num == key:
(row, column) = value
piece.position = value
self.board[row][column] = piece
for row in self.board:
print(row)
def is_puzzle_completed(self):
if (self.piece_dictionary[1] == (0, 0) and
self.piece_dictionary[2] == (0, 1) and
self.piece_dictionary[3] == (0, 2) and
self.piece_dictionary[4] == (0, 3) and
self.piece_dictionary[5] == (1, 0) and
self.piece_dictionary[6] == (1, 1) and
self.piece_dictionary[7] == (1, 2) and
self.piece_dictionary[8] == (1, 3) and
self.piece_dictionary[9] == (2, 0) and
self.piece_dictionary[10] == (2, 1) and
self.piece_dictionary[11] == (2, 2) and
self.piece_dictionary[12] == (2, 3) and
self.piece_dictionary[13] == (3, 0) and
self.piece_dictionary[14] == (3, 1) and
self.piece_dictionary[15] == (3, 2) and
self.piece_dictionary[' '] == (3, 3)):
return True
else:
return False
class GamePiece():
def __init__(self, num, position):
self._num = num
self._position = position
def __repr__(self):
return str(self._num)
@property
def num(self):
return self._num
@num.setter
def num(self, number):
if type(number) is int:
self._num = number
@property
def position(self):
return self._position
@position.setter
def position(self, tup):
if type(tup) is tuple:
self._position | {"/main.py": ["/classes.py"], "/tests.py": ["/classes.py", "/main.py"]} |
61,786 | Brandon-Ritchie/PythonSlidingPuzzle | refs/heads/main | /tests.py | import unittest
import classes
from main import generate_pieces
class TestGameMethods(unittest.TestCase):
def setUp(self):
pieces = generate_pieces()
self.pieces = pieces
self.game = classes.Game()
self.game.generate_board(pieces)
self.game.create_piece_dictionary(pieces)
self.test_piece = classes.GamePiece(2, (1, 1))
def test_game_class_exists(self):
self.assertIsNotNone(self.game)
def test_game_has_board_property(self):
self.assertIsNotNone(self.game.board)
def test_game_board_is_list_of_lists(self):
for row in self.game.board:
self.assertTrue(type(row) is list)
def test_game_board_rows_are_not_blank(self):
for row in self.game.board:
self.assertTrue(len(row))
def test_each_game_row_has_four_game_pieces(self):
for row in self.game.board:
self.assertTrue(len(row) == 4)
def test_game_piece_class_exists(self):
self.assertIsNotNone(self.test_piece)
def test_game_piece_position_returns_tuple(self):
self.assertEqual(self.test_piece.position, (1, 1))
def test_game_has_piece_dict(self):
self.assertIsNotNone(self.game._piece_dictionary)
def test_game_piece_dictionary_is_dict(self):
self.assertTrue(type(self.game.piece_dictionary) is dict)
def test_game_piece_dictionary_has_1_to_15(self):
for i in range(1, 16):
self.assertIn(i, self.game.piece_dictionary)
def test_move_piece_open_piece_is_below(self):
self.game.piece_dictionary[' '] = (2, 1)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertTrue(returned_value)
def test_move_piece_open_piece_is_above(self):
self.game.piece_dictionary[' '] = (0, 1)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertTrue(returned_value)
def test_move_piece_open_piece_is_right(self):
self.game.piece_dictionary[' '] = (1, 2)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertTrue(returned_value)
def test_move_piece_open_piece_is_left(self):
self.game.piece_dictionary[' '] = (1, 0)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertTrue(returned_value)
def test_move_piece_open_piece_is_above_left_diagonal(self):
self.game.piece_dictionary[' '] = (0, 0)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertFalse(returned_value)
def test_move_piece_open_piece_is_above_right_diagonal(self):
self.game.piece_dictionary[' '] = (0, 2)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertFalse(returned_value)
def test_move_piece_open_piece_is_below_left_diagonal(self):
self.game.piece_dictionary[' '] = (2, 0)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertFalse(returned_value)
def test_move_piece_open_piece_is_below_right_diagonal(self):
self.game.piece_dictionary[' '] = (2, 2)
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertFalse(returned_value)
def test_move_piece_too_far(self):
self.game.piece_dictionary[self.test_piece.num] = self.test_piece.position
returned_value = self.game.move_piece(self.test_piece)
self.assertFalse(returned_value)
def test_puzzle_is_not_completed(self):
self.assertFalse(self.game.is_puzzle_completed())
def test_puzzle_is_completed(self):
self.game.piece_dictionary[1] = (0, 0)
self.game.piece_dictionary[2] = (0, 1)
self.game.piece_dictionary[3] = (0, 2)
self.game.piece_dictionary[4] = (0, 3)
self.game.piece_dictionary[5] = (1, 0)
self.game.piece_dictionary[6] = (1, 1)
self.game.piece_dictionary[7] = (1, 2)
self.game.piece_dictionary[8] = (1, 3)
self.game.piece_dictionary[9] = (2, 0)
self.game.piece_dictionary[10] = (2, 1)
self.game.piece_dictionary[11] = (2, 2)
self.game.piece_dictionary[12] = (2, 3)
self.game.piece_dictionary[13] = (3, 0)
self.game.piece_dictionary[14] = (3, 1)
self.game.piece_dictionary[15] = (3, 2)
self.game.piece_dictionary[' '] = (3, 3)
self.assertTrue(self.game.is_puzzle_completed())
def test_can_piece_move_function_exists(self):
returned_value = True
try:
self.game.can_piece_move(self.test_piece)
except:
returned_value = False
self.assertTrue(returned_value)
if __name__ == '__main__':
unittest.main(verbosity=2) | {"/main.py": ["/classes.py"], "/tests.py": ["/classes.py", "/main.py"]} |
61,796 | latifanoor/Mydjangoproject | refs/heads/master | /members/models.py | from django.db import models
class Members(models.Model):
full_name = models.CharField(max_length=100)
phone_number=models.IntegerField()
national_id = models.IntegerField()
email=models.CharField(max_length=20)
biography=models.CharField(max_length=100)
Gender=models.CharField(max_length=20)
dob = models.DateTimeField('date of birth')
location = models.CharField(max_length=50)
def __str__(self):
return self.full_name | {"/members/views.py": ["/members/models.py"]} |
61,797 | latifanoor/Mydjangoproject | refs/heads/master | /members/migrations/0001_initial.py | # Generated by Django 2.0.5 on 2018-05-29 07:58
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Members',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=100)),
('phone_number', models.IntegerField()),
('national_id', models.IntegerField()),
('email', models.CharField(max_length=20)),
('biography', models.CharField(max_length=100)),
('Gender', models.CharField(max_length=20)),
('dob', models.DateTimeField(verbose_name='date of birth')),
('location', models.CharField(max_length=50)),
],
),
]
| {"/members/views.py": ["/members/models.py"]} |
61,798 | latifanoor/Mydjangoproject | refs/heads/master | /polls/models.py | from django.db import models
# Create your models here.
class Question(models.Model):
question =models.CharField(max_length=200)
pub_date =models.DateTimeField('date published')
location =models.CharField(max_length=50) | {"/members/views.py": ["/members/models.py"]} |
61,799 | latifanoor/Mydjangoproject | refs/heads/master | /members/views.py | from django.shortcuts import render
# Create your views here.
from .models import Members
def members(request):
members_names = Members.objects.all()
print(members_names)
return render(request, 'members/my.html', {'members_names':members_names}) | {"/members/views.py": ["/members/models.py"]} |
61,800 | latifanoor/Mydjangoproject | refs/heads/master | /swahilipot/apps.py | from django.apps import AppConfig
class SwahilipotConfig(AppConfig):
name = 'swahilipot'
| {"/members/views.py": ["/members/models.py"]} |
61,801 | latifanoor/Mydjangoproject | refs/heads/master | /swahilipot/migrations/0001_initial.py | # Generated by Django 2.0.5 on 2018-05-17 09:29
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Members',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=100)),
('national_id', models.IntegerField()),
('occupation', models.CharField(max_length=50)),
('dob', models.DateTimeField(verbose_name='date of birth')),
('location', models.CharField(max_length=50)),
],
),
]
| {"/members/views.py": ["/members/models.py"]} |
61,802 | latifanoor/Mydjangoproject | refs/heads/master | /swahilipot/views.py | from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
from .models import Board
def home(request):
boards_names = Board.objects.all()
print(boards_names)
return render(request, 'swahilipot/list.html', {'boards_names':boards_names})
def swahilipot(request):
return render(request,'swahilipot/latifa.html') | {"/members/views.py": ["/members/models.py"]} |
61,803 | latifanoor/Mydjangoproject | refs/heads/master | /swahilipot/migrations/0003_delete_members.py | # Generated by Django 2.0.5 on 2018-05-29 08:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('swahilipot', '0002_auto_20180523_0826'),
]
operations = [
migrations.DeleteModel(
name='Members',
),
]
| {"/members/views.py": ["/members/models.py"]} |
61,804 | latifanoor/Mydjangoproject | refs/heads/master | /pygirls/apps.py | from django.apps import AppConfig
class PygirlsConfig(AppConfig):
name = 'pygirls'
| {"/members/views.py": ["/members/models.py"]} |
61,805 | elees1219/LineBot | refs/heads/master | /game/rps.py | # -*- coding: utf-8 -*-
from enum import Enum
from tool import random_gen
from error import error
import time
from collections import defaultdict
class battle_item(Enum):
rock = 1
paper = 2
scissor = 3
class battle_item_representative(object):
def __init__(self, battle_item, is_sticker, content):
if is_sticker:
try:
int(content)
except Exception as ex:
raise ex
self._battle_item = battle_item
self._is_sticker = is_sticker
self._content = content
@property
def is_sticker(self):
return self._is_sticker
@property
def content(self):
return self._content
@property
def battle_item(self):
return self._battle_item
class battle_result(Enum):
undefined = -1
tie = 0
win1 = 1
win2 = 2
class battle_player(object):
def __init__(self, name, uid):
self._name = name
self._uid = uid
self.reset_statistics()
def win(self):
self._win += 1
if self._consecutive_winning:
self._consecutive_count += 1
else:
self._consecutive_count = 1
if self._consecutive_count > self._consecutive_win:
self._consecutive_win = self._consecutive_count
self._consecutive_winning = True
def lose(self):
self._lose += 1
if not self._consecutive_winning:
self._consecutive_count += 1
else:
self._consecutive_count = 1
if self._consecutive_count > self._consecutive_lose:
self._consecutive_lose = self._consecutive_count
self._consecutive_winning = False
def tied(self):
self._tied += 1
def reset_statistics(self):
self._win = 0
self._lose = 0
self._tied = 0
self._last_item = None
self._consecutive_winning = False
self._consecutive_count = 0
self._consecutive_win = 0
self._consecutive_lose = 0
def is_same_uid(self, uid):
return self._uid == uid
@property
def name(self):
return self._name
@property
def win_count(self):
return self._win
@property
def lose_count(self):
return self._lose
@property
def tied_count(self):
return self._tied
@property
def total_played(self):
return self._win + self._lose + self._tied
@property
def consecutive_type(self):
"""True=Win, False=Lose"""
return self._consecutive_winning
@property
def consecutive_count(self):
return self._consecutive_count
@property
def longest_consecutive_win(self):
return self._consecutive_win
@property
def longest_consecutive_lose(self):
return self._consecutive_lose
@property
def winning_rate(self):
try:
return self._win / float(self._win + self._lose)
except ZeroDivisionError:
return 1.0 if self._win > 0 else 0.0
@property
def last_item(self):
return self._last_item
@last_item.setter
def last_item(self, value):
self._last_item = value
class rps(object):
"""Game of Rock-Paper-Scissors"""
def __init__(self, vs_bot, rock, paper, scissor):
self._gap_time = -1
self._vs_bot = vs_bot
self._battle_dict = {battle_item.rock: [],
battle_item.paper: [],
battle_item.scissor: []}
self._result_generated = False
self._enabled = True
return self._init_register(rock, paper, scissor)
def register_battle_item(self, battle_item, is_sticker, content):
self._battle_dict[battle_item].append(battle_item_representative(battle_item, is_sticker, content))
def register_player(self, name, uid):
if self._player_dict.get(uid) is None:
self._player_dict[uid] = battle_player(name, uid)
return True
else:
return False
def play(self, item, player_uid):
"""
return not void if error occurred.
No action if player not exist.
"""
if self._enabled:
player_count = len(self._player_dict)
if player_count < 2:
return error.main.miscellaneous(u'玩家人數不足,需要先註冊2名玩家以後方可遊玩。目前已註冊玩家{}名。\n已註冊玩家: {}'.format(
player_count, '、'.join([player.name for player in self._player_dict.itervalues()])))
else:
if self._play_entered:
if self._player1.is_same_uid(player_uid):
return error.main.miscellaneous(u'同一玩家不可重複出拳。')
else:
self._play2(item, player_uid)
else:
self._play1(item, player_uid)
else:
return error.main.miscellaneous(u'遊戲暫停中...')
def result_text(self):
"""
Player object will be released after calling this method.
"""
if self._result_enum == battle_result.tie:
text = u'【平手】'
elif self._result_enum == battle_result.win1:
text = u'【勝利 - {}】'.format(self._player1.name)
text += u'\n【敗北 - {}】'.format(self._player2.name)
elif self._result_enum == battle_result.win2:
text = u'【勝利 - {}】'.format(self._player2.name)
text += u'\n【敗北 - {}】'.format(self._player1.name)
elif self._result_enum == battle_result.undefined:
text = u'【尚未猜拳】'
else:
raise ValueError(error.main.invalid_thing(u'猜拳結果', result_enum))
text += u'\n本次猜拳兩拳間格時間(包含程式處理時間) {:.3f} 秒'.format(self._gap_time)
text += u'\n\n'
text += rps.player_stats_text(self._player_dict)
self._reset()
return text
def battle_item_dict_text(self, item=None):
if item is None:
text = u'【剪刀石頭布代表物件】\n'
text += self._battle_item_dict_text(battle_item.scissor)
text += '\n'
text += self._battle_item_dict_text(battle_item.rock)
text += '\n'
text += self._battle_item_dict_text(battle_item.paper)
return text
else:
return self._battle_item_dict_text(item)
def reset_statistics(self):
for player in self._player_dict.itervalues():
player.reset_statistics()
def find_battle_item(self, is_sticker, content):
for battle_item_key, representatives in self._battle_dict.iteritems():
for representative in representatives:
if representative.is_sticker == is_sticker and representative.content == content:
return battle_item_key
return None
def has_player(self, player_uid):
return self._player_dict.has_key(player_uid)
def reset_battle_item(self, item):
self._battle_dict[item] = []
self._init_battle_dict()
def _play1(self, item, player_uid):
player_obj = self._player_dict.get(player_uid)
if player_obj is not None:
self._player1 = player_obj
self._player1.last_item = item
self._play_begin_time = time.time()
if self._vs_bot:
self._play2(random_gen.random_drawer.draw_text(self._battle_dict.keys()), 0)
else:
self._play_entered = True
def _play2(self, item, player_uid):
player_obj = self._player_dict.get(player_uid)
if player_obj is not None:
self._player2 = player_obj
self._player2.last_item = item
self._gap_time = time.time() - self._play_begin_time
self._play_entered = False
self._calculate_result()
def _calculate_result(self):
result = self._player1.last_item.value - self._player2.last_item.value
result = result % 3
self._result_enum = battle_result(result)
if self._result_enum == battle_result.win1:
self._player1.win()
self._player2.lose()
elif self._result_enum == battle_result.win2:
self._player2.win()
self._player1.lose()
elif self._result_enum == battle_result.tie:
self._player1.tied()
self._player2.tied()
self._result_generated = True
def _reset(self):
self._play_entered = False
self._result_generated = False
self._play_begin_time = 0
self._result_enum = battle_result.undefined
self._player1 = None
self._player2 = None
def _battle_item_dict_text(self, item):
if item == battle_item.scissor:
text = u'【剪刀】\n'
elif item == battle_item.rock:
text = u'【石頭】\n'
elif item == battle_item.paper:
text = u'【布】\n'
else:
return u''
text += u', '.join([u'(貼圖ID {})'.format(item.content) if item.is_sticker else unicode(item.content) for item in self._battle_dict[item]])
return text
def _init_register(self, rock, paper, scissor):
"""
Initially register process must use sticker ID to create instance
Return void when successfully initialized
"""
try:
int(rock)
except ValueError:
return error.main.invalid_thing_with_correct_format(u'石頭貼圖ID', u'整數', rock)
try:
int(paper)
except ValueError:
return error.main.invalid_thing_with_correct_format(u'布貼圖ID', u'整數', rock)
try:
int(scissor)
except ValueError:
return error.main.invalid_thing_with_correct_format(u'剪刀貼圖ID', u'整數', rock)
if scissor == rock == paper:
return error.main.miscellaneous(u'剪刀、石頭、布不可相同,請重新輸入。')
elif scissor == rock:
return error.main.miscellaneous(u'剪刀和石頭的定義衝突(相同),請重新輸入。')
elif rock == paper:
return error.main.miscellaneous(u'石頭和布的定義衝突(相同),請重新輸入。')
elif paper == scissor:
return error.main.miscellaneous(u'布和剪刀的定義衝突(相同),請重新輸入。')
self._init_battle_dict(rock, paper, scissor)
self._rock = rock
self._paper = paper
self._scissor = scissor
self._player_dict = defaultdict(battle_player)
if self._vs_bot:
self._player_dict[0] = battle_player(u'(電腦)', 0)
self._reset()
def _init_battle_dict(self, rock=None, paper=None, scissor=None):
if len(self._battle_dict[battle_item.paper]) < 1:
self.register_battle_item(battle_item.paper, True, paper if paper is not None else self._paper)
if len(self._battle_dict[battle_item.rock]) < 1:
self.register_battle_item(battle_item.rock, True, rock if rock is not None else self._rock)
if len(self._battle_dict[battle_item.scissor]) < 1:
self.register_battle_item(battle_item.scissor, True, scissor if scissor is not None else self._scissor)
@property
def gap_time(self):
return self._gap_time
@property
def vs_bot(self):
return self._vs_bot
@property
def battle_dict(self):
try:
return self._battle_dict
except NameError:
pass
@property
def player_dict(self):
try:
return self._player_dict
except NameError:
pass
@property
def is_waiting_next(self):
try:
return self._play_entered
except NameError:
pass
@property
def result_generated(self):
return self._result_generated
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, value):
self._enabled = value
@staticmethod
def player_stats_text(player_dict):
text = u'【玩家戰績】\n'
text += u'\n\n'.join([u'{}\n{}戰 勝率{:.3f} {}勝 {}敗 {}平 {}連{}中 最長{}連勝、{}連敗'.format(player.name, player.total_played, player.winning_rate, player.win_count, player.lose_count, player.tied_count,
player.consecutive_count, u'勝' if player.consecutive_type else u'敗', player.longest_consecutive_win, player.longest_consecutive_lose)
for player in sorted(player_dict.values(), reverse=True)])
return text
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,806 | elees1219/LineBot | refs/heads/master | /bot/system.py | # -*- coding: utf-8 -*-
import enum
from datetime import datetime, timedelta
from collections import defaultdict
from linebot import exceptions
import hashlib
import operator
import traceback
from math import *
class _command(object):
def __init__(self, min_split=2, max_split=2, non_user_permission_required=False):
self._split_max = max_split
self._split_min = min_split
self._count = 0
self._non_user_permission_required = non_user_permission_required
@property
def split_max(self):
"""Maximum split count."""
return self._split_max + (1 if self._non_user_permission_required else 0)
@property
def split_min(self):
"""Minimum split count."""
return self._split_min
@property
def count(self):
"""Called count."""
return self._count
@count.setter
def count(self, value):
"""Called count."""
self._count = value
@property
def non_user_permission_required(self):
"""Required Permission"""
return self._non_user_permission_required
_sys_cmd_dict = {'S': _command(1, 1, True),
'A': _command(2, 4, False),
'M': _command(2, 4, True),
'D': _command(1, 2, False),
'R': _command(1, 2, True),
'Q': _command(1, 2, False),
'C': _command(0, 0, True),
'I': _command(1, 2, False),
'K': _command(2, 2, False),
'P': _command(0, 1, False),
'G': _command(0, 1, False),
'GA': _command(1, 5, True),
'H': _command(0, 1, False),
'SHA': _command(1, 1, False),
'O': _command(1, 1, False),
'B': _command(0, 0, False),
'RD': _command(1, 2, False),
'STK': _command(0, 0, False),
'T': _command(1, 1, False)}
_game_cmd_dict = {'RPS': _command(0, 4, False)}
_helper_cmd_dict = {'MFF': _command(1, 8, False),
'CALC': _command(0, 0, False)}
class system_data(object):
def __init__(self):
self._boot_up = datetime.now() + timedelta(hours=8)
self._silence = False
self._intercept = True
self._string_calc_debug = False
self._last_sticker = defaultdict(str)
self._sys_cmd_dict = _sys_cmd_dict
self._game_cmd_dict = _game_cmd_dict
self._helper_cmd_dict = _helper_cmd_dict
self._webpage_viewed = 0
def set_last_sticker(self, cid, stk_id):
self._last_sticker[cid] = str(stk_id)
def get_last_sticker(self, cid):
return self._last_sticker.get(cid)
@property
def silence(self):
return self._silence
@silence.setter
def silence(self, value):
self._silence = value
@property
def intercept(self):
return self._intercept
@intercept.setter
def intercept(self, value):
self._intercept = value
@property
def calc_debug(self):
return self._string_calc_debug
@calc_debug.setter
def calc_debug(self, value):
self._string_calc_debug = value
@property
def boot_up(self):
return self._boot_up
@property
def sys_cmd_dict(self):
return self._sys_cmd_dict
@property
def sys_cmd_called(self):
return sum([x.count for x in self._sys_cmd_dict.itervalues()])
@property
def game_cmd_dict(self):
return self._game_cmd_dict
@property
def sys_cmd_called(self):
return sum([x.count for x in self._game_cmd_dict.itervalues()])
@property
def helper_cmd_dict(self):
return self._helper_cmd_dict
@property
def sys_cmd_called(self):
return sum([x.count for x in self._helper_cmd_dict.itervalues()])
@property
def webpage_viewed(self):
return self._webpage_viewed
def view_webpage(self):
self._webpage_viewed += 1
class permission_verifier(object):
def __init__(self, permission_key_list):
self._permission_list = [None]
self._permission_list.extend(permission_key_list)
def permission_level(self, key):
try:
return permission(self._permission_list.index(hashlib.sha224(key).hexdigest()))
except ValueError:
return permission.user
class permission(enum.IntEnum):
user = 0
moderator = 1
group_admin = 2
bot_admin = 3
class line_api_proc(object):
def __init__(self, line_api):
self._line_api = line_api
def profile(self, uid):
try:
return self._line_api.get_profile(uid)
except exceptions.LineBotApiError as ex:
if ex.status_code == 404:
return None
@staticmethod
def source_channel_id(event_source):
return event_source.sender_id
@staticmethod
def source_user_id(event_source):
return event_source.user_id
@staticmethod
def is_valid_user_id(uid):
return uid is not None and len(uid) == 33 and uid.startswith('U')
@staticmethod
def is_valid_room_group_id(uid):
return uid is not None and len(uid) == 33 and (uid.startswith('C') or uid.startswith('R'))
def string_can_be_int(s):
try:
int(s)
return True
except ValueError:
return False
def string_can_be_float(s):
try:
float(s)
return True
except ValueError:
return False
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,807 | elees1219/LineBot | refs/heads/master | /bot/game_object_holder.py | # -*- coding: utf-8 -*-
import game
from collections import defaultdict
class game_objects(object):
def __init__(self):
self._rps = defaultdict(game.rps)
@property
def rps_instance_count(self):
return len(self._rps)
def set_rps(self, cid, rps):
self._rps[cid] = rps
def del_rps(self, cid):
del self._rps[cid]
def get_rps(self, cid):
return self._rps.get(cid)
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,808 | elees1219/LineBot | refs/heads/master | /msg_handler/text_msg.py | # -*- coding: utf-8 -*-
import errno, os, sys
import validators
import urllib
from urlparse import urlparse
import requests
from flask import Flask, request, abort, url_for
import hashlib
from linebot import (
LineBotApi, WebhookHandler, exceptions
)
from linebot.models import (
MessageEvent, TextMessage, TextSendMessage,
SourceUser, SourceGroup, SourceRoom,
TemplateSendMessage, ConfirmTemplate, MessageTemplateAction,
ButtonsTemplate, URITemplateAction, PostbackTemplateAction,
CarouselTemplate, CarouselColumn, PostbackEvent,
StickerMessage, StickerSendMessage, LocationMessage, LocationSendMessage,
ImageMessage, VideoMessage, AudioMessage,
UnfollowEvent, FollowEvent, JoinEvent, LeaveEvent, BeaconEvent
)
from db import kw_dict_mgr, kwdict_col, group_ban, gb_col, message_tracker, msg_track_col
from error import error
from bot import system, webpage_auto_gen
from bot.system import line_api_proc, system_data, string_can_be_float, string_can_be_int
# tool import
from tool import mff, random_gen
from db.msg_track import msg_event_type
class text_msg(object):
def __init__(self, api_proc, kw_dict_mgr, group_ban, msg_trk, oxford_obj, permission_key_list, system_data, game_object, webpage_generator):
self.kwd = kw_dict_mgr
self.gb = group_ban
self.msg_trk = msg_trk
self.oxford_obj = oxford_obj
self.permission_verifier = system.permission_verifier(permission_key_list)
self.api_proc = api_proc
self.system_data = system_data
self.game_object = game_object
self.webpage_generator = webpage_generator
def S(self, src, params):
key = params.pop(1)
sql = params[1]
if isinstance(src, SourceUser) and self.permission_verifier.permission_level(key) >= system.permission.bot_admin:
results = self.kwd.sql_cmd_only(sql)
text = u'資料庫指令:\n{}\n\n'.format(sql)
if results is not None and len(results) > 0:
text += u'輸出結果(共{}筆):'.format(len(results))
for result in results:
text += u'\n[{}]'.format(', '.join(str(s).decode('utf-8') for s in result))
else:
text += error.main.no_result()
else:
text = error.main.restricted(3)
return text
def A(self, src, params, pinned=False):
new_uid = line_api_proc.source_user_id(src)
if not line_api_proc.is_valid_user_id(new_uid):
text = error.main.unable_to_receive_user_id()
else:
if params[4] is not None:
action_kw = params[1]
kw = params[2]
action_rep = params[3]
rep = params[4]
if action_kw != 'STK':
results = None
text = error.main.incorrect_param(u'參數1', u'STK')
elif not system.string_can_be_int(kw):
results = None
text = error.main.incorrect_param(u'參數2', u'整數數字')
elif action_rep != 'PIC':
results = None
text = error.main.incorrect_param(u'參數3', u'PIC')
else:
if system.string_can_be_int(rep):
rep = kw_dict_mgr.sticker_png_url(rep)
url_val_result = True
else:
url_val_result = url_val_result = True if validators.url(rep) and urlparse(rep).scheme == 'https' else False
if type(url_val_result) is bool and url_val_result:
results = self.kwd.insert_keyword(kw, rep, new_uid, pinned, True, True)
else:
results = None
text = error.main.incorrect_param(u'參數4', u'HTTPS協定,並且是合法的網址。')
elif params[3] is not None:
rep = params[3]
if params[2] == 'PIC':
kw = params[1]
if system.string_can_be_int(rep):
rep = kw_dict_mgr.sticker_png_url(rep)
url_val_result = True
else:
url_val_result = True if validators.url(rep) and urlparse(rep).scheme == 'https' else False
if type(url_val_result) is bool and url_val_result:
results = self.kwd.insert_keyword(kw, rep, new_uid, pinned, False, True)
else:
results = None
text = error.main.incorrect_param(u'參數3', u'HTTPS協定,並且是合法的網址。')
elif params[1] == 'STK':
kw = params[2]
if system.string_can_be_int(kw):
results = self.kwd.insert_keyword(kw, rep, new_uid, pinned, True, False)
else:
results = None
text = error.main.incorrect_param(u'參數2', u'整數數字')
else:
text = error.main.unable_to_determine()
results = None
elif params[2] is not None:
kw = params[1]
rep = params[2]
results = self.kwd.insert_keyword(kw, rep, new_uid, pinned, False, False)
else:
results = None
text = error.main.lack_of_thing(u'參數')
if results is not None:
text = u'已新增回覆組。{}\n'.format(u'(置頂)' if pinned else '')
for result in results:
text += kw_dict_mgr.entry_basic_info(result)
return text
def M(self, src, params):
key = params.pop(1)
if not isinstance(src, SourceUser) or self.permission_verifier.permission_level(key) < system.permission.moderator:
text = error.main.restricted(1)
elif not line_api_proc.is_valid_user_id(line_api_proc.source_user_id(src)):
text = error.main.unable_to_receive_user_id()
else:
text = self.A(src, params)
return text
def D(self, src, params, pinned=False):
deletor_uid = line_api_proc.source_user_id(src)
if not line_api_proc.is_valid_user_id(deletor_uid):
text = error.main.unable_to_receive_user_id()
else:
if params[2] is None:
kw = params[1]
results = self.kwd.delete_keyword(kw, deletor_uid, pinned)
else:
action = params[1]
if action == 'ID':
pair_id = params[2]
if system.string_can_be_int(pair_id):
results = self.kwd.delete_keyword_id(pair_id, deletor_uid, pinned)
else:
results = None
text = error.main.incorrect_param(u'參數2', u'整數數字')
else:
results = None
text = error.main.incorrect_param(u'參數1', u'ID')
if results is not None and len(results) > 0:
for result in results:
line_profile = self.api_proc.profile(result[int(kwdict_col.creator)])
text = u'已刪除回覆組。{}\n'.format(u'(置頂)' if pinned else '')
text += kw_dict_mgr.entry_basic_info(result)
text += u'\n此回覆組由 {} 製作。'.format(
error.main.line_account_data_not_found() if line_profile is None else line_profile.display_name)
else:
if system.string_can_be_int(params[1]):
text = error.main.miscellaneous(u'偵測到參數1是整數。若欲使用ID作為刪除根據,請參閱小水母使用說明。')
else:
text = error.main.pair_not_exist_or_insuffieicnt_permission()
return text
def R(self, src, params):
key = params.pop(1)
if not isinstance(src, SourceUser) or self.permission_verifier.permission_level(key) < system.permission.group_admin:
text = error.main.restricted(2)
elif not line_api_proc.is_valid_user_id(line_api_proc.source_user_id(src)):
text = error.main.unable_to_receive_user_id()
else:
text = self.D(src, params, True)
return text
def Q(self, src, params):
if params[2] is not None:
si = params[1]
ei = params[2]
text = u'搜尋範圍: 【回覆組ID】介於【{}】和【{}】之間的回覆組。\n'.format(si, ei)
try:
begin_index = int(si)
end_index = int(ei)
if end_index - begin_index < 0:
results = None
text += error.main.incorrect_param(u'參數2', u'大於參數1的數字')
else:
results = self.kwd.search_keyword_index(begin_index, end_index)
except ValueError:
results = None
text += error.main.incorrect_param(u'參數1和參數2', u'整數數字')
else:
kw = params[1]
text = u'搜尋範圍: 【關鍵字】或【回覆】包含【{}】的回覆組。\n'.format(kw)
results = self.kwd.search_keyword(kw)
if results is not None:
q_list = kw_dict_mgr.list_keyword(results)
text = q_list['limited']
text += u'\n完整搜尋結果顯示: {}'.format(self.webpage_generator.rec_query(q_list['full']))
else:
if params[2] is not None:
text = u'找不到和指定的ID範圍({}~{})有關的結果。'.format(si, ei)
else:
text = u'找不到和指定的關鍵字({})有關的結果。'.format(kw)
return text
def I(self, src, params):
error = False
if params[2] is not None:
action = params[1]
pair_id = params[2]
text = u'搜尋條件: 【回覆組ID】為【{}】的回覆組。\n'.format(pair_id)
if action != 'ID':
results = None
error = True
text += error.main.invalid_thing_with_correct_format(u'參數1', u'ID', action)
else:
if system.string_can_be_int(pair_id):
results = self.kwd.get_info_id(pair_id)
else:
results = None
error = True
text += error.main.invalid_thing_with_correct_format(u'參數2', u'正整數', pair_id)
else:
kw = params[1]
text = u'搜尋條件: 【關鍵字】或【回覆】為【{}】的回覆組。\n'.format(kw)
results = self.kwd.get_info(kw)
if results is not None:
i_object = kw_dict_mgr.list_keyword_info(self.kwd, self.api_proc, results)
text += i_object['limited']
text += u'\n完整資訊URL: {}'.format(self.webpage_generator.rec_info(i_object['full']))
elif not error:
text = error.main.miscellaneous(u'查無相符資料。')
return text
def K(self, src, params):
ranking_type = params[1]
limit = params[2]
try:
limit = int(limit)
except ValueError as err:
text = error.main.incorrect_param(u'參數2(數量)', u'整數')
else:
Valid = True
if ranking_type == 'USER':
text = kw_dict_mgr.list_user_created_ranking(self.api_proc, self.kwd.user_created_rank(limit))
elif ranking_type == 'KW':
text = kw_dict_mgr.list_keyword_ranking(self.kwd.order_by_usedrank(limit))
elif ranking_type == 'KWRC':
text = kw_dict_mgr.list_keyword_recently_called(self.kwd.recently_called(limit))
else:
text = error.main.incorrect_param(u'參數1(種類)', u'USER(使用者排行)、KW(關鍵字排行)或KWRC(呼叫時間排行)')
Valid = False
if Valid:
text += u'\n\n完整使用者排名: {}\n完整關鍵字排名: {}\n完整最新呼叫表: {}'.format(
request.url_root + url_for('full_ranking', type='user')[1:],
request.url_root + url_for('full_ranking', type='used')[1:],
request.url_root + url_for('full_ranking', type='called')[1:])
return text
def P(self, src, params):
if params[1] is not None:
category = params[1]
if category == 'MSG':
limit = 5
sum_data = self.msg_trk.count_sum()
tracking_data = message_tracker.entry_detail_list(self.msg_trk.order_by_recorded_msg_count(), limit, self.gb)
text = u'【訊息流量統計】'
text += u'\n收到(無對應回覆組): {}則文字訊息 | {}則貼圖訊息'.format(sum_data[msg_event_type.recv_txt], sum_data[msg_event_type.recv_stk])
text += u'\n收到(有對應回覆組): {}則文字訊息 | {}則貼圖訊息'.format(sum_data[msg_event_type.recv_txt_repl], sum_data[msg_event_type.recv_stk_repl])
text += u'\n回覆: {}則文字訊息 | {}則貼圖訊息'.format(sum_data[msg_event_type.send_txt], sum_data[msg_event_type.send_stk])
text += u'\n\n【群組訊息統計資料 - 前{}名】\n'.format(limit)
text += tracking_data['limited']
text += u'\n\n完整資訊URL: {}'.format(self.webpage_generator.rec_info(tracking_data['full']))
elif category == 'KW':
kwpct = self.kwd.row_count()
user_list_top = self.kwd.user_sort_by_created_pair()[0]
line_profile = self.api_proc.profile(user_list_top[0])
limit = 10
first = self.kwd.most_used()
last = self.kwd.least_used()
recently_called_data = self.kwd.recently_called(limit)
last_count = len(last)
text = u'【回覆組相關統計資料】'
text += u'\n\n已使用回覆組【{}】次'.format(self.kwd.used_count_sum())
text += u'\n\n已登錄【{}】組回覆組\n【{}】組貼圖關鍵字 | 【{}】組圖片回覆'.format(
kwpct,
self.kwd.sticker_keyword_count(),
self.kwd.picture_reply_count())
text += u'\n\n共【{}】組回覆組可使用 ({:.2%})\n【{}】組貼圖關鍵字 | 【{}】組圖片回覆'.format(
self.kwd.row_count(True),
self.kwd.row_count(True) / float(kwpct),
self.kwd.sticker_keyword_count(True),
self.kwd.picture_reply_count(True))
text += u'\n\n製作最多回覆組的LINE使用者ID:\n{}'.format(user_list_top[0])
text += u'\n製作最多回覆組的LINE使用者:\n{}【{}組 - {:.2%}】'.format(
error.main.line_account_data_not_found() if line_profile is None else line_profile.display_name,
user_list_top[1],
user_list_top[1] / float(kwpct))
text += u'\n\n使用次數最多的回覆組【{}次,{}組】:\n'.format(first[0][int(kwdict_col.used_count)], len(first))
text += u'\n'.join([u'ID: {} - {}'.format(entry[int(kwdict_col.id)],
u'(貼圖ID {})'.format(entry[int(kwdict_col.keyword)].decode('utf-8')) if entry[int(kwdict_col.is_sticker_kw)] else entry[int(kwdict_col.keyword)].decode('utf-8')) for entry in first[0 : limit - 1]])
text += u'\n\n使用次數最少的回覆組【{}次,{}組】:\n'.format(last[0][int(kwdict_col.used_count)], len(last))
text += u'\n'.join([u'ID: {} - {}'.format(entry[int(kwdict_col.id)],
u'(貼圖ID {})'.format(entry[int(kwdict_col.keyword)].decode('utf-8')) if entry[int(kwdict_col.is_sticker_kw)] else entry[int(kwdict_col.keyword)].decode('utf-8')) for entry in last[0 : limit - 1]])
if last_count - limit > 0:
text += u'\n...(還有{}組)'.format(last_count - limit)
text += u'\n\n最近被使用的{}組回覆組:\n'.format(limit)
text += kw_dict_mgr.list_keyword_recently_called(recently_called_data)
elif category == 'SYS':
global game_object
text = u'【系統統計資料 - 開機後重設】\n'
text += u'開機時間: {} (UTC+8)\n'.format(self.system_data.boot_up)
text += u'\n【自動產生網頁相關】\n瀏覽次數: {}'.format(self.system_data.webpage_viewed)
text += u'\n\n【系統指令相關(包含呼叫失敗)】\n總呼叫次數: {}\n'.format(self.system_data.sys_cmd_called)
text += u'\n'.join([u'指令{} - {}'.format(cmd, cmd_obj.count) for cmd, cmd_obj in self.system_data.sys_cmd_dict.items()])
text += u'\n\n【內建小工具相關】\nMFF傷害計算輔助 - {}'.format(self.system_data.helper_cmd_dict['MFF'].count)
text += u'\n計算機 - {}'.format(self.system_data.helper_cmd_dict['CALC'].count)
text += u'\n\n【小遊戲相關】\n猜拳遊戲數量 - {}\n猜拳次數 - {}'.format(self.game_object.rps_instance_count, self.system_data.game_cmd_dict['RPS'].count)
else:
text = error.main.invalid_thing_with_correct_format(u'參數1', u'GRP、KW或SYS', params[1])
else:
text = error.main.incorrect_param(u'參數1', u'MSG、KW或SYS')
return text
def G(self, src, params):
if params[1] is not None:
gid = params[1]
else:
gid = line_api_proc.source_channel_id(src)
if params[1] is None and isinstance(src, SourceUser):
text = error.main.incorrect_channel(False, True, True)
else:
if line_api_proc.is_valid_room_group_id(gid):
group_detail = self.gb.get_group_by_id(gid)
uids = {u'管理員': group_detail[int(gb_col.admin)], u'副管I': group_detail[int(gb_col.moderator1)],
u'副管II': group_detail[int(gb_col.moderator2)], u'副管III': group_detail[int(gb_col.moderator3)]}
text = u'群組/房間頻道ID: {}\n'.format(gid)
if group_detail is not None:
text += u'\n自動回覆機能狀態【{}】'.format(u'已停用' if group_detail[int(gb_col.silence)] else u'使用中')
for txt, uid in uids.items():
if uid is not None:
prof = self.api_proc.profile(uid)
text += u'\n\n{}: {}\n'.format(txt, error.main.line_account_data_not_found() if prof is None else prof.display_name)
text += u'{} 使用者ID: {}'.format(txt, uid)
else:
text += u'\n自動回覆機能狀態【使用中】'
group_tracking_data = self.msg_trk.get_data(gid)
text += u'\n\n收到(無對應回覆組): {}則文字訊息 | {}則貼圖訊息'.format(group_tracking_data[int(msg_track_col.text_msg)],
group_tracking_data[int(msg_track_col.stk_msg)])
text += u'\n收到(有對應回覆組): {}則文字訊息 | {}則貼圖訊息'.format(group_tracking_data[int(msg_track_col.text_msg_trig)],
group_tracking_data[int(msg_track_col.stk_msg_trig)])
text += u'\n回覆: {}則文字訊息 | {}則貼圖訊息'.format(group_tracking_data[int(msg_track_col.text_rep)],
group_tracking_data[int(msg_track_col.stk_rep)])
else:
text = error.main.invalid_thing_with_correct_format(u'群組/房間ID', u'R或C開頭,並且長度為33字元', gid)
return text
def GA(self, src, params):
error_no_action_fetch = error.main.miscellaneous(u'無對應指令。有可能是因為權限不足或是缺少參數而造成。')
perm_dict = {3: u'權限: 開發者/機器人管理員',
2: u'權限: Group Admin',
1: u'權限: Group Moderator',
0: u'權限: User'}
perm = int(self.permission_verifier.permission_level(params.pop(1)))
pert = perm_dict[perm]
param_count = len(params) - params.count(None)
if isinstance(src, SourceUser):
text = error_no_action_fetch
# Set bot auto-reply switch
if perm >= 1 and param_count == 3:
action = params[1].replace(' ', '')
gid = params[2]
pw = params[3]
action_dict = {'SF': True, 'ST': False}
status_silence = {True: u'停用', False: u'啟用'}
if action in action_dict:
settarget = action_dict[action]
if self.gb.set_silence(gid, str(settarget), pw):
text = u'群組自動回覆功能已{}。\n\n群組/房間ID: {}'.format(status_silence[settarget], gid)
else:
text = u'群組靜音設定變更失敗。\n\n群組/房間ID: {}'.format(gid)
else:
text = error.main.invalid_thing(u'參數1(動作)', action)
# Set new admin/moderator
elif perm >= 2 and param_count == 5:
action = params[1]
gid = params[2]
new_uid = params[3]
pw = params[4]
new_pw = params[5]
action_dict = {'SA': self.gb.change_admin,
'SM1': self.gb.set_mod1,
'SM2': self.gb.set_mod2,
'SM3': self.gb.set_mod3}
pos_name = {'SA': u'群組管理員',
'SM1': u'群組副管 1',
'SM2': u'群組副管 2',
'SM3': u'群組副管 3'}
line_profile = self.api_proc.profile(new_uid)
if line_profile is not None:
try:
if action_dict[action](gid, new_uid, pw, new_pw):
position = pos_name[action]
text = u'群組管理員已變更。\n'
text += u'群組/房間ID: {}\n\n'.format(gid)
text += u'新{}使用者ID: {}\n'.format(position, new_uid)
text += u'新{}使用者名稱: {}\n\n'.format(position, line_profile.display_name)
text += u'新{}密碼: {}\n'.format(position, new_pw)
text += u'請記好密碼,嚴禁洩漏,或在群頻中直接開關群組自動回覆功能!'
else:
text = u'{}變更作業失敗。'.format(pos_name[action])
except KeyError as Ex:
text = error.main.invalid_thing(u'參數1(動作)', action)
else:
text = error.main.line_account_data_not_found()
# Add new group - only execute when data not found
elif perm >= 3 and param_count == 4:
action = params[1]
gid = params[2]
uid = params[3]
pw = params[4]
if action != 'N':
text = error.main.invalid_thing(u'參數1(動作)', action)
else:
group_data_test = self.gb.get_group_by_id(gid)
if len(group_data_test) > 0:
text = u'群組資料已存在。'
else:
line_profile = self.api_proc.profile(uid)
if line_profile is not None:
if self.gb.new_data(gid, uid, pw):
text = u'群組資料註冊成功。\n'
text += u'群組ID: {}'.format(gid)
text += u'群組管理員ID: {}'.format(uid)
text += u'群組管理員名稱: {}'.format(line_profile.display_name)
else:
text = u'群組資料註冊失敗。'
else:
text = error.main.line_account_data_not_found()
else:
text = error.main.incorrect_channel()
return pert, text
def H(self, src, params):
if params[1] is not None:
uid = params[1]
line_profile = self.api_proc.profile(uid)
source_type = u'使用者詳細資訊'
if not line_api_proc.is_valid_user_id(uid):
text = error.main.invalid_thing_with_correct_format(u'使用者ID', u'U開頭,並且長度為33字元', uid)
else:
if line_profile is not None:
kwid_arr = self.kwd.user_created_id_array(uid)
if len(kwid_arr) < 1:
kwid_arr = [u'無']
text = u'使用者ID: {}\n'.format(uid)
text += u'使用者名稱: {}\n'.format(line_profile.display_name)
text += u'使用者頭貼網址: {}\n'.format(line_profile.picture_url)
text += u'使用者狀態訊息: {}\n\n'.format(line_profile.status_message)
text += u'使用者製作的回覆組ID: {}'.format(u', '.join(map(unicode, kwid_arr)))
else:
text = u'找不到使用者ID - {} 的詳細資訊。'.format(uid)
else:
text = line_api_proc.source_channel_id(src)
if isinstance(src, SourceUser):
source_type = u'頻道種類: 使用者(私訊)'
elif isinstance(src, SourceGroup):
source_type = u'頻道種類: 群組'
elif isinstance(src, SourceRoom):
source_type = u'頻道種類: 房間'
else:
source_type = u'頻道種類: 不明'
return [source_type, text]
def SHA(self, src, params):
target = params[1]
if target is not None:
text = hashlib.sha224(target.encode('utf-8')).hexdigest()
else:
text = error.main.incorrect_param(u'參數1', u'非空參數')
return text
def O(self, src, params):
voc = params[1]
if not self.oxford_obj.enabled:
text = error.main.miscellaneous(u'牛津字典功能已停用。可能是因為超過單月查詢次數或無效的API密鑰。')
else:
j = self.oxford_obj.get_data_json(voc)
if type(j) is int:
code = j
if code == 404:
text = error.main.no_result()
else:
text = u'查詢字典時發生錯誤。\n\n狀態碼: {} ({}).'.format(code, httplib.responses[code])
else:
text = u''
section_splitter = u'.................................................................'
lexents = j['results'][0]['lexicalEntries']
for lexent in lexents:
text += u'=={} ({})=='.format(lexent['text'], lexent['lexicalCategory'])
lexentarr = lexent['entries']
for lexentElem in lexentarr:
if 'senses' in lexentElem:
sens = lexentElem['senses']
text += u'\nDefinition:'
for index, sen in enumerate(sens, start=1):
if 'definitions' in sen:
for de in sen['definitions']:
text += u'\n{}. {} {}'.format(index, de, u'({})'.format(u', '.join(sen['registers'])) if u'registers' in sen else u'')
if 'crossReferenceMarkers' in sen:
for crm in sen['crossReferenceMarkers']:
text += u'\n{}. {} (Cross Reference Marker)'.format(index, crm)
if 'examples' in sen:
for ex in sen['examples']:
text += u'\n------{}'.format(ex['text'])
else:
text += u'\n(Senses not found in dictionary.)'
text += u'\n{}\n'.format(section_splitter)
text += u'Powered by Oxford Dictionary.'
return text
def RD(self, src, params):
if params[2] is not None:
if params[1].endswith('%') and params[1].count('%') == 1:
opportunity = params[1].replace('%', '')
scout_count = params[2]
shot_count = 0
miss_count = 0
if not system.string_can_be_float(opportunity):
text = error.main.incorrect_param(u'參數1(機率)', u'百分比加上符號%')
elif not system.string_can_be_float(scout_count):
text = error.main.incorrect_param(u'參數2(抽籤次數)', u'整數')
elif int(scout_count) > 999999:
text = error.main.invalid_thing_with_correct_format(u'參數2(抽籤次數)', u'小於999999的整數', scout_count)
else:
for i in range(int(scout_count)):
result = random_gen.random_drawer.draw_probability(float(opportunity) / 100.0)
if result:
shot_count += 1
else:
miss_count += 1
text = u'抽籤機率【{}%】\n抽籤結果【中{}次 | 失{}次】\n實際中率【{:.2%}】'.format(opportunity, shot_count, miss_count, shot_count / float(scout_count))
else:
start_index = params[1]
end_index = params[2]
if not start_index.isnumeric():
text = error.main.invalid_thing_with_correct_format(u'起始抽籤數字', u'整數', start_index)
elif not end_index.isnumeric():
text = error.main.invalid_thing_with_correct_format(u'終止抽籤數字', u'整數', start_index)
else:
text = u'抽籤範圍【{}~{}】\n抽籤結果【{}】'.format(start_index, end_index, random_gen.random_drawer.draw_number(start_index, end_index))
elif params[1] is not None:
text_splitter = ' '
if text_splitter in params[1]:
texts = params[1]
text_list = texts.split(text_splitter)
text = u'抽籤範圍【{}】\n抽籤結果【{}】'.format(', '.join(text_list), random_gen.random_drawer.draw_text(text_list))
elif params[1].endswith('%') and params[1].count('%') == 1:
opportunity = params[1].replace('%', '')
text = u'抽籤機率【{}%】\n抽籤結果【{}】'.format(
opportunity,
u'恭喜中獎' if random_gen.random_drawer.draw_probability(float(opportunity) / 100.0) else u'銘謝惠顧')
else:
text = error.main.invalid_thing(u'參數1', params[1])
else:
text = error.main.lack_of_thing(u'參數')
return text
def STK(self, src, params):
last_sticker = self.system_data.get_last_sticker(line_api_proc.source_channel_id(src))
if last_sticker is not None:
text = u'最後一個貼圖的貼圖ID為{}。'.format(last_sticker)
else:
text = u'沒有登記到本頻道的最後貼圖ID。如果已經有貼過貼圖,則可能是因為機器人剛剛才啟動而造成。\n\n本次開機時間: {}'.format(self.system_data.boot_up)
return text
def T(self, src, params):
if params[1] is not None:
text = params[1]
if isinstance(text, unicode):
# unicode to utf-8
text = text.encode('utf-8')
else:
try:
# maybe utf-8
text = text.decode('utf-8').encode('utf-8')
except UnicodeError:
# gbk to utf-8
text = text.decode('gbk').encode('utf-8')
else:
text = error.main.lack_of_thing(u'參數')
return urllib.quote(text)
@staticmethod
def split(text, splitter, size):
list = []
if text is not None:
for i in range(size):
if splitter not in text or i == size - 1:
list.append(text)
break
list.append(text[0:text.index(splitter)])
text = text[text.index(splitter)+len(splitter):]
while len(list) < size:
list.append(None)
return list
def split_verify(self, cmd, splitter, param_text):
if cmd not in self.system_data.sys_cmd_dict:
return error.main.invalid_thing(u'指令', cmd)
max_prm = self.system_data.sys_cmd_dict[cmd].split_max
min_prm = self.system_data.sys_cmd_dict[cmd].split_min
params = text_msg.split(param_text, splitter, max_prm)
if min_prm > len(params) - params.count(None):
return error.main.lack_of_thing(u'參數')
params.insert(0, None)
self.system_data.sys_cmd_dict[cmd].count += 1
return params
class oxford_dict(object):
def __init__(self, language):
"""
Set environment variable "OXFORD_ID", "OXFORD_KEY" as presented api id and api key.
"""
self._language = language
self._url = 'https://od-api.oxforddictionaries.com:443/api/v1/entries/{}/'.format(self._language)
self._id = os.getenv('OXFORD_ID', None)
self._key = os.getenv('OXFORD_KEY', None)
self._enabled = False if self._id is None or self._key is None else True
def get_data_json(self, word):
url = self._url + word.lower()
r = requests.get(url, headers = {'app_id': self._id, 'app_key': self._key})
status_code = r.status_code
if status_code != requests.codes.ok:
return status_code
else:
return r.json()
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, value):
self._enabled = value
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,809 | elees1219/LineBot | refs/heads/master | /bot/webpage_auto_gen.py | # -*- coding: utf-8 -*-
from enum import Enum
from error import error
from cgi import escape
from collections import defaultdict
import traceback
import time
from datetime import datetime, timedelta
from flask import request, url_for, render_template
from linebot.models import TextSendMessage
class webpage(object):
def __init__(self):
self._error_route = 'Error'
self._query_route = 'FullQuery'
self._info_route = 'FullInfo'
self._text_route = 'Text'
self._page_content = {self._error_route: defaultdict(unicode),
self._query_route: defaultdict(unicode),
self._info_route: defaultdict(unicode),
self._text_route: defaultdict(unicode)}
def rec_error(self, err_sum, channel_id):
timestamp = str(int(time.time()))
err_detail = u'錯誤發生時間: {}\n'.format(datetime.now() + timedelta(hours=8))
err_detail += u'頻道ID: {}'.format(channel_id)
err_detail += u'\n\n'
err_detail += traceback.format_exc().decode('utf-8')
print err_detail.encode('utf-8')
self._page_content[self._error_route][timestamp] = err_detail
err_list = u'詳細錯誤URL: {}\n錯誤清單: {}'.format(
request.url_root + url_for('get_error_message', timestamp=timestamp)[1:],
request.url_root + url_for('get_error_list')[1:])
return err_sum + u'\n\n' + err_list
def rec_query(self, full_query):
timestamp = str(int(time.time()))
self._page_content[self._query_route][timestamp] = full_query
return request.url_root + url_for('full_query', timestamp=timestamp)[1:]
def rec_info(self, full_info):
timestamp = str(int(time.time()))
self._page_content[self._info_route][timestamp] = full_info
return request.url_root + url_for('full_info', timestamp=timestamp)[1:]
def rec_text(self, textmsg_list):
if not isinstance(textmsg_list, (list, tuple)):
textmsg_list = [textmsg_list]
timestamp = str(int(time.time()))
self._page_content[self._text_route][timestamp] = u'\n===============================\n'.join([u'【Message {}】\n\n{}'.format(index, txt.text) for index, txt in enumerate(textmsg_list, start=1)])
return request.url_root + url_for('full_content', timestamp=timestamp)[1:]
def error_timestamp_list(self):
sorted_list = sorted(self._page_content[self._error_route].keys(), key=self._page_content[self._error_route].get, reverse=True)
return sorted_list
def get_content(self, type, timestamp):
timestamp = str(timestamp)
content = None
if type == content_type.Error:
content = self._page_content[self._error_route].get(timestamp)
type_chn = u'錯誤'
elif type == content_type.Query:
content = self._page_content[self._query_route].get(timestamp)
type_chn = u'索引'
elif type == content_type.Info:
content = self._page_content[self._info_route].get(timestamp)
type_chn = u'查詢詳細資料'
elif type == content_type.Text:
content = self._page_content[self._text_route].get(timestamp)
type_chn = u'回傳文字'
if content is None:
return error.webpage.no_content_at_time(type_chn, float(timestamp))
else:
return content
@staticmethod
def html_render(content, title=None):
return render_template('WebPage.html', Contents=content.replace(' ', ' ').split('\n'), Title=title)
@staticmethod
def html_render_error_list(boot_up, error_dict):
"""
Error dict
key=timestamp
value=URL
"""
return render_template('ErrorList.html', boot_up=boot_up, ErrorDict=error_dict)
class content_type(Enum):
Error = 0
Query = 1
Info = 2
Text = 3
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,810 | elees1219/LineBot | refs/heads/master | /tool/__init__.py | from .mff import (
mff_dmg_calc, job, dmg_bonus
)
from .random_gen import (
random_drawer
) | {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,811 | elees1219/LineBot | refs/heads/master | /db/groupban.py | # -*- coding: utf-8 -*-
import os, sys
import urlparse
import psycopg2
from sqlalchemy.exc import IntegrityError
import hashlib
from enum import Enum
import collections
class group_ban(object):
def __init__(self, scheme, db_url):
urlparse.uses_netloc.append(scheme)
self.url = urlparse.urlparse(db_url)
self._set_connection()
self.id_length = 33
self.moderator_count = 3
def sql_cmd_only(self, cmd):
return self.sql_cmd(cmd, None)
def sql_cmd(self, cmd, dict):
self._set_connection()
self.cur.execute(cmd, dict)
try:
result = self.cur.fetchall()
except psycopg2.ProgrammingError as ex:
if ex.message == 'no results to fetch':
result = None
else:
raise ex
self._close_connection()
return result
@property
def table_structure(self):
cmd = u'CREATE TABLE group_ban( \
groupId VARCHAR(33) PRIMARY KEY, \
silence BOOLEAN NOT NULL DEFAULT FALSE, \
admin VARCHAR(33) NOT NULL, \
admin_sha VARCHAR(56) NOT NULL, \
moderator1 VARCHAR(33), \
moderator1_sha VARCHAR(56), \
moderator2 VARCHAR(33), \
moderator2_sha VARCHAR(56), \
moderator3 VARCHAR(33), \
moderator3_sha VARCHAR(56));'
return cmd
def new_data(self, groupId, adminUID, key_for_admin):
if len(adminUID) != self.id_length or len(groupId) != self.id_length:
return False
else:
try:
cmd = u'INSERT INTO group_ban(groupId, silence, admin, admin_sha) VALUES(%(id)s, FALSE, %(adm)s, %(key)s)'
cmd_dict = {'id': groupId, 'adm': adminUID, 'key': str(hashlib.sha224(key_for_admin.encode('utf-8')).hexdigest())}
self.sql_cmd(cmd, cmd_dict)
return True
except IntegrityError as ex:
return False
def del_data(self, groupId):
if len(groupId) != self.id_length:
return False
else:
cmd = u'DELETE FROM group_ban WHERE groupid = %(gid)s'
cmd_dict = {'gid': groupId}
self.sql_cmd(cmd, cmd_dict)
return True
def get_group_by_id(self, groupId):
cmd = u'SELECT * FROM group_ban WHERE groupId = %(gid)s'
cmd_dict = {'gid': groupId}
result = self.sql_cmd(cmd, cmd_dict)
if len(result) >= 1:
return result[0]
else:
return None
def set_silence(self, groupId, set, key):
if len(groupId) != self.id_length:
return False
cmd_check = u'SELECT * FROM group_ban WHERE admin_sha = %(key)s OR \
moderator1_sha = %(key)s OR \
moderator2_sha = %(key)s OR \
moderator3_sha = %(key)s'
cmd_check_dict = {'key': hashlib.sha224(key).hexdigest()}
results = self.sql_cmd(cmd_check, cmd_check_dict)
if len(results) >= 1:
cmd = u'UPDATE group_ban SET silence = %(set)s WHERE groupId = %(id)s'
cmd_dict = {'id': groupId, 'set': set}
self.sql_cmd(cmd, cmd_dict)
return True
else:
return False
def change_admin(self, groupId, newAdminUID, key, newkey):
if len(newAdminUID) != self.id_length or len(groupId) != self.id_length:
return False
cmd_check = u'SELECT * FROM group_ban WHERE admin_sha = %(key)s'
cmd_check_dict = {'key': hashlib.sha224(key).hexdigest()}
results = self.sql_cmd(cmd_check, cmd_check_dict)
if len(results) >= 1:
cmd = u'UPDATE group_ban SET admin = %(adm)s, admin_sha = %(sha)s WHERE groupId = %(id)s'
cmd_dict = {'id': groupId, 'adm': newAdminUID, 'sha': hashlib.sha224(newkey).hexdigest()}
self.sql_cmd(cmd, cmd_dict)
return True
else:
return False
def set_mod1(self, groupId, newModUID, key, newkey):
return self._set_moderator(groupId, 1, newModUID, key, newkey)
def set_mod2(self, groupId, newModUID, key, newkey):
return self._set_moderator(groupId, 2, newModUID, key, newkey)
def set_mod3(self, groupId, newModUID, key, newkey):
return self._set_moderator(groupId, 3, newModUID, key, newkey)
def _set_moderator(self, groupId, moderator_pos, newModUID, key, newkey):
if len(groupId) != self.id_length or len(newModUID) != self.id_length or moderator_pos > 3 or moderator_pos < 0:
return False
mod_col_dict = {1: 'moderator1', 2: 'moderator2', 3: 'moderator3'}
mod_sha_dict = {1: 'moderator1_sha', 2: 'moderator2_sha', 3: 'moderator3_sha'}
cmd_check = u'SELECT * FROM group_ban WHERE admin_sha = %(key)s OR {sha} = %(key)s'.format(sha=mod_sha_dict[moderator_pos])
cmd_check_dict = {'key': hashlib.sha224(key).hexdigest()}
results = self.sql_cmd(cmd_check, cmd_check_dict)
if len(results) >= 1:
cmd = u'UPDATE group_ban SET {col} = %(mod)s, {sha} = %(newkey)s WHERE groupId = %(id)s'.format(sha=mod_sha_dict[moderator_pos],
col=mod_col_dict[moderator_pos])
cmd_dict = {'id': groupId, 'mod': newModUID, 'newkey': hashlib.sha224(newkey).hexdigest()}
self.sql_cmd(cmd, cmd_dict)
return True
else:
return False
def is_group_set_to_silence(self, groupId):
group = self.get_group_by_id(groupId)
if group is not None:
return group[int(gb_col.silence)]
def _close_connection(self):
self.conn.commit()
self.cur.close()
self.conn.close()
def _set_connection(self):
self.conn = psycopg2.connect(
database=self.url.path[1:],
user=self.url.username,
password=self.url.password,
host=self.url.hostname,
port=self.url.port
)
self.cur = self.conn.cursor()
class gb_col(Enum):
groupId = 0
silence = 1
admin = 2
admin_sha = 3
moderator1 = 4
moderator1_sha = 5
moderator2 = 6
moderator2_sha = 7
moderator3 = 8
moderator3_sha = 9
def __int__(self):
return self.value | {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,812 | elees1219/LineBot | refs/heads/master | /tool/txt_calc.py | # -*- coding: utf-8 -*-
from math import *
class text_calculator(object):
@staticmethod
def calc(text, debug=False):
result = ''
if text.startswith('0'):
return
try:
if 'result=' not in text:
exec('result={}'.format(text))
else:
exec(text)
if result != '' and text != str(result) and isinstance(result, (float, int, long)):
return result
elif debug:
print 'String math calculation failed:'
print type(result)
print 'Original Text:'
print text.encode('utf-8')
print 'Result variant:'
print result.encode('utf-8')
except:
if debug:
print 'String math calculation failed:'
print type(result)
print 'Original Text:'
print text.encode('utf-8')
print 'Result variant:'
print result.encode('utf-8')
return
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,813 | elees1219/LineBot | refs/heads/master | /msg_handler/game_msg.py | # -*- coding: utf-8 -*-
import os, sys
from error import error
from bot.system import line_api_proc, string_can_be_int
import game
from linebot.models import SourceUser
class game_msg(object):
def __init__(self, game_data, line_api_proc):
self._game_data = game_data
self._line_api = line_api_proc
def RPS(self, src, params):
cid = line_api_proc.source_channel_id(src)
uid = line_api_proc.source_user_id(src)
if params[4] is not None:
rps_obj = self._game_data.get_rps(cid)
if rps_obj is not None and isinstance(rps_obj, game.rps):
action = params[1]
if action == 'ADD':
item_type = params[2]
is_sticker = params[3]
content = params[4]
battle_item = None
if item_type == 'R':
battle_item = game.battle_item.rock
if item_type == 'P':
battle_item = game.battle_item.paper
if item_type == 'S':
battle_item = game.battle_item.scissor
if battle_item is not None:
if is_sticker == 'STK':
if string_can_be_int(content):
rps_obj.register_battle_item(battle_item, True, content)
text = rps_obj.battle_item_dict_text()
else:
text = error.main.incorrect_param(u'參數4', u'整數,以代表貼圖ID')
elif is_sticker == 'TXT':
rps_obj.register_battle_item(battle_item, False, content)
text = rps_obj.battle_item_dict_text()
else:
text = error.main.incorrect_param(u'參數3', u'STK(是貼圖ID)或TXT(文字訊息)')
else:
text = error.main.incorrect_param(u'參數2', u'S(剪刀)、R(石頭)或P(布)')
else:
text = error.main.incorrect_param(u'參數1', u'ADD')
else:
text = error.main.miscellaneous(u'尚未建立猜拳遊戲。')
elif params[3] is not None:
scissor = params[1]
rock = params[2]
paper = params[3]
rps_obj = game.rps(True if isinstance(src, SourceUser) else False, rock, paper, scissor)
if isinstance(rps_obj, game.rps):
if line_api_proc.is_valid_user_id(uid):
rps_obj.register_player(self._line_api.profile(uid).display_name, uid)
text = u'遊戲建立成功。\n\n剪刀貼圖ID: {}\n石頭貼圖ID: {}\n布貼圖ID: {}'.format(scissor, rock, paper)
self._game_data.set_rps(cid, rps_obj)
else:
text = error.main.unable_to_receive_user_id()
else:
text = rps_obj
elif params[2] is not None:
rps_obj = self._game_data.get_rps(cid)
if rps_obj is not None and isinstance(rps_obj, game.rps):
action = params[1]
battle_item_text = params[2]
if action == 'RST':
if battle_item_text == 'R':
rps_obj.reset_battle_item(game.battle_item.rock)
text = u'已重設代表【石頭】的物件。'
elif battle_item_text == 'P':
rps_obj.reset_battle_item(game.battle_item.paper)
text = u'已重設代表【布】的物件。'
elif battle_item_text == 'S':
rps_obj.reset_battle_item(game.battle_item.scissor)
text = u'已重設代表【剪刀】的物件。'
else:
text = error.main.incorrect_param(u'參數2', u'R(石頭), P(布), S(剪刀)')
else:
text = error.main.incorrect_param(u'參數1', u'RST')
else:
text = error.main.miscellaneous(u'尚未建立猜拳遊戲。')
elif params[1] is not None:
rps_obj = self._game_data.get_rps(cid)
action = params[1]
if rps_obj is not None and isinstance(rps_obj, game.rps):
if action == 'DEL':
self._game_data.del_rps(cid)
text = u'猜拳遊戲已刪除。'
elif action == 'RST':
rps_obj.reset_statistics()
text = u'猜拳遊戲統計資料已重設。'
elif action == 'R':
text = rps_obj.battle_item_dict_text(game.battle_item.rock)
elif action == 'P':
text = rps_obj.battle_item_dict_text(game.battle_item.paper)
elif action == 'S':
text = rps_obj.battle_item_dict_text(game.battle_item.scissor)
elif action == 'PLAY':
uid = line_api_proc.source_user_id(src)
if line_api_proc.is_valid_user_id(uid):
player_name = self._line_api.profile(uid).display_name
reg_success = rps_obj.register_player(player_name, uid)
if reg_success:
text = u'成功註冊玩家 {}。'.format(player_name)
else:
text = u'玩家 {} 已存在於玩家清單中。'.format(player_name)
else:
text = error.main.unable_to_receive_user_id()
elif action == 'SW':
rps_obj.enabled = not rps_obj.enabled
if rps_obj.enabled:
text = u'遊戲已繼續。'
else:
text = u'遊戲已暫停。'
else:
text = error.main.incorrect_param(u'參數1', u'DEL, RST, R, P, S, PLAY, SW')
else:
text = error.main.miscellaneous(u'尚未建立猜拳遊戲。')
else:
rps_obj = self._game_data.get_rps(cid)
if rps_obj is not None and isinstance(rps_obj, game.rps):
if rps_obj.player_dict is not None and len(rps_obj.player_dict) > 0:
text = game.rps.player_stats_text(rps_obj.player_dict)
text += '\n\n'
text += rps_obj.battle_item_dict_text()
else:
text = error.main.miscellaneous(u'無玩家資料。')
else:
text = error.main.miscellaneous(u'尚未建立猜拳遊戲。')
return text
| {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
61,814 | elees1219/LineBot | refs/heads/master | /msg_handler/__init__.py | from .text_msg import (
text_msg, oxford_dict, line_api_proc
)
from .game_msg import (
game_msg
) | {"/game/rps.py": ["/tool/__init__.py"], "/bot/game_object_holder.py": ["/game/__init__.py"], "/msg_handler/text_msg.py": ["/db/__init__.py", "/bot/__init__.py", "/bot/system.py", "/tool/__init__.py"], "/tool/__init__.py": ["/tool/random_gen.py"], "/msg_handler/game_msg.py": ["/bot/system.py", "/game/__init__.py"], "/msg_handler/__init__.py": ["/msg_handler/text_msg.py", "/msg_handler/game_msg.py"], "/game/__init__.py": ["/game/rps.py"], "/db/__init__.py": ["/db/groupban.py"], "/bot/__init__.py": ["/bot/system.py", "/bot/webpage_auto_gen.py", "/bot/game_object_holder.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.