index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
22,674
|
HadrienRenaud/simple-calculator-kata
|
refs/heads/master
|
/calculator.py
|
import re
from logger import ILogger, IWebserver
class Calculator:
@staticmethod
def add(inputs: str):
if len(inputs) == 0:
ILogger.write(0)
return 0
else:
delimiters = r'[,\n]'
if re.match(r'^//.+$', inputs.split('\n')[0]):
delimiters = inputs.split('\n')[0][2:]
m = re.match(r'^\[(.*)]\[(.*)]$', delimiters)
if m:
deli1 = re.escape(m.group(1))
deli2 = re.escape(m.group(2))
delimiters = f"{deli1}|{deli2}"
else:
delimiters = re.escape(delimiters)
inputs = "\n".join(inputs.split('\n')[1:])
# print("Custom delimiters :", delimiters)
inputs_list = list(re.split(delimiters, inputs))
inputs_float_list = list(map(float, inputs_list))
if any([i < 0 for i in inputs_float_list]):
raise Exception("negatives not allowed ")
inputs_filtered = list(filter(lambda x: x < 1001, inputs_float_list))
# print(f"""Call with :
# \t- delimiters={delimiters}
# \t- inputs={inputs}
# \t- inputs_list={inputs_list}
# \t- inputs_float={inputs_float_list}""")
result = sum(inputs_filtered)
try:
ILogger.write(result)
except Exception as e:
IWebserver.notify("logging has failed with the message : ", e)
return result
|
{"/test_calculator.py": ["/calculator.py"], "/calculator.py": ["/logger.py"]}
|
22,684
|
farazmazhar/Tweet-miner-py
|
refs/heads/master
|
/executioner.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 10 18:23:50 2018
@author: faraz
"""
import tweepy
from tweepy import OAuthHandler
import json
import argparse
import urllib.request
import os
import operator
from collections import Counter
from nltk.corpus import stopwords
import string
import streamListener as sl
import tokenizer as t
consumer_key = 'Consumer_key'
consumer_secret = 'Consumer_secret'
access_token = 'acc_token'
access_secret = 'acc_secret'
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
api = tweepy.API(auth)
def process_or_store(tweet):
print(json.dumps(tweet))
def clearExistingFiles(user):
if user is not "None":
filename = user + "_tweet_dump.json"
minfilename = user + "_tweet_dump.min.json"
else:
filename = "tweet_dump.json"
minfilename = "tweet_dump.min.json"
open(filename, 'w').close()
open(minfilename, 'w').close()
def get_clArgs():
clArgs = argparse.ArgumentParser(description="Tweet miner.")
clArgs.add_argument("-f", "--filter",
dest="filter",
help="Trend Tweet stream filter. Must start with \'#\'.",
default="-") # To be implemented.
clArgs.add_argument("-u", "--user",
dest="user",
help="Get tweets of a certain user.",
default="None")
clArgs.add_argument("-p", "--photos",
dest="photos",
help="Set it 'True' to download photos. Default is 'False'.",
default="False")
clArgs.add_argument("-c", "--count",
dest="count",
help="Number of tweets to retrieve. Max is 100, Default is 25.",
default=25)
clArgs.add_argument("-d", "--download",
dest="download",
help="Download tweets.",
default="False") # To be implemented.
clArgs.add_argument("-s", "--separate",
dest="separate",
help="Stores tweets in seprate file. Use with download.",
default="False") # To be implemented.
return clArgs
def trendStream(trendFilter = '#duck'):
twitter_stream = sl.Stream(auth, sl.MyListener())
twitter_stream.filter(track=[trendFilter])
def getminTweet(tweet):
try:
media_url = tweet.entities['media'][0]['media_url']
except (AttributeError, KeyError):
media_url = None
tweet_min = {"time" : tweet.created_at.strftime("%A %d/%B/%Y at %I:%M:%S %p UTC +0000"),
"screen_name" : tweet.user.screen_name,
"username" : tweet.user.name,
"tweet" : tweet.text,
"media_url" : media_url,
"id_str" : tweet.id_str,
"user_id_str" : tweet.user.id_str}
return tweet_min
def generateJSON(tweet, mintweet, user, username):
if user:
filename = username + "_tweet_dump.json"
minfilename = username + "_tweet_dump.min.json"
else:
filename = "tweet_dump.json"
minfilename = "tweet_dump.min.json"
with open(filename, 'a') as outfile:
json.dump(tweet._json, outfile)
outfile.write('\n')
with open(minfilename, 'a') as minoutfile:
json.dump(mintweet, minoutfile)
minoutfile.write('\n')
def textTokenize(text):
return t.preprocess(text)
def tweetAnalyizer(user): # To be worked on
if user is not "None":
minfilename = user + "_tweet_dump.min.json"
else:
minfilename = "tweet_dump.min.json"
with open(minfilename, 'r') as f:
for line in f:
tweet = json.loads(line)
tokens = tweetAnalyizer(tweet['tweet'])
# do_something_else(tokens)
def phraseCount(user):
punctuation = list(string.punctuation)
stop = stopwords.words('english') + punctuation + ['rt', 'via', 'RT', 'the', 'The']
if user is not "None":
fname = user + "_tweet_dump.min.json"
phraseCountLog = user + "_commonPhrases.txt"
else:
fname = "tweet_dump.min.json"
phraseCountLog = "commonPhrases.txt"
count_all = Counter()
with open(fname, 'r') as f:
for line in f:
# print("-------------------------------------")
# print(line)
# print("-------------------------------------")
tweet = json.loads(line)
# Create a list with all the terms
terms_stop = [term for term in textTokenize(tweet['tweet']) if term not in stop and len(term) > 2]
# Update the counter
count_all.update(terms_stop)
# Print the first 20 most frequent words
print(count_all.most_common(20))
with open(phraseCountLog, 'w') as f:
counter = 1
for phrase in count_all.most_common(20):
f.write(str(counter) + ": " + str(phrase) + "\n")
counter += 1
def tweetWalker(args):
if args.user is not "None":
for status in tweepy.Cursor(api.user_timeline, screen_name=args.user).items(int(args.count)):
# Process a single status
mintweet = getminTweet(status);
print(mintweet)
generateJSON(status, mintweet, True, mintweet['screen_name'])
if args.photos == "True" and mintweet['media_url'] != None:
directory = mintweet['screen_name'] + "_tweet"
if not os.path.exists(directory):
os.makedirs(directory)
urllib.request.urlretrieve(mintweet['media_url'], directory + "/" + mintweet['screen_name'] + "_" + mintweet['media_url'].split('/')[-1])
elif args.user is "None":
for status in tweepy.Cursor(api.home_timeline).items(int(args.count)):
# Process a single status
mintweet = getminTweet(status);
print(mintweet)
generateJSON(status, mintweet, False, None)
if args.photos == "True" and mintweet['media_url'] != None:
directory = mintweet['screen_name'] + "_tweet"
if not os.path.exists(directory):
os.makedirs(directory)
urllib.request.urlretrieve(mintweet['media_url'], directory + "/" + mintweet['screen_name'] + "_" + mintweet['media_url'].split('/')[-1])
# elif args.filter[0] is '#':
# trendStream(trendFilter=args.filter)
print("< |=== END ===| >")
def main():
directory = "mined_tweets"
if not os.path.exists(directory):
os.makedirs(directory)
os.chdir(directory)
args = get_clArgs().parse_args()
tweetWalker(args)
# tweetAnalyizer(args.user)
phraseCount(args.user)
if __name__ == '__main__':
main()
|
{"/executioner.py": ["/streamListener.py"]}
|
22,685
|
farazmazhar/Tweet-miner-py
|
refs/heads/master
|
/streamListener.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 10 18:58:45 2018
@author: faraz
Reference: https://marcobonzanini.com/2015/03/02/mining-twitter-data-with-python-part-1/
"""
from tweepy import Stream
from tweepy.streaming import StreamListener
class MyListener(StreamListener):
def on_data(self, data):
try:
with open('python.json', 'a') as f:
f.write(data)
return True
except BaseException as e:
print("Error on_data: %s" % str(e))
return True
def on_error(self, status):
print(status)
return True
|
{"/executioner.py": ["/streamListener.py"]}
|
22,692
|
ljm625/p4-srv6-usid
|
refs/heads/master
|
/mininet/topo-6r.py
|
#!/usr/bin/python
# Copyright 2019-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from mininet.cli import CLI
from mininet.log import setLogLevel
from mininet.net import Mininet
from mininet.node import RemoteController
from mininet.topo import Topo
from bmv2 import ONOSStratumSwitch
from host6 import IPv6Host
CPU_PORT = 255
class TutorialTopo(Topo):
"""
transit_r1 -- transit_r3
/ \ / \
end_r1 ----- \ ----- end-r2
\ / \ /
transit_r2 -- transit_r4
"""
def __init__(self, *args, **kwargs):
Topo.__init__(self, *args, **kwargs)
# End routers
end_r1 = self.addSwitch('end_r1', cls=ONOSStratumSwitch, grpcport=50001,
cpuport=CPU_PORT)
end_r2 = self.addSwitch('end_r2', cls=ONOSStratumSwitch, grpcport=50002,
cpuport=CPU_PORT)
# Transit routers
transit_r1 = self.addSwitch('transit_r1', cls=ONOSStratumSwitch, grpcport=50011,
cpuport=CPU_PORT)
transit_r2 = self.addSwitch('transit_r2', cls=ONOSStratumSwitch, grpcport=50012,
cpuport=CPU_PORT)
transit_r3 = self.addSwitch('transit_r3', cls=ONOSStratumSwitch, grpcport=50013,
cpuport=CPU_PORT)
transit_r4 = self.addSwitch('transit_r4', cls=ONOSStratumSwitch, grpcport=50014,
cpuport=CPU_PORT)
# Switch Links
self.addLink(end_r1, transit_r1)
self.addLink(end_r1, transit_r2)
self.addLink(transit_r1, transit_r3)
self.addLink(transit_r1, transit_r4)
self.addLink(transit_r2, transit_r3)
self.addLink(transit_r2, transit_r4)
self.addLink(transit_r3, end_r2)
self.addLink(transit_r4, end_r2)
# IPv6 hosts attached to leaf 1
h1 = self.addHost('h1', cls=IPv6Host, mac="00:00:00:00:00:10",
ipv6='2001:1:1::1/64', ipv6_gw='2001:1:1::ff')
h2 = self.addHost('h2', cls=IPv6Host, mac="00:00:00:00:00:20",
ipv6='2001:1:2::1/64', ipv6_gw='2001:1:2::ff')
self.addLink(h1, end_r1)
self.addLink(h2, end_r2)
def main(argz):
topo = TutorialTopo()
controller = RemoteController('c0', ip=argz.onos_ip)
net = Mininet(topo=topo, controller=None)
net.addController(controller)
net.start()
CLI(net)
net.stop()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Mininet script for cisco-srv6 topology')
parser.add_argument('--onos-ip', help='ONOS controller IP address',
type=str, action="store", required=True)
args = parser.parse_args()
setLogLevel('info')
main(args)
|
{"/mininet/topo-6r.py": ["/mininet/topo.py"]}
|
22,693
|
ljm625/p4-srv6-usid
|
refs/heads/master
|
/mininet/topo.py
|
#!/usr/bin/python
# Copyright 2019-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from mininet.cli import CLI
from mininet.log import setLogLevel
from mininet.net import Mininet
from mininet.node import RemoteController
from mininet.topo import Topo
from bmv2 import ONOSStratumSwitch
from host6 import IPv6Host
CPU_PORT = 255
class TutorialTopo(Topo):
"""
/--------\ /----\ /----\ /----\ /----\
| Site A |---| R1 |---| R4 |---| R5 |---| R8 |
\________/ \____/ \____/ \____/ \____/
| | | |
| | | |
/--------\ /----\ /----\ /----\ /----\
| Site B |---| R2 |---| R3 |---| R6 |---| R7 |
\________/ \____/ \____/ \____/ \____/
"""
def __init__(self, *args, **kwargs):
Topo.__init__(self, *args, **kwargs)
# End routers
r1 = self.addSwitch('r1', cls=ONOSStratumSwitch, grpcport=50001,
cpuport=CPU_PORT)
r2 = self.addSwitch('r2', cls=ONOSStratumSwitch, grpcport=50002,
cpuport=CPU_PORT)
# Transit routers
r3 = self.addSwitch('r3', cls=ONOSStratumSwitch, grpcport=50003,
cpuport=CPU_PORT)
r4 = self.addSwitch('r4', cls=ONOSStratumSwitch, grpcport=50004,
cpuport=CPU_PORT)
r5 = self.addSwitch('r5', cls=ONOSStratumSwitch, grpcport=50005,
cpuport=CPU_PORT)
r6 = self.addSwitch('r6', cls=ONOSStratumSwitch, grpcport=50006,
cpuport=CPU_PORT)
r7 = self.addSwitch('r7', cls=ONOSStratumSwitch, grpcport=50007,
cpuport=CPU_PORT)
r8 = self.addSwitch('r8', cls=ONOSStratumSwitch, grpcport=50008,
cpuport=CPU_PORT)
# Switch Links
self.addLink(r1, r2)
self.addLink(r1, r4)
self.addLink(r2, r3)
self.addLink(r4, r5)
self.addLink(r4, r3)
self.addLink(r3, r6)
self.addLink(r5, r8)
self.addLink(r5, r6)
self.addLink(r6, r7)
self.addLink(r7, r8)
# IPv6 hosts attached to leaf 1
h1 = self.addHost('h1', cls=IPv6Host, mac="00:00:00:00:00:10",
ipv6='2001:1:1::1/64', ipv6_gw='2001:1:1::ff')
h2 = self.addHost('h2', cls=IPv6Host, mac="00:00:00:00:00:20",
ipv6='2001:1:2::1/64', ipv6_gw='2001:1:2::ff')
self.addLink(h1, r1)
self.addLink(h2, r2)
def main(argz):
topo = TutorialTopo()
controller = RemoteController('c0', ip=argz.onos_ip)
net = Mininet(topo=topo, controller=None)
net.addController(controller)
net.start()
CLI(net)
net.stop()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Mininet script for cisco-srv6 topology')
parser.add_argument('--onos-ip', help='ONOS controller IP address',
type=str, action="store", required=True)
args = parser.parse_args()
setLogLevel('info')
main(args)
|
{"/mininet/topo-6r.py": ["/mininet/topo.py"]}
|
22,694
|
ljm625/p4-srv6-usid
|
refs/heads/master
|
/mininet/ipv6_sr.py
|
#!/usr/bin/python
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.node import Node
from mininet.log import setLogLevel, info
from mininet.cli import CLI
class IPv6Node( Node ):
def config( self, ipv6, ipv6_gw=None, **params ):
super( IPv6Node, self).config( **params )
self.cmd( 'ip -6 addr add %s dev %s' % ( ipv6, self.defaultIntf() ) )
if ipv6_gw:
self.cmd( 'ip -6 route add default via %s' % ( ipv6_gw ) )
# Enable SRv6
self.cmd( 'sysctl -w net.ipv6.conf.all.seg6_enabled=1' )
self.cmd( 'sysctl -w net.ipv6.conf.%s.seg6_enabled=1' % self.defaultIntf() )
# Enable forwarding on the router:
#self.cmd( 'sysctl -w net.ipv6.conf.all.forwarding=1' )
def terminate( self ):
#self.cmd( 'sysctl -w net.ipv6.conf.all.forwarding=0' )
super( IPv6Node, self ).terminate()
class NetworkTopo( Topo ):
"A LinuxRouter connecting three IP subnets"
def build( self, **_opts ):
s1 = self.addSwitch( 's1' )
h1 = self.addHost( 'h1', cls=IPv6Node, ipv6='2001::1/64', ipv6_gw='2001::ff' )
h2 = self.addHost( 'h2', cls=IPv6Node, ipv6='2001::2/64' )
for h, s in [ (h1, s1), (h2, s1) ]:
self.addLink( h, s )
def run():
topo = NetworkTopo()
net = Mininet( topo=topo )
net.start()
net['h1'].cmd( 'ip -6 addr add fd00:1::1 dev h1-eth0' )
net['h2'].cmd( 'ip -6 addr add fd00:2::2 dev h2-eth0' )
net['h1'].cmd( 'ip -6 route add fd00:2::2 encap seg6 mode inline segs 2001::2 dev h1-eth0' )
net['h2'].cmd( 'ip -6 route add fd00:1::1 encap seg6 mode inline segs 2001::1 dev h2-eth0' )
net['h1'].cmd( 'ip addr add 1.0.0.1 dev h1-eth0' )
net['h2'].cmd( 'ip addr add 2.0.0.2 dev h2-eth0' )
net['h1'].cmd( 'ip route add 2.0.0.2 encap seg6 mode encap segs 2001::2 dev h1-eth0 src 1.0.0.1' )
net['h2'].cmd( 'ip route add 1.0.0.1 encap seg6 mode encap segs 2001::1 dev h2-eth0 src 2.0.0.2' )
print 'h1 routing table:'
print net['h1'].cmd( 'ip -6 route' )
print 'h2 routing table:'
print net['h2'].cmd( 'ip -6 route' )
CLI( net )
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
run()
|
{"/mininet/topo-6r.py": ["/mininet/topo.py"]}
|
22,695
|
ljm625/p4-srv6-usid
|
refs/heads/master
|
/mininet/host6.py
|
# Copyright 2019-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mininet.node import Host
class IPv6Host(Host):
def config(self, ipv6, ipv6_gw=None, **params):
super(IPv6Host, self).config(**params)
self.cmd('ip -4 addr flush dev %s' % self.defaultIntf())
self.cmd('ip -6 addr flush dev %s' % self.defaultIntf())
self.cmd('ip -6 addr add %s dev %s' % (ipv6, self.defaultIntf()))
if ipv6_gw:
self.cmd('ip -6 route add default via %s' % ipv6_gw)
def updateIP():
return ipv6.split('/')[0]
self.defaultIntf().updateIP = updateIP
def terminate(self):
# self.cmd( 'sysctl -w net.ipv6.conf.all.forwarding=0' )
super(IPv6Host, self).terminate()
class SRv6Host(IPv6Host):
def config(self, ipv6, ipv6_gw=None, **params):
super(IPv6Host, self).config(**params)
# Enable SRv6
self.cmd('sysctl -w net.ipv6.conf.all.seg6_enabled=1')
self.cmd('sysctl -w net.ipv6.conf.%s.seg6_enabled=1' % self.defaultIntf())
|
{"/mininet/topo-6r.py": ["/mininet/topo.py"]}
|
22,768
|
enanablancaynumeros/company_test_1
|
refs/heads/master
|
/api/setup.py
|
from setuptools import setup, find_packages
setup(
name="api",
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version="1.0.0",
include_package_data=True,
packages=find_packages(exclude=["docs", "tests*"]),
)
|
{"/test_roman_calculator.py": ["/api/roman_api/roman_calculator.py"], "/api/roman_api/app.py": ["/api/roman_api/roman_calculator.py"]}
|
22,769
|
enanablancaynumeros/company_test_1
|
refs/heads/master
|
/api/roman_api/roman_calculator.py
|
import ast
import operator
class InvalidRomanInput(Exception):
pass
class InvalidCalculatorInput(Exception):
pass
roman_int_map = {"I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000}
def char_to_int(char: str) -> int:
try:
return roman_int_map[char]
except KeyError as e:
raise InvalidRomanInput(str(e))
def roman_to_int(roman_text: str) -> int:
total = 0
if len(roman_text):
total += char_to_int(roman_text[-1])
else:
raise InvalidRomanInput(roman_text)
for i, char in enumerate(reversed(roman_text[:-1])):
current_number = char_to_int(char)
previous_number = char_to_int(roman_text[len(roman_text) - i - 1])
if current_number >= previous_number:
total += current_number
else:
total -= current_number
return total
def int_to_roman(number: int) -> str:
combinations = [
"M",
"CM",
"D",
"CD",
"C",
"XC",
"L",
"XL",
"X",
"IX",
"V",
"IV",
"I",
]
nums = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1]
result = ""
for letter, n in zip(combinations, nums):
result += letter * int(number / n)
number %= n
return result
def roman_calculator(roman_text: str) -> str:
"""
To simplify errors feedback, we will simply catch any exception
"""
try:
number = Calc.evaluate(roman_text)
return int_to_roman(number)
except Exception:
raise InvalidCalculatorInput('Invalid expression')
_OP_MAP = {
ast.Add: operator.add,
ast.Sub: operator.sub,
ast.Mult: operator.mul,
ast.Div: operator.floordiv,
ast.BitXor: operator.pow,
}
class Calc(ast.NodeVisitor):
"""
Extended from https://stackoverflow.com/questions/33029168/how-to-calculate-an-equation-in-a-string-python
"""
def visit_BinOp(self, node):
left = self.visit(node.left)
right = self.visit(node.right)
return _OP_MAP[type(node.op)](left, right)
def visit_Name(self, node):
return roman_to_int(node.id)
def visit_Expr(self, node):
return self.visit(node.value)
@classmethod
def evaluate(cls, expression):
tree = ast.parse(expression)
calc = cls()
return calc.visit(tree.body[0])
|
{"/test_roman_calculator.py": ["/api/roman_api/roman_calculator.py"], "/api/roman_api/app.py": ["/api/roman_api/roman_calculator.py"]}
|
22,770
|
enanablancaynumeros/company_test_1
|
refs/heads/master
|
/test_roman_calculator.py
|
import pytest
from api.roman_api.roman_calculator import (
roman_to_int,
InvalidRomanInput,
InvalidCalculatorInput,
int_to_roman,
roman_calculator,
)
@pytest.mark.parametrize(
"roman_text,expected",
[
("I", 1),
("III", 3),
("IV", 4),
("VI", 6),
("XI", 11),
("XXII", 22),
("CCXXVI", 226),
("CMXC", 990),
("MDCCXII", 1712),
("MMXIX", 2019),
],
)
def test_to_integer_nice_input(roman_text, expected):
assert roman_to_int(roman_text) == expected
def test_roman_to_int_exception():
with pytest.raises(InvalidRomanInput):
roman_to_int("XO")
def test_roman_to_int_exception_empty():
with pytest.raises(InvalidRomanInput):
roman_to_int("")
@pytest.mark.parametrize(
"number,expected",
[
(1, "I"),
(3, "III"),
(4, "IV"),
(6, "VI"),
(11, "XI"),
(22, "XXII"),
(226, "CCXXVI"),
(990, "CMXC"),
(1712, "MDCCXII"),
(2019, "MMXIX"),
],
)
def test_to_roman_nice_input(number, expected):
assert int_to_roman(number) == expected
@pytest.mark.parametrize(
"text,expected",
[
("I + I", "II"),
("III - I", "II"),
("IV * IV", "XVI"),
("VI ^ II", "XXXVI"),
("XI - II + X", "XIX"),
("(XXI - (II * X))", "I"),
("XXII - II * X", "II"),
],
)
def test_calculator(text, expected):
assert roman_calculator(text) == expected
def test_calculator_exception():
with pytest.raises(InvalidCalculatorInput):
roman_calculator("2 + 2")
|
{"/test_roman_calculator.py": ["/api/roman_api/roman_calculator.py"], "/api/roman_api/app.py": ["/api/roman_api/roman_calculator.py"]}
|
22,771
|
enanablancaynumeros/company_test_1
|
refs/heads/master
|
/api/roman_api/app.py
|
import os
from flask import Flask, render_template, request, flash
from flask_wtf import FlaskForm
from flask_bootstrap import Bootstrap
from wtforms import SubmitField, StringField
from .roman_calculator import roman_calculator, InvalidCalculatorInput
app = Flask(__name__)
bootstrap = Bootstrap(app)
app.config["SECRET_KEY"] = os.environ["APP_SECRET_KEY"]
@app.route("/", methods=["GET"])
def roman_calculator_get():
form = RomanCalculatorForm()
return render_template("roman.html", form=form)
@app.route("/", methods=["POST"])
def roman_calculator_post():
form = RomanCalculatorForm(request.form)
if form.validate_on_submit():
try:
roman_result = roman_calculator(form.data["roman_text"])
flash(f'The result of "{form.data["roman_text"]}" is "{roman_result}"')
except InvalidCalculatorInput:
flash(f'Invalid input {form.data["roman_text"]}')
return render_template("roman.html", form=form)
class RomanCalculatorForm(FlaskForm):
roman_text = StringField("Roman input")
submit = SubmitField("Submit")
if __name__ == "__main__":
port = os.environ["API_PORT"]
app.run(host="localhost", port=port)
|
{"/test_roman_calculator.py": ["/api/roman_api/roman_calculator.py"], "/api/roman_api/app.py": ["/api/roman_api/roman_calculator.py"]}
|
22,779
|
undera/customfunctions
|
refs/heads/master
|
/calculatefn.py
|
import logging
from graphite.render.datalib import TimeSeries
from graphite.render import functions
def centered_mov_avg(requestContext, seriesList, windowSize):
windowInterval = None
if isinstance(windowSize, basestring):
delta = functions.parseTimeOffset(windowSize)
windowInterval = abs(delta.seconds + (delta.days * 86400))
if windowInterval:
bootstrapSeconds = windowInterval
else:
bootstrapSeconds = max([s.step for s in seriesList]) * int(windowSize)
bootstrapList = functions._fetchWithBootstrap(requestContext, seriesList, seconds=bootstrapSeconds)
result = []
for bootstrap, series in zip(bootstrapList, seriesList):
if windowInterval:
windowPoints = windowInterval / series.step
else:
windowPoints = int(windowSize)
if isinstance(windowSize, basestring):
newName = 'centeredMovingAverage(%s,"%s")' % (series.name, windowSize)
else:
newName = "centeredMovingAverage(%s,%s)" % (series.name, windowSize)
newSeries = TimeSeries(newName, series.start, series.end, series.step, [])
newSeries.pathExpression = newName
offset = len(bootstrap) - len(series)
logging.info("Offset: %s", offset)
logging.info("windowPoints: %s", windowPoints)
for i in range(len(series)):
window = bootstrap[i + offset - windowPoints + windowPoints / 2:i + offset + windowPoints / 2]
logging.info("window: %s", len(window))
newSeries.append(functions.safeAvg(window))
result.append(newSeries)
return result
def percentileOfSeries(requestContext, *args):
levels = []
seriesList = []
for arg in args:
logging.info("Arg: %s", arg)
if isinstance(arg, (int, long, float)):
levels.append(arg)
elif isinstance(arg, basestring):
levels += [float(x) for x in arg.split(";")]
else:
seriesList += arg
logging.info("Levels: %s", levels)
logging.info("Series: %s", seriesList)
result = []
for level in levels:
if levels <= 0:
raise ValueError('The requested percent is required to be greater than 0')
name = 'percentilesOfSeries(%s,%g)' % (seriesList[0].pathExpression, level)
(start, end, step) = functions.normalize([seriesList])[1:]
values = [functions._getPercentile(row, level, False) for row in functions.izip(*seriesList)]
resultSeries = TimeSeries(name, start, end, step, values)
resultSeries.pathExpression = name
result.append(resultSeries)
return result
|
{"/__init__.py": ["/calculatefn.py", "/seglinr.py"]}
|
22,780
|
undera/customfunctions
|
refs/heads/master
|
/__init__.py
|
import logging
from graphite.render.functions import SeriesFunctions
import calculatefn
import seglinr
SeriesFunctions['segLinReg'] = seglinr.seg_lin_reg
SeriesFunctions['segLinRegAuto'] = seglinr.seg_lin_reg_auto
SeriesFunctions['centeredMovingAverage'] = calculatefn.centered_mov_avg
SeriesFunctions['percentileOfEnumeratedSeries'] = calculatefn.percentileOfSeries
logging.basicConfig(format='%(asctime)s\t%(message)s', level=logging.INFO)
|
{"/__init__.py": ["/calculatefn.py", "/seglinr.py"]}
|
22,781
|
undera/customfunctions
|
refs/heads/master
|
/seglinr.py
|
import logging
import seglinreg
def seg_lin_reg(request_context, series_list, segment_count=3):
"""
Graphs the segmented linear regression
requires python-numpy python-scipy
segmentCount must be > 2
"""
for series in series_list:
series.name = "segLinReg(%s,%s)" % (series.name, segment_count)
#series.pathExpression = series.name
s = [(i, value) for i, value in enumerate(series)]
#logging.info("Source: %s", s)
regr = seglinreg.SegLinReg(segment_count)
regr.first_pass_breakpoint_ratio = 10
res = regr.calculate(s)
logging.info("Result: %s", res)
for i, value in enumerate(series):
series[i] = None
for k, v in res.get_regression_data():
series[int(k)] = v
logging.info("Output: %s", len(series))
return series_list
def seg_lin_reg_auto(request_context, series_list, segment_count=10, threshold=None):
"""
Graphs the segmented linear regression up to
requires python-numpy python-scipy
segmentCount must be > 2
"""
for series in series_list:
series.name = "segLinRegAuto(%s,%s)" % (series.name, segment_count)
#series.pathExpression = series.name
s = [(i, value) for i, value in enumerate(series)]
#logging.info("Source: %s", s)
regr = seglinreg.SegLinRegAuto(segment_count)
if threshold:
regr.r2_threshold = threshold
res = regr.calculate(s)
logging.info("Result: %s", res)
for i, value in enumerate(series):
series[i] = None
for k, v in res.get_regression_data():
series[int(k)] = v
logging.info("Output: %s", len(series))
return series_list
|
{"/__init__.py": ["/calculatefn.py", "/seglinr.py"]}
|
22,783
|
terrytsan/OccupancyDetection
|
refs/heads/master
|
/Body.py
|
class Body:
def __init__(self, id, location):
# History of the body's locations [x,y] of centroid
self.visited = []
# Current location of the body
self.location = location
self.ID = id
# Represents the current direction of the body (0 is out of train) (out is down the screen)
self.direction = 0
self.crossedLine = False
# Flag indicates if the body has crossed the line
# Determine the direction (based on overall movement across frame)
def determine_direction(self):
# go through each of the locations
total = 0
for i in self.visited:
total += i[1]
# Calculate the average of all past locations
average = total / len(self.visited)
if average < self.location[1]:
# Less than current location (moving down)
self.direction = 0
else:
self.direction = 1
# Update the location of the body
def update_location(self, location):
# Set its current location
self.location = location
# Add the location to the history
self.visited.append(location)
self.determine_direction()
|
{"/ObjectTrackerTest.py": ["/BodyTracker.py"], "/BodyTracker.py": ["/Body.py"], "/ObjectDetection.py": ["/BodyTracker.py", "/Body.py"]}
|
22,784
|
terrytsan/OccupancyDetection
|
refs/heads/master
|
/ObjectTrackerTest.py
|
# This is used to test the ObjectTracker class
import cv2
import numpy as np
from BodyTracker import BodyTracker
obTrack = BodyTracker()
# rectangles = []
rectangles = [[300, 200, 50, 50], [104, 190, 50, 50], [600, 300, 50, 50]]
objects = obTrack.update(rectangles)
rectangles = [[0, 220, 50, 50], [600, 320, 50, 50], [100, 230, 50, 50], [50, 50, 50, 50], [0, 0, 50, 50]]
objects = obTrack.update(rectangles)
# x is the ID and y is the body (an class instance)
for x, y in objects.items():
print("Return of Body Tracker:", x, y.location)
|
{"/ObjectTrackerTest.py": ["/BodyTracker.py"], "/BodyTracker.py": ["/Body.py"], "/ObjectDetection.py": ["/BodyTracker.py", "/Body.py"]}
|
22,785
|
terrytsan/OccupancyDetection
|
refs/heads/master
|
/BodyTracker.py
|
# A body tracker keeps track of a set of rectangles it is given, assigning unique IDs to each one
# update the tracker with some new rectangles and it will return a dictionary of bodies
from scipy.spatial import distance as dist
import numpy as np
from Body import Body
import logging
# Logging configuration
logger = logging.getLogger(__name__)
class BodyTracker:
def __init__(self):
# initialise variables when the tracker is first created
self.currentBodyID = 0
# Dictionary which holds body objects (holding ID makes things easier) (might be able to change to array in future)
self.bodies = {}
# Dictionary holding the amount of frames a corresponding body has been "missing" for.
self.disappearedTime = {}
# Maximum time a body can go "missing" for before tracking ends
self.maxDisTime = 10
# The maximum distance a body can move between frames
self.max_dist = 150
logger.info("hello")
# Create a body with the centroid and start tracking it
def start_track(self, centroid):
# Create a new body object
body = Body(self.currentBodyID, centroid)
# Add the new body to the list of bodies
self.bodies[self.currentBodyID] = body
logger.info(f"Registered {self.bodies[self.currentBodyID].location} as {self.bodies[self.currentBodyID].ID}")
# Initialise a disappearedTime for the new body
self.disappearedTime[self.currentBodyID] = 0
# Increment the ID
self.currentBodyID = self.currentBodyID + 1
# Stop tracking the passed in body
def end_track(self, body_id):
# Remove both instances of the body from the dictionaries
del self.bodies[body_id]
del self.disappearedTime[body_id]
# Updates the list of tracked bodies, pass in current frames's rectangles
def update(self, rectangles):
if len(rectangles) == 0:
# If nothing is input, increment disappeared time of all objects
logger.debug("Nothing input")
for bodyID in list(self.bodies.keys()):
self.disappearedTime[bodyID] += 1
if self.disappearedTime[bodyID] > self.maxDisTime:
self.end_track(bodyID)
# Leave the function
return self.bodies
# initialize array to hold all the centroids for the inputted rectangles
input_centroids = np.zeros((len(rectangles), 2), dtype="int")
# Convert the inputted rectangles to centroids
for (rec, (startX, startY, endX, endY)) in enumerate(rectangles):
# print("new", startX, startY, endX, endY)
x_centroid = int((startX + endX) / 2)
y_centroid = int((startY + endY) / 2)
input_centroids[rec] = (x_centroid, y_centroid)
# If there are currently no tracked objects
if len(self.bodies) == 0:
# print("No objects, adding", len(rectangles))
logger.debug(f"No objects, adding: {len(rectangles)}")
for centroid in input_centroids:
# Start tracking all inputted rectangles
self.start_track(centroid)
else:
# Try and approximate the new centroids to the tracked bodies (based on distance)
# Holds all the currently used object IDs (some may have disappeared)
body_ids = list(self.bodies.keys())
existing_centroids = []
for body in self.bodies.values():
existing_centroids.append(body.location)
# Calculate distances between each centroid
distance = dist.cdist(np.array(existing_centroids), input_centroids)
logger.debug(f"distance array:\n {distance} \n")
# Gets the index of the shortest distance in each row and then orders them in ascending order.
# Each entry represents the index of input centroid with the shortest distance to the corresponding
# (already) tracked centroid
distance_min_row = distance.min(axis=1).argsort()
# Do the same for the columns
distance_min_col = distance.argmin(axis=1)[distance_min_row]
# List of x,y coords (in matrix) for the minimum values (1 per row)
min_coords = list(zip(distance_min_row, distance_min_col))
# Holds all possible indexes of objects and input_Centroids so that the same centroid isn't used twice
remaining_x = set(list(range(0, len(body_ids))))
remaining_y = set(list(range(0, len(input_centroids))))
# Go through each row coord and assign bodies[row] with with a new coordinate (the input centroid)
for (x, y) in min_coords:
if (x in remaining_x) and (y in remaining_y):
if distance[x][y] < self.max_dist:
# Replace the existing centroid with the new input centroid with smallest distance
self.bodies[body_ids[x]].update_location(input_centroids[y])
# Reset the disappeared time
self.disappearedTime[body_ids[x]] = 0
# Remove x and y from remaining set
remaining_x.remove(x)
remaining_y.remove(y)
# Go through all the remaining original objects (no match has been found in this new frame)
for x in remaining_x:
# Increment disappeared time
self.disappearedTime[body_ids[x]] += 1
# Check if time value has exceeded limit
logging.info(f"{self.bodies[body_ids[x]].location} {self.bodies[body_ids[x]].ID}. time till disapeared: {self.disappearedTime[body_ids[x]]}")
if self.disappearedTime[body_ids[x]] > self.maxDisTime:
logging.info(f"{self.bodies[body_ids[x]].location} {self.bodies[body_ids[x]].ID} has disappeared")
self.end_track(body_ids[x])
# Go through the remaining input centroids that weren't matched and register them as new objects
for y in remaining_y:
self.start_track(input_centroids[y])
# return the dictionary of tracked objects
return self.bodies
|
{"/ObjectTrackerTest.py": ["/BodyTracker.py"], "/BodyTracker.py": ["/Body.py"], "/ObjectDetection.py": ["/BodyTracker.py", "/Body.py"]}
|
22,786
|
terrytsan/OccupancyDetection
|
refs/heads/master
|
/ObjectDetection.py
|
import logging
import cv2
import numpy as np
from BodyTracker import BodyTracker
from Body import Body
# constants
video = "example_01.mp4"
#video = "marbles5.mp4"
videoScaleFactor = 0.4
# videoScaleFactor = 1
# minimum area of contour before they are considered
minArea = 800
# Toggle writing output to file
writeToFile = True
# Create an object tracker object
bodTrack = BodyTracker()
# Number of people on the train
totalDown = 0
totalUp = 0
# Logging config. Disable with logging.CRITICAL
# Create logger
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# Create console handler
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
# Create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)
# End of logging config
# Return boolean, if line is crossed. True if line has been crossed. Requires y coord of line.
def line_crossed(body):
# Skip check if there is no previous points (first frame body appears in)
if len(body.visited) <= 1:
return False
# If the direction is down
if body.direction == 0:
# If the previous location_y is less than(<=) line_y AND current_y is greater than line_y
if body.visited[-2][1] <= line_y < body.location[1]:
# Line has been crossed
logger.info(f"{body.ID} has crossed the line (down)")
return True
# If the direction is up
if body.direction == 1:
# If the previous location_y is greater(>=) than line_y AND current_y is less than line_y
if body.visited[-2][1] >= line_y > body.location[1]:
# Line has been crossed
logger.info(f"{body.ID} has crossed the line (up)")
return True
return False
# Draws the contours, bounding box, and text on inputted image
def draw_graphics(contours, input_image):
# Declare that we will use this variable is global
global totalUp
global totalDown
# Draw the contours
contour_color = (256, 0, 250)
cv2.drawContours(input_image, contours, -1, contour_color, cv2.LINE_4)
# holds all the rectangles (to be passed into the object tracker)
rectangles = []
rect_count = 0
# Approximate a bounding rectangle around each contour
for c in contours:
x, y, w, h = cv2.boundingRect(c)
rect_count = rect_count + 1
# draw the rectangle on the passed in image
cv2.rectangle(input_image, (x, y), (x + w, y + h), (0, 255, 0), 2)
rectangles.append([x, y, x + w, y + h])
# Print the area of the contour next to the rectangle
cv2.putText(input_image, str(cv2.contourArea(c)), (x - 1, y - 1), cv2.FONT_HERSHEY_PLAIN, 2, (0, 255, 0))
# Get a list of tracked objects
tracked_bodies = bodTrack.update(rectangles)
# Draw the crossing line
cv2.line(input_image, (0, line_y), (500, line_y), (0, 255, 0), 3)
# Write text on the centroid
for (ID, body) in tracked_bodies.items():
body_x = body.location[0]
body_y = body.location[1]
# 0 is down
if body.direction == 0:
direction = "down"
else:
direction = "up"
if line_crossed(body):
# If direction is down (leaving)
if body.direction == 0:
totalDown += 1
# If direction is up (boarding)
if body.direction == 1:
totalUp += 1
trackedObjectText = ("ID: %s %s" % (ID, direction))
cv2.putText(input_image, (trackedObjectText), (body_x - 10, body_y - 10),
cv2.FONT_HERSHEY_PLAIN, 1, (0, 255, 0), 2)
# rectangle indicating centroid
cv2.rectangle(input_image, (body_x, body_y - 1), (body_x + 1, body_y + 1), (0, 255, 0), 2)
# Print out the number of rectangles in current frame
cv2.putText(input_image, str(rect_count), (50, 50), cv2.FONT_HERSHEY_PLAIN, 2, (0, 255, 0))
# Print out the number of people on board
totalUpText = ("Up: %s" % (totalUp))
totalDownText = ("Down: %s" % (totalDown))
cv2.putText(input_image, totalUpText, (100, 50), cv2.FONT_HERSHEY_PLAIN, 2, (255, 0, 0))
cv2.putText(input_image, totalDownText, (200, 50), cv2.FONT_HERSHEY_PLAIN, 2, (255, 255, 0))
return input_image
# Finds contours in given input image
def find_contours(input_image):
# Declare this variable as global
global minArea
# Array to hold contours meeting minimum area
reduced_contours = []
# Find contours (only outermost ones)
contours, hierarchy = cv2.findContours(input_image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# Go through each contour keeping ones that meet minArea
for c in contours:
if cv2.contourArea(c) > minArea:
reduced_contours.append(c)
return reduced_contours
# Perform background subtraction on input frame
def subtract_background(input_frame, subtractor_function):
# get dimensions of the window (fix positioning of the other windows)
blobX, blobY, blobW, blobH = cv2.getWindowImageRect('Original frame')
# Background subtraction
foregroundMask = subtractor_function.apply(input_frame, None, -1)
# opening removes false positives (white dots in background - the noise)
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (2, 2))
foregroundMask = cv2.morphologyEx(foregroundMask, cv2.MORPH_OPEN, kernel)
# closing removes false negatives (black dots in actual object)
subKernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (2, 2))
# foregroundMask = cv2.morphologyEx(foregroundMask, cv2.MORPH_CLOSE, subKernel)
# threshold the frame - removes the random large changes
ret, frameThresh = cv2.threshold(foregroundMask, 200, 255, cv2.THRESH_TOZERO)
# Further noise reduction/dilation
EKernel = np.ones((2, 2), np.uint8)
DKernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
# erosion = cv2.erode(frameThresh, EKernel, iterations=1) # gets rid of things
# try and fill the gaps in objects
dilation = cv2.dilate(frameThresh, DKernel, iterations=4) # makes things more pronounced
# Show the intermediate steps as windows
cv2.imshow('Blob frame', foregroundMask)
cv2.moveWindow('Blob frame', blobX + blobW, 0)
cv2.imshow('Threshold', frameThresh)
cv2.moveWindow('Threshold', blobX + (2 * blobW), 0)
# cv2.imshow('Dilation & Erosion', dilation)
# cv2.moveWindow('Dilation & Erosion', blobX + blobW, + blobY + blobH)
return dilation
# Load the video
cap = cv2.VideoCapture(video)
# cv.Flip(frame, flipMode=-1)
# want to detect shadows so they can be thresholded
subtractor = cv2.createBackgroundSubtractorMOG2(history=10, varThreshold=20, detectShadows=1)
subtractorTwo = cv2.createBackgroundSubtractorKNN()
subtractor.setShadowThreshold(0.7)
logger.debug(f"Shadow threshold: {subtractor.getShadowThreshold()}")
subtractor.setBackgroundRatio(0.5)
logger.debug(f"Background ratio: {subtractor.getBackgroundRatio()}")
frame_w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
frame_h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
writer = cv2.VideoWriter('output.avi', cv2.VideoWriter_fourcc(*'MJPG'), 30, (int(frame_w * videoScaleFactor), int(frame_h * videoScaleFactor)))
# Set the line to be half way
line_y = int((frame_h * videoScaleFactor * 0.5))
# Play the video
while 1:
ret, frame = cap.read()
if not ret:
break
# resize frame
frame = cv2.resize(frame, (0, 0), fx=videoScaleFactor, fy=videoScaleFactor)
# frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
blurred_frame = cv2.GaussianBlur(frame, (7, 7), 0)
# Perform background subtraction
subtracted_frame = subtract_background(blurred_frame, subtractor)
# This section displays the frames
# show the two frames side by side (appears to be a video)
cv2.imshow('Original frame', frame)
cv2.moveWindow('Original frame', 0, 0)
# get dimensions of the window (fix positioning of the other windows
blobX, blobY, blobW, blobH = cv2.getWindowImageRect('Original frame')
blurredDilation = cv2.GaussianBlur(subtracted_frame, (7, 7), 0)
cv2.imshow('Blurred Dilation', blurredDilation)
cv2.moveWindow('Blurred Dilation', 0, + blobY + blobH)
# Get the contours in the image
found_contours = find_contours(blurredDilation)
blurredDilation_contours = cv2.cvtColor(blurredDilation,cv2.COLOR_GRAY2BGR)
contour_color = (256, 0, 250)
blurredDilation_contours = cv2.drawContours(blurredDilation_contours, found_contours, -1, color=contour_color, thickness=cv2.LINE_4)
cv2.imshow('Contours', blurredDilation_contours)
cv2.moveWindow('Contours', blobX + blobW, + blobY + blobH)
blank_image = np.zeros((frame.shape[0], frame.shape[1], 3), dtype=np.uint8)
cv2.imshow('Final output', draw_graphics(found_contours, frame))
cv2.moveWindow('Final output', blobX + (2 * blobW), blobY + blobH)
if writeToFile:
writer.write(draw_graphics(found_contours, frame).astype('uint8'))
# was 15 before
if cv2.waitKey(40) == 13:
break
cap.release()
cv2.destroyAllWindows()
|
{"/ObjectTrackerTest.py": ["/BodyTracker.py"], "/BodyTracker.py": ["/Body.py"], "/ObjectDetection.py": ["/BodyTracker.py", "/Body.py"]}
|
22,796
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/migrations/0010_auto_20191011_1826.py
|
# Generated by Django 2.2.5 on 2019-10-11 15:26
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('insta', '0009_auto_20191011_1638'),
]
operations = [
migrations.RenameModel(
old_name='follows',
new_name='Follow',
),
]
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,797
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/migrations/0005_auto_20191011_1434.py
|
# Generated by Django 2.2.5 on 2019-10-11 11:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('insta', '0004_comment_image_like'),
]
operations = [
migrations.RemoveField(
model_name='image',
name='profile',
),
migrations.RemoveField(
model_name='like',
name='image',
),
migrations.RemoveField(
model_name='like',
name='person',
),
migrations.DeleteModel(
name='Comment',
),
migrations.DeleteModel(
name='Image',
),
migrations.DeleteModel(
name='Like',
),
]
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,798
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/views.py
|
from django.shortcuts import render,redirect,get_object_or_404
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .models import Image,Like,Comment,Profile,Tags,Follow
from django.http import JsonResponse
from .forms import UpdateProfile,UpdateProfilePhoto,PostImage
# Create your views here.
from django.contrib.auth.decorators import login_required
from django import template
import json
import random
register = template.Library()
@login_required
def home(request):
images=Image.objects.all()
following = Follow.objects.filter(follow = request.user)
likes = Like.objects.all()
comments = Comment.objects.all()
try:
profiles =list(Profile.objects.all().exclude(user = request.user))
all_profiles = random.sample(profiles,3)
except ValueError:
all_profiles =Profile.objects.all().exclude(user = request.user)
if request.method == "POST":
form = PostImage(request.POST,request.FILES)
if form.is_valid():
image_u =form.cleaned_data['image']
name =form.cleaned_data['name']
caption=form.cleaned_data['caption']
post = Image(image =image_u,name=name,caption = caption,profile = request.user)
post.save()
return redirect('home')
else:
form = PostImage()
context={
'profiles':all_profiles,
'images':images,
'following':following,
'likes':likes,
'form':form,
'comments':comments
}
return render(request,'index.html',context)
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
user=form
form.save()
return redirect('home')
else:
form= UserCreationForm()
context={
'form':form
}
return render(request,'registration/signup.html', context)
@login_required
def profile(request,username):
current_user = User.objects.filter(username = username).first()
images = Image.objects.filter(profile = current_user )
try:
following = Follow.objects.filter(follow = current_user).all()
except:
following =[]
try:
followers = Follow.objects.filter(following = current_user)
except Exception as e:
followers=[]
follow_status=None
for follower in followers:
if request.user == follower.follow:
print(True)
follow_status=True
else:
print(False)
follow_status=False
context={
'posts':images,
'current_user':current_user,
'followers':followers,
'following':following,
'follow_status':follow_status
}
return render(request,'profile.html',context)
@login_required
def unfollow(request):
if request.method == 'POST':
me= request.POST['me']
you= request.POST['you']
unfollow = Follow.objects.filter(follow = me ,following = you).first()
unfollow.delete()
return JsonResponse({'unfollowed':True})
return redirect('home')
@login_required
def follow(request):
if request.method == 'POST':
me= request.POST['me']
you= request.POST['you']
follow = Follow(follow=User.objects.get(pk=me),following=User.objects.get(pk=you))
follow.save()
return JsonResponse({'followed':True})
return redirect('home')
@login_required
def like(request,img_id):
if request.method == 'GET':
image = Image.objects.get(pk=img_id)
already_liked = Like.objects.filter(person = request.user , image = image ).first()
print(image,'****************',already_liked)
if already_liked == None:
liked= Like( image = image, person = request.user)
liked.save()
print('********1*********')
return JsonResponse({'img_id':img_id,'status':True})
else:
print('********2*********')
already_liked.delete()
return JsonResponse({'img_id':img_id,'status':False})
print(already_liked)
@login_required
def comment(request):
if request.method == 'GET':
image = Image.objects.get(pk = request.GET['imageId'])
commnent = request.GET['comment']
comment_s = Comment(person = request.user,comment =commnent,image = image)
comment_s.save()
return JsonResponse({'image_id': request.GET['imageId'],'user':request.user.username,'comment':commnent})
@login_required
def update_profile(request,username):
if request.method == 'POST':
user = request.user
form = UpdateProfile(request.POST)
if form.is_valid():
name= form.cleaned_data['name']
bio= form.cleaned_data['bio']
website= form.cleaned_data['website']
profile = Profile.objects.get(pk = user.profile.pk)
profile.name = name
profile.bio = bio
profile.website = website
profile.user = user
print(profile)
profile.save()
return redirect('profile',username)
else:
form2 =UpdateProfilePhoto()
form = UpdateProfile()
context={
'form':form,
'form2':form2
}
return render(request,'edit-profile.html',context)
@login_required
def update_profile_pic(request,username):
if request.method == 'POST':
form =UpdateProfilePhoto(request.POST,request.FILES)
if form.is_valid():
# photo = form.save(commit = False)
# photo.user = request.user
photo = form.cleaned_data['profile_pic']
profile = Profile.objects.get(pk = request.user.profile.pk)
profile.profile_pic = photo
profile.save()
print(photo)
return redirect('profile',username)
else:
return redirect('profile',username)
@login_required
def search(request,search_term):
results = list(User.objects.filter(username__icontains = search_term))
res=[]
for i in results:
username = i.username
image = json.dumps("/media/"+str(i.profile.profile_pic))
name = i.profile.name
data ={
'username':username,
'image':image,
'name':name
}
res.append(data)
if res:
return JsonResponse({'results':res})
else:
return JsonResponse({'notFound':True,'results':res})
@login_required
def post_details(request,id):
image = get_object_or_404(Image,pk = id)
return render(request, 'image-details.html',{'post':image})
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,799
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/models.py
|
from django.db import models as md
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
class Tags(md.Model):
tag = md.TextField()
def __str__(self):
return f'{self.tag}'
class Meta:
ordering=['tag']
class Image(md.Model):
image = md.ImageField(upload_to='articles/')
name = md.CharField(max_length=70)
caption = md.TextField()
profile = md.ForeignKey(User,on_delete=md.CASCADE)
posted_on = md.DateTimeField(auto_now_add=True)
tags =md.ManyToManyField(Tags, blank = True)
def __str__(self):
return f'{self.name}'
class Meta:
ordering=['name']
@property
def likes(self):
return self.image_likes.count()
class Like(md.Model):
status = md.BooleanField(default=True)
image = md.ForeignKey(Image ,on_delete=md.CASCADE,related_name='image_likes')
person = md.ForeignKey(User ,on_delete=md.CASCADE,related_name='user_likes')
def __str__(self):
return '{} liked {}'.format(self.person.username, self.image)
class Meta:
ordering=['image']
class Comment(md.Model):
comment=md.TextField()
image = md.ForeignKey(Image,on_delete=md.CASCADE,related_name='image_comments')
person = md.ForeignKey(User,on_delete=md.CASCADE)
posted_on = md.DateTimeField(auto_now_add=True)
def __str__(self):
return f'{self.person.username}"s comment on {self.image} '
class Meta:
ordering=['image']
class Profile(md.Model):
user = md.OneToOneField(User,on_delete=md.CASCADE)
name = md.CharField(max_length = 100 ,blank = True)
profile_pic= md.ImageField(upload_to='profile/',default = 'profile/default.jpg')
bio = md.TextField(max_length=500,blank = True)
website = md.URLField(blank = True)
acount_stauts= md.BooleanField(default = False ,blank = True)
@receiver(post_save,sender=User)
def create_user_profile(sender,instance,created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save,sender=User)
def save_user_profile(sender,instance,**kwargs):
instance.profile.save()
def __str__(self):
return f'{self.user.username}s profile'
@property
def followers(self):
return self.user.followers
class Follow(md.Model):
follow= md.ForeignKey(User ,on_delete=md.CASCADE , related_name='following')
status=md.BooleanField(default=True)
following = md.ForeignKey(User ,on_delete=md.CASCADE, related_name='followers')
def __str__(self):
return f'{self.follow} follows {self.following}'
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,800
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/urls.py
|
from django.urls import path
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('',views.home ,name='home'),
path('signup/',views.signup,name='signup'),
path('<username>' , views.profile, name='profile'),
path('unfollow/', views.unfollow),
path('follow/',views.follow),
path('like/<img_id>',views.like ,name='like'),
path('comment/',views.comment,name='comment'),
path('update/<username>',views.update_profile ,name= 'update_profile'),
path('update_pic/<username>',views.update_profile_pic , name = 'update_profile_pic'),
path('search/<search_term>',views.search),
path('post_details/<id>',views.post_details, name="post_details")
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,801
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/migrations/0011_auto_20191011_1828.py
|
# Generated by Django 2.2.5 on 2019-10-11 15:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('insta', '0010_auto_20191011_1826'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='profile_pic',
field=models.ImageField(default='profile/default.jpg', upload_to='profile/'),
),
]
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,802
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/admin.py
|
from django.contrib import admin
from .models import Image,Like,Comment,Profile,Tags,Follow
# Register your models here.
admin.site.register(Image)
admin.site.register(Profile)
admin.site.register(Comment)
admin.site.register(Tags)
admin.site.register(Follow)
admin.site.register(Like)
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,803
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/forms.py
|
from django.forms import ModelForm
from django.utils.translation import gettext_lazy as _
from .models import Profile,Image
class UpdateProfile(ModelForm):
class Meta:
model = Profile
fields = ['name','bio','website']
class UpdateProfilePhoto(ModelForm):
class Meta:
model=Profile
exclude = ['user','name','bio','website','acount_stauts']
class PostImage(ModelForm):
class Meta:
model = Image
exclude=['profile','posted_on','tags']
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,804
|
SteveMitto/instagram
|
refs/heads/master
|
/insta/migrations/0012_auto_20191012_1809.py
|
# Generated by Django 2.2.5 on 2019-10-12 15:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('insta', '0011_auto_20191011_1828'),
]
operations = [
migrations.AddField(
model_name='profile',
name='website',
field=models.URLField(blank=True),
),
migrations.AlterField(
model_name='profile',
name='profile_pic',
field=models.ImageField(default='profile/ default.jpg', upload_to='profile/'),
),
]
|
{"/insta/views.py": ["/insta/models.py", "/insta/forms.py"], "/insta/admin.py": ["/insta/models.py"], "/insta/forms.py": ["/insta/models.py"]}
|
22,805
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/esports_site.py
|
from extended_site import GamepediaSite
ALL_ESPORTS_WIKIS = ['lol', 'halo', 'smite', 'vg', 'rl', 'pubg', 'fortnite',
'apexlegends', 'fifa', 'gears', 'nba2k', 'paladins', 'siege',
'default-loadout', 'commons', 'teamfighttactics']
def get_wiki(wiki):
if wiki in ['lol', 'teamfighttactics'] or wiki not in ALL_ESPORTS_WIKIS:
return wiki
return wiki + '-esports'
class EsportsSite(GamepediaSite):
def __init__(self, user, wiki):
super().__init__(user, get_wiki(wiki))
self.user = user
self.wiki = wiki
def standard_name_redirects(self):
for item in self.cargoquery(
tables="Tournaments,_pageData",
join_on="Tournaments.StandardName_Redirect=_pageData._pageName",
where="_pageData._pageName IS NULL AND Tournaments.StandardName_Redirect IS NOT NULL",
fields="Tournaments.StandardName_Redirect=Name,Tournaments._pageName=Target",
limit="max"
):
page = self.pages[item['Name']]
target = item['Target']
page.save('#redirect[[%s]]' % target, summary="creating needed CM_StandardName redirects")
def other_wikis(self):
for wiki in ALL_ESPORTS_WIKIS:
if wiki == self.wiki:
continue
yield wiki
def other_sites(self):
for wiki in self.other_wikis():
yield EsportsSite('me', wiki)
@staticmethod
def all_wikis():
for wiki in ALL_ESPORTS_WIKIS:
yield wiki
@staticmethod
def all_sites(user):
for wiki in ALL_ESPORTS_WIKIS:
yield EsportsSite(user, wiki)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,806
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/top_schedule_refresh.py
|
from log_into_wiki import *
wikis = [ 'lol', 'cod-esports' ]
to_purges = {
'lol' : ['League of Legends Esports Wiki', 'Match History Index'],
'cod-esports' : ['Call of Duty Esports Wiki']
}
to_blank_edit = ['Project:Top Schedule', 'Project:Matches Section/Matches',
'Project:Matches Section/Results']
to_blank_edits = {
'lol' : ['Project:Korizon Standings']
}
def blank_edit_pages(site, ls):
for name in ls:
p = site.pages[name]
p.save(p.text(), summary='blank editing')
for wiki in wikis:
site = login('me',wiki)
blank_edit_pages(site, to_blank_edit)
if wiki in to_blank_edits.keys():
blank_edit_pages(site, to_blank_edits[wiki])
for name in to_purges[wiki]:
site.pages[name].purge()
result = site.api('expandtemplates', format='json',
prop = 'wikitext',
text = '{{Project:Template/Current Tournaments Section}}'
)
text = result['expandtemplates']['wikitext']
p2 = site.pages['Project:Current Tournaments Section']
p2.save(text, summary = 'Automatically updating Current Tournaments',tags='daily_errorfix')
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,807
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/patrol_namespaces.py
|
import log_into_wiki
namespaces = ['User:', 'Predictions:']
site_names = ['lol', 'cod-esports']
interval = 10
def do_we_patrol(revision):
return [_ for _ in namespaces if revision['title'].startswith(_)]
for site_name in site_names:
site = log_into_wiki.login('me', site_name)
site.patrol_recent(interval, do_we_patrol)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,808
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/refresh_teamnames_cron.py
|
from log_into_wiki import *
import luacache_refresh, datetime
site = login('me', 'lol')
now = datetime.datetime.utcnow()
then = now - datetime.timedelta(minutes=1)
revisions = site.api('query',
list="recentchanges",
rcstart = now.isoformat(),
rcend = then.isoformat(),
rcprop = 'title',
rclimit = 'max',
rctoponly = '1',
rcdir = 'older'
)
for revision in revisions['query']['recentchanges']:
print(revision['title'])
if revision['title'] == 'Module:Teamnames':
luacache_refresh.teamnames(site)
break
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,809
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/touch.py
|
from log_into_wiki import *
import time
limit = -1
site = login('me','cavesofqud')
t = site.pages["Template:Item Page"]
pages = t.embeddedin()
c = site.categories['Pages with script errors']
pages = site.allpages(namespace=0)
startat_page = 'Burrowing Claws'
passed_startat = False
lmt = 0
#for p in c:
for p in pages:
if lmt == limit:
break
if p.name == startat_page:
passed_startat = True
if not passed_startat:
continue
lmt += 1
print(p.name)
text = p.text()
try:
p.save(text,'blank editing')
except Exception as e:
print('uh oh!!!!!!!!')
time.sleep(10)
p.save(text, 'blank editing')
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,810
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/!!scratch.py
|
from log_into_wiki import *
import mwparserfromhell
site = login('me', 'lol') # Set wiki
summary = '|sub=Yes |trainee=Yes & take out of |status=' # Set summary
limit = -1
startat_page = None
print(startat_page)
# startat_page = 'asdf'
this_template = site.pages['Template:RCInfo'] # Set template
pages = this_template.embeddedin()
# with open('pages.txt', encoding="utf-8") as f:
# pages = f.readlines()
passed_startat = False if startat_page else True
lmt = 0
for page in pages:
if lmt == limit:
break
if startat_page and page.name == startat_page:
passed_startat = True
if not passed_startat:
print("Skipping page %s" % page.name)
continue
lmt += 1
text = page.text()
wikitext = mwparserfromhell.parse(text)
for template in wikitext.filter_templates():
if tl_matches(template, ['RCInfo']):
if template.has('status'):
if template.get('status').value.strip().lower() == 'sub':
template.add('sub', 'Yes')
template.add('status', '')
if template.get('status').value.strip().lower() == 'trainee':
template.add('trainee', 'Yes')
template.add('status', '')
newtext = str(wikitext)
if text != newtext:
print('Saving page %s...' % page.name)
page.save(newtext, summary=summary)
else:
print('Skipping page %s...' % page.name)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,811
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/sprites_cachebreak.py
|
from log_into_wiki import *
import re
site = login('me', 'lol')
summary = 'Bot Edit - Automatically Forcing Sprite Cache Update'
url_re_start = r'.*(\/.\/..\/)'
url_re_end = r'(\?version=\w*)\".*'
css_page_list = ['MediaWiki:Common.css', 'MediaWiki:Mobile.css']
category_result = site.api('query', list = 'categorymembers', cmtitle = 'Category:Sprite Images', cmlimit = 50)
file_name_list = [_['title'] for _ in category_result['query']['categorymembers']]
parse_text_list = ['[[%s|link=]]' % _ for _ in file_name_list]
parse_text = '!!!'.join(parse_text_list)
result = site.api('parse', text = parse_text, title = 'Main Page', disablelimitreport = 1)
text = result['parse']['text']['*']
css_texts_old = []
css_texts_new = []
for file_name in file_name_list:
raw_name = file_name.replace('File:', '')
re_full = url_re_start + re.escape(raw_name) + url_re_end
match = re.match(re_full, text)
css_texts_new.append(match[1] + raw_name + r'\1' + match[2])
css_texts_old.append(re.escape(match[1] + raw_name) + r'(.*)' + r'\?version=\w*')
def replace_css_in_file(css_page):
css_page_text = css_page.text()
css_page_text_new = css_page_text
for i, v in enumerate(css_texts_old):
css_page_text_new = re.sub(v, css_texts_new[i], css_page_text_new)
if css_page_text != css_page_text_new:
css_page.save(css_page_text_new, summary = summary)
for page_name in css_page_list:
replace_css_in_file(site.pages[page_name])
print('Ran!')
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,812
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/lol_archive_compare.py
|
from esports_site import EsportsSite
archive = EsportsSite('me', 'lol-archive')
live = EsportsSite('me', 'lol')
pages = []
for page in archive.allpages(namespace=0):
pages.append((page.name, live.pages[page.name].exists))
text = []
for p in pages:
text.append('{}\t{}'.format(p[0], str(p[1])))
with open('archive_pages.txt', 'w+', encoding="utf-8") as f:
f.write('\n'.join(text))
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,813
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/blank_edit_players_from_league.py
|
from log_into_wiki import *
limit = -1
site = login('me','lol')
with open('pages.txt', encoding="utf-8") as f:
tournaments = f.readlines()
pages = set()
for tournament in tournaments:
response = site.api('cargoquery',
tables = 'ScoreboardPlayer',
where = 'OverviewPage="%s"' % tournament.strip().replace('_', ' '),
fields = 'Link',
group_by = 'Link'
)
for item in response['cargoquery']:
pages.add(item['title']['Link'])
lmt = 0
for page in pages:
if lmt == limit:
break
p = site.pages[page]
lmt += 1
print(p.name)
text = p.text()
if text != '':
p.save(text,'blank editing')
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,814
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/rune sprite.py
|
import urllib.request, time, sprite_creator, io, os
from log_into_wiki import *
SUFFIX = ''
SPRITE_NAME = 'SmiteRole'
IMAGE_DIR = 'Sprites/' + SPRITE_NAME + ' Images'
TEAM_DATA_FILE_LOCATION = SPRITE_NAME + 'Sprite' + SUFFIX + '.txt'
FILE_TYPE = 'png'
limit = -1
startat = None
site = login('me', 'smite-esports')
site_lol = login('me', 'lol')
if not os.path.exists(IMAGE_DIR):
os.makedirs(IMAGE_DIR)
def get_country_name(file_name):
return file_name.replace('.' + FILE_TYPE, '').replace('File:', '').replace('Square','')
pattern = r'.*src\=\"(.+?)\".*'
cat = site.categories['Role Icons']
for page in cat:
to_parse_text = '[[%s|link=]]' % page.name
result = site.api('parse', title = 'Main Page', text = to_parse_text, disablelimitreport = 1)
parse_result_text = result['parse']['text']['*']
url = re.match(pattern, parse_result_text)[1]
image = urllib.request.urlopen(url).read()
# image = Image.open(io.BytesIO(urllib.request.urlopen(url).read()))
country = get_country_name(page.name)
image_path = IMAGE_DIR + '/' + country + '.' + FILE_TYPE
print(image_path)
f = open(image_path, 'wb')
f.write(image)
f.close()
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,815
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/disambig_creation.py
|
import re, threading, mwparserfromhell
from log_into_wiki import *
#################################################################################################
original_name = 'Awaker'
irl_name = "Kentaro Hanaoka"
new_name = '{} ({})'.format(original_name, irl_name.strip())
init_move = True
blank_edit = False
limit = -1
timeout_limit = 30
listplayer_templates = ["listplayer", "listplayer/Current"]
roster_templates = ["ExtendedRosterLine", "ExtendedRosterLine/MultipleRoles"]
scoreboard_templates = ["MatchRecapS8/Player","Scoreboard/Player"]
stat_templates = ["IPS", "CareerPlayerStats", "MatchHistoryPlayer"]
player_line_templates = ["LCKPlayerLine", "LCSPlayerLine"]
roster_change_templates = ["RosterChangeLine", "RosterRumorLine2",
"RosterRumorLineStay", "RosterRumorLineNot", "RosterRumorLine"]
summary = "Disambiguating {} to {}".format(original_name, new_name)
css_style = " {\n color:orange!important;\n font-weight:bold;\n}"
orig_name_lc = original_name[0].lower() + original_name[1:]
new_name_lc = new_name[0].lower() + new_name[1:]
blank_edit_these = []
#############################################################################################
def savepage(targetpage, savetext):
targetpage.save(savetext, summary=summary, tags="bot_disambig")
def blank_edit_page(page):
textname = str(page.name)
newpage = site.pages[textname]
text = newpage.text(cache=False)
page.save(text, summary="Blank Editing")
def move_page(from_page):
new_page_name = str(from_page.name).replace(original_name, new_name)
new_page = site.pages[new_page_name]
if new_page.exists:
print("{} already exists, cannot move!".format(from_page.name))
else:
print("Moving page {} to {}".format(from_page.name, new_page_name))
from_page.move(new_page_name, reason=summary, no_redirect=True)
blank_edit_these.append(new_page)
def edit_concept(concept):
text = concept.text()
wikitext = mwparserfromhell.parse(text)
for template in wikitext.filter_templates():
if template.name.matches("PlayerGamesConcept"):
i = 1
while template.has(i):
if template.get(i).strip() == original_name:
template.add(i, new_name)
elif template.get(i).strip() == orig_name_lc:
template.add(i, new_name_lc)
i = i + 1
newtext = str(wikitext)
if newtext != text:
concept.save(newtext, summary=summary, tags="bot_disambig")
def edit_subpage(subpage):
text = subpage.text()
wikitext = mwparserfromhell.parse(text)
for stemplate in wikitext.filter_templates():
if stemplate.has(1):
if stemplate.get(1).value.strip() == original_name:
stemplate.add(1, new_name)
newtext = str(wikitext)
if text != newtext:
print("Editing " + subpage.name + "...")
subpage.save(newtext, reason=summary)
def process_page(page):
print("Processing next page: " + page.name)
text = page.text()
origtext = text
# do links first because it's easier to just edit them as a string
if text.lower().startswith('#redirect') and page.name.lower() == original_name.lower():
pass
else:
text = text.replace("[[" + original_name + "]]", "[[" + new_name + "|" + original_name + "]]")
wikitext = mwparserfromhell.parse(text)
for template in wikitext.filter_templates():
process_template(template)
newtext = str(wikitext)
if origtext != newtext or blank_edit:
print("Saving...")
t = threading.Thread(target=savepage, kwargs={"targetpage": page, "savetext": newtext})
t.start()
t.join(timeout=timeout_limit)
else:
print("No changes, skipping")
def check_list(template, param, sep = ','):
if not template.has(param):
return
text_initial = template.get(param).value.strip()
tbl = text_initial.split(sep)
made_changes = False
for i, val in enumerate(tbl):
if val.strip() == original_name:
made_changes = True
tbl[i] = new_name
if made_changes:
template.add(param, sep.join(tbl))
def process_template(template):
def tl_matches(arr, field=None):
if field:
has_field = False
if template.has(field):
has_field = template.get(field).value.strip() == original_name
return [_ for _ in arr if template.name.matches(_)] and has_field
return [_ for _ in arr if template.name.matches(_)]
if tl_matches(['bl'], field=1) and not template.has(2):
template.add(1, new_name)
template.add(2, original_name)
elif tl_matches(listplayer_templates, field=1) and not template.has("link"):
template.add("link", new_name, before=1)
elif tl_matches(roster_templates, field='player') and not template.has('link'):
template.add("link", new_name, before="name")
elif tl_matches(scoreboard_templates, field='name'):
template.add("link", new_name, before="kills")
elif tl_matches(roster_change_templates, field='player'):
template.add("player", new_name + "{{!}}" + original_name)
elif tl_matches(['TeamRoster/Line', 'RosterLineOld'], field='player'):
template.add('link', new_name)
elif tl_matches(player_line_templates, field=1):
template.add(2, new_name)
elif tl_matches(['Player', 'RSRR/Player'], field=1):
template.add('link', new_name)
elif tl_matches(["MatchDetails/Series"], field='mvp'):
template.add("mvplink", new_name, before="mvp")
elif tl_matches(["PentakillLine"], field=6):
template.add("playerlink", new_name, before=6)
elif tl_matches(["MatchSchedule","MatchSchedule/Game"]):
if template.has("mvp"):
if template.get("mvp").value.strip() == original_name:
template.add("mvp", new_name)
check_list(template, 'with')
check_list(template, 'pbp')
check_list(template, 'color')
elif tl_matches(['ExternalContent/Line']):
check_list(template, 'players')
elif tl_matches(['SeasonAward']):
if template.has(1):
if template.get(1).value.strip() == original_name:
template.add('link', new_name)
check_links(template, 'eligibleplayers', 'eligiblelinks', ',', original_name, new_name)
elif tl_matches(['PlayerImageMetadata'], field="playerlink"):
template.add('playerlink', new_name)
elif tl_matches(["PortalCurrentRosters"]):
for pos in ['t', 'j', 'm', 'a', 's']:
for period in ['old', 'new']:
arg_name = pos + '_' + period
arg_link = arg_name + '_links'
check_links(template, arg_name, arg_link, ',', original_name, new_name)
def make_disambig_page():
text = "{{DisambigPage\n|player1=" + new_name + "\n|player2=\n}}"
page = site.pages[original_name]
old_text = page.text()
if 'disambigpage' not in old_text.lower():
page.save(text, summary=summary)
site = login('me','lol')
thispage = site.pages[original_name]
newpage = site.pages[new_name]
if init_move:
move_page(thispage)
subpages = site.allpages(prefix=original_name + "/")
for subpage in subpages:
edit_subpage(subpage)
move_page(subpage)
concept = site.pages["Concept:{}/Games".format(original_name)]
if concept.exists:
edit_concept(concept)
move_page(concept)
pages = thispage.backlinks()
i = 0
for page in pages:
if i == limit:
break
i = i + 1
process_page(page)
print("Blank editing...")
if init_move:
for page in blank_edit_these:
blank_edit_page(page)
make_disambig_page()
print("Done! If some pages stalled out you may still need to abort manually.")
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,816
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/default_loadout.py
|
import datetime
from time import mktime
from log_into_wiki import *
loadout = login('me', 'spyro') # Set wiki
target = login('me', 'wikisandbox')
summary = 'Backing up spyro' # Set summary
startat_namespace = None
print(startat_namespace)
startat_namespace = 274
startat_page = None
print(startat_page)
startat_page = 'Module:Navbox/Aether II/en'
startat_comparison = startat_namespace - 1 if startat_namespace else -1
passed_startat = False
for ns in loadout.namespaces:
print(ns)
if ns > startat_comparison and ns != 4: # ns 4 is Project ns
for page in loadout.allpages(namespace=ns):
if startat_page == page.name:
passed_startat = True
if startat_page and not passed_startat:
continue
if target.pages[page.name].text() == '':
target.pages[page.name].save(page.text(), summary=summary)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,817
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/fortnite_auto_new_players.py
|
from log_into_wiki import *
import mwparserfromhell
limit = -1
site = login('bot', 'fortnite-esports')
summary = 'Automatically create player pages for Power Rankings'
result = site.api('cargoquery',
tables = 'TournamentResults=TR,TournamentResults__RosterLinks=RL,_pageData=PD',
join_on = 'TR._ID=RL._rowID,RL._value=PD._pageName',
where = 'PD._pageName IS NULL AND RL._value IS NOT NULL AND TR.PRPoints > "0"',
fields = 'RL._value=name',
group_by = 'RL._value',
limit = 'max'
)
default_text = site.pages['Help:Player Template'].text()
default_text = default_text.replace('<noinclude>','').replace('</noinclude>','').strip()
wikitext = mwparserfromhell.parse(default_text)
this_template = None
for template in wikitext.filter_templates():
if template.name.matches('Infobox Player'):
this_template = template
this_template.add('pronly','Yes')
break
def get_residency(name):
print(name)
res_response = site.api('cargoquery',
tables='Tournaments=T,TournamentResults=TR,TournamentResults__RosterLinks=RL',
join_on='T._pageName=TR.OverviewPage,TR._ID=RL._rowID',
where='RL._value="%s"' % name,
fields='T.Region',
group_by='T.Region'
)
res_result = res_response['cargoquery']
if len(res_result) == 1:
return res_result[0]['title']['Region']
return ''
lmt = 0
for item in result['cargoquery']:
if lmt == limit:
break
lmt = lmt + 1
name = item['title']['name']
if site.pages[name].text() != '':
print('Page %s already exists, skipping' % name)
continue
print('Processing page %s...' % name)
this_template.add('residency', get_residency(name))
this_template.add('id', name)
text = str(wikitext)
site.pages[name].save(text, summary=summary)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,818
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/extended_site.py
|
import mwclient, datetime
class ExtendedSite(mwclient.Site):
def cargoquery(self, **kwargs):
response = self.api('cargoquery', **kwargs)
ret = []
for item in response['cargoquery']:
ret.append(item['title'])
return ret
def cargo_pagelist(self, fields=None, limit="max", page_pattern = "%s", **kwargs):
field = fields.split('=')[1] if '=' in fields else fields
group_by = fields.split('=')[0]
response = self.api('cargoquery',
fields=fields,
group_by=group_by,
limit=limit,
**kwargs
)
pages = []
for item in response['cargoquery']:
page = page_pattern % item['title'][field]
if page in pages:
continue
pages.append(page)
yield(self.pages[page])
def recentchanges_by_interval(self, interval, offset=0, prop='title|ids', **kwargs):
now = datetime.datetime.utcnow() - datetime.timedelta(minutes=offset)
then = now - datetime.timedelta(minutes=interval)
result = self.recentchanges(
start=now.isoformat(),
end=then.isoformat(),
limit='max',
prop=prop,
**kwargs
)
return result
def patrol_recent(self, interval, f, **kwargs):
revisions = self.recentchanges_by_interval(interval, prop='title|ids|patrolled', **kwargs)
patrol_token = self.get_token('patrol')
for revision in revisions:
# revid == 0 if the page was deleted, so it can't be deleted
if f(revision) and revision['revid'] != 0 and 'unpatrolled' in revision:
self.api('patrol', revid = revision['revid'], token = patrol_token)
class GamepediaSite(ExtendedSite):
def __init__(self, user, wiki):
super().__init__('%s.gamepedia.com' % wiki, path='/')
pwd_file = 'password2.txt' if user == 'bot' else 'password.txt'
user_file = 'username2.txt' if user == 'bot' else 'username.txt'
pwd = open(pwd_file).read().strip()
username = open(user_file).read().strip()
self.login(username, pwd)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,819
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/log_into_wiki.py
|
import re, urllib.request, io
from esports_site import EsportsSite
from PIL import Image, ImageFile
ImageFile.LOAD_TRUNCATED_IMAGES = True
def login(user, wiki, timeout = 30):
return EsportsSite(user, wiki)
def log_into_fandom(user, wiki):
if user == 'me':
password = open('password_fandom.txt').read().strip()
site = extended_site.ExtendedSite('%s.fandom.com' % wiki, path='/')
site.login('RheingoldRiver', password)
return site
def report_errors(report_page, page, errors):
text = report_page.text()
error_text = '\n* '.join([e.args[0] for e in errors])
newtext = text + '\n==Python Error Report==\nPage: [[{}]] Messages:\n* {}'.format(page, error_text)
report_page.save(newtext)
def api_parse_query(site, datatype, values):
query_text = '{{#invoke:PrintParsedText|unordered|type=' + datatype + '|' + '|'.join(values) + '}}'
query_result = site.api(
'parse',
format='json',
text=query_text,
prop='text',
disablelimitreport=1,
wrapoutputclass=''
)
result = query_result['parse']['text']['*']
result = result.replace('<p>', '').replace('\n</p>', '')
result_tbl = result.split(',')
return result_tbl
def parse_ordered_field(val, sep):
if not sep:
sep = ','
tbl = re.split('\s*' + sep + '\s*' + '\s*', val)
return tbl
def check_links(template, key1, key2, sep, name, link):
if not sep:
sep = ','
if template.has(key1):
val1 = template.get(key1).value.strip()
tbl1 = parse_ordered_field(val1, sep)
tbl2 = ['' for _ in range(len(tbl1))] # list(range(len(tbl1)))
if template.has(key2):
val2 = template.get(key2).value.strip()
tbl2 = parse_ordered_field(val2, sep)
if name in tbl1:
i = tbl1.index(name)
tbl2[i] = link
template.add(key2,sep.join(tbl2), before=key1)
template.add(key1, val1, before=key2)
def get_filename_url_to_open(site, filename, size=None):
pattern = r'.*src\=\"(.+?)\".*'
size = '|' + str(size) + 'px' if size else ''
to_parse_text = '[[File:{}|link=%s]]'.format(filename, size)
result = site.api('parse', title='Main Page', text=to_parse_text, disablelimitreport=1)
parse_result_text = result['parse']['text']['*']
print(parse_result_text)
url = re.match(pattern, parse_result_text)[1]
return url
def open_file_url(url):
return Image.open(io.BytesIO(urllib.request.urlopen(url).read()))
def open_image_from_filename(site, filename, size=None):
url = get_filename_url_to_open(site, filename, size=size)
return open_file_url(url)
def tl_matches(tl, arr):
return [_ for _ in arr if tl.name.matches(_)]
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,820
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/match_schedule_hash.py
|
import datetime
import mwparserfromhell
from log_into_wiki import *
ERROR_LOCATION = 'Maintenance:MatchSchedule Ordering Errors'
ERROR_TEAMS_TEXT = 'Team 1 - {}; Team 2: {}'
def get_append_hash(hash, res):
tl = mwparserfromhell.nodes.Template(name='MSHash')
tl.add('hash', hash)
tl.add('team1', res['Team1'])
tl.add('team2', res['Team2'])
return str(tl)
def verify_hash(template, team1, team2):
team1_old = template.get('team1').value.strip()
team2_old = template.get('team2').value.strip()
if team1_old != 'TBD' and team1_old != team1:
return False
if team2_old != 'TBD' and team2_old != team2:
return False
return True
def get_hash_template(ms_hash, wikitext):
for template in wikitext.filter_templates():
if template.has('hash') and template.get('hash').value.strip() == ms_hash:
return template
return None
def get_error_text(res, page_name, tl):
match_info = 'Page - [[{}]]; Tab - {}; initialorder: {}'.format(page_name, res['Tab'], res['Order'])
original = ERROR_TEAMS_TEXT.format(tl.get('team1').value.strip(), tl.get('team2').value.strip())
new = ERROR_TEAMS_TEXT.format(res['Team1'], res['Team2'])
return 'Match Info: {}\n<br>Originally: {}\n<br>Now: {}<br>'.format(match_info, original, new)
def write_errors(site, errors):
if len(errors) == 0:
return
page = site.pages[ERROR_LOCATION]
if page.text() != '':
errors.insert(0, page.text())
text = '\n'.join(errors)
page.save(text, summary = 'Reporting MatchSchedule initialorder Errors')
def check_page(site, page_name):
response = site.api('cargoquery', tables = 'MatchSchedule',
fields = 'InitialN_MatchInTab=Order, Team1, Team2, Tab, InitialPageAndTab',
where = '_pageName="%s"' % page_name
)
result = response['cargoquery']
hash_location = site.pages[page_name + '/Hash']
text = hash_location.text()
wikitext = mwparserfromhell.parse(text)
hashes_to_add = []
errors = []
for res in result:
data = res['title']
if data['InitialPageAndTab'] != '':
ms_hash = data['InitialPageAndTab'].split('_')[1] + '_' + data['Order']
else:
ms_hash = data['Tab'] + '_' + data['Order']
hash_template = get_hash_template(ms_hash, wikitext)
if not hash_template:
hashes_to_add.append(get_append_hash(ms_hash, data))
elif not verify_hash(hash_template, data['Team1'], data['Team2']):
errors.append(get_error_text(data, page_name, hash_template))
hash_template.add('team1', data['Team1'])
hash_template.add('team2', data['Team2'])
else: # There could be a TBD that we need to replace
hash_template.add('team1', data['Team1'])
hash_template.add('team2', data['Team2'])
write_errors(site, errors)
if str(wikitext) != '':
hashes_to_add.insert(0, str(wikitext))
new_text = '\n'.join(hashes_to_add)
if text != new_text:
hash_location.save(new_text)
def check_recent_revisions(site):
then_time = datetime.datetime.utcnow() - datetime.timedelta(minutes=20)
then = then_time.isoformat()
now = datetime.datetime.utcnow().isoformat()
revisions = site.api('query', format='json',
list='recentchanges',
rcstart=now,
rcend=then,
rcprop='title',
rclimit='max',
# rctoponly=0, # commented bc we need all revisions to patrol user pages
rcdir='older'
)
titles = []
for revision in revisions['query']['recentchanges']:
if revision['title'].startswith('Data:'):
titles.append(revision['title'])
for title in titles:
check_page(site, title)
if __name__ == '__main__':
site = login('me', 'lol')
check_recent_revisions(site)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,821
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/luacache_refresh.py
|
import re
def teamnames(site):
prefix_text = site.pages['Module:Team'].text()
processed = prefix_text.replace('\n','')
prefix = re.match(r".*PREFIX = '(.+?)'.*", processed)[1]
site.api(
action='parse',
text='{{#invoke:CacheUtil|resetAll|Teamnames|module=Team|f=teamlinkname|prefix=' + prefix + '}}'
)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,822
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/weekly_utils_main.py
|
# weekly is a lie, this runs twice-daily
import mwparserfromhell, datetime
import weekly_utils as utils
from esports_site import EsportsSite
import scrape_runes, luacache_refresh
from template_list import *
site = EsportsSite('me','lol')
limit = -1
site.standard_name_redirects()
# Blank edit pages we need to
blank_edit_pages = ['Leaguepedia:Top Schedule']
for page in blank_edit_pages:
p = site.pages[page]
p.save(p.text(), summary = 'blank editing')
now_timestamp = datetime.datetime.utcnow().isoformat()
with open('daily_last_run.txt','r') as f:
last_timestamp = f.read()
with open('daily_last_run.txt','w') as f:
f.write(now_timestamp)
revisions = site.api('query', format='json',
list='recentchanges',
rcstart=now_timestamp,
rcend=last_timestamp,
rcprop='title|ids|patrolled',
rclimit='max',
rctoponly=1, # commented bc we need all revisions to patrol user pages
rcdir = 'older'
)
pages = []
pages_for_runes = []
for revision in revisions['query']['recentchanges']:
title = revision['title']
if title not in pages:
pages.append(title)
if title.startswith('Data:'):
pages_for_runes.append(title)
lmt = 1
for page in pages:
if lmt == limit:
break
lmt+=1
try:
p = site.pages[page]
except KeyError:
print(page)
continue
utils.make_doc_pages(site, p)
if '/Edit Conflict/' in page and p.namespace == 2 and p.text() != '':
p.delete(reason='Deleting old edit conflict')
else:
text = p.text()
wikitext = mwparserfromhell.parse(text)
errors = []
for template in wikitext.filter_templates():
try:
if template.name.matches('Infobox Player'):
utils.fixInfoboxPlayer(template)
if p.namespace == 0:
if template.has('checkboxIsPersonality'):
if template.get('checkboxIsPersonality').value.strip() != 'Yes':
utils.createResults(site, page, template, 'Tournament Results', 'Player', '{{PlayerResults|show=everything}}')
elif template.name.matches('Infobox Team'):
utils.fixInfoboxTeam(template)
if p.namespace == 0:
utils.createResults(site, page, template, 'Tournament Results', 'Team', '{{TeamResults|show=everything}}')
utils.createResults(site, page, template, 'Schedule History', 'Team', '{{TeamScheduleHistory}}')
tooltip = site.pages['Tooltip:%s' % page]
tooltip.save('{{RosterTooltip}}',tags='daily_errorfix')
elif template.name.strip() in gameschedule_templates:
utils.fixDST(template)
utils.updateParams(template)
elif template.name.matches('PicksAndBansS7') or template.name.matches('PicksAndBans'):
utils.fixPB(site, template)
elif template.name.matches('Listplayer/Current/End'):
template.add(1, '')
except Exception as e:
errors.append(e)
if p.namespace == 10008: # Data namespace
utils.set_initial_order(wikitext)
newtext = str(wikitext)
if text != newtext:
print('Saving page %s...' % page)
p.save(newtext,summary='Automated error fixing (Python)',tags='daily_errorfix')
if len(errors) > 0:
report_page = site.pages['User talk:RheingoldRiver']
report_errors(report_page, page, errors)
luacache_refresh.teamnames(site)
success_page = site.pages['User:RheingoldRiver/Maint Log']
text = success_page.text()
text = text + '\nScript finished maint successfully: ' + now_timestamp
try:
scrape_runes.scrape(site, pages_for_runes, False)
text = text + '\nScript finished regular runes successfully: ' + now_timestamp
except Exception as e:
text = text + '\nException running regular runes: ' + str(e) + ' ' + now_timestamp
try:
scrape_runes.scrapeLPL(site, pages_for_runes, False)
text = text + '\nScript finished everything successfully: ' + now_timestamp
except Exception as e:
text = text + '\nException running LPL runes: ' + str(e) + ' ' + now_timestamp
success_page.save(text,tags='daily_errorfix')
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,823
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/scrape_runes_run.py
|
import scrape_runes
from log_into_wiki import *
site = login('me','lol') # Set wiki
pages = ['Data:LPL/2019 Season/Spring Season', 'Data:LPL/2019 Season/Spring Season/2']
#pages = ['Data:OPL/2019 Season/Split 1/2']
#scrape_runes.scrape(site, pages, False)
scrape_runes.scrapeLPL(site, pages, False)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,824
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/extended_page.py
|
import mwclient.page
class ExtendedPage(mwclient.page.Page):
def __init__(self, page):
super().__init__(page.site, page.name, info=page._info)
self.base_title = self.page_title.split('/')[0]
self.base_name = self.name.split('/')[0]
@staticmethod
def extend_pages(page_gen):
for page in page_gen:
yield(ExtendedPage(page))
def touch(self, check_existence=False):
if check_existence and not self.exists:
return
self.site.api(
'edit',
title=self.name,
appendtext="",
token=self.get_token('edit'),
summary="ExtendedPage Touch Edit"
)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,825
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/fortnite_player_blank_edit.py
|
import log_into_wiki
from extended_page import ExtendedPage
site = log_into_wiki.login('bot', 'fortnite-esports')
rc = site.recentchanges_by_interval(12 * 60, toponly=1)
data_pages = []
for p in rc:
if p['title'].startswith('Data:'):
data_pages.append(p['title'])
where = ' OR '.join(['TR._pageName="%s"' % _ for _ in data_pages])
players = site.cargo_pagelist(
tables="TournamentResults=TR,TournamentResults__RosterLinks=RL,_pageData=pd",
join_on="TR._ID=RL._rowID, RL._value=pd._pageName",
where='(%s) AND RL._rowID IS NOT NULL AND pd._pageName IS NOT NULL' % where,
fields="RL._value=player"
)
for player in ExtendedPage.extend_pages(players):
player.touch(check_existence=True)
# purge PR pages
for page in site.pages['Template:PRWiki'].embeddedin():
page.purge()
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,826
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/yearly_stats_pages.py
|
from extended_site import GamepediaSite
from extended_page import ExtendedPage
site = GamepediaSite('me', 'lol')
create_text = """{{PlayerTabsHeader}}
{{PlayerYearStats}}"""
overview_create_text = """{{PlayerTabsHeader}}
{{CareerPlayerStats}}"""
redirect_text = '#redirect[[%s]]'
summary= "Automatically discovering & creating year player stats"
results = site.cargoquery(
tables='ScoreboardPlayer=SP,_pageData=PD1,_pageData=PD2',
join_on='SP.Link=PD1._pageName,SP.StatsPage=PD2._pageName',
where='PD1._pageName IS NOT NULL and PD2._pageName IS NULL and BINARY PD1._pageName=BINARY SP.Link',
fields="SP.StatsPage=StatsPage, PD1._isRedirect=IsRedirect",
group_by= "SP.StatsPage",
limit='max'
)
def save_pages(page):
page.save(create_text, summary=summary)
base_stats_page = site.pages[page.base_title + '/Statistics']
if not base_stats_page.exists:
base_stats_page.save(overview_create_text, summary=summary)
for result in results:
stats_page = ExtendedPage(site.pages[result['StatsPage']])
if result['IsRedirect'] == '0':
save_pages(stats_page)
continue
target = site.pages[stats_page.base_title].redirects_to()
target_stats_page_name = stats_page.name.replace(stats_page.base_title, target.name)
target_stats_page = ExtendedPage(site.pages[target_stats_page_name])
save_pages(target_stats_page)
stats_page.save(redirect_text % target_stats_page.name)
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,827
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/download_images_from_list.py
|
from log_into_wiki import *
import os
limit = -1
site = login('bot', 'lol')
LOC = 'Sprites/' + 'League Images'
with open('pages.txt', encoding="utf-8") as f:
pages = f.readlines()
for page in pages:
page = page.strip()
if os.path.isfile(LOC + '/' + page) or os.path.isfile(LOC + '/' + page.replace(' ','_')):
pass
else:
img = open_image_from_filename(site, page)
img.save(LOC + '/' + page, 'png')
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,828
|
banditti99/leaguepedia_util
|
refs/heads/master
|
/weekly_utils.py
|
import dateutil.parser, pytz, re, datetime, mwparserfromhell
from log_into_wiki import *
site = login('me','lol')
typo_find = ['favourite','quater','partecipate','Portugese', 'Regelations']
typo_replace = ['favorite','quarter','participate','Portuguese', 'Relegations']
def typoFixes(text):
i = 0
while i < len(typo_find):
text = text.replace(typo_find,typo_replace)
return text
teamhist_find = [r"(\d+)(\s*)-(\s*)(\"?Present\"?|''Present'')",
r'^\s*(.*)(\d+)\s*-\s*(\w+)',
r'^\s*(\w\w\w)(?:[A-Za-z])*\s*(\d\d\d\d) - (\w\w\w)(?:[A-Za-z])*\s*(\d)',
r"^\s*(\w\w\w)(?:[A-Za-z])*\s*(\d\d\d\d) - ''Present''",
r"^\s*(\w\w\w)(?:[A-Za-z])*\s*- (\w\w\w)(?:[A-Za-z])*\s*(\d\d\d\d)",
r"\?\s*-\s*Present",
r'^\s*([\? ]+)-',
r'^\s*(.*)-([\? ]+)$',
]
teamhist_replace = [r"\1 - ''Present''",
r'\1\2 - \3',
r'\1 \2 - \3 \4',
r"\1 \2 - ''Present''",
r'\1 \3 - \2 \3',
r"? - ''Present''",
r'??? ???? -',
r'\1- ??? ????',
]
social_fr = [
{ "field" : "twitter", "find" : r'(?:\[?https?://)?(?:www\.)?(?:twitter\.com/)?([^/ \n]+)(.*\])?', "replace" : r'\1' },
{ "field" : "twitter", "find" : r'/$', "replace" : r'' },
{ "field" : "stream", "find" : r'\[?(?:https?://)?(.*)/([^ \n]+).*', "replace" : r'https://\1/\2' },
{ "field" : "stream", "find" : r'/$', "replace" : r'' },
{ "field" : "instagram", "find" : r'(?:\[?https?://)?(?:www\.)?(?:instagram\.com/)?([^/ \n]+)(.*\])?', "replace" : r'\1' },
{ "field" : "instagram", "find" : r'/$', "replace" : r'' },
{ "field" : "facebook", "find" : r'\[?(?:https?://)?(.*)/([^ \n]+).*', "replace" : r'https://\1/\2' },
{ "field" : "facebook", "find" : r'/$', "replace" : r'' },
{ "field" : "youtube", "find" : r'\[?(?:https?://)?([^ \n]*)(.*\])?', "replace" : r'https://\1' },
{ "field" : "website", "find" : r'\[?([^ \n]*)(.*\])?', "replace" : r'\1' },
{ "field" : "vk", "find": r'(?:\[?https?://)?(?:www\.)?(?:vk\.com/)?([^/ \n]+)(.*\])?', "replace": r'https://vk.com/\1'},
{ "field" : "vk", "find" : r'/$', "replace" : r'' },
]
def fixSocialField(template, item):
field = item['field']
if template.has(field):
val_old = template.get(field).value.strip()
if val_old != '':
val_arr = re.split(r'(<!--|-->)', val_old)
val_arr[0] = re.sub(item['find'], item['replace'], val_arr[0])
val_new = ''.join(val_arr)
template.add(field, val_new)
def fixInfoboxPlayer(template):
for item in social_fr:
fixSocialField(template, item)
i = 1
key = 'teamdate' + str(i)
while template.has(key):
teamdate_new = str(template.get(key).value.strip())
for j, f in enumerate(teamhist_find):
teamdate_new = re.sub(f,teamhist_replace[j],teamdate_new)
template.add(key,teamdate_new)
i += 1
key = 'teamdate' + str(i)
return
def fixInfoboxTeam(template):
for item in social_fr:
fixSocialField(template, item)
if template.has('isdisbanded'):
if template.get('isdisbanded').value.strip().lower() == 'no':
template.remove('isdisbanded')
return
def createResults(site, page, template, subpage, result_type, template_text):
if template.has('checkboxIsPersonality') and template.get('checkboxIsPersonality').value.strip() == 'Yes':
pass
else:
p = site.pages[page + '/' + subpage]
text = p.text()
if text == '':
p.save('{{{{{}TabsHeader}}}}\n{}'.format(result_type, template_text),tags='daily_errorfix')
pst = pytz.timezone('America/Los_Angeles')
est = pytz.timezone('America/New_York')
cet = pytz.timezone('Europe/Berlin')
kst = pytz.timezone('Asia/Seoul')
tz_lookup = {
'PST' : pst,
'EST' : est,
'CET' : cet,
'KST' : kst
}
def fixDST(template):
if template.has('date') and template.has('time'):
date = template.get("date").value.strip()
time = template.get("time").value.strip()
tz_local_str = template.get('timezone').value.strip()
tz_local = tz_lookup[tz_local_str]
date_time = dateutil.parser.parse(date + " " + time)
date_time_local = tz_local.localize(date_time)
isDST_PST = bool(date_time_local.astimezone(pst).dst())
isDST_CET = bool(date_time_local.astimezone(cet).dst())
if isDST_PST and isDST_CET:
template.add('dst','yes')
elif isDST_PST:
template.add('dst','spring')
else:
template.add('dst','no')
def updateParams(template):
# update gameschedule params for new conventions
if template.has('t1score'):
template.get('t1score').name = 'team1score'
if template.has('t2score'):
template.get('t2score').name = 'team2score'
if template.has('post-match'):
template.get('post-match').name = 'reddit'
pb_data = [
{
'data_type' : 'champion',
'args' : [ 'blueban1', 'blueban2', 'blueban3', 'blueban4', 'blueban5', 'red_ban1', 'red_ban2', 'red_ban3', 'red_ban4', 'red_ban5', 'bluepick1', 'bluepick2', 'bluepick3', 'bluepick4', 'bluepick5', 'red_pick1', 'red_pick2', 'red_pick3', 'red_pick4', 'red_pick5' ]
},
{
'data_type' : 'role',
'args' : [ 'bluerole1', 'bluerole2', 'bluerole3', 'bluerole4', 'bluerole5' ],
},
{
'data_type' : 'role',
'args' : [ 'red_role1', 'red_role2', 'red_role3', 'red_role4', 'red_role5' ]
}
]
pb_exceptions = ['', 'unknown', 'none', 'missing data', 'loss of ban']
def fixPB(site, template):
for lookup in pb_data:
values = []
datatype = lookup['data_type']
for arg in lookup['args']:
if template.has(arg):
values.append(template.get(arg).value.strip())
query_text = '{{#invoke:PrintParsedText|unordered|type=' + datatype + '|' + '|'.join(values) + '}}'
query_result = site.api(
'parse',
format = 'json',
text = query_text,
prop = 'text',
disablelimitreport = 1,
wrapoutputclass = ''
)
result = query_result['parse']['text']['*']
result = result.replace('<p>','').replace('\n</p>','')
result_tbl = result.split(',')
result_parsed = [x for x in result_tbl if x.lower() not in pb_exceptions]
if len(result_parsed) != len(set(result_parsed)):
template.add('has' + datatype + 'error','Yes')
def set_initial_order(wikitext):
i = 0
for template in wikitext.filter_templates():
if template.name.matches('MatchSchedule/Start'):
i = 0
continue
if template.name.matches('MatchSchedule'):
i += 1
if template.has('initialorder'):
continue
template.add('initialorder', str(i), before = 'team1')
DOC_PAGES_TO_MAKE = [
{
'matches': r'^Module:Bracket/',
'notmatches': r'(doc|Wiki)$',
'pages': {
'Tooltip:Module:{}' : '{{BracketTooltip}}',
'Module:{}/doc' : '{{BracketDoc}}'
}
},
{
'matches': r'^Module:.*/i18n$',
'notmatches': r'doc$',
'pages': {
'Module:{}/doc': '{{i18ndoc}}'
}
},
{
'matches': r'^Module:CargoDeclare/',
'notmatches': r'doc$',
'pages': {
'Module:{}/doc': '{{CargodocModule}}'
}
}
]
def make_doc_pages(site, p):
for case in DOC_PAGES_TO_MAKE:
if 'matches' in case.keys():
if not re.findall(case['matches'], p.name):
continue
if 'notmatches' in case.keys():
if re.findall(case['notmatches'], p.name):
continue
for i, (k, v) in enumerate(case['pages'].items()):
site.pages[k.format(p.page_title)].save(v, summary='Automated error fixing (Python)',
tags='daily_errorfix')
|
{"/esports_site.py": ["/extended_site.py"], "/top_schedule_refresh.py": ["/log_into_wiki.py"], "/patrol_namespaces.py": ["/log_into_wiki.py"], "/refresh_teamnames_cron.py": ["/log_into_wiki.py", "/luacache_refresh.py"], "/touch.py": ["/log_into_wiki.py"], "/!!scratch.py": ["/log_into_wiki.py"], "/sprites_cachebreak.py": ["/log_into_wiki.py"], "/lol_archive_compare.py": ["/esports_site.py"], "/blank_edit_players_from_league.py": ["/log_into_wiki.py"], "/rune sprite.py": ["/log_into_wiki.py"], "/disambig_creation.py": ["/log_into_wiki.py"], "/default_loadout.py": ["/log_into_wiki.py"], "/fortnite_auto_new_players.py": ["/log_into_wiki.py"], "/log_into_wiki.py": ["/esports_site.py"], "/match_schedule_hash.py": ["/log_into_wiki.py"], "/weekly_utils_main.py": ["/weekly_utils.py", "/esports_site.py", "/luacache_refresh.py"], "/scrape_runes_run.py": ["/log_into_wiki.py"], "/fortnite_player_blank_edit.py": ["/log_into_wiki.py", "/extended_page.py"], "/yearly_stats_pages.py": ["/extended_site.py", "/extended_page.py"], "/download_images_from_list.py": ["/log_into_wiki.py"], "/weekly_utils.py": ["/log_into_wiki.py"]}
|
22,833
|
Kavyeah/tkinter-interface
|
refs/heads/master
|
/compressgu.py
|
from tkinter import *
import re
root = Tk()
root.geometry("1200x6000")
root.title("Compress your file")
lblInfo =Label(root, font = ('helvetica', 50, 'bold'),
text = "COMPRESS YOUR FILE",
fg = "Black", bd = 10, anchor='w')
lblInfo.grid(row = 0, column = 0)
filename = StringVar()
e1 = Entry(root, font = ('arial', 16, 'bold'),textvariable = filename, bd = 10, insertwidth = 9,width=50,bg = "powder blue", justify = 'right')
e1.grid(row=50, column=0)
def compress():
//write your algorithm code
root.destroy()
btnExit =Button(root, padx = 20, pady = 20, bd = 16,
fg = "black", font = ('arial', 16, 'bold'),
width = 20, text = "compress", bg = "red",
command = compress).grid(row = 50, column = 29)
root.mainloop()
|
{"/index.py": ["/compressgu.py", "/decompressgu.py", "/encryptgu.py"]}
|
22,834
|
Kavyeah/tkinter-interface
|
refs/heads/master
|
/index.py
|
# import tkinter module
from tkinter import *
# creating root object
root = Tk()
# defining size of window
root.geometry("1200x6000")
# setting up the title of window
root.title("Message Encryption and Decryption")
Tops = Frame(root, width = 1600, relief = SUNKEN)
Tops.pack(side = TOP)
f1 = Frame(root, width = 800, height = 700,
relief = SUNKEN)
f1.pack(side = LEFT)
lblInfo = Label(Tops, font = ('helvetica', 50, 'bold'),
text = "FILE COMPRESSION",
fg = "Black", bd = 10, anchor='w')
lblInfo.grid(row = 0, column = 0)
# exit function
def qExit():
root.destroy()
def compress():
import compressgu
def decompress():
import decompressgu
def encrypt():
import encryptgu
def decrypt():
import decryptgu
# Show message button
btnTotal = Button(f1, padx = 16, pady = 8, bd = 16, fg = "black",
font = ('arial', 16, 'bold'), width = 10,
text = "COMPRESS", bg = "indian red",
command = compress).grid(row = 7, column = 1)
btnTotal = Button(f1, padx = 16, pady = 8, bd = 16, fg = "black",
font = ('arial', 16, 'bold'), width = 10,
text = "DECOMPRESS", bg = "brown4",
command = decompress).grid(row = 7, column = 2)
btnTotal = Button(f1, padx = 16, pady = 8, bd = 16, fg = "black",
font = ('arial', 16, 'bold'), width = 10,
text = "ENCRYPT", bg = "brown4",
command = encrypt).grid(row = 8, column = 1)
# Reset button
btnReset = Button(f1, padx = 16, pady = 8, bd = 16,
fg = "black", font = ('arial', 16, 'bold'),
width = 10, text = "DECRYPT", bg = "indian red",
command = decrypt).grid(row = 8, column = 2)
# Exit button
exit = Button(root,bd = 16,
fg = "black", font = ('arial', 16, 'bold'),width=10,
text = "Exit", bg = "brown4",
command = qExit)
exit.place(relx=1.0, rely=1.0, anchor=SE)
# keeps window alive
root.mainloop()
|
{"/index.py": ["/compressgu.py", "/decompressgu.py", "/encryptgu.py"]}
|
22,835
|
Kavyeah/tkinter-interface
|
refs/heads/master
|
/encryptgu.py
|
from tkinter import *
import subprocess
root = Tk()
root.geometry("1200x6000")
root.title("Encryption")
lblInfo = Label(root, font = ('helvetica', 50, 'bold'),
text = "ENCRYPT YOUR FILE",
fg = "Black", bd = 10, anchor='w')
lblInfo.grid(row = 0, column = 0)
filename = StringVar()
e1 = Entry(root, font = ('arial', 16, 'bold'),textvariable = filename, bd = 10, insertwidth = 9,width=50,bg = "powder blue", justify = 'right')
e1.grid(row=50, column=0)
def encrypt():
subprocess.call(["gcc","enc.c"])
subprocess.call("./a.out")
root.destroy()
btnExit =Button(root, padx = 20, pady = 20, bd = 16,
fg = "black", font = ('arial', 16, 'bold'),
width = 20, text = "Encrypt", bg = "red",
command = encrypt).grid(row = 50, column = 29)
# keeps window alive
root.mainloop()
|
{"/index.py": ["/compressgu.py", "/decompressgu.py", "/encryptgu.py"]}
|
22,836
|
Kavyeah/tkinter-interface
|
refs/heads/master
|
/decompressgu.py
|
from tkinter import *
from shlex import split
import subprocess
root = Tk()
root.geometry("1200x6000")
root.title("Decompress your file")
lblInfo = Label(root, font = ('helvetica', 50, 'bold'),
text = "DECOMPRESS YOUR FILE",
fg = "Black", bd = 10, anchor='w')
lblInfo.grid(row = 0, column = 0)
filename = StringVar()
e1 = Entry(root, font = ('arial', 16, 'bold'),textvariable = filename, bd = 10, insertwidth = 9,width=50,bg = "powder blue", justify = 'right')
e1.grid(row=50, column=0)
def decompress():
//write your algorithm
root.destroy()
btnExit =Button(root, padx = 20, pady = 20, bd = 16,
fg = "black", font = ('arial', 16, 'bold'),
width = 20, text = "DECOMPRESS", bg = "red",
command = decompress).grid(row = 50, column = 29)
# keeps window alive
root.mainloop()
|
{"/index.py": ["/compressgu.py", "/decompressgu.py", "/encryptgu.py"]}
|
22,837
|
standroidbeta/CS-Build-Week-2
|
refs/heads/master
|
/direct_path.py
|
import json
import requests
from decouple import config
from utils import Queue
from time import sleep
from player import Player
from operations import Operations
player = Player()
ops = Operations()
api_key = config('STAN_KEY')
with open("room_conns.txt", "r") as conns:
room_conn = json.loads(conns.read())
# {
# "room_id": 148,
# "title": "A misty room",
# "description": "You are standing on grass and surrounded by a dense mist.
# You can barely make out the exits in any direction.",
# "coordinates": "(56,57)",
# "elevation": 0,
# "terrain": "NORMAL",
# "players": [],
# "items": [],
# "exits": [
# "e",
# "w"
# ],
# "cooldown": 1.0,
# "errors": [],
# "messages": []
# }
current_room = requests.get('https://lambda-treasure-hunt.herokuapp.com/api/adv/init/',
headers={'Authorization': api_key}).json()
def bfs(current_room, end_room):
"""
Return a list containing the shortest path from
starting_vertex to destination_vertex in
breath-first order.
"""
q = Queue()
q.enqueue([current_room])
visited = set()
while q.size() > 0:
path = q.dequeue()
v = path[-1]
if v not in visited:
if v == end_room:
return path
visited.add(v)
for key, value in room_conn[str(v)].items():
new_path = list(path)
new_path.append(value)
q.enqueue(new_path)
return None
def go_to_room(current_room, end_room):
fast_paths = bfs(current_room["room_id"], end_room=end_room)
print(fast_paths)
routes = []
for ind in range(len(fast_paths)):
for direc in ["n", "e", "w", "s"]:
try:
if room_conn[str(fast_paths[ind])][direc] == fast_paths[ind + 1]:
routes.append(direc)
except KeyError:
None
except IndexError:
None
counter = 0
while current_room['room_id'] != end_room:
for dirs in routes:
if counter < len(fast_paths):
sleep(current_room['cooldown'])
next_room = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/move/",
json={"direction": str(dirs)}, headers={'Authorization': api_key}).json()
current_room = next_room
print(current_room)
counter += 1
else:
break
def sell(item="tiny treasure"):
if current_room['title'] != 'Shop':
print("You should go to the shop to sell")
else:
sleep(current_room["cooldown"])
# res1 = requests.post(" https://lambda-treasure-hunt.herokuapp.com/api/adv/sell/", json={"name": item},
# headers={'Authorization': api_key}).json()
# print(res1)
# sleep(res1["cooldown"])
res = requests.post(" https://lambda-treasure-hunt.herokuapp.com/api/adv/sell/",
json={"name": item, "confirm": "yes"}, headers={'Authorization': api_key}).json()
print(res)
sleep(res["cooldown"])
def examine(item='Well'):
sleep(current_room["cooldown"])
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/examine/", json={"name": item},
headers={'Authorization': api_key}).json()
with open("well_code.txt", "w") as wc:
wc.write(res['description'])
sleep(res["cooldown"])
# sell('shiny treasure')
# ops.status_check()
# ops.change_name("standroidbeta")
# examine('Well')
# List of important room numbers
shop = '1'
namer = '467'
speed_shrine = '461'
warp_shrine = '374'
gold_shrine = '499'
sandovsky_statue = '492'
aarons_athenaeum = '486'
well = '55'
mine = '112'
go_to_room(current_room, mine)
# 'tiny treasure', 'tiny treasure', 'small treasure', 'shiny treasure', 'small treasure', 'shiny treasure'
|
{"/direct_path.py": ["/operations.py"]}
|
22,838
|
standroidbeta/CS-Build-Week-2
|
refs/heads/master
|
/mapper.py
|
import requests
from time import sleep
import json
from utils import Queue
from decouple import config
api_key = config('STAN_KEY')
def initialize_room():
room_data = []
start_room = {
"room_id": 0,
"title": "A brightly lit room",
"description": """
You are standing in the center of a brightly lit room. You notice a shop to the west and exits
to the north, south and east.
""",
"coordinates": "(60,60)",
"players": [],
"elevation": 0,
"terrain": "NORMAL",
"items": [],
"exits": ["n", "s", "e", "w"],
"cooldown": 1.0,
"errors": [],
"messages": []
}
room_data.append(start_room)
with open('room_data.txt', 'w') as rd:
rd.write(json.dumps(room_data))
# start with all unknowns
room_conns = {}
start_conns = {"0": {"n": "?", "s": "?", "e": "?", "w": "?"}}
room_conns.update(start_conns)
# parse to conn file
with open("room_conns.txt", "w") as conns:
conns.write(json.dumps(room_conns))
def explore(queue):
room_id = str(room_data[-1]["room_id"])
current_conns = room_conns[room_id]
unchecked_conns = []
for direction in current_conns:
if current_conns[direction] == "?":
unchecked_conns.append(direction)
if unchecked_conns:
queue.enqueue(unchecked_conns[0])
else:
unchecked_paths = bft(room_data, room_conns)
if unchecked_paths is not None:
for path in unchecked_paths:
for direction in current_conns:
if current_conns[direction] == path:
queue.enqueue(direction)
def bft(room_data, room_conns):
cue = Queue()
cue.enqueue([str(room_data[-1]["room_id"])])
visited = set()
while cue.size() > 0:
room_list = cue.dequeue()
room = room_list[-1]
if room not in visited:
visited.add(room)
for direction in room_conns[room]:
if room_conns[room][direction] == "?":
return room_list
else:
path = list(room_list)
path.append(room_conns[room][direction])
cue.enqueue(path)
return None
initialize_room()
with open("room_data.txt", "r") as rdat:
room_data = json.loads(rdat.read())
with open("room_conns.txt", "r") as rconn:
room_conns = json.loads(rconn.read())
q2 = Queue()
explore(q2)
while q2.size() > 0:
# Room information
with open("room_data.txt", "r") as rdat:
room_data = json.loads(rdat.read())
# Room connections
with open("room_conns.txt", "r") as rconn:
room_conns = json.loads(rconn.read())
player_room = str(room_data[-1]["room_id"])
direction = q2.dequeue()
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/move/",
json={"direction": direction}, headers={'Authorization': api_key})
data = res.json()
room_data.append(data)
new_room = str(room_data[-1]["room_id"])
print("Now in Room:" + str(new_room))
room_conns[player_room][direction] = new_room
if new_room not in room_conns:
exits = data["exits"]
directions = {}
for xit in exits:
directions[xit] = "?"
room_conns[new_room] = directions
puddit_in_reverse_terry = {"n": "s", "s": "n", "e": "w", "w": "e"}
reverse = puddit_in_reverse_terry[direction]
room_conns[new_room][reverse] = player_room
# Push it.. push it to the text files
with open("room_data.txt", "w") as rdat:
rdat.write(json.dumps(room_data))
with open("room_conns.txt", "w") as rcon:
rcon.write(json.dumps(room_conns))
# rest my child
sleep(data["cooldown"])
explore(q2)
|
{"/direct_path.py": ["/operations.py"]}
|
22,839
|
standroidbeta/CS-Build-Week-2
|
refs/heads/master
|
/operations.py
|
import requests
from time import sleep
import json
import hashlib
import random
from decouple import config
api_key = config('STAN_KEY')
url = "https://lambda-treasure-hunt.herokuapp.com/api/adv/init/"
class Operations:
def __init__(self):
self.current_room = {}
self.wait = None
def init_player(self):
res = requests.get(url, headers={'Authorization': api_key}).json()
self.wait = float(res.get('cooldown'))
self.current_room = res
sleep(res["cooldown"])
return self.current_room
def room_id(self):
return self.current_room['room_id']
def move(self,direction):
if direction not in self.current_room['exits']:
print("You can't go that way")
return
else:
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/move/",
json={"direction": direction}, headers={'Authorization': api_key}).json()
self.current_room = res
sleep(res["cooldown"])
return self.current_room
def take(self):
if len(self.current_room['items']) == 0:
print('Nothing here to take')
return None
else:
item = self.current_room['items']
print(f'Taking {item}')
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/take/",
json={"name": item[0]}, headers={'Authorization': api_key}).json()
sleep(res["cooldown"])
def status_check(self):
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/status/",
headers={'Authorization': api_key}).json()
print(res)
sleep(res["cooldown"])
def sell(self, item="tiny treasure"):
if self.current_room['title'] != 'Shop':
print("You should go to the shop to sell")
else:
res1 = requests.post(" https://lambda-treasure-hunt.herokuapp.com/api/adv/sell/",
json={"name": item}, headers={'Authorization': api_key}).json()
print(res1)
sleep(res1["cooldown"])
res = requests.post(" https://lambda-treasure-hunt.herokuapp.com/api/adv/sell/",
json={"name": item, "confirm": "yes"}, headers={'Authorization': api_key}).json()
print(res)
sleep(res["cooldown"])
def change_name(self, name):
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/change_name/",
json={"name": [name], "confirm": "aye"}, headers={'Authorization': api_key}).json()
print("You shall be known as", str(name))
print(res)
return res
def lambda_coin_wallet(self):
res = requests.get("https://lambda-treasure-hunt.herokuapp.com/api/bc/get_balance/",
headers={'Authorization': api_key}).json()
print(res)
sleep(res["cooldown"])
def wise_explore(self, direction, next_id):
if direction not in self.current_room['exits']:
print('Not a valid move')
return
else:
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/move/",
json={"direction": direction, "next_room_id": next_id}).json()
sleep(res["cooldown"])
def pray(self):
if self.current_room["title"] != 'Shrine':
print("Nothing here to pray to")
return
else:
res = requests.post("https://lambda-treasure-hunt.herokuapp.com/api/adv/pray/",
headers={'Authorization': api_key}).json()
print("praying")
print(res)
sleep(res["cooldown"])
ops = Operations()
ops.lambda_coin_wallet()
# C:\Users\SEEK\AppData\Local\Programs\Python\Python38\python.exe C:/Users/SEEK/Documents/GitHub/CS-Build-Week-2/operations.py
# {'cooldown': 1.0, 'messages': ['You have a balance of 1.0 Lambda Coins'], 'errors': []}
#
# Process finished with exit code 0
# Token d8f89865e1683a54809e2ecf3b71918cd0a07958
|
{"/direct_path.py": ["/operations.py"]}
|
22,881
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/main.py
|
import optimizations
import comparison
import pcl
from time import time
def main():
cloud = pcl.load('data/milk.pcd')
cloud1 = pcl.load('data/milk.pcd')
cloud2 = pcl.load('data/milk.pcd')
# print('org')
# print('dist = ', comparison.c2c_distance(cloud, cloud))
# print('size = ', comparison.c2c_size(cloud, cloud))
start = time()
voxel_cloud = optimizations.voxel(cloud, 0.0031, 0.0031, 0.0031)
end = time()
print('voxel')
print('time = ', end - start)
print('dist = ', comparison.c2c_distance(voxel_cloud, cloud))
print('size = ', comparison.c2c_size(cloud, voxel_cloud))
start = time()
sor_cloud = optimizations.sor(cloud1, 7, 0.00001)
end = time()
print('sor')
print('time = ', end - start)
print('dist = ', comparison.c2c_distance(sor_cloud, cloud))
print('size = ', comparison.c2c_size(cloud, sor_cloud))
start = time()
rand_cloud = optimizations.rand(cloud2, 2, 7200)
end = time()
print('rand')
print('time = ', end - start)
print('dist = ', comparison.c2c_distance(rand_cloud, cloud))
print('size = ', comparison.c2c_size(cloud, rand_cloud))
"""
result example
voxel
time = 0.0008120536804199219
dist = 2.7635448593573273e-07
size = 0.4269582504970179
sor
time = 0.021006107330322266
dist = 5.078507019032886e-07
size = 0.4279920477137177
rand
time = 1.0679066181182861
dist = 2.391338733629257e-07
size = 0.4274353876739563
"""
if __name__ == "__main__":
main()
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,882
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/rand.py
|
import pcl
import pcl.pcl_visualization
import numpy as np
import random
def main():
cloud = pcl.load('data/lamppost.pcd')
# параметры
step = 2 # размер вокселя относительно облака
rand_param = cloud.size // 2 # сколько точек отфильтровать
arr = cloud.to_array().transpose()
x_min = arr[0].min()
x_max = arr[0].max()
y_min = arr[1].min()
y_max = arr[1].max()
z_min = arr[2].min()
z_max = arr[2].max()
# координаты вокселей
x_coords = []
s = np.linspace(x_min, x_max, step)
x_coords.append(s)
s = np.linspace(y_min, y_min, step)
x_coords.append(s)
s = np.linspace(z_min, z_min, step)
x_coords.append(s)
y_coords = []
s = np.linspace(x_min, x_min, step)
y_coords.append(s)
s = np.linspace(y_min, y_max, step)
y_coords.append(s)
s = np.linspace(z_min, z_min, step)
y_coords.append(s)
z_coords = []
s = np.linspace(x_min, x_min, step)
z_coords.append(s)
s = np.linspace(y_min, y_min, step)
z_coords.append(s)
s = np.linspace(z_min, z_max, step)
z_coords.append(s)
coords = np.empty([3, step * 3])
coords[0] = [*x_coords[0], *y_coords[0], *z_coords[0]]
coords[1] = [*x_coords[1], *y_coords[1], *z_coords[1]]
coords[2] = [*x_coords[2], *y_coords[2], *z_coords[2]]
voxel = []
for i in range(0, coords[0].size * coords[1].size * coords[2].size):
voxel.append([])
# поиск координат вокселя для каждой точки
voxel_x = np.searchsorted(coords[0], arr[0])
voxel_y = np.searchsorted(coords[1], arr[1])
voxel_z = np.searchsorted(coords[2], arr[2])
# заполнение вокселей точками
for point in range(len(arr[0])):
voxel[voxel_x[point] * coords[0].size + voxel_y[point] * coords[1].size + voxel_z[point] * coords[
2].size].append([arr[0][point], arr[1][point], arr[2][point]])
# прореживание вокселей
for i in range(len(voxel)):
if len(voxel[i]) > rand_param:
for j in range(rand_param):
voxel[i].remove(voxel[i][random.randint(0, len(voxel[i]) - 1)])
arr_filtered = []
filled_voxel = list(filter(None, voxel))
for i in range(len(filled_voxel)):
for j in range(len(filled_voxel[i])):
arr_filtered.append(filled_voxel[i][j])
cloud_filtered = pcl.PointCloud()
cloud_filtered.from_array(np.array(arr_filtered).astype(np.float32))
pcl.save(cloud_filtered, 'data/lamppost_rand.pcd')
print("initial size ", cloud.size)
print("optimized size ", cloud_filtered.size)
if __name__ == "__main__":
main()
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,883
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/sor.py
|
import numpy as np
import pcl
import random
def main():
cloud = pcl.load('data/lamppost.pcd')
# удаление точек с большим колличеством соседей
sor = cloud.make_statistical_outlier_filter()
# колличество соседей
sor.set_mean_k(1)
# радиус поиска соседей
sor.set_std_dev_mul_thresh(0.1)
sor.set_negative(True)
cloud_filtered = sor.filter()
pcl.save(cloud_filtered, 'data/lamppost_sor.pcd')
print("initial size ", cloud.size)
print("optimized size ", cloud_filtered.size)
if __name__ == "__main__":
main()
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,884
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/visualization.py
|
import pcl
import pcl.pcl_visualization
def main():
viewer = pcl.pcl_visualization.PCLVisualizering()
cloud = pcl.load('data/lamppost.pcd')
while 1:
# Visualizing pointcloud
viewer.AddPointCloud(cloud, b'scene_cloud', 0)
viewer.SpinOnce()
viewer.RemovePointCloud(b'scene_cloud', 0)
if __name__ == "__main__":
main()
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,885
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/plotXYZ.py
|
import pcl
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
def main():
cloud = pcl.load('data/lamppost.pcd')
shape = cloud.to_array().transpose()
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1, projection='3d')
x = shape[0]
y = shape[1]
z = shape[2]
ax.scatter(x, y, z)
plt.show()
def plot_bounds(ax, x, y, z):
max_dist = np.array([x.max() - x.min(), y.max() - y.min(), z.max() - z.min()]).max() / 2.0
mean_x = (x.max() + x.min()) / 2
mean_y = (y.max() + y.min()) / 2
mean_z = (z.max() + z.min()) / 2
ax.set_xlim(mean_x - max_dist, mean_x + max_dist)
ax.set_ylim(mean_y - max_dist, mean_y + max_dist)
ax.set_zlim(mean_z - max_dist, mean_z + max_dist)
if __name__ == "__main__":
main()
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,886
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/voxel.py
|
import pcl
def main():
cloud = pcl.load('data/lamppost.pcd')
# аппроксимация внутри каждого вокселя
voxel = cloud.make_voxel_grid_filter()
# размеры вокселя
voxel.set_leaf_size(0.045, 0.045, 0.045)
cloud_filtered = voxel.filter()
pcl.save(cloud_filtered, 'data/lamppost_vox.pcd')
print("initial size ", cloud.size)
print("optimized size ", cloud_filtered.size)
if __name__ == "__main__":
main()
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,887
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/triang.py
|
from scipy.spatial import Delaunay
import numpy as np
import matplotlib.pyplot as plt
import pcl
from mpl_toolkits.mplot3d import Axes3D
from plotXYZ import plot_bounds
def main():
# оптимизированное облако
cloud1 = pcl.load('data/lamppost2.pcd')
# начальное облако
cloud2 = pcl.load('data/lamppost.pcd')
points1 = cloud1.to_array()
points2 = cloud2.to_array()
# построение триангуляции
tri = Delaunay(points1)
x = []
y = []
z = []
for point in points1:
x.append(float(point[0]))
y.append(float(point[1]))
z.append(float(point[2]))
x = np.array(x)
y = np.array(y)
z = np.array(z)
# граффическое изображение
# fig = plt.figure()
# ax = fig.add_subplot(1, 1, 1, projection='3d')
#
# plot_bounds(ax, x, y, z)
#
# ax.plot_trisurf(x, y, z, triangles=tri.simplices)
# plt.show()
error = 0.0
i = 0
# подсчет общей ошибки
for point in points2:
i += 1
dist = tri.plane_distance(point)
min_dist = min(abs(dist))
print("step ", i, "\terror ", min_dist)
error += min_dist
print("\nerror sum ", error)
print("\nerror sum/points number ", error/points1.size)
if __name__ == "__main__":
main()
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,888
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/optimizations.py
|
import pcl
import numpy as np
import random
def voxel(cloud, x, y, z):
voxel = cloud.make_voxel_grid_filter()
# размеры вокселя
voxel.set_leaf_size(x, y, z)
cloud_filtered = voxel.filter()
return cloud_filtered
def sor(cloud, k, thresh):
# удаление точек с большим колличеством соседей
sor = cloud.make_statistical_outlier_filter()
# колличество соседей
sor.set_mean_k(k)
# радиус поиска соседей
sor.set_std_dev_mul_thresh(thresh)
sor.set_negative(True)
cloud_filtered = sor.filter()
return cloud_filtered
def rand(cloud, step, rand_param):
# параметры
# step = 2 # размер вокселя относительно облака
# rand_param = cloud.size // 2 # сколько точек отфильтровать
arr = cloud.to_array().transpose()
x_min = arr[0].min()
x_max = arr[0].max()
y_min = arr[1].min()
y_max = arr[1].max()
z_min = arr[2].min()
z_max = arr[2].max()
# координаты вокселей
x_coords = []
s = np.linspace(x_min, x_max, step)
x_coords.append(s)
s = np.linspace(y_min, y_min, step)
x_coords.append(s)
s = np.linspace(z_min, z_min, step)
x_coords.append(s)
y_coords = []
s = np.linspace(x_min, x_min, step)
y_coords.append(s)
s = np.linspace(y_min, y_max, step)
y_coords.append(s)
s = np.linspace(z_min, z_min, step)
y_coords.append(s)
z_coords = []
s = np.linspace(x_min, x_min, step)
z_coords.append(s)
s = np.linspace(y_min, y_min, step)
z_coords.append(s)
s = np.linspace(z_min, z_max, step)
z_coords.append(s)
coords = np.empty([3, step * 3])
coords[0] = [*x_coords[0], *y_coords[0], *z_coords[0]]
coords[1] = [*x_coords[1], *y_coords[1], *z_coords[1]]
coords[2] = [*x_coords[2], *y_coords[2], *z_coords[2]]
b_set = set(tuple(x) for x in coords.transpose())
coords = np.array([list(x) for x in b_set])
coords = coords.transpose()
coords_x = np.unique(coords[0])
coords_y = np.unique(coords[1])
coords_z = np.unique(coords[2])
voxel = []
for i in range(0, coords_x.size * coords_y.size * coords_z.size):
voxel.append([])
# поиск координат вокселя для каждой точки
voxel_x = np.searchsorted(coords_x, arr[0])
voxel_y = np.searchsorted(coords_y, arr[1])
voxel_z = np.searchsorted(coords_z, arr[2])
# заполнение вокселей точками
for point in range(len(arr[0])):
voxel[voxel_x[point] + voxel_y[point] * (coords_x.size - 1) + voxel_z[point] * (coords_x.size - 1) * (
coords_x.size - 1)].append([arr[0][point], arr[1][point], arr[2][point]])
# прореживание вокселей
for i in range(len(voxel)):
if len(voxel[i]) > rand_param:
for j in range(rand_param):
voxel[i].remove(voxel[i][random.randint(0, len(voxel[i]) - 1)])
arr_filtered = []
filled_voxel = list(filter(None, voxel))
for i in range(len(filled_voxel)):
for j in range(len(filled_voxel[i])):
arr_filtered.append(filled_voxel[i][j])
cloud_filtered = pcl.PointCloud()
cloud_filtered.from_array(np.array(arr_filtered).astype(np.float32))
return cloud_filtered
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,889
|
oxovu/point_clouds_optimization
|
refs/heads/master
|
/comparison.py
|
from scipy.spatial import Delaunay
import numpy as np
import matplotlib.pyplot as plt
import pcl
def c2c_distance(cloud1, cloud2):
points1 = cloud1.to_array()
points2 = cloud2.to_array()
# построение триангуляции
tri = Delaunay(points1)
distance = 0.0
i = 0
# подсчет общего расстояния
for point in points2:
i += 1
dist = tri.plane_distance(point)
min_dist = min(abs(dist))
distance += min_dist
return distance/cloud1.size
def c2c_size(cloud1, cloud2):
return cloud2.size / cloud1.size
|
{"/main.py": ["/optimizations.py", "/comparison.py"], "/triang.py": ["/plotXYZ.py"]}
|
22,896
|
unexpector/JanusVRandom
|
refs/heads/master
|
/hello/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from hello.models import RandomSites, ObjectLibrary, Rooms
from random import randint
from hello.forms import ObjectForm
from hello.utils import newobject, makeobject, imgur
# Create your views here.
def index(request):
namepicked = "Random Imgur"
reddit="Random Reddit"
wikipedia="Random Wikipedia"
random_list = RandomSites.objects.get(sitename=namepicked).src
randomreddit = RandomSites.objects.get(sitename=reddit).src
randomwikipedia = RandomSites.objects.get(sitename=wikipedia).src
wikipedia_id= RandomSites.objects.get(sitename=wikipedia).id
thenumber = randint(1, 26)
fullrandom = RandomSites.objects.get(id=thenumber).src
#Set Number Of Cubes
#cubeamount = randint(1,4)
#for each in cubeamount
#Room Random Code
roomrandom = randint(1, 14)
random_room = Rooms.objects.get(id=roomrandom).localasset
random_model ="Subwoofer"
object_one = makeobject('object_one')
object_one.randcords()
object_two = makeobject('object_two')
object_two.randcords()
object_three = makeobject('object_three')
object_three.randcords()
#imgurone = imgur('imgurone')
#imgurone.takeinput()
themodel = ObjectLibrary.objects.get(objectname=random_model).src
context_dict = {'boldmessage': object_one.newobjx, 'randomreddit': randomreddit, 'randomwikipedia': randomwikipedia, 'thenumber': thenumber, 'themodel': themodel, 'wikipediaid': wikipedia_id, 'realrandom': fullrandom, 'randomroom': random_room, 'one_xvalue': object_one.newobjx, 'one_yvalue': object_one.newobjy, 'one_zvalue': object_one.newobjz}
context_dict['page_name'] = "this is the page name"
#context_dict.update({'newvalue': thecube.newobjx})
object_one.addtodict(context_dict)
object_two.addtodict(context_dict)
object_three.addtodict(context_dict)
context_dict['xname'] = object_one.xname
return render(request, 'unexpector-template.html', context_dict)
def db(request):
greetings = "hello"
return render(request, 'db.html', {'greetings': greetings})
def about(request):
pagetext = "<html><body> Here is the link to the main page</body></html>"
return HttpResponse(pagetext)
def category(request, site_name_slug):
context_dict = {}
try:
# Can we find a category name slug with the given name?
# If we can't, the .get() method raises a DoesNotExist exception.
# So the .get() method returns one model instance or raises an exception.
site = RandomSites.objects.get(sitename=site_name_slug)
context_dict['site_name'] = site
# Retrieve all of the associated pages.
# Note that filter returns >= 1 model instance.
url = RandomSites.objects.get(sitename=site).src
# Adds our results list to the template context under name pages.
context_dict['src'] = url
# We also add the category object from the database to the context dictionary.
# We'll use this in the template to verify that the category exists.
context_dict['sitename'] = site
except RandomSites.DoesNotExist:
# We get here if we didn't find the specified category.
# Don't do anything - the template displays the "no category" message for us.
pass
# Go render the response and return it to the client.
return render(request, 'showsites.html', context_dict)
def add_object(request):
# A HTTP POST?
if request.method == 'POST':
form = ObjectForm(request.POST)
# Have we been provided with a valid form?
if form.is_valid():
# Save the new category to the database.
form.save(commit=True)
# Now call the index() view.
# The user will be shown the homepage.
return index(request)
else:
# The supplied form contained errors - just print them to the terminal.
print form.errors
else:
# If the request was not a POST, display the form to enter details.
form = ObjectForm()
# Bad form (or form details), no form supplied...
# Render the form with error messages (if any).
return render(request, 'add_forms.html', {'form': form})
|
{"/hello/forms.py": ["/hello/models.py"], "/hello/admin.py": ["/hello/models.py"]}
|
22,897
|
unexpector/JanusVRandom
|
refs/heads/master
|
/hello/forms.py
|
from django import forms
from hello.models import ObjectLibrary, RandomSites
class ObjectForm(forms.ModelForm):
objectname = forms.CharField(max_length=128, help_text="Please enter the object name")
idseed= forms.CharField(max_length=128, help_text="Please enter the ID Seed")
src = forms.CharField(max_length=128, help_text="Please enter the URL")
txtsrc = forms.CharField(max_length=128, help_text="Please enter texture source")
# An inline class to provide additional information on the form.
class Meta:
# Provide an association between the ModelForm and a model
model = ObjectLibrary
fields = ('objectname',)
class RandomSitesForm(forms.ModelForm):
sitename = forms.CharField(max_length=128, help_text="Please enter the Site Name")
src = forms.CharField(max_length=128, help_text="Please enter the URL of the page")
class Meta:
# Provide an association between the ModelForm and a model
model = RandomSites
# What fields do we want to include in our form?
# This way we don't need every field in the model present.
# Some fields may allow NULL values, so we may not want to include them...
# Here, we are hiding the foreign key.
# we can either exclude the category field from the form,
exclude = ('src',)
#or specify the fields to include (i.e. not include the category field)
#fields = ('title', 'url', 'views')
|
{"/hello/forms.py": ["/hello/models.py"], "/hello/admin.py": ["/hello/models.py"]}
|
22,898
|
unexpector/JanusVRandom
|
refs/heads/master
|
/hello/models.py
|
from django.db import models
# Create your models here.
class Rooms(models.Model):
name = models.CharField(max_length=128, unique=True)
localasset = models.CharField(max_length=128)
col = models.CharField(max_length=128)
def __unicode__(self): #For Python 2, use __str__ on Python 3
return self.name
class ObjectLibrary(models.Model):
objectname = models.CharField(max_length=128, unique=True)
idseed = models.CharField(max_length=128, unique=True)
src = models.CharField(max_length=128)
texsrc = models.CharField(max_length=128)
class Meta:
verbose_name_plural = "ObjectLibraries"
def __unicode__(self): #For Python 2, use __str__ on Python 3
return self.objectname
class RandomSites(models.Model):
sitename = models.CharField(max_length=128, unique=True)
src = models.URLField()
class Meta:
verbose_name_plural = "RandomSites"
def __unicode__(self): #For Python 2, use __str__ on Python 3
return self.sitename
|
{"/hello/forms.py": ["/hello/models.py"], "/hello/admin.py": ["/hello/models.py"]}
|
22,899
|
unexpector/JanusVRandom
|
refs/heads/master
|
/hello/utils.py
|
from random import randint
import urllib2
import json
class makeobject(object):
def __init__(self, name):
self.name = name
self.position = [] # creates a new location list
self.newobjx = 1
self.newobjy= 1
self.newobjz= 1
def add_co_ords(self, location):
self.position.append(location)
def setcords(self, xpos,ypos,zpos):
self.newobjx = xpos
self.newobjy = ypos
self.newobjz = zpos
def randcords(self):
setx = randint(-2, 5)
sety = 1
setz = randint(-10,-1)
objlocation = [setx,sety,setz]
#Set Dimensions
self.newobjx = objlocation[0]
self.newobjy = objlocation[1]
self.newobjz = objlocation[2]
def addtodict(self, dictionary):
self.xvar = "x"
self.yvar = "y"
self.zvar = "z"
self.xname = self.name + self.xvar
self.yname = self.name + self.yvar
self.zname = self.name + self.zvar
xdict = {self.xname: self.newobjx,self.yname: self.newobjy, self.zname: self.newobjz }
updatedict = dictionary.update(xdict)
return updatedict, self.xname
class imgur(object):
def __init__(self, name):
self.name = name
def takeinput(self):
clientID = '9aa094141cc554a'
req = urllib2.urlopen('https://api.imgur.com/3/gallery/random/random/1')
req.add_header('Authorization', 'Client-ID ' + clientID)
response = urllib2.urlopen(req)
response = json.loads(response.read())
return str(response[u'data'][u'link'])
def newobject():
setx = randint(-2, 5)
sety = 1
setz = randint(-10,-1)
objlocation = [setx,sety,setz]
#Set Dimensions
newobjx = objlocation[0]
newobjy = objlocation[1]
newobjz = objlocation[2]
return newobjx, newobjy, newobjz
|
{"/hello/forms.py": ["/hello/models.py"], "/hello/admin.py": ["/hello/models.py"]}
|
22,900
|
unexpector/JanusVRandom
|
refs/heads/master
|
/hello/admin.py
|
from django.contrib import admin
from hello.models import Rooms, RandomSites, ObjectLibrary
class RandomSitesAdmin(admin.ModelAdmin):
list_display = ('sitename', 'src')
# Register your models here.
admin.site.register(Rooms)
admin.site.register(RandomSites, RandomSitesAdmin)
admin.site.register(ObjectLibrary)
|
{"/hello/forms.py": ["/hello/models.py"], "/hello/admin.py": ["/hello/models.py"]}
|
22,901
|
unexpector/JanusVRandom
|
refs/heads/master
|
/populate.py
|
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gettingstarted.settings')
from hello.models import RandomSites
def populate():
add_randomsite(sitename="Random Wordpress",
src="http://en.blog.wordpress.com/next/")
add_randomsite(sitename="Random Wikipedia",
src="http://en.wikipedia.org/wiki/Special:Random")
add_randomsite(sitename="Youtube Random Video",
src="http://en.wikipedia.org/wiki/Special:Random")
add_randomsite(sitename="Map Crunch",
src="http://www.mapcrunch.com/")
add_randomsite(sitename="Random Reddit",
src="http://www.reddit.com/r/random")
add_randomsite(sitename="Crakced",
src="http://www.cracked.com/")
add_randomsite(sitename="Random Imgur",
src="http://imgur.com/random")
add_randomsite(sitename="Random Spotify",
src="http://www.karnhuset.net/demos/spotify/randomSong/")
# Print out what we have added to the user.
for p in RandomSites.objects.all():
print "- {0} - {1}".format(str(p), str(p))
def add_randomsite(sitename, src):
p = RandomSites.objects.get_or_create(sitename=sitename, src=src)[0]
p.save()
return p
# Start execution here!
if __name__ == '__main__':
print "Starting Unexpector population script..."
populate()
|
{"/hello/forms.py": ["/hello/models.py"], "/hello/admin.py": ["/hello/models.py"]}
|
22,906
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/cases/project/themes/test_theme1/plugin.py
|
from geekcms import protocol
class TestPlugin(protocol.BasePlugin):
plugin = 'a'
def run(self):
pass
_TEST_DOC = """
Usage:
geekcms testcmd -a -b <c> <d>
"""
class TestCLI(protocol.BaseExtendedProcedure):
plugin = 'test_cli'
def get_command_and_explanation(self):
return 'testcmd', 'command for test.'
def get_doc(self):
return _TEST_DOC
def run(self, args):
return args
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,907
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/load_tests.py
|
import os.path
import unittest
def additional_tests():
loader = unittest.defaultTestLoader
start_dir = os.getcwd()
suites = loader.discover(start_dir)
return suites
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,908
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/test_parser.py
|
import unittest
import os
import re
import configparser
from collections import defaultdict
from geekcms.parser.simple_lex import lexer
from geekcms.parser.simple_yacc import parser
from geekcms.protocol import PluginIndex
from geekcms.sequence_analyze import SequenceParser
class PLYTest(unittest.TestCase):
def test_parser(self):
pass
_THEME = 'testtheme'
class SequenceParserTest(unittest.TestCase):
def _get_suppose_result(self, text):
result = re.sub(r'\s', '', text).split(',')
def fix(item):
if '.' not in item:
return PluginIndex(_THEME, item)
else:
theme, plugin = item.split('.')
return PluginIndex(theme, plugin)
return list(map(fix, result))
def _load_test_case(self, name):
dir_path = os.path.join(os.path.dirname(__file__), 'cases/parser')
file_path = os.path.join(dir_path, name)
config = configparser.ConfigParser()
with open(file_path) as f:
config.read_file(f)
section = config['Test']
return section['case'], self._get_suppose_result(section['result'])
def test_fix_order_good_case(self):
cases = ['case1', 'case2', # base chain.
'case3', 'case4', # forward induction.
'case5', 'case6'] # reverse induction.
for case in cases:
text, suppose_result = self._load_test_case(case)
parser = SequenceParser()
parser.analyze(_THEME, text)
result = parser.generate_sequence()
self.assertListEqual(result, suppose_result)
#@unittest.expectedFailure
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,909
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/test_doc.py
|
import unittest
from docopt import docopt
from geekcms.doc_construct import DocConstructor
from geekcms.protocol import BaseExtendedProcedure
class DocConstructorTest(unittest.TestCase):
def test_doc_not_in_project(self):
doc = DocConstructor.get_doc_not_in_project()
argv = ['startproject', 'default']
args = docopt(doc, argv)
self.assertEqual(
args['<template_name>'],
'default',
)
def test_in_project_with_extended_plugin(self):
class TestPlugin(BaseExtendedProcedure):
def get_command_and_explanation(self):
return 'testcmd', 'this is a test command'
doc, mapping = DocConstructor.get_doc_and_cli_mapping([TestPlugin()])
argv = ['testcmd', '-a', '-b', 'c', 'd']
args = docopt(doc, argv, options_first=True)
self.assertEqual(
args['<command>'],
'testcmd',
)
self.assertListEqual(
args['<args>'],
['-a', '-b', 'c', 'd'],
)
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,910
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/test_loadup.py
|
"""
Test Plan.
SettingsProcedure
test_run:
...
PluginProcedure
test_run:
...
"""
import os
import unittest
from geekcms.loadup import (SettingsProcedure, PluginProcedure)
from geekcms.utils import (ShareData, ProjectSettings, ThemeSettings)
from geekcms.protocol import (PluginRegister, PluginIndex)
class ProcedureTest(unittest.TestCase):
def setUp(self):
ShareData.clear()
ProjectSettings.clear()
ThemeSettings.clear()
PluginRegister.clean_up_registered_plugins()
self.project_path = os.path.join(
os.getcwd(),
'tests/cases/project',
)
def test_settings_run(self):
SettingsProcedure.run(self.project_path)
self.assertSetEqual(
set(ProjectSettings.get_registered_theme_name()),
set(['test_theme1', 'test_theme2']),
)
self.assertSetEqual(
set(ThemeSettings._vars),
set(['test_theme1', 'test_theme2']),
)
def test_plugin_run(self):
SettingsProcedure.run(self.project_path)
flat_orders, cli_indices = PluginProcedure.run()
self.assertSetEqual(
set(flat_orders),
set([PluginIndex('test_theme1', 'a'),
PluginIndex('test_theme2', 'b')]),
)
self.assertSetEqual(
set(cli_indices),
set([PluginIndex('test_theme1', 'test_cli')]),
)
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,911
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/parser/utils.py
|
class PluginRel:
def __init__(self, is_left_rel, degree):
self.is_left_rel = is_left_rel
self.degree = degree
def __repr__(self):
return 'PluginRel({}, {})'.format(self.is_left_rel, self.degree)
class PluginExpr:
HEAD = 'HEAD'
TAIL = 'TAIL'
def __init__(self, *,
left_operand=None, right_operand=None, relation=None):
# HEAD and TAIL are set for None value.
self.left_operand = left_operand or self.HEAD
self.right_operand = right_operand or self.TAIL
# if rel is None, which means the expression's order is not defined.
self.relation = relation
def __repr__(self):
text = '<PluginExpr left: {}, right: {}, is_left_rel: {}>'.format(
self.left_operand,
self.right_operand,
self.relation,
)
return text
class _ContainerError:
def __init__(self, name):
self._name = name
def __get__(self, instance, cls):
container_name = cls._get_message_container(self._name)
container = getattr(cls, container_name)
return bool(container)
class ErrorCollector:
lex_error = _ContainerError('lex')
yacc_error = _ContainerError('yacc')
theme_lex_error = {}
theme_yacc_error = {}
_lex_message_container = []
_yacc_message_container = []
@classmethod
def _get_message_container(cls, name):
attr = '_{}_message_container'.format(name)
return attr
@classmethod
def _get_theme_error_mapping(cls, name):
attr = 'theme_{}_error'.format(name)
return attr
@classmethod
def _clean_up(cls, name):
attr = cls._get_message_container(name)
setattr(cls, attr, list())
@classmethod
def _add_message(cls, name, message):
attr = cls._get_message_container(name)
getattr(cls, attr).append(message)
@classmethod
def add_lex_message(cls, message):
cls._add_message('lex', message)
@classmethod
def add_yacc_message(cls, message):
cls._add_message('yacc', message)
@classmethod
def _archive(cls, name, theme):
# resolve name.
mapping_name = cls._get_theme_error_mapping(name)
container_name = cls._get_message_container(name)
# get obj.
mapping = getattr(cls, mapping_name)
container = getattr(cls, container_name)
# archive and clean up.
mapping[theme] = container
cls._clean_up(name)
@classmethod
def archive_yacc_messages(cls, theme):
cls._archive('yacc', theme)
@classmethod
def archive_lex_messages(cls, theme):
cls._archive('lex', theme)
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,912
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/interface.py
|
import os
from docopt import docopt
from .utils import (PathResolver, check_cwd_is_project)
from .doc_construct import DocConstructor
from .loadup import (SettingsProcedure, PluginProcedure)
from .protocol import (PluginRegister, BaseResource, BaseProduct, BaseMessage)
from .download_theme import download_theme
__version__ = '0.3'
def _get_args(doc, options_first=False):
args = docopt(
doc,
version=__version__,
options_first=options_first,
)
return args
def _not_in_project():
doc = DocConstructor.get_doc_not_in_project()
args = _get_args(doc)
template = args['<template_name>']
if template:
download_theme(template, PathResolver.project_path)
return template
def _get_plugin_instance(plugin_index):
plugin_cls = PluginRegister.get_plugin(plugin_index)
return plugin_cls()
def _run_default_procedure(plugin_exec_order):
for plugin in map(_get_plugin_instance, plugin_exec_order):
plugin.run()
def _run_extended_procedure(command, cli_mapping):
# extended procedure.
cli_plugin = cli_mapping.get(command, None)
if cli_plugin is None:
print('No Such Extended Procedure: {}'.format(command))
else:
args = _get_args(
cli_plugin.get_doc(),
)
cli_plugin.run(args)
def _in_project():
SettingsProcedure.run()
plugin_exec_order, cli_indices = PluginProcedure.run()
doc, cli_mapping = DocConstructor.get_doc_and_cli_mapping(
map(_get_plugin_instance, cli_indices),
)
args = _get_args(doc, options_first=True)
# get command and decide which procedure to run.
command = args['<command>']
if command == 'run':
_run_default_procedure(plugin_exec_order)
elif command:
_run_extended_procedure(command, cli_mapping)
return command
def main():
# set cwd to as project path.
PathResolver.set_project_path(os.getcwd())
if not check_cwd_is_project():
_not_in_project()
else:
_in_project()
if __name__ == '__main__':
main()
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,913
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/parser/simple_yacc.py
|
import os
from ply import yacc
from .simple_lex import tokens
from .utils import PluginRel
from .utils import PluginExpr
from .utils import ErrorCollector
def p_start(p):
'''start : NEWLINE lines end
| lines end'''
if len(p) == 4:
lines = p[2]
end = p[3]
elif len(p) == 3:
lines = p[1]
end = p[2]
# check end is avaliable or not
if end:
lines.append(end)
p[0] = lines
def p_end(p):
'''end : plugin_expr
| empty'''
p[0] = p[1]
def p_lines_expend(p):
'''lines : lines line_atom
| empty'''
if len(p) == 2:
p[0] = []
elif len(p) == 3:
# init a list if lines is None
plugin_set = p[1]
single_plugin = p[2]
plugin_set.append(single_plugin)
p[0] = plugin_set
def p_line_atom(p):
'line_atom : plugin_expr NEWLINE'
p[0] = p[1]
def p_plugin_expr_binary(p):
'plugin_expr : plugin_name relation plugin_name'
p[0] = PluginExpr(
left_operand=p[1],
relation=p[2],
right_operand=p[3],
)
def p_plugin_expr_left(p):
'plugin_expr : plugin_name relation'
p[0] = PluginExpr(
left_operand=p[1],
relation=p[2],
)
def p_plugin_expr_right(p):
'plugin_expr : relation plugin_name'
p[0] = PluginExpr(
relation=p[1],
right_operand=p[2],
)
def p_plugin_expr_none(p):
'plugin_expr : plugin_name'
p[0] = PluginExpr(
left_operand=p[1],
)
def p_relation(p):
'''relation : left_rel
| right_rel'''
p[0] = p[1]
def p_left_rel(p):
'''left_rel : LEFT_OP
| LEFT_OP DEGREE'''
if len(p) == 2:
rel = PluginRel(True, 0)
elif len(p) == 3:
rel = PluginRel(True, int(p[2]))
p[0] = rel
def p_right_rel(p):
'''right_rel : RIGHT_OP
| DEGREE RIGHT_OP'''
if len(p) == 2:
rel = PluginRel(False, 0)
elif len(p) == 3:
rel = PluginRel(False, int(p[1]))
p[0] = rel
def p_plugin_name(p):
'plugin_name : IDENTIFIER'
p[0] = p[1]
def p_empty(p):
'empty :'
# in order not to fix up with plugin_expr
p[0] = None
def p_error(p):
# print("Syntax Error: '{}' in line {}".format(p.value, p.lineno))
discard = [p.value]
while True:
token = yacc.token()
if token and token.type != 'NEWLINE':
discard.append(token.value)
continue
else:
val = '[NEWLINE]' if token else '[EOL]'
discard.append(val)
break
# print('Discard: ', ''.join(discard))
ErrorCollector.add_yacc_message(
(p.value, p.lineno, ''.join(discard)),
)
yacc.restart()
parser = yacc.yacc(
debug=0,
optimize=1,
outputdir=os.path.dirname(__file__),
)
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,914
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/download_theme.py
|
import re
import os
import urllib.parse
import subprocess
from .utils import PathResolver, PathResolverContextManager
_SVN_URL = 'https://github.com/haoxun/GeekCMS-Themes/trunk/'
def download_theme(theme_name, path):
svn_url = urllib.parse.urljoin(
_SVN_URL,
re.sub(r'\s', '', theme_name),
)
target_path = os.path.join(path, theme_name)
subprocess.check_call(
['svn', 'checkout', svn_url, target_path],
)
with PathResolverContextManager(target_path):
PathResolver.inputs(ensure_exist=True)
PathResolver.outputs(ensure_exist=True)
PathResolver.themes(ensure_exist=True)
PathResolver.states(ensure_exist=True)
PathResolver.project_settings(ensure_exist=True)
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,915
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/doc_construct.py
|
import os
_DOC_TEMPLATE_IN_PROJECT = """
Usage:
geekcms <command> [<args>...]
Avaliable Commands:
run Default procedure.
{0}
"""
_DOC_TEMPLATE_NOT_IN_PROJECT = """
Usage:
geekcms startproject <template_name>
"""
_NEWLINE = os.linesep
class DocConstructor:
@classmethod
def _get_explanation(cls, command, explanation):
INDENT4 = ' ' * 4
INDENT2 = ' ' * 2
return (INDENT4 + command +
INDENT2 + explanation)
@classmethod
def get_doc_and_cli_mapping(cls, cli_plugins):
mapping = {}
explanations = []
for cli_plugin in cli_plugins:
command, explanation = cli_plugin.get_command_and_explanation()
explanations.append(
cls._get_explanation(command, explanation),
)
mapping[command] = cli_plugin
doc = _DOC_TEMPLATE_IN_PROJECT.format(
_NEWLINE.join(explanations),
)
return doc, mapping
@classmethod
def get_doc_not_in_project(cls):
return _DOC_TEMPLATE_NOT_IN_PROJECT
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,916
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/protocol.py
|
from collections import UserDict
from collections import OrderedDict
from collections import abc
from functools import partial
from functools import wraps
from inspect import signature
from inspect import Parameter
import types
class _UniqueKeyDict(dict):
def __setitem__(self, key, val):
if key in self:
raise Exception('Key Already Existed!.')
super().__setitem__(key, val)
class PluginIndex:
def __init__(self, theme_name, plugin_name):
self.theme_name = theme_name
self.plugin_name = plugin_name
self.unique_key = '{}.{}'.format(theme_name, plugin_name)
def __hash__(self):
return self.unique_key.__hash__()
def __eq__(self, other):
return hash(self) == hash(other)
def __repr__(self):
return 'PluginIndex({}, {})'.format(
self.theme_name,
self.plugin_name,
)
class Manager(UserDict):
def __get__(self, instance, cls):
if instance:
raise Exception('Manager Can Not Be Called From Instances')
return self
def __set__(self, instance, val):
raise Exception('Manager Can Not Be Replaced.')
def __delete__(self, instance):
raise Exception('Manager Can Not Be Deleted.')
def __init__(self, target_cls, data=None):
super().__init__()
# share data area.
if isinstance(data, dict):
self.data = data
# class to init items
self._target_cls = target_cls
def __getitem__(self, key):
if key not in self:
self[key] = []
return super().__getitem__(key)
# operations covers all types.
def add(self, item):
self[item.owner].append(item)
def remove(self, item):
self[item.owner].remove(item)
if not self[item.owner]:
del self[item.owner]
def keys(self):
return list(self)
# operations related to _target_cls.
def _filter_isinstance(self, items):
for item in items[:]:
if not isinstance(item, self._target_cls):
items.remove(item)
return items
# Be Careful! owner is keyword-only parameter.
def create(self, *args, owner, **kwargs):
item = self._target_cls(*args, **kwargs)
item.set_owner(owner)
self[owner].append(item)
return item
def filter(self, owner):
if isinstance(owner, str):
result = self[owner]
elif isinstance(owner, abc.Iterable):
result = []
owners = owner
for owner in owners:
result.extend(self[owner])
return self._filter_isinstance(result)
def values(self):
result = []
for items in super().values():
result.extend(items)
return self._filter_isinstance(result)
def clear(self):
owners_to_be_remove = []
for owner, items in self.items():
for item in items[:]:
if isinstance(item, self._target_cls):
items.remove(item)
if not items:
owners_to_be_remove.append(owner)
for owner in owners_to_be_remove:
del self[owner]
class ManagerProxyWithOwner:
def __init__(self, owner, manager):
# search type(manager).__dict__
for method_name in vars(type(manager)):
if method_name.startswith('_'):
continue
method = getattr(manager, method_name)
sig = signature(method)
if 'owner' in sig.parameters:
partial_method = partial(method, owner=owner)
else:
partial_method = method
setattr(self, method_name, partial_method)
class SetUpObjectManager(type):
MANAGER_NAME = 'objects'
@classmethod
def _get_manager_data(cls, result_cls):
pre_manager = getattr(result_cls, cls.MANAGER_NAME, None)
# _BaseAsset would create the first manager, and all derived class
# would operated a shared data field.
if isinstance(pre_manager, Manager):
return pre_manager.data
else:
return None
def __new__(cls, *args, **kwargs):
result_cls = super().__new__(cls, *args, **kwargs)
# init share data for BaseResource, BaseProduct and BaseMessage.
if result_cls.__name__ != '_BaseAsset':
data = cls._get_manager_data(result_cls)
setattr(
result_cls,
cls.MANAGER_NAME,
Manager(result_cls, data),
)
return result_cls
class _BaseAsset(metaclass=SetUpObjectManager):
def __init__(self, *args, **kwargs):
text = 'In Base Class: *args: {}; **kwargs: {}'
raise Exception(
text.format(owner, args, kwargs),
)
def set_owner(self, owner):
self.owner = owner
@classmethod
def get_manager_with_fixed_owner(cls, owner):
return ManagerProxyWithOwner(owner, cls.objects)
def __repr__(self):
return '{}({})'.format(type(self).__name__, self.owner)
class BaseResource(_BaseAsset):
pass
class BaseProduct(_BaseAsset):
pass
class BaseMessage(_BaseAsset):
pass
class PluginController:
"""
Data fields and operations related to plugin 'run' method's customization.
"""
ACCEPT_OWNERS_ATTR = '__accept_owners__'
ACCEPT_PARAMS_ATTR = '__accept_params__'
OWNER = 'owner'
RESOURCES = 'resources'
PRODUCTS = 'products'
MESSAGES = 'messages'
AVALIABLE_PARA_NAMES = [RESOURCES, PRODUCTS, MESSAGES]
# Control owner
@classmethod
def accept_owners(cls, *owners):
def decorator(func):
setattr(func, cls.ACCEPT_OWNERS_ATTR, owners)
return func
return decorator
# Control incoming parameter.
@classmethod
def accept_parameters(cls, *fixed_params, **typed_params):
"""
1. typed_params is empty and params is not empty.
2. params is empty and typed_params is not empty.
3. both params and typed_params are not empty.(conflict might occur)
4. params might cantains items of str or tuple.
"""
# preprocess of params and typed_params.
params = []
for index, item in enumerate(fixed_params[:]):
if isinstance(item, str):
params.append(item)
elif isinstance(item, (list, tuple)) and len(item) == 2:
para_name, para_type = item
# add name to params.
params.append(para_name)
# update restriction of types.
typed_params.update({para_name: para_type})
else:
raise SyntaxError('Error In *params.')
# check parameters name.
name_set = set(params) | set(typed_params)
if not name_set:
raise SyntaxError('Argument Can Not Be Empty.')
if not (name_set <= set(cls.AVALIABLE_PARA_NAMES)):
raise SyntaxError(
'Arguments should be any among'
' [RESOURCES, PRODUCTS, MESSAGES]'
)
# set up order dict to keep the restriction.
customized_params = OrderedDict()
# keep the order defined by params
for key in params:
customized_params[key] = None
# set the type of params.
for key in cls.AVALIABLE_PARA_NAMES:
if key not in typed_params:
continue
elif key not in params and params:
raise SyntaxError('Parameters Conflicts.')
else:
customized_params[key] = typed_params[key]
def decorator(func):
setattr(func, cls.ACCEPT_PARAMS_ATTR, customized_params)
return func
return decorator
@classmethod
def get_owner(cls, func, cls_defined_owner):
# get owners definded by accept_owners.
decorator_defined_owners = getattr(func, cls.ACCEPT_OWNERS_ATTR, None)
# ensure developer definded owner.
if not any((cls_defined_owner, decorator_defined_owners)):
raise Exception("Can Not Find Owner.")
# make class defined owner iterable
if isinstance(cls_defined_owner, str):
cls_defined_owner = [cls_defined_owner]
# final_owners should be a container, and owners definded by
# accept_owners is in higher priority.
final_owners = decorator_defined_owners or cls_defined_owner
return final_owners
@classmethod
def asset_owner_filter(cls, owners):
check_func = lambda item: item.owner in owners
return check_func
@classmethod
def get_parameters(cls, func):
return getattr(func, cls.ACCEPT_PARAMS_ATTR, None)
@classmethod
def count_parameters(cls, func, expect_num=None):
# get __signature__ of func, or generate a new signature of func.
bound_func = types.MethodType(func, object)
sig = signature(bound_func)
count = 0
for name, para in sig.parameters.items():
if para.kind is Parameter.POSITIONAL_OR_KEYWORD\
and para.default is Parameter.empty:
count += 1
if count > 3:
raise SyntaxError('Require only 0~3 positional parameters.')
if expect_num and count != expect_num:
raise SyntaxError(
'Require {} positional parameters'.format(expect_num),
)
return count
class PluginRegister(type):
THEME = 'theme'
PLUGIN = 'plugin'
plugin_mapping = _UniqueKeyDict()
context_theme = None
@classmethod
def unset_context_theme(cls):
cls.context_theme = None
@classmethod
def clean_up_registered_plugins(cls):
cls.plugin_mapping = _UniqueKeyDict()
@classmethod
def _find_case_insensitive_name(cls, target_name, namespace):
for name, val in namespace.items():
if name.lower() == target_name:
return val
return None
@classmethod
def get_plugin(cls, plugin_index):
return cls.plugin_mapping.get(plugin_index, None)
@classmethod
def get_registered_plugins(cls):
return dict(cls.plugin_mapping)
@classmethod
def _get_theme_name(cls, namespace):
find_name = cls._find_case_insensitive_name
# class-level attribute 'theme' could be omitted, in such case the name
# of theme's top-level directory would be adapt.
theme_name = find_name(cls.THEME, namespace) or cls.context_theme
# class-level attribute 'plugin' could be omitted, in such case the
return theme_name
@classmethod
def _get_plugin_name(cls, plugin_cls, namespace):
find_name = cls._find_case_insensitive_name
# name of plugin class would be adapt.
plugin_name = find_name(cls.PLUGIN, namespace) or plugin_cls.__name__
return plugin_name
@classmethod
def _register_plugin(cls, plugin_cls, namespace):
# get attributes.
theme_name = cls._get_theme_name(namespace)
plugin_name = cls._get_plugin_name(plugin_cls, namespace)
# set attrbutes of theme and plugin to class.
setattr(plugin_cls, cls.THEME, theme_name)
setattr(plugin_cls, cls.PLUGIN, plugin_name)
# register plugin
plugin_index = PluginIndex(theme_name, plugin_name)
cls.plugin_mapping[plugin_index] = plugin_cls
@classmethod
def _should_process(cls, cls_name):
return cls_name not in ['BasePlugin', 'BaseExtendedProcedure']
def __new__(cls, cls_name, bases, namespace, **kargs):
plugin_cls = super().__new__(cls,
cls_name, bases, namespace,
**kargs)
if cls._should_process(cls_name):
cls._register_plugin(plugin_cls, namespace)
return plugin_cls
class PluginRegisterAndRunFilter(PluginRegister):
PLUGIN_RUN_METHOD_NAME = 'run'
@classmethod
def _data_filter(cls, func=None, owner=''):
# support decorator
if func is None:
return partial(cls._data_filter, owner=owner)
# begin decorating
@wraps(func)
def run(self):
# contains all assets index by AVALIABLE_PARA_NAMES
params = {
PluginController.RESOURCES: BaseResource,
PluginController.PRODUCTS: BaseProduct,
PluginController.MESSAGES: BaseMessage,
}
owners = PluginController.get_owner(func, owner)
check_owner_func = PluginController.asset_owner_filter(owners)
# get parameters order defined by accept_parameters
params_order = PluginController.get_parameters(func)
if params_order is None:
# make sure the number of functino's POSITIONAL_OR_KEYWORD
# parameters <= 3.
count = PluginController.count_parameters(func)
# defualt
params_order = PluginController.AVALIABLE_PARA_NAMES[:count]
else:
# call count_parameters to check function's signature and make
# sure that the number of POSITIONAL_OR_KEYWORD parameters is
# exactly the same as the length of params_order
PluginController.count_parameters(func, len(params_order))
# adjust params with user defined types.
for key, val in params_order.items():
if val is None:
continue
params[key] = val
# filter assets.
processed_params = []
for key in params_order:
filtered_items = filter(
check_owner_func,
params[key].objects.values(),
)
processed_params.append(list(filtered_items))
# here we go.
return func(self, *processed_params)
return run
def __new__(cls, cls_name, bases, namespace, **kargs):
plugin_cls = super().__new__(cls,
cls_name, bases, namespace,
**kargs)
if cls._should_process(cls_name):
theme_name = cls._get_theme_name(namespace)
# filter data for run method.
process_func = getattr(plugin_cls, cls.PLUGIN_RUN_METHOD_NAME)
setattr(
plugin_cls,
cls.PLUGIN_RUN_METHOD_NAME,
cls._data_filter(process_func, theme_name),
)
return plugin_cls
class BasePlugin(metaclass=PluginRegisterAndRunFilter):
@classmethod
def get_manager_bind_with_plugin(cls, other_cls):
fixed_manager = other_cls.get_manager_with_fixed_owner(
getattr(cls, PluginRegister.THEME),
)
return fixed_manager
# all plugins should define a 'run' function with 'self' as its first
# parameter, and with other zero/one/two/three positional parameter(s),
# one for resources, one for products, and the last one is for messages.
# Otherwise, use 'accept_parameters' to control parameters.
def run(self, resources=None, products=None, messages=None):
raise Exception('In BasePlugin.')
class BaseExtendedProcedure(metaclass=PluginRegister):
"""
1. call get_command_and_explanation(self), in order to construct main doc.
2. when user enter 'geekcms <command> [args...]', GeekCMS would call
get_doc(self) to construct an new instance of docopt, such instance would
be passed to run(self, args).
"""
def get_command_and_explanation(self):
# return (<command>, explanation).
raise Exception('In BaseExtendedProcedure.')
def get_doc(self):
raise Exception('In BaseExtendedProcedure.')
def run(self, args):
raise Exception('In BaseExtendedProcedure.')
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,917
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/test_main.py
|
import os
import sys
import unittest
import shutil
from geekcms.interface import (_not_in_project, _in_project)
from geekcms.utils import (PathResolver, ShareData,
ThemeSettings, ProjectSettings)
from geekcms.protocol import PluginRegister
class TestCLI(unittest.TestCase):
def setUp(self):
PluginRegister.clean_up_registered_plugins()
ProjectSettings.clear()
ThemeSettings.clear()
ShareData.clear()
#force reload
for key in ['test_theme1', 'test_theme1.plugin',
'test_theme2', 'test_theme2.plugin']:
if key in sys.modules:
del sys.modules[key]
def _get_path(self, rel_path):
path = os.path.join(
os.getcwd(),
rel_path,
)
return path
def test_not_in_project(self):
not_project_path = self._get_path('tests/cases')
PathResolver.set_project_path(not_project_path)
template = 'simple'
sys.argv = ['geekcms', 'startproject', template]
self.assertEqual(
_not_in_project(),
template,
)
shutil.rmtree(
os.path.join(not_project_path, template),
)
def test_in_project(self):
project_path = self._get_path('tests/cases/project')
PathResolver.set_project_path(project_path)
sys.argv = ['geekcms', 'run']
command = _in_project()
self.assertEqual(
command,
'run',
)
sys.argv = ['geekcms', 'testcmd', '-a', '-b', 'c', 'd']
command = _in_project()
self.assertEqual(
command,
'testcmd',
)
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,918
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/loadup.py
|
import os
import importlib
from collections import OrderedDict
from .utils import (SettingsLoader, ProjectSettings, ThemeSettings,
ShareData, PathResolver, SysPathContextManager)
from .protocol import PluginRegister
from .sequence_analyze import SequenceParser
class SettingsProcedure:
@classmethod
def _load_share_data(cls, loaders):
ShareData.load_data(loaders)
@classmethod
def _load_project_settings(cls, path):
project_settings_loader = SettingsLoader(path)
ProjectSettings.load_data(project_settings_loader)
cls._load_share_data(project_settings_loader)
@classmethod
def _load_theme_settings(cls, path, name):
theme_settings_loader = SettingsLoader(path, name)
ThemeSettings.load_data(theme_settings_loader)
cls._load_share_data(theme_settings_loader)
@classmethod
def _load_settings(cls):
pr = PathResolver
# set up ProjectSettings
project_settings_path = pr.project_settings()
cls._load_project_settings(project_settings_path)
# set up ThemeSettings
theme_settings_set = []
for theme_name in ProjectSettings.get_registered_theme_name():
theme_settings_path = pr.theme_settings(theme_name)
cls._load_theme_settings(theme_settings_path, theme_name)
@classmethod
def _load_themes(cls):
pr = PathResolver
theme_dir = pr.themes()
for theme_name in ProjectSettings.get_registered_theme_name():
with SysPathContextManager(theme_name, theme_dir):
importlib.import_module(theme_name)
@classmethod
def run(cls, project_path=None):
# project_path is None means the path has already been set.
if project_path:
PathResolver.set_project_path(project_path)
cls._load_settings()
cls._load_themes()
class PluginProcedure:
runtime_components = ['pre_load', 'in_load', 'post_load',
'pre_process', 'in_process', 'post_process',
'pre_write', 'in_write', 'post_write']
extended_procedure = ['cli_extend']
@classmethod
def _get_plain_text(cls, theme_name, field_name):
search_key = '{}.{}'.format(theme_name, field_name)
plain_text = ThemeSettings.get(search_key)
return plain_text
@classmethod
def _get_execution_orders(cls):
error_happend = False
exec_orders = OrderedDict()
# In this function, exec_orders contains both default and extended
# procedures.
for component in (cls.runtime_components + cls.extended_procedure):
parser = SequenceParser()
for theme_name in ProjectSettings.get_registered_theme_name():
plain_text = cls._get_plain_text(theme_name, component)
if plain_text is None:
continue
# analyze
parser.analyze(theme_name, plain_text)
if parser.error:
parser.report_error()
error_happend = True
else:
exec_orders[component] = parser.generate_sequence()
return error_happend, exec_orders
@classmethod
def _linearize_exec_orders(cls, exec_orders):
# extract cli_indices.
extract_field = cls.extended_procedure[0]
cli_indices = exec_orders[extract_field]
del exec_orders[extract_field]
# generate plugin calling sequence.
flat_orders = []
for container in exec_orders.values():
flat_orders.extend(container)
return flat_orders, cli_indices
@classmethod
def _verify_plugins(cls, flat_orders):
for plugin_index in flat_orders:
plugin = PluginRegister.get_plugin(plugin_index)
if plugin is None:
# can not find such plugin
print('Can Not Find {}'.format(plugin_index))
return True
return False
@classmethod
def run(cls):
parse_error, exec_orders = cls._get_execution_orders()
flat_order, cli_indices = cls._linearize_exec_orders(exec_orders)
match_error = cls._verify_plugins(flat_order + cli_indices)
if parse_error or match_error:
raise SyntaxError('Error happended, suspend program.')
return flat_order, cli_indices
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,919
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/cases/project/themes/test_theme2/plugin.py
|
from geekcms import protocol
class TestPlugin(protocol.BasePlugin):
plugin = 'b'
def run(self):
pass
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,920
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/test_protocol.py
|
"""
Test Plan.
PluginIndex:
test_unique:
make sure plugin index is hashable and its uniqueness.
BaseResource, BaseProduct, BaseMessage, _BaseAsset:
test_init:
these class can not be initialized.
test_access_manager_from_instance:
manager can not be accessed from instance.
Manager:
test_trace_one_class:
initialize single derived class of [BaseResource, BaseProduct,
BaseMessage,], make sure the manager can trace that instance.
test_trace_multi_classes:
initialize multiple derived classes of the same base class, make sure
the manager of base class can trace multiple instances.
test_usage:
make sure bussinesses of manager, such as create, add, remove and so
on, is correct.
ManagerProxyWithOwner:
test_proxy:
make sure the method with 'owenr' in signature is binded properly.
BasePlugin, PluginRegister, PluginController:
test_register:
ensure plugin is register by metaclass, and so as the process of
retrieving plugins.
test_theme_name:
1. definde class-level theme attr.
2. undefine class-level theme attr, but PluginRegister defines context
theme.
test_plugin_name:
1. define class-level plugin attr.
2. undefine ...
test_default_params:
1. test run method with 0~3 defined parameters(exclude self).
2. test run method with parameters more than 3.
test_accept_parameters:
make sure accept_parameters works.
test_accept_owners:
make sure accept_owners works, and that do not confilct with
theme_name.
"""
import unittest
from collections import defaultdict
from geekcms import protocol
class PluginIndexTest(unittest.TestCase):
def test_unique(self):
item_1 = protocol.PluginIndex('a', 'b')
item_2 = protocol.PluginIndex('a', 'c')
self.assertEqual(item_1, 'a.b')
self.assertNotEqual(item_1, item_2)
class AssetTest(unittest.TestCase):
def setUp(self):
class TestClass(protocol.BaseResource):
def __init__(self, owner):
self.set_owner(owner)
self.TestClass = TestClass
def test_init(self):
owner = 'testowner'
attr = 'testattr'
item = self.TestClass(owner)
self.assertEqual(item.owner, owner)
self.assertIsInstance(item, self.TestClass)
self.assertNotIsInstance(item, protocol.BaseProduct)
def test_access_manager_from_instance(self):
with self.assertRaises(Exception):
item = self.TestClass('testowner')
# access
item.objects
with self.assertRaises(Exception):
item = self.TestClass('testowner')
# assign
item.objects = None
with self.assertRaises(Exception):
# remove
item = self.TestClass('testowner')
del item.objects
class ManagerRegisterTest(unittest.TestCase):
def setUp(self):
protocol.BaseResource.objects.clear()
# create test class
class TestAsset(protocol.BaseResource):
def __init__(self):
pass
self.TestAsset = TestAsset
self.owner = 'testowner'
def test_trace_one_class(self):
# get binded owner
manager = self.TestAsset.get_manager_with_fixed_owner(self.owner)
item = manager.create()
self.assertEqual(item.owner, self.owner)
self.assertIsInstance(item, self.TestAsset)
def test_trace_multi_classes(self):
# TestAsset and AnotherTestAsset both derived from BaseResource.
class AnotherTestAsset(protocol.BaseResource):
def __init__(self):
pass
first_item = self.TestAsset.objects.create(owner=self.owner)
second_item = AnotherTestAsset.objects.create(owner=self.owner)
# get all assets.
self.assertSetEqual(
set((first_item, second_item)),
set(protocol.BaseResource.objects.values()),
)
# exclusive share field.
self.assertFalse(protocol.BaseProduct.objects.values())
self.assertFalse(protocol.BaseMessage.objects.values())
class ManagerUsageTest(unittest.TestCase):
def setUp(self):
class TestClass(protocol.BaseResource):
def __init__(self):
pass
self.TestClass = TestClass
self.owner = 'testowner'
self.manager = protocol.Manager(TestClass)
def test_create(self):
item = self.manager.create(owner=self.owner)
self.assertEqual(item.owner, self.owner)
self.assertIsInstance(item, self.TestClass)
def test_add_remove(self):
item = self.TestClass()
item.set_owner(self.owner)
self.assertEqual(self.manager, defaultdict(list))
self.manager.add(item)
self.assertDictEqual(
dict(self.manager),
{self.owner: [item]},
)
self.manager.remove(item)
self.assertEqual(self.manager, defaultdict(list))
def test_filter_keys_values(self):
owner_1 = 'owner_1'
owner_2 = 'owner_2'
item_1 = self.manager.create(owner=owner_1)
item_2 = self.manager.create(owner=owner_2)
self.assertListEqual(self.manager.filter(owner_1), [item_1])
self.assertListEqual(self.manager.filter(owner_2), [item_2])
self.assertSetEqual(
set(self.manager.keys()),
{owner_1, owner_2},
)
self.assertSetEqual(
set(self.manager.values()),
{item_1, item_2},
)
class ManagerProxyWithOwnerTest(unittest.TestCase):
def setUp(self):
class TestClass(protocol.BaseResource):
def __init__(self):
pass
self.TestClass = TestClass
self.owner = 'testowner'
self.manager = protocol.Manager(TestClass)
def test_proxy(self):
proxy = protocol.ManagerProxyWithOwner(self.owner, self.manager)
item = proxy.create()
self.assertDictEqual(
dict(self.manager),
{self.owner: [item]},
)
proxy.remove(item)
self.assertEqual(self.manager, defaultdict(list))
item = proxy.create()
self.assertListEqual(proxy.filter(), [item])
self.assertListEqual(proxy.keys(), [self.owner])
self.assertListEqual(proxy.values(), [item])
class PluginTest(unittest.TestCase):
def setUp(self):
# clean up
protocol.BaseResource.objects.clear()
protocol.BaseProduct.objects.clear()
protocol.BaseMessage.objects.clear()
protocol.PluginRegister.clean_up_registered_plugins()
protocol.PluginRegister.unset_context_theme()
self.theme_name = 'testtheme'
self.plugin_name = 'testplugin'
def test_register(self):
class TestPlugin(protocol.BasePlugin):
theme = self.theme_name
plugin = self.plugin_name
def run(self):
pass
self.assertDictEqual(
protocol.PluginRegister.get_registered_plugins(),
{protocol.PluginIndex(self.theme_name, self.plugin_name):
TestPlugin},
)
def test_theme_name(self):
protocol.PluginRegister.context_theme = self.theme_name
temp_theme_name = 'theme_name_for_test'
class PluginWithThemeName(protocol.BasePlugin):
theme = temp_theme_name
plugin = self.plugin_name
def run(self):
pass
class PluginWithoutThemeName(protocol.BasePlugin):
plugin = self.plugin_name
def run(self):
pass
self.assertSetEqual(
# get theme_names
set(protocol.PluginRegister.get_registered_plugins()),
set((
protocol.PluginIndex(temp_theme_name, self.plugin_name),
protocol.PluginIndex(self.theme_name, self.plugin_name),
)),
)
def test_plugin_name(self):
protocol.PluginRegister.context_theme = self.theme_name
class PluginWithPluginName(protocol.BasePlugin):
plugin = self.plugin_name
def run(self):
pass
class PluginWithoutPluginName(protocol.BasePlugin):
def run(self):
pass
self.assertSetEqual(
# get theme_names
set(protocol.PluginRegister.get_registered_plugins()),
set((
protocol.PluginIndex(self.theme_name, self.plugin_name),
protocol.PluginIndex(self.theme_name,
'PluginWithoutPluginName'),
)),
)
def test_default_params(self):
class TestResource(protocol.BaseResource):
def __init__(self):
pass
class TestProduct(protocol.BaseProduct):
def __init__(self):
pass
class TestMessage(protocol.BaseMessage):
def __init__(self):
pass
theme_zero = 'zero'
theme_one = 'one'
theme_three = 'three'
for theme_name in [theme_zero, theme_one, theme_three]:
TestResource.objects.create(owner=theme_name)
TestProduct.objects.create(owner=theme_name)
TestMessage.objects.create(owner=theme_name)
test_self = self
class PluginZeroParam(protocol.BasePlugin):
theme = theme_zero
def run(self):
pass
class PluginOneParam(protocol.BasePlugin):
theme = theme_one
def run(self, resources):
test_self.assertEqual(len(resources), 1)
test_self.assertIsInstance(resources[0], TestResource)
class PluginThreeParam(protocol.BasePlugin):
theme = theme_three
def run(self, resources, products, messages):
test_self.assertEqual(len(resources), 1)
test_self.assertEqual(len(products), 1)
test_self.assertEqual(len(messages), 1)
test_self.assertIsInstance(resources[0], TestResource)
test_self.assertIsInstance(products[0], TestProduct)
test_self.assertIsInstance(messages[0], TestMessage)
plugin_mapping =\
protocol.PluginRegister.get_registered_plugins().items()
for _, plugin_cls in plugin_mapping:
plugin = plugin_cls()
plugin.run()
def test_accept_parameters(self):
protocol.PluginRegister.context_theme = self.theme_name
pcl = protocol.PluginController
test_self = self
class TestMessageBase(protocol.BaseMessage):
def __init__(self):
pass
class TestMessageDerived(TestMessageBase):
pass
TestMessageBase.objects.create(owner=self.theme_name)
TestMessageDerived.objects.create(owner=self.theme_name)
class TestPluginBase(protocol.BasePlugin):
@pcl.accept_parameters(pcl.MESSAGES)
def run(self, messages):
test_self.assertEqual(len(messages), 2)
test_self.assertIsInstance(messages[0], TestMessageBase)
test_self.assertIsInstance(messages[1], TestMessageBase)
class TestPluginDerived(protocol.BasePlugin):
TYPED_PARAMS = {pcl.MESSAGES: TestMessageDerived}
@pcl.accept_parameters(
pcl.MESSAGES,
**TYPED_PARAMS
)
def run(self, messages):
test_self.assertEqual(len(messages), 1)
test_self.assertIsInstance(messages[0], TestMessageDerived)
class TestPluginAnotherDerived(protocol.BasePlugin):
@pcl.accept_parameters(
(pcl.MESSAGES, TestMessageDerived)
)
def run(self, messages):
test_self.assertEqual(len(messages), 1)
test_self.assertIsInstance(messages[0], TestMessageDerived)
plugin_base = TestPluginBase()
plugin_derived = TestPluginDerived()
plugin_another_derived = TestPluginDerived()
plugin_base.run()
plugin_derived.run()
plugin_another_derived.run()
def test_accept_owners(self):
protocol.PluginRegister.context_theme = self.theme_name
pcl = protocol.PluginController
test_self = self
class TestResource(protocol.BaseResource):
def __init__(self):
pass
target_theme_name = 'a'
noice_theme_name = 'b'
TestResource.objects.create(owner=target_theme_name)
TestResource.objects.create(owner=noice_theme_name)
class TestPlugin(protocol.BasePlugin):
theme = noice_theme_name
@pcl.accept_owners(target_theme_name)
def run(self, resources):
test_self.assertEqual(len(resources), 1)
test_self.assertIsInstance(resources[0], TestResource)
plugin = TestPlugin()
plugin.run()
if __name__ == '__main__':
unittest.main()
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,921
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/utils.py
|
"""
This package implements tools that can facilitates theme development.
"""
import os
import re
import sys
import configparser
from collections import abc
from functools import wraps
from .protocol import PluginRegister
class SettingsLoader:
PROJECT_GLOBAL = 'global'
def __init__(self, path, name=None):
# settings of project has 'global' as its prefix.
self.name = name or self.PROJECT_GLOBAL
# check and save path
if not os.path.exists(path):
raise Exception('{} Not Exists.'.format(path))
self.path = path
# load up
self._load_settings()
def _load_settings(self):
config = configparser.ConfigParser()
with open(self.path) as f:
config.read_file(f)
self._config = config
def get_section(self, section):
if section in self._config:
return self._config[section]
else:
return None
class _SearchData:
DATA_FIELD = None
@classmethod
def load_data(cls, loaders):
# ensure iterable
if not isinstance(loaders, abc.Iterable):
loaders = [loaders]
for loader in loaders:
section = loader.get_section(cls.DATA_FIELD)
if section:
cls._vars[loader.name] = section
@classmethod
def clear(cls):
cls._vars.clear()
cls._cache.clear()
@classmethod
def _access_vars(cls, section_name, key):
section = cls._vars.get(section_name, None)
if section:
val = section.get(key, None)
return val
else:
return None
@classmethod
def _generate_key_val_of_vars(cls):
for section_name, section in cls._vars.items():
for key, val in section.items():
yield key, val
@classmethod
def _get_cache(cls, key):
return getattr(cls._cache, key, None)
@classmethod
def _set_cache(cls, key, val):
cls._cache[key] = val
@classmethod
def get(cls, search_key):
# anyway, lookup the cache first.
val = cls._get_cache(search_key)
if val:
return val
# First way.
dot_index = search_key.find('.')
if dot_index != -1 and dot_index != (len(search_key) - 1):
# prefix with theme name or global.
section_name = search_key[:dot_index]
key = search_key[dot_index + 1:]
val = cls._access_vars(section_name, key)
if val:
cls._set_cache(search_key, val)
return val
# Second way.
for key, val in cls._generate_key_val_of_vars():
if search_key == key:
cls._set_cache(search_key, val)
return val
return None
class ShareData(_SearchData):
DATA_FIELD = 'Share'
_vars = {}
_cache = {}
class ProjectSettings(_SearchData):
DATA_FIELD = 'RegisterTheme'
_vars = {}
_cache = {}
@classmethod
def get_registered_theme_name(cls):
THEMES_KEY = 'themes'
plain_text = cls.get(THEMES_KEY)
# yep, name of themes is split by whitespaces.
theme_names = re.split(r'\s+', plain_text)
# remove empty string.
return [name for name in theme_names if name]
class ThemeSettings(_SearchData):
DATA_FIELD = 'RegisterPlugin'
_vars = {}
_cache = {}
class PathResolver:
INPUTS = 'inputs'
OUTPUTS = 'outputs'
THEMES = 'themes'
STATES = 'states'
PROJECT_SETTINGS = 'settings'
THEME_SETTINGS = 'settings'
project_path = None
def _let_dir_exist(func):
@wraps(func)
def func_with_option(*args, ensure_exist=False, **kwargs):
path = func(*args, **kwargs)
if ensure_exist and not os.path.exists(path):
os.makedirs(path)
return path
return func_with_option
def _let_file_exist(func):
@wraps(func)
def func_with_option(*args, ensure_exist=False, **kwargs):
path = func(*args, **kwargs)
if ensure_exist and not os.path.exists(path):
open(path, 'a').close()
return path
return func_with_option
@classmethod
def set_project_path(cls, path):
cls.project_path = path
@classmethod
def _join_project(cls, path):
return os.path.join(cls.project_path, path)
@classmethod
@_let_dir_exist
def inputs(cls):
return cls._join_project(cls.INPUTS)
@classmethod
@_let_dir_exist
def outputs(cls):
return cls._join_project(cls.OUTPUTS)
@classmethod
@_let_dir_exist
def themes(cls):
return cls._join_project(cls.THEMES)
@classmethod
@_let_dir_exist
def states(cls):
return cls._join_project(cls.STATES)
@classmethod
@_let_dir_exist
def theme_state(cls, theme_name):
return os.path.join(
cls.states(),
theme_name,
)
@classmethod
@_let_dir_exist
def theme_dir(cls, theme_name):
return os.path.join(
cls.themes(),
theme_name,
)
@classmethod
@_let_file_exist
def project_settings(cls):
return cls._join_project(cls.PROJECT_SETTINGS)
@classmethod
@_let_file_exist
def theme_settings(cls, theme_name):
return os.path.join(
cls.theme_dir(theme_name),
cls.THEME_SETTINGS,
)
class SysPathContextManager:
def __init__(self, theme_name, theme_dir):
self.theme_name = theme_name
self.path = theme_dir
def __enter__(self):
sys.path.insert(0, self.path)
PluginRegister.context_theme = self.theme_name
def __exit__(self, *args):
sys.path.remove(self.path)
PluginRegister.unset_context_theme()
class PathResolverContextManager:
def __init__(self, path=None):
self.path = path
def __enter__(self):
self.bak_path = PathResolver.project_path
PathResolver.set_project_path(self.path)
def __exit__(self, *args):
PathResolver.set_project_path(self.bak_path)
def check_cwd_is_project():
require_exist_path = [
PathResolver.inputs(),
PathResolver.outputs(),
PathResolver.themes(),
PathResolver.states(),
PathResolver.project_settings(),
]
for requied_path in require_exist_path:
if not os.path.exists(requied_path):
return False
return True
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,922
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/parser/simple_lex.py
|
import os
import re
from ply import lex
from .utils import ErrorCollector
tokens = (
'IDENTIFIER',
'LEFT_OP',
'RIGHT_OP',
'DEGREE',
'NEWLINE',
)
plugin_name = r'[^\d\W]\w*'
full_name = r'({0}\.)?{0}'.format(plugin_name)
t_IDENTIFIER = full_name
t_LEFT_OP = r'<<'
t_RIGHT_OP = r'>>'
t_DEGREE = r'[^0\D]\d*|0+'
def t_NEWLINE(t):
r'\n+'
t.lexer.lineno += len(t.value)
return t
t_ignore = ' \t'
t_ignore_COMMENT = r'\#.*'
def t_error(t):
# print("Illegal Character: '{}' in line {}".format(t.value[0], t.lineno))
ErrorCollector.add_lex_message(
(t.value[0], t.lineno),
)
t.lexer.skip(1)
lexer = lex.lex(
debug=0,
optimize=1,
reflags=re.ASCII,
outputdir=os.path.dirname(__file__),
)
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,923
|
huntzhan/GeekCMS
|
refs/heads/master
|
/geekcms/sequence_analyze.py
|
"""
Syntax:
start : NEWLINE lines end
| lines end
end : plugin_expr
| empty
lines : lines line_atom
| empty
line_atom : plugin_expr NEWLINE
plugin_expr : plugin_name relation plugin_name
| plugin_name relation
| relation plugin_name
| plugin_name
relation : left_rel
| right_rel
left_rel : LEFT_OP
| LEFT_OP DEGREE
right_rel : RIGHT_OP
| DEGREE RIGHT_OP
plugin_name : IDENTIFIER
empty : <empty>
Semantics:
1. "pre_load: my_loader": register plugin "my_loader" to component
"pre_load".
2. "pre_load: my_loader << my_filter": register plugins "my_loader" and
"my_filter" to component "pre_load", with "my_loader" being executed before
"my_filter".
3. "pre_load: my_filter >> my_loader": has the same meaning as
"pre_load: my_loader << my_filter".
4. "pre_load: loader_a <<0 loader_b NEWLINE loader_c <<1 loader_b" the
execution order would be "loader_c" --> "loader_a" --> "loader_b".
"<<" is equivalent to "<<0", and "<< decimalinteger" is equivalent to
"decimalinteger >>".
5. "pre_load: my_loader <<": means "my_loader" would be executed before the
other plugins within a component, unless another relation such as
"anther_loader <<1" is established.
6. "pre_load: >> my_filter": reverse meaning of "pre_load: my_loader <<".
Algorithm:
1. lexical analysis and Syntax Checking: Performed by PLY, extract plugin
relation expression from each physical line, transform to the format of
'x <<p y'. Some important syntax directed actions are as follow:
1.1 Extract left operand, operator and right operand.
1.2 If x is missed, HEAD is added as x; If y is missed, TAIL is added
as y;
1.3 For expressions that only consist of one operand and no operator,
for example, 'x NEWLINE', the only operand in the expression would be
considered as the left operand, with no relation and right operand.
1.4 '<<' is transform to '<<0', and so '>>'.
2. Preparation for generating plugin execution order.
2.1 Transform operand to the form of (theme, plugin), based on
'theme.plugin'. If 'theme.' part is omitted, then automatically
generate theme with respect to file's directory(where relation
expressions were loaded).
2.2 Expressions that has left operand with no relation and right
operand, would be removed and kept in somewhere else. Such expressions
would not be used to generating relation group(step 3).
2.3 Transform 'x p>> y' to 'y <<p x'.
3. Generate relation groups.
A relation group: {(x <<p y)| for x, all avaliable (p, y) in expressions}.
3.1 Sort expressions(x <<p y) with respect to x's value, then with p's
value. Generate raw relation groups. Then sort expressions(x <<p y)
with respect to y's value, then with p's value in reversed order.
3.2 For every raw relation groups, tranlate all its relations
(x <<p1 y1, x <<p2 y2, ..., x <<pn yn) to (x < y1, y1 < y2, ...,
yn-1 < yn) and (xn <<pn y, xn-1 <<pn-1 y, ..., x1 <<p1 y) to
(xn < xn-1, xn-1 < xn-2, ..., x1 < y). Notice that 'x < y' means 'x is
executed earlier then y', in order to distingush with '<<', since
'x << y1, x << y2' would cause syntax error.
4. Generate order of plugin execution.
Input: relations generated by 3.2.
Output: sequence of plugin execution.
order = a queue
left_behind = a set initiated with items removed in 2.3.
left_hand_side = the dict of left operands, with index as its key and
reference count as its value.
right_hand side = the dict of right operands, similar with
left_hand_side.
left_behind = items only in right_hand_side.
while left_hand_side:
find x in left_hand_side and not in right hand side. If such x not
exist, then there must be a syntax error.
remove x from left_hand_side.
for y of all relations (x < y):
decrease y's reference count by 1.
if y's reference count equals to zero, delete y from
right_hand_side.
if left_behind is not empty, them push all its items to order.
return order
5. Remove HEAD and TAIL from order.
"""
import inspect
import functools
from collections import defaultdict
from .parser.simple_lex import lexer
from .parser.simple_yacc import parser
from .parser.utils import ErrorCollector
from .parser.utils import PluginExpr
from .parser.utils import PluginRel
from .protocol import PluginIndex
_SPECIAL_DEGREE = -1
class _Algorithm:
def __init__(self, exprs):
self._exprs = []
for container in exprs:
self._exprs.extend(container)
# implement 2.3
def _transform_to_left_rel(self):
for expr in self._exprs:
relation = expr.relation
if relation.is_left_rel:
continue
relation.is_left_rel = True
# exchange operand
expr.left_operand, expr.right_operand =\
expr.right_operand, expr.left_operand
# implement 2.2
def _remove_irrelevant_exprs(self):
irrelevant_exprs = []
for expr in self._exprs[:]:
# if expr.relation is None, then expr is so called irrelevant.
if expr.relation is None:
self._exprs.remove(expr)
irrelevant_exprs.append(expr)
return irrelevant_exprs
def _yield_group(self, sorted_exprs, op_name):
while sorted_exprs:
group = []
val = getattr(sorted_exprs[0], op_name)
while sorted_exprs\
and val == getattr(sorted_exprs[0], op_name):
expr = sorted_exprs.pop(0)
group.append(expr)
yield group
# implement 3.1.1
def _generate_left_relation_group(self, exprs):
cmp_key_left = lambda x: (hash(x.left_operand), x.relation.degree)
sorted_exprs_left = sorted(exprs, key=cmp_key_left)
# group by left operand.
for group in self._yield_group(sorted_exprs_left, 'left_operand'):
yield group
# implement 3.1.2
def _generate_right_relation_group(self, exprs):
cmp_key_right = lambda x: (hash(x.right_operand), x.relation.degree)
sorted_exprs_right = sorted(exprs, key=cmp_key_right, reverse=True)
# group by right operand.
for group in self._yield_group(sorted_exprs_right, 'right_operand'):
yield group
def _break_relation_group(self, relation_group, op_name, special_index):
"""
op_name is the string of operand NOT to be gathered.
"""
new_group = []
special_rel = PluginRel(True, _SPECIAL_DEGREE)
last_operand = None
for expr in relation_group:
if last_operand is None:
# set up last_operand
last_operand = getattr(expr, op_name)
continue
combined_expr = PluginExpr(
left_operand=last_operand,
right_operand=getattr(expr, op_name),
relation=special_rel,
)
new_group.append(combined_expr)
# update last_operand
last_operand = getattr(expr, op_name)
special_expr = relation_group[special_index]
new_expr = PluginExpr(
left_operand=special_expr.left_operand,
right_operand=special_expr.right_operand,
relation=special_rel,
)
if special_index == 0:
new_group.insert(0, new_expr)
elif special_index == -1:
new_group.append(new_expr)
else:
raise SyntaxError
return new_group
# implement 3.2.1
def _break_left_relation_group(self, relation_group):
return self._break_relation_group(relation_group, 'right_operand', 0)
# implement 3.2.2
def _break_right_relation_group(self, relation_group):
return self._break_relation_group(relation_group, 'left_operand', -1)
# implement 4 and 5
def _generate_execution_order(self, relations, irrelevant_exprs):
order = []
left_behind = {expr.left_operand for expr in irrelevant_exprs}
left_hand_side = defaultdict(list)
right_hand_side = defaultdict(int)
for expr in relations:
left_hand_side[expr.left_operand].append(expr.right_operand)
right_hand_side[expr.right_operand] += 1
items_only_in_right_hand_side =\
set(right_hand_side.keys()) - set(left_hand_side.keys())
left_behind |= items_only_in_right_hand_side
while left_hand_side:
unique_items =\
set(left_hand_side.keys()) - set(right_hand_side.keys())
if not unique_items:
text = "Something Wrong. LHS: '{}' RHS: '{}'"
raise SyntaxError(
text.format(dict(left_hand_side), dict(right_hand_side)),
)
item = unique_items.pop()
order.append(item)
for right_op in left_hand_side[item]:
right_hand_side[right_op] -= 1
if right_hand_side[right_op] == 0:
del right_hand_side[right_op]
del left_hand_side[item]
order.extend(left_behind)
# remove HEAD and TAIL
HEAD_AND_TAIL = [PluginExpr.HEAD, PluginExpr.TAIL]
for index in order[:]:
if index.theme_name is None\
and index.plugin_name in HEAD_AND_TAIL:
order.remove(index)
return order
# Mix up all above functions.
def generate_sequence(self):
irrelevant_exprs = self._remove_irrelevant_exprs()
self._transform_to_left_rel()
new_relations = []
# left operand.
for relation_group in self._generate_left_relation_group(self._exprs):
new_group = self._break_left_relation_group(relation_group)
new_relations.extend(new_group)
# right operand.
for relation_group in self._generate_right_relation_group(self._exprs):
new_group = self._break_right_relation_group(relation_group)
new_relations.extend(new_group)
return self._generate_execution_order(new_relations, irrelevant_exprs)
class SequenceParser:
def __init__(self):
self.error = False
self.theme_plugin_expr_mapping = dict()
# bind parser with lexer
self._parse = functools.partial(parser.parse, lexer=lexer)
# implement 2.2
def _replace_with_plugin_index(self, theme, plugin_exprs):
def get_theme_plugin(operand):
# special case
if operand == PluginExpr.HEAD or operand == PluginExpr.TAIL:
return None, operand
# return theme.plugin or plugin.
items = operand.split('.')
if len(items) == 1:
return theme, operand
elif len(items) == 2:
return items
else:
raise SyntaxError('Operand Error: {}'.format(operand))
processed_exprs = []
for expr in plugin_exprs:
left_theme, left_plugin = get_theme_plugin(expr.left_operand)
right_theme, right_plugin = get_theme_plugin(expr.right_operand)
left_index = PluginIndex(left_theme, left_plugin)
right_index = PluginIndex(right_theme, right_plugin)
new_expr = PluginExpr(
left_operand=left_index,
right_operand=right_index,
relation=expr.relation,
)
processed_exprs.append(new_expr)
return processed_exprs
def _archive_error(self):
if ErrorCollector.lex_error:
self.error = True
ErrorCollector.archive_lex_messages(theme)
if ErrorCollector.yacc_error:
self.error = True
ErrorCollector.archive_yacc_messages(theme)
def analyze(self, theme, text):
exprs = self._parse(text)
self._archive_error()
processed_exprs = self._replace_with_plugin_index(theme, exprs)
self.theme_plugin_expr_mapping[theme] = processed_exprs
def report_error(self):
# print lex error
for theme, messages in ErrorCollector.theme_lex_error.items():
# lineno not really the line number of 'settings' file.
# might be improved in the future.
val, lineno = messages
template = "Theme '{}' >> Illegal Character: '{}' in line {}"
print(template.format(theme, val, lineno))
# print yacc error
for theme, messages in ErrorCollector.theme_yacc_error.items():
val, lineno, discard = messages
template = ("Theme '{}' >> Syntax Error: '{}' in line {}"
"Discard: {}")
print(template.format(theme, val, lineno, discard))
def generate_sequence(self):
algorithm = _Algorithm(
self.theme_plugin_expr_mapping.values(),
)
return algorithm.generate_sequence()
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,924
|
huntzhan/GeekCMS
|
refs/heads/master
|
/tests/test_utils.py
|
"""
Test Plan.
SettingsLoader:
test_init:
1. with name.
2. without name.
test_not_found:
initialize loader with file not existed, expect failure.
_SearchData, ShareData, ProjectSettings, ThemeSettings:
test_search_with_prefix:
1. theme prefix(i.e. 'theme.variable').
2. global prefix.
test_search_without_prefix:
...
test_theme_names_retrieving:
...
PathResolver:
test_inputs_outputs:
...
test_themes_states:
...
test_theme_state_and_dir:
...
test_project_and_theme_settings:
...
"""
import unittest
import os
from geekcms.utils import (SettingsLoader, ShareData, ProjectSettings,
PathResolver, check_cwd_is_project)
class _GetCasePath:
def _get_file_path(self, rel_path):
test_dir = os.path.join(
os.getcwd(),
'tests/cases/loader',
)
path = os.path.join(
test_dir,
rel_path,
)
return path
class SettingsLoaderTest(unittest.TestCase, _GetCasePath):
def test_init(self):
case_path = self._get_file_path('case1')
theme = 'testtheme'
loader_with_name = SettingsLoader(case_path, theme)
loader_without_name = SettingsLoader(case_path)
self.assertEqual(loader_with_name.name, theme)
self.assertEqual(loader_without_name.name, 'global')
@unittest.expectedFailure
def test_not_found(self):
case_path = self._get_file_path('CASE_DO_NOT_EXISTED')
loader = SettingsLoader(case_path)
class DataSearchTest(unittest.TestCase, _GetCasePath):
def setUp(self):
case_path = self._get_file_path('case1')
self.theme = 'testtheme'
self.loader = SettingsLoader(case_path, self.theme)
self.global_loader = SettingsLoader(case_path)
def _get_search_key(self, theme, key):
return '{}.{}'.format(theme, key)
def test_search_with_prefix(self):
ShareData.clear()
ShareData.load_data(self.loader)
self.assertEqual(
ShareData.get(self._get_search_key(self.theme, 'a')),
'1',
)
self.assertEqual(
ShareData.get(self._get_search_key(self.theme, 'b')),
'2',
)
self.assertEqual(
ShareData.get(self._get_search_key(self.theme, 'c')),
'3',
)
def test_global_search(self):
ShareData.clear()
ShareData.load_data(self.global_loader)
self.assertEqual(
ShareData.get(self._get_search_key('global', 'a')),
'1',
)
self.assertEqual(
ShareData.get(self._get_search_key('global', 'b')),
'2',
)
self.assertEqual(
ShareData.get(self._get_search_key('global', 'c')),
'3',
)
def test_search_without_prefix(self):
ShareData.clear()
ShareData.load_data(self.loader)
self.assertEqual(
ShareData.get('a'),
'1',
)
self.assertEqual(
ShareData.get('b'),
'2',
)
self.assertEqual(
ShareData.get('c'),
'3',
)
def test_theme_names_retrieving(self):
ProjectSettings.clear()
case_path = self._get_file_path('case2')
loader = SettingsLoader(case_path)
ProjectSettings.load_data(loader)
self.assertListEqual(
list(ProjectSettings.get_registered_theme_name()),
['a', 'b', 'c', 'd', 'e', 'f'],
)
class PathResolverTest(unittest.TestCase, _GetCasePath):
def setUp(self):
self.project_path = self._get_file_path('')
PathResolver.set_project_path(self.project_path)
def test_top_level_names(self):
names = ['inputs', 'outputs', 'themes', 'states']
for name in names:
self.assertEqual(
getattr(PathResolver, name)(),
self._get_file_path(name),
)
def test_theme_state_and_dir(self):
self.assertEqual(
PathResolver.theme_state('testtheme'),
self._get_file_path('states/testtheme'),
)
self.assertEqual(
PathResolver.theme_dir('testtheme'),
self._get_file_path('themes/testtheme'),
)
def test_project_and_theme_settings(self):
self.assertEqual(
PathResolver.project_settings(),
self._get_file_path('settings'),
)
self.assertEqual(
PathResolver.theme_settings('testtheme'),
self._get_file_path('themes/testtheme/settings'),
)
def test_check_cwd_is_project(self):
not_project_path = self._get_file_path('')
project_path = os.path.join(
os.getcwd(),
'tests/cases/project',
)
PathResolver.set_project_path(not_project_path)
self.assertFalse(check_cwd_is_project())
PathResolver.set_project_path(project_path)
self.assertTrue(check_cwd_is_project())
|
{"/tests/test_parser.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/tests/test_doc.py": ["/geekcms/doc_construct.py", "/geekcms/protocol.py"], "/tests/test_loadup.py": ["/geekcms/loadup.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/interface.py": ["/geekcms/utils.py", "/geekcms/doc_construct.py", "/geekcms/loadup.py", "/geekcms/protocol.py", "/geekcms/download_theme.py"], "/geekcms/parser/simple_yacc.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/utils.py"], "/geekcms/download_theme.py": ["/geekcms/utils.py"], "/tests/test_main.py": ["/geekcms/interface.py", "/geekcms/utils.py", "/geekcms/protocol.py"], "/geekcms/loadup.py": ["/geekcms/utils.py", "/geekcms/protocol.py", "/geekcms/sequence_analyze.py"], "/geekcms/utils.py": ["/geekcms/protocol.py"], "/geekcms/parser/simple_lex.py": ["/geekcms/parser/utils.py"], "/geekcms/sequence_analyze.py": ["/geekcms/parser/simple_lex.py", "/geekcms/parser/simple_yacc.py", "/geekcms/parser/utils.py", "/geekcms/protocol.py"], "/tests/test_utils.py": ["/geekcms/utils.py"]}
|
22,928
|
EMAT31530/ai-group-project-football-analysis-team
|
refs/heads/master
|
/Aiproject.py
|
import csv
import os
from sklearn.naive_bayes import BernoulliNB
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LogisticRegression
from data_scrapy import scrapy_data
# Read csv file
def read_csv(filename):
with open(filename, encoding='utf-8') as csvfile:
reader = csv.reader(csvfile)
# all data
data = [(row[1:]) for row in reader]
# reduce data
#data = [(row[1:3] + row[8:]) for row in reader]
# Integrate the data of both parties into one piece of data
x_data = []
y_data = []
for i in range(int(len(data)/2)):
x_data.append(list(map(float, data[2*i][:-1])) + list(map(float, data[2*i + 1][:-1])))
y_data.append(data[2*i][-1])
# one_hot encode
x_data = []
for i in range(int(len(data)/2)):
temp_x = []
for j in range(len(data[0]) - 1):
if data[2*i][j] > data[2*i + 1][j]:
temp_x.append(1)
elif data[2*i][j] == data[2*i + 1][j]:
temp_x.append(0)
else:
temp_x.append(-1)
x_data.append(temp_x)
#print(x_data)
return x_data, y_data
if __name__ == '__main__':
# First determine whether there is a game data file in the folder
if not os.path.exists('all_data.csv'):
scrapy_data()
# Read csv file data
x_data, y_data = read_csv('all_data.csv')
# Split training data and test data
# Training data
train_x = x_data[:340]
train_y = list(map(int, y_data[:340]))
# Test Data
test_x = x_data[340:]
test_y = list(map(int, y_data[340:]))
# Standardize data
scaler = StandardScaler()
scaler.fit(train_x)
train_x = scaler.transform(train_x)
test_x = scaler.transform(test_x)
# Use different methods for training and testing
####################### K neighbors #########################
# training
k_neighbour = KNeighborsClassifier(n_neighbors=3)
k_neighbour.fit(train_x, train_y)
# prediction
print("**************************")
predict_y_training = k_neighbour.predict(train_x)
print("Accuracy of k nearest neighbors in training data:")
print(accuracy_score(predict_y_training, train_y))
predict_y = k_neighbour.predict(test_x)
print("k nearest neighbor prediction accuracy:")
print(accuracy_score(predict_y, test_y))
print("**************************")
######################## Bayesian network ###########################
# training
NB = BernoulliNB()
NB.fit(train_x, train_y)
# prediction
print("**************************")
predict_y = NB.predict(test_x)
predict_y_training = NB.predict(train_x)
print("Bayesian accuracy in training data:")
print(accuracy_score(predict_y_training, train_y))
print("Bayesian network prediction accuracy:")
print(accuracy_score(predict_y, test_y))
print("**************************")
###################### logit regression #######################
# training
lr = LogisticRegression()
lr.fit(train_x, train_y)
# prediction
print("**************************")
predict_y_training = lr.predict(train_x)
print("Logit regression accuracy rate on training data:")
print(accuracy_score(predict_y_training, train_y))
predict_y = lr.predict(test_x)
print("Logit regression prediction accuracy:")
print(accuracy_score(predict_y, test_y))
print("**************************")
|
{"/Aiproject.py": ["/data_scrapy.py"]}
|
22,929
|
EMAT31530/ai-group-project-football-analysis-team
|
refs/heads/master
|
/data_scrapy.py
|
import requests
from requests.exceptions import RequestException
import json
import csv
import time
import random
# Get page content
def get_page(match_url, headers):
try:
response = requests.get(match_url, headers=headers)
response.encoding = 'utf-8'
if response.status_code == 200:
return response.text
else:
return None
except RequestException as err:
print('Get page fault')
print(err)
# Parse page data
def parse_page(html):
# Parse the returned Json data
html = json.loads(html)
# A list of data stored by both parties
team_1_list = ['name', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
team_2_list = ['name', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# Names of the teams on both sides of the match
team_1_list[0] = html['entity']['teams'][0]['team']['name']
team_2_list[0] = html['entity']['teams'][1]['team']['name']
# Team id of both teams
id_1 = str(html['entity']['teams'][0]['team']['club']['id'])
id_2 = str(html['entity']['teams'][1]['team']['club']['id'])
# The number of goals scored by both teams
score_1 = html['entity']['teams'][0]['score']
score_2 = html['entity']['teams'][1]['score']
# The result of the game is stored in the list: 1 means winning, -1 means losing, 0 means tie
if score_1 > score_2:
team_1_list[15] = 1
team_2_list[15] = -1
elif score_1 < score_2:
team_1_list[15] = -1
team_2_list[15] = 1
elif score_1 == score_2:
team_1_list[15] = 0
team_2_list[15] = 0
# Parse the game data from the web page
for item in html['data'][id_1]['M']:
if item['name'] == 'accurate_pass':
# Game data:accurate pass
team_1_list[1] = item['value']
elif item['name'] == 'possession_percentage':
# Game data:possession percentage
team_1_list[2] = item['value']
elif item['name'] == 'touches':
# Game data:touches
team_1_list[3] = item['value']
elif item['name'] == 'total_pass':
# Game data:total pass
team_1_list[4] = item['value']
elif item['name'] == 'fk_foul_lost':
# Game data:Fouls conceded
team_1_list[5] = item['value']
elif item['name'] == 'shot_off_target':
# Game data:shot off target
team_1_list[6] = item['value']
elif item['name'] == 'offtarget_att_assist':
# Game data:off target assist
team_1_list[7] = item['value']
elif item['name'] == 'ontarget_scoring_att':
# Game data:score on target
team_1_list[8] = item['value']
elif item['name'] == 'won_corners':
# Game data:won corners
team_1_list[9] = item['value']
elif item['name'] == 'total_tackle':
# Game data:total tackle
team_1_list[10] = item['value']
elif item['name'] == 'total_clearance':
# Game data:total clearance
team_1_list[11] = item['value']
elif item['name'] == 'total_yel_card':
# Game data:total yellow cards
team_1_list[12] = item['value']
elif item['name'] == 'total_offside':
# Game data:total offside
team_1_list[13] = item['value']
elif item['name'] == 'ontarget_att_assist':
# Game data:on target assist
team_1_list[14] = item['value']
# Repeat the above process
for item in html['data'][id_2]['M']:
if item['name'] == 'accurate_pass':
team_2_list[1] = item['value']
elif item['name'] == 'possession_percentage':
team_2_list[2] = item['value']
elif item['name'] == 'touches':
team_2_list[3] = item['value']
elif item['name'] == 'total_pass':
team_2_list[4] = item['value']
elif item['name'] == 'fk_foul_lost':
team_2_list[5] = item['value']
elif item['name'] == 'shot_off_target':
team_2_list[6] = item['value']
elif item['name'] == 'offtarget_att_assist':
team_2_list[7] = item['value']
elif item['name'] == 'ontarget_scoring_att':
team_2_list[8] = item['value']
elif item['name'] == 'won_corners':
team_2_list[9] = item['value']
elif item['name'] == 'total_tackle':
team_2_list[10] = item['value']
elif item['name'] == 'total_clearance':
team_2_list[11] = item['value']
elif item['name'] == 'total_yel_card':
team_2_list[12] = item['value']
elif item['name'] == 'total_offside':
team_2_list[13] = item['value']
elif item['name'] == 'ontarget_att_assist':
team_2_list[14] = item['value']
# Return to the data list of both parties
return team_1_list, team_2_list
# Save data to file
def write2csv(data):
with open('all_data.csv','a',newline='',encoding='utf-8-sig') as f:
writer = csv.writer(f)
writer.writerow(data)
def scrapy_data():
# Analog browser header
headers = {
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Origin': 'https://www.premierleague.com',
'Referer': 'https://www.premierleague.com/',
#'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
user_agent_list = [
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)",
"Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
"Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
"Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
"Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5",
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 LBBROWSER",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1",
"Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b13pre) Gecko/20110307 Firefox/4.0b13pre",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11",
"Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
]
# Constructing the Premier League website url
team_1, team_2 = [], []
for index in range(46605, 46985):
match_url = "https://footballapi.pulselive.com/football/stats/match/" + str(index)
# Get the contest page
user_agent = random.choice(user_agent_list)
headers['User-Agent'] = user_agent
html = get_page(match_url, headers)
# Analyze the competition webpage data
team_1_list, team_2_list = parse_page(html)
team_1.append(team_1_list)
team_2.append(team_2_list)
write2csv(team_1_list)
write2csv(team_2_list)
print(index)
time.sleep(random.randint(10, 20))
|
{"/Aiproject.py": ["/data_scrapy.py"]}
|
22,930
|
EMAT31530/ai-group-project-football-analysis-team
|
refs/heads/master
|
/data_draft1.py
|
import requests
from requests.exceptions import RequestException
import json
import csv
import time
import random
# 获取页面内容
def get_page(match_url, headers):
try:
response = requests.get(match_url, headers=headers)
response.encoding = 'utf-8'
if response.status_code == 200:
return response.text
else:
return None
except RequestException as err:
print('获取页面错误')
print(err)
# 解析页面数据
def parse_page(html):
# 解析返回的Json数据
html = json.loads(html)
# 比赛双方存储数据的list
team_1_list = ['name', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
team_2_list = ['name', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# 比赛双方球队名字
team_1_list[0] = html['entity']['teams'][0]['team']['name']
team_2_list[0] = html['entity']['teams'][1]['team']['name']
# 比赛双方球队id
id_1 = str(html['entity']['teams'][0]['team']['club']['id'])
id_2 = str(html['entity']['teams'][1]['team']['club']['id'])
# 比赛双方球队的进球数
score_1 = html['entity']['teams'][0]['score']
score_2 = html['entity']['teams'][1]['score']
# list中存储比赛结果:1为赢球,-1为输球,0为平
if score_1 > score_2:
team_1_list[15] = 1
team_2_list[15] = -1
elif score_1 < score_2:
team_1_list[15] = -1
team_2_list[15] = 1
elif score_1 == score_2:
team_1_list[15] = 0
team_2_list[15] = 0
# 从网页中解析比赛数据
for item in html['data'][id_1]['M']:
if item['name'] == 'accurate_pass':
# 比赛数据:accurate pass
team_1_list[1] = item['value']
elif item['name'] == 'possession_percentage':
# 比赛数据:possession percentage
team_1_list[2] = item['value']
elif item['name'] == 'touches':
# 比赛数据:touches
team_1_list[3] = item['value']
elif item['name'] == 'total_pass':
# 比赛数据:total pass
team_1_list[4] = item['value']
elif item['name'] == 'fk_foul_lost':
# 比赛数据:Fouls conceded
team_1_list[5] = item['value']
elif item['name'] == 'shot_off_target':
# 比赛数据:shot off target
team_1_list[6] = item['value']
elif item['name'] == 'offtarget_att_assist':
# 比赛数据:off target assist
team_1_list[7] = item['value']
elif item['name'] == 'ontarget_scoring_att':
# 比赛数据:score on target
team_1_list[8] = item['value']
elif item['name'] == 'won_corners':
# 比赛数据:won corners
team_1_list[9] = item['value']
elif item['name'] == 'total_tackle':
# 比赛数据:total tackle
team_1_list[10] = item['value']
elif item['name'] == 'total_clearance':
# 比赛数据:total clearance
team_1_list[11] = item['value']
elif item['name'] == 'total_yel_card':
# 比赛数据:total yellow cards
team_1_list[12] = item['value']
elif item['name'] == 'total_offside':
# 比赛数据:total offside
team_1_list[13] = item['value']
elif item['name'] == 'ontarget_att_assist':
# 比赛数据:on target assist
team_1_list[14] = item['value']
# 重复上述过程
for item in html['data'][id_2]['M']:
if item['name'] == 'accurate_pass':
team_2_list[1] = item['value']
elif item['name'] == 'possession_percentage':
team_2_list[2] = item['value']
elif item['name'] == 'touches':
team_2_list[3] = item['value']
elif item['name'] == 'total_pass':
team_2_list[4] = item['value']
elif item['name'] == 'fk_foul_lost':
team_2_list[5] = item['value']
elif item['name'] == 'shot_off_target':
team_2_list[6] = item['value']
elif item['name'] == 'offtarget_att_assist':
team_2_list[7] = item['value']
elif item['name'] == 'ontarget_scoring_att':
team_2_list[8] = item['value']
elif item['name'] == 'won_corners':
team_2_list[9] = item['value']
elif item['name'] == 'total_tackle':
team_2_list[10] = item['value']
elif item['name'] == 'total_clearance':
team_2_list[11] = item['value']
elif item['name'] == 'total_yel_card':
team_2_list[12] = item['value']
elif item['name'] == 'total_offside':
team_2_list[13] = item['value']
elif item['name'] == 'ontarget_att_assist':
team_2_list[14] = item['value']
# 返回比赛双方数据列表
return team_1_list, team_2_list
# 将数据存入文件
def write2csv(data):
with open('all_data.csv','a',newline='',encoding='utf-8-sig') as f:
writer = csv.writer(f)
writer.writerow(data)
if __name__ == "__main__":
# 模拟浏览器表头
headers = {
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Origin': 'https://www.premierleague.com',
'Referer': 'https://www.premierleague.com/',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
# 构造英超比赛网页url
team_1, team_2 = [], []
for index in range(46605, 46985):
#for index in range(46605, 46607):
print(index)
match_url = "https://footballapi.pulselive.com/football/stats/match/" + str(index)
# 获取比赛网页
html = get_page(match_url, headers)
# 解析比赛网页数据
team_1_list, team_2_list = parse_page(html)
team_1.append(team_1_list)
team_2.append(team_2_list)
write2csv(team_1_list)
write2csv(team_2_list)
time.sleep(random.randint(15, 30))
|
{"/Aiproject.py": ["/data_scrapy.py"]}
|
22,931
|
kaefee/Agustin-Codazzi-Project
|
refs/heads/main
|
/callbacks.py
|
import base64
import datetime
import io
import pandas as pd
#basic libraries
from dash.dependencies import Input, Output, State
from flask_caching import Cache
import dash
import dash_html_components as html
import dash_bootstrap_components as dbc
from apps.utils.utils_getdata import get_data
from apps.utils.utils_pivot_table import make_pivot_table
from apps.utils.utils_plots import Make_map
from apps.utils.utils_tree_map import Make_tree_map
from apps.utils.utils_filters import make_filters, make_options_filters
import dash_core_components as dcc
from dash.exceptions import PreventUpdate
#main dash instance
from app import app
# #call modules needed for callbacks
from apps.home import layout_home
#df=get_data(["CLIMA_AMBIENTAL","FORMA_TERRENO","MATERIAL_PARENTAL_LITOLOGIA","ORDEN","PAISAJE"]).dropna()
#cache configuration
TIMEOUT = 240
cache = Cache(app.server, config={
'CACHE_TYPE': 'filesystem',
'CACHE_DIR': 'cache-directory',
'CACHE_THRESHOLD': 20
})
#Entire callbacks definition
def register_callbacks(app):
#callback for navigation, look for url and respond with the right layout
@cache.memoize(timeout=TIMEOUT)
@app.callback(Output("page-content", "children"), [Input("url", "pathname")])
def render_page_content(pathname):
if pathname in ["/", "/apps/home/layout_home"]:
return layout_home.layout
# If the user tries to reach a different page, return a 404 message
return dbc.Jumbotron(
[
html.H1("404: Not found", className="text-danger"),
html.Hr(),
html.P(f"The pathname {pathname} was not recognised..."),
html.Br(),
html.P(f"Check again what you are requesting")
],fluid=False
)
#Callbacks definidos para la carga de un archivo
def parse_contents(contents, filename, date):
content_type, content_string = contents.split(',')
decoded = base64.b64decode(content_string)
columns_to_consider=["CLIMA_AMBIENTAL", "PAISAJE",
'TIPO_RELIEVE', 'FORMA_TERRENO',
'MATERIAL_PARENTAL_LITOLOGIA', 'ORDEN',
"LATITUD","LONGITUD","ALTITUD","CODIGO"]
try:
if 'csv' in filename:
df = pd.read_csv(io.StringIO(decoded.decode('utf-8')))
df = df[columns_to_consider]
elif 'xls' in filename:
# Assume that the user uploaded an excel file
df = pd.read_excel(io.BytesIO(decoded))
df = df[columns_to_consider]
return df, filename, date
except Exception as e:
return pd.DataFrame([]), filename, date
@app.callback(Output('Mapa', 'figure'), Output('tree_map', 'figure'),
Output("carta_datos","children"),
Output("filtro_clima","options"), Output("filtro_paisaje","options"),
Output("filtro_forma_terreno", "options"), Output("filtro_material_parental","options"),
Output('Table_data', 'children'), Output("the_alert", "children"),
Output("main_alert", "children"),
Input('upload-data', 'contents'),
State('upload-data', 'filename'),
State('upload-data', 'last_modified'))
def update_maps(list_of_contents, list_of_names, list_of_dates):
if list_of_contents is not None:
df, filename, date = parse_contents(list_of_contents, list_of_names, list_of_dates)
if len(df) == 0:
alert1 = dbc.Alert("There was an error processing this file.",
color="danger",dismissable=True,
duration=5000)
alert2 = dbc.Alert([html.H4("An error was encountered when parsing the file {}".format(filename)),
html.Hr(),
html.P("Please double check that your file is compatible with the formats (csv,xls,xlsx,xlsm)"
"these formats are only supported at the moment so make sure you are using the right format."
"In case you are using the right format make sure your file has the required columns.",className="mb-0")
],
color="danger",dismissable=True,
duration=7000)
error_section=html.Div([
html.H2('There was an error processing this file. File {}, uploaded {}'.format(filename,date)),
html.Br(),
html.H2('Double check that you have the right format or your file has the needed Columns')]
,className="text-danger",style={"align-text":"center"})
return dash.no_update, dash.no_update, dash.no_update,\
dash.no_update, dash.no_update,\
dash.no_update, dash.no_update,\
error_section, alert1,alert2
else:
table_children=html.Div([
html.H2("Tabla Dinamica", className='title ml-2', style={'textAlign': 'left', 'color': '#FFFFFF'}),
html.H4("Archivo Cargado: {}".format(filename), className='title ml-2',style={'textAlign': 'left', 'color': '#FFFFFF'}),
html.H5("Fecha de Carga: {}".format(datetime.datetime.fromtimestamp(date)), className='title ml-2', style={'textAlign': 'left', 'color': '#FFFFFF'}),
make_pivot_table(df)
])
good_alarm=dbc.Alert([html.H4("The file {} was successfully processed".format(filename)),],
color="success",dismissable=True,
duration=5000)
mini_alarm= dbc.Alert("File proccesed",
color="success",dismissable=True,
duration=5000)
return Make_map(df),Make_tree_map(df), len(df), \
make_options_filters(df["CLIMA_AMBIENTAL"].dropna().unique()), \
make_options_filters(df["PAISAJE"].dropna().unique()), \
make_options_filters(df["FORMA_TERRENO"].dropna().unique()), \
make_options_filters(df["MATERIAL_PARENTAL_LITOLOGIA"].dropna().unique()),\
table_children, mini_alarm, good_alarm
else:
raise PreventUpdate
#@app.callback(Output("Download_file", "data"),
#"Table_data"
# [Input("Boton_download", "n_clicks")],
# prevent_initial_call=True)
#def generate_csv(n_nlicks):
# return dcc.send_data_frame(df.to_csv, filename="prueba.csv")
#return {"color":"primary", }
|
{"/callbacks.py": ["/apps/utils/utils_getdata.py", "/apps/utils/utils_plots.py", "/apps/utils/utils_filters.py"], "/apps/utils/utils_filters.py": ["/apps/utils/utils_getdata.py"], "/apps/home/layout_home.py": ["/apps/utils/utils_getdata.py"]}
|
22,932
|
kaefee/Agustin-Codazzi-Project
|
refs/heads/main
|
/apps/utils/utils_getdata.py
|
import pandas as pd
import unidecode
def get_data(column_name):
df=pd.read_csv("Data3.csv",
usecols =column_name , low_memory = True)
return df
def standarised_string(x):
no_accents = unidecode.unidecode(x)
return no_accents.replace("_"," ").lower().capitalize()
|
{"/callbacks.py": ["/apps/utils/utils_getdata.py", "/apps/utils/utils_plots.py", "/apps/utils/utils_filters.py"], "/apps/utils/utils_filters.py": ["/apps/utils/utils_getdata.py"], "/apps/home/layout_home.py": ["/apps/utils/utils_getdata.py"]}
|
22,933
|
kaefee/Agustin-Codazzi-Project
|
refs/heads/main
|
/apps/utils/utils_cardskpi.py
|
import dash_bootstrap_components as dbc
def Card_total(datos):
lista_observaciones = [
dbc.ListGroupItemHeading("Numero de Observaciones",style={"font-size":"1.3em"}),
dbc.ListGroupItemText(datos, style={"font-size":"2.5em","align":"right"},id="carta_datos")
]
return lista_observaciones
|
{"/callbacks.py": ["/apps/utils/utils_getdata.py", "/apps/utils/utils_plots.py", "/apps/utils/utils_filters.py"], "/apps/utils/utils_filters.py": ["/apps/utils/utils_getdata.py"], "/apps/home/layout_home.py": ["/apps/utils/utils_getdata.py"]}
|
22,934
|
kaefee/Agustin-Codazzi-Project
|
refs/heads/main
|
/apps/main/main_nav.py
|
import dash_bootstrap_components as dbc
import dash_html_components as html
from dash.dependencies import Input, Output, State
from app import app
IGAC_LOGO = "https://www.igac.gov.co/sites/igac.gov.co/files/igac-logo.png"
github_logo="https://github.com/jamontanac/Tesis_Master/raw/master/GitHub_logo.png"
correlation_one_logo="https://www.correlation-one.com/hubfs/c1logo_color.png"
menu_bar = [
dbc.NavItem(
dbc.NavLink("Home", active = True, id = "page-1-link",href="/apps/home/layout_home")
),
dbc.Row([
dbc.Col([
html.A(html.Img(src=correlation_one_logo, height="20px"),
href="https://www.correlation-one.com/")
],width="auto"),
dbc.Col([
html.A(html.Img(src=github_logo, height="25px")
,href="https://github.com/DS4A-Team19-2021")]),
],
no_gutters=False,
className="ml-auto flex-nowrap mt-3 mt-md-0",
align="center",
)
]
navbar = dbc.Navbar(
[
html.A(
# Use row and col to control vertical alignment of logo / brand
dbc.Row(
[
dbc.Col(html.Img(src=IGAC_LOGO, height="30px")),
dbc.Col(dbc.NavbarBrand("DS4A/IGAC", className="ml-2"))
],
align="center",
no_gutters=True,
),
href="https://www.igac.gov.co/",
),
dbc.NavbarToggler(id="navbar-toggler", n_clicks=0),
dbc.Collapse(
menu_bar,
id="navbar-collapse", navbar=True, is_open=False
),
],
color="primary",
dark=False,
)
layout = html.Div(
[
navbar
],
id="menu",
)
|
{"/callbacks.py": ["/apps/utils/utils_getdata.py", "/apps/utils/utils_plots.py", "/apps/utils/utils_filters.py"], "/apps/utils/utils_filters.py": ["/apps/utils/utils_getdata.py"], "/apps/home/layout_home.py": ["/apps/utils/utils_getdata.py"]}
|
22,935
|
kaefee/Agustin-Codazzi-Project
|
refs/heads/main
|
/apps/utils/utils_filters.py
|
import dash_html_components as html
import dash_bootstrap_components as dbc
import dash_core_components as dcc
from apps.utils.utils_getdata import standarised_string
def make_options_filters(data):
return [{"label":x,"value":x} for x in data]
def make_filters(df):
card_of_filters = dbc.Card([
dbc.CardHeader("Filtros"),
dbc.CardBody([
html.H5("Filtre la información que desea ver", className="card-title"),
dbc.FormGroup([
dbc.Label("Clima"),
dcc.Dropdown(
id="filtro_clima",
options=make_options_filters(df["CLIMA_AMBIENTAL"].dropna().unique()),
value="",style={'color': 'black'}),
]),
dbc.FormGroup([
dbc.Label("Paisaje"),
dcc.Dropdown(
id="filtro_paisaje",
options=make_options_filters(df["PAISAJE"].dropna().unique()),
value="",style={'color': 'black'}),
]),
dbc.FormGroup([
dbc.Label("Forma de Terreno"),
dcc.Dropdown(
id="filtro_forma_terreno",
options=make_options_filters(df["FORMA_TERRENO"].dropna().unique()),
value="",style={'color': 'black'}),
]),
dbc.FormGroup([
dbc.Label("Material parental"),
dcc.Dropdown(
id="filtro_material_parental",
options=make_options_filters(df["MATERIAL_PARENTAL_LITOLOGIA"].dropna().unique()),
value="",style={'color': 'black'}),
]),
html.Div(id="the_alert", children=[]),
dbc.FormGroup([
dcc.Upload(
id='upload-data',
children=html.Div([
'Drag Here or ',
html.A('Select Files')
]),
style={
'width': '100%',
'height': '60px',
'lineHeight': '60px',
'borderWidth': '1px',
'borderStyle': 'dashed',
'borderRadius': '5px',
'textAlign': 'center',
'margin': '10px'
},
# Allow multiple files to be uploaded
multiple=False
)]),
]),
],color="secondary")
return card_of_filters
#"/Users/jamontanac/Desktop/Screen Shot 2021-07-23 at 10.11.21 AM.png"
|
{"/callbacks.py": ["/apps/utils/utils_getdata.py", "/apps/utils/utils_plots.py", "/apps/utils/utils_filters.py"], "/apps/utils/utils_filters.py": ["/apps/utils/utils_getdata.py"], "/apps/home/layout_home.py": ["/apps/utils/utils_getdata.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.