index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
74,684 | Ihor-Kalhanov/Flask_rest_jwt | refs/heads/master | /resource/user.py | from flask_restful import Resource, reqparse
from flask import jsonify
from flask_jwt_extended import create_access_token, jwt_required
import json
from heplers.encoder import AlchemyEncoder
from models.user import UserModel
class User(Resource):
parser = reqparse.RequestParser()
parser.add_argument('username', type=str, required=True,
help='This field cannot be left blank')
parser.add_argument('password', type=str, required=True,
help='This field cannot be left blank')
def post(self):
data = User.parser.parse_args()
username = data['username']
current_user = UserModel.find_by_username(username)
if not current_user:
return {'message': f'User {username} doesn\'t exist'}
if UserModel.verify_hash(data['password'], current_user.password):
access_token = create_access_token(
identity=json.dumps(current_user, cls=AlchemyEncoder))
return jsonify(access_token=access_token)
else:
return {'message': 'Wrong username or password.'}, 401
class UserRegister(Resource):
parser = reqparse.RequestParser()
parser.add_argument('username', type=str, required=True,
help='This field cannot be left blank')
parser.add_argument('password', type=str, required=True,
help='This field cannot be left blank')
def post(self):
data = UserRegister.parser.parse_args()
if UserModel.find_by_username(data['username']):
return {'message': 'UserModel has already been created, aborting.'}, 400
new_user = UserModel(
username=data['username'],
password=UserModel.generate_hash(data['password'])
)
new_user.save_to_db()
return {'message': 'user has been created successfully.'}, 201
| {"/resource/car.py": ["/models/car.py", "/schema/car.py"], "/app.py": ["/resource/car.py", "/resource/user.py"]} |
74,685 | Ihor-Kalhanov/Flask_rest_jwt | refs/heads/master | /schema/car.py | from ma import ma
class CarSchema(ma.Schema):
class Meta:
fields = ("id", "brand", "year")
| {"/resource/car.py": ["/models/car.py", "/schema/car.py"], "/app.py": ["/resource/car.py", "/resource/user.py"]} |
74,686 | Ihor-Kalhanov/Flask_rest_jwt | refs/heads/master | /resource/car.py | from flask_jwt_extended import jwt_required
from flask_restful import Resource, reqparse
from flask import request, jsonify
from db import db
from models.car import CarModel
from schema.car import CarSchema
car_schema = CarSchema()
cars_schema = CarSchema(many=True)
class CarListResource(Resource):
parser = reqparse.RequestParser()
parser.add_argument('year', type=int, required=False,
help='This field cannot be string')
def get(self):
cars = CarModel.query.all()
cars_data = cars_schema.dump(cars)
return {"cars": cars_data}, 200
@jwt_required()
def post(self):
new_post = CarModel(
brand=request.json['brand'],
year=request.json['year']
)
try:
db.session.add(new_post)
db.session.commit()
return {"car": car_schema.dump(new_post)}, 201
except:
return {"error": "You send invalid request"}, 400
class CarResource(Resource):
@jwt_required()
def get(self, car_id):
try:
car = CarModel.query.get_or_404(car_id)
return car_schema.dump(car)
except:
return {"error": f"car: {car_id} does not exists"}, 404
@jwt_required()
def patch(self, car_id):
car = CarModel.query.get_or_404(car_id)
if 'brand' in request.json:
car.brand = request.json['brand']
if 'year' in request.json:
car.content = request.json['year']
db.session.commit()
return car_schema.dump(car)
@jwt_required()
def delete(self, car_id):
car = CarModel.query.get_or_404(car_id)
db.session.delete(car)
db.session.commit()
return '', 204
| {"/resource/car.py": ["/models/car.py", "/schema/car.py"], "/app.py": ["/resource/car.py", "/resource/user.py"]} |
74,687 | Ihor-Kalhanov/Flask_rest_jwt | refs/heads/master | /heplers/token.py | # from datetime import datetime
#
# from flask_jwt_extended import decode_token
# from sqlalchemy.orm.exc import NoResultFound
#
#
#
#
# def add_token_to_database(encoded_token, identity_claim):
# """
# Adds a new token to the database. It is not revoked when it is added.
# :param identity_claim: configured key to get user identity
# """
# decoded_token = decode_token(encoded_token)
# jti = decoded_token["jti"]
# token_type = decoded_token["type"]
# user_identity = decoded_token[identity_claim]
# expires = datetime.fromtimestamp(decoded_token["exp"])
# revoked = False
#
# db_token = TokenBlocklist(
# jti=jti,
# token_type=token_type,
# user_id=user_identity,
# expires=expires,
# revoked=revoked,
# )
# db.session.add(db_token)
# db.session.commit()
#
#
#
#
| {"/resource/car.py": ["/models/car.py", "/schema/car.py"], "/app.py": ["/resource/car.py", "/resource/user.py"]} |
74,688 | Ihor-Kalhanov/Flask_rest_jwt | refs/heads/master | /app.py | from flask import Flask
from flask_restful import Api
from flask_jwt_extended import JWTManager
from db import db
from resource.car import CarListResource, CarResource
from resource.user import UserRegister, User
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['PROPAGATE_EXCEPTIONS'] = True
app.config['FLASK_ADMIN_SWATCH'] = 'cerulean'
app.secret_key = 'super secret key'
app.config['SESSION_TYPE'] = 'filesystem'
api = Api(app)
jwt = JWTManager(app)
@app.before_first_request
def create_tables():
db.init_app(app)
db.create_all()
api.add_resource(CarListResource, '/cars')
api.add_resource(CarResource, '/car/<int:car_id>')
api.add_resource(UserRegister, '/register')
api.add_resource(User, '/user')
if __name__ == '__main__':
db.init_app(app)
app.run(port=5000, debug=True)
| {"/resource/car.py": ["/models/car.py", "/schema/car.py"], "/app.py": ["/resource/car.py", "/resource/user.py"]} |
74,689 | Ihor-Kalhanov/Flask_rest_jwt | refs/heads/master | /models/car.py | from db import db
class CarModel(db.Model):
__tablename__ = 'cars'
id = db.Column(db.Integer, primary_key=True)
brand = db.Column(db.String(50), unique=True)
year = db.Column(db.Integer, nullable=True)
def __init__(self, brand, year):
self.brand = brand
self.year = year
def __repr__(self):
return '<Cars %s>' % self.brand
@classmethod
def find_by_name(cls, brand):
return cls.query.filter_by(brand=brand).first()
@classmethod
def find_all(cls):
return cls.query.all()
def save_to_db(self):
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
| {"/resource/car.py": ["/models/car.py", "/schema/car.py"], "/app.py": ["/resource/car.py", "/resource/user.py"]} |
74,690 | gh0ststrike/python-postgres | refs/heads/main | /connect_db.py | # Postgres driver
import psycopg2
def initial_connect_db():
# establishing the connection
return psycopg2.connect(database="postgres", user='postgres',
password='welcome333', host='127.0.0.1', port='5432')
def connect_db(db_name):
# establishing the connection
return psycopg2.connect(database=db_name, user='postgres',
password='welcome333', host='127.0.0.1', port='5432')
| {"/main.py": ["/create_db.py", "/create_table.py", "/insert_data.py", "/query_table.py", "/drop_db.py"], "/insert_data.py": ["/connect_db.py"], "/create_table.py": ["/connect_db.py"], "/create_db.py": ["/connect_db.py"], "/query_table.py": ["/connect_db.py"], "/drop_db.py": ["/connect_db.py"]} |
74,691 | gh0ststrike/python-postgres | refs/heads/main | /main.py | # import functions from other files
from create_db import create_db
from create_table import create_table
from insert_data import insert_data
from query_table import query_table
from drop_db import drop_db
# Press the green arrow to run the script.
if __name__ == '__main__':
# Create variable with name of database
db_name = "my_test_db"
print('Call Create DB Function')
create_db(db_name)
print('Call Create Table Function')
create_table(db_name)
print('Call Insert Data Function')
insert_data(db_name)
print('Call Query Table Function')
query_table(db_name)
# Uncomment if you want to drop the DB
# print('Delete DB')
# drop_db(db_name)
| {"/main.py": ["/create_db.py", "/create_table.py", "/insert_data.py", "/query_table.py", "/drop_db.py"], "/insert_data.py": ["/connect_db.py"], "/create_table.py": ["/connect_db.py"], "/create_db.py": ["/connect_db.py"], "/query_table.py": ["/connect_db.py"], "/drop_db.py": ["/connect_db.py"]} |
74,692 | gh0ststrike/python-postgres | refs/heads/main | /insert_data.py | import connect_db as db
def insert_data(db_name):
# get connection instance
conn = db.connect_db(db_name)
# Setting auto commit false
conn.autocommit = True
# Creating a cursor object using the cursor() method
cursor = conn.cursor()
# Preparing SQL queries to INSERT a record into the database.
cursor.execute('''INSERT INTO EMPLOYEE (FIRST_NAME,LAST_NAME,AGE,SEX,INCOME)
VALUES ('Shiba', 'Inu', 25, 'F', 200)''')
# Commit your changes in the database
conn.commit()
print("Records inserted........")
# Closing the connection
conn.close()
| {"/main.py": ["/create_db.py", "/create_table.py", "/insert_data.py", "/query_table.py", "/drop_db.py"], "/insert_data.py": ["/connect_db.py"], "/create_table.py": ["/connect_db.py"], "/create_db.py": ["/connect_db.py"], "/query_table.py": ["/connect_db.py"], "/drop_db.py": ["/connect_db.py"]} |
74,693 | gh0ststrike/python-postgres | refs/heads/main | /create_table.py | import connect_db as db
# get connection instance
def create_table(db_name):
conn = db.connect_db(db_name)
# Creating a cursor object using the cursor() method
cursor = conn.cursor()
conn.autocommit = True
# Droping EMPLOYEE table if already exists.
cursor.execute("DROP TABLE IF EXISTS EMPLOYEE")
# Creating table as per requirement
sql = '''CREATE TABLE EMPLOYEE (
FIRST_NAME CHAR(20) NOT NULL,
LAST_NAME CHAR(20),
AGE INT,
SEX CHAR(1),
INCOME FLOAT
)'''
cursor.execute(sql)
print("Table created successfully........")
# Closing the connection
conn.close()
| {"/main.py": ["/create_db.py", "/create_table.py", "/insert_data.py", "/query_table.py", "/drop_db.py"], "/insert_data.py": ["/connect_db.py"], "/create_table.py": ["/connect_db.py"], "/create_db.py": ["/connect_db.py"], "/query_table.py": ["/connect_db.py"], "/drop_db.py": ["/connect_db.py"]} |
74,694 | gh0ststrike/python-postgres | refs/heads/main | /create_db.py | import connect_db as db
def create_db(db_name):
# get connection instance
conn = db.initial_connect_db()
conn.autocommit = True
# Creating a cursor object using the cursor() method
cursor = conn.cursor()
# Preparing query to create a database
cursor.execute("SELECT 1 FROM pg_catalog.pg_database WHERE datname = %(value)s", {'value': db_name})
exists = cursor.fetchone()
if not exists:
cursor.execute("CREATE DATABASE " + db_name)
print("Database created successfully........")
else:
print("Database already exists........")
# Closing the connection
conn.close()
| {"/main.py": ["/create_db.py", "/create_table.py", "/insert_data.py", "/query_table.py", "/drop_db.py"], "/insert_data.py": ["/connect_db.py"], "/create_table.py": ["/connect_db.py"], "/create_db.py": ["/connect_db.py"], "/query_table.py": ["/connect_db.py"], "/drop_db.py": ["/connect_db.py"]} |
74,695 | gh0ststrike/python-postgres | refs/heads/main | /query_table.py | import connect_db as db
def query_table(db_name):
# get connection instance
conn = db.connect_db(db_name)
# Open a cursor to perform database operations
cur = conn.cursor()
# Execute a query
cur.execute("SELECT * FROM EMPLOYEE")
# Retrieve query results
records = cur.fetchall()
print(records)
| {"/main.py": ["/create_db.py", "/create_table.py", "/insert_data.py", "/query_table.py", "/drop_db.py"], "/insert_data.py": ["/connect_db.py"], "/create_table.py": ["/connect_db.py"], "/create_db.py": ["/connect_db.py"], "/query_table.py": ["/connect_db.py"], "/drop_db.py": ["/connect_db.py"]} |
74,696 | gh0ststrike/python-postgres | refs/heads/main | /drop_db.py | import connect_db as db
def drop_db(db_name):
# get connection instance
conn = db.connect_db(db_name)
conn.autocommit = True
# Open a cursor to perform database operations
cur = conn.cursor()
# Execute a query
cur.execute("DROP DATABASE " + db_name)
# Closing the connection
conn.close()
| {"/main.py": ["/create_db.py", "/create_table.py", "/insert_data.py", "/query_table.py", "/drop_db.py"], "/insert_data.py": ["/connect_db.py"], "/create_table.py": ["/connect_db.py"], "/create_db.py": ["/connect_db.py"], "/query_table.py": ["/connect_db.py"], "/drop_db.py": ["/connect_db.py"]} |
74,702 | licface/lfd | refs/heads/master | /lfd.py | import os
import sys
import requests
from bs4 import BeautifulSoup as bs
import argparse
import configset
import parserheader
import re
import codecs
import pm
import tarfile
import zipfile
import convert
from debug import debug
PID = os.getpid()
import cmdw
MAX_WIDTH = cmdw.getWidth()
TEST_COUNT = 0
class lfd(object):
def __init__(self):
super(lfd, self)
self.URL = 'https://www.lfd.uci.edu/~gohlke/pythonlibs/'
self.URL_DOWNLOAD = 'https://download.lfd.uci.edu/pythonlibs/'
self.INDEX = "l8ulg3xw"
self.CONFIG_NAME = 'lfd.ini'
self.SESS = requests.Session()
# self.PACKAGES = self.parser_content()
self.pm = pm.pm
def config(self, section, option, value=None, configname=None):
if configname:
self.CONFIG_NAME = configname
conf = configset.configset()
conf.configname = self.CONFIG_NAME
if value:
configset.write_config(section, option, value)
return configset.read_config(section, option)
def minusreplace(self, exc):
if exc.object[exc.start:exc.end] == u'\u2011':
return (u'-', exc.end)
return (u'?', exc.end)
def parser_content(self):
global TEST_COUNT
codecs.register_error('minusreplace', self.minusreplace)
req = self.SESS.get(self.URL)
content = ''
package_dict = {}
if req.ok:
content = req.content
b = bs(content, 'lxml')
a = b.find('ul', {'class': 'pylibs'})
all_group_packages = a.find_all_next('ul')
all_group_packages_main = a.find_all_next('ul')[0:-1]
# print all_group_packages_main[0]
all_strong_package_name = a.find_all_next('strong')[0:-6]
# print "all_strong_package_name[-2] =",all_strong_package_name[-2]
# print "all_strong_package_name[-1] =",all_strong_package_name[-1]
# print "parent"
# print all_strong_package_name[-1].parent
# print "+"*MAX_WIDTH
# print all_group_packages_main[-1]
# sys.exit(0)
n = 1
for i in all_group_packages_main:
index = all_group_packages_main.index(i)
# print "index =", index
# print "len(all_strong_package_name) =",
# len(all_strong_package_name)
m = 0
package_name = all_strong_package_name[index].text
if package_name == u'Misc':
package_desc = "Misc Packages"
# print "package_name =", package_name
else:
package_desc = " ".join(unicode(all_strong_package_name[
index].parent.text.strip()).encode('utf-8').split('\n\n')[0].split('\n'))
# print "package_desc =", package_desc
all_a_per_group = all_group_packages_main[index].find_all('a')
# print "all_a_per_group =", all_a_per_group
# print "-" * MAX_WIDTH
packages = {}
for p in all_a_per_group:
name = p.text.encode('cp850', errors='minusreplace')
if name:
# print "name =", name
link = self.URL_DOWNLOAD + self.INDEX + "/" + name
# print "link =", link
packages.update({
m: {'name': name, 'link': link}
})
m += 1
# print packages
# print "-" * MAX_WIDTH
package_dict.update({
n: {
'package_name': str(package_name).lower(),
'package_desc': package_desc,
'packages': packages
}
})
n += 1
# print "package_dict.keys() =",package_dict.keys()
# print "package_dict:"
# print package_dict
# print package_dict.get(package_dict.keys()[0])
# print "-" * MAX_WIDTH
# print package_dict.get(package_dict.keys()[1])
# print "-" * MAX_WIDTH
# print package_dict.get(package_dict.keys()[2])
return package_dict
# else:
# return req.content
def find(self, package_name, python_version, architecture, packages_db=None):
architecture = architecture + ".whl"
package_name = unicode(package_name).lower()
package_name_alt = package_name.replace("-", "_")
if packages_db:
packages = packages_db
else:
packages = self.parser_content()
# print packages
# print "#" * MAX_WIDTH
for i in packages:
if packages.get(i).get('package_name') == package_name or packages.get(i).get('package_name') == package_name_alt:
# print "THE LINKS"
find_packages = packages.get(i).get('packages')
find_packages_name = packages.get(i).get('package_name')
# print "find_packages =", find_packages
if find_packages:
for p in find_packages:
pp = find_packages.get(p)
name = pp.get('name')
link = pp.get('link')
# print "name =", name
# print "link =", link
name_split = re.split('-', name)
# print "name_split =", name_split
if python_version in name_split:
# print "name_split FOUND:", name_split
if architecture in name_split:
print "FOUND LINK:", link
return link, name, find_packages_name
else:
return '','',''
def find_localstorage(python_version, architecture, packages_list):
for i in packages_list:
name = os.path.basename(i)
name_split = re.split('-', name)
if python_version in name_split:
if architecture in name_split:
return i
return ''
def find_localstorage_basename(basename):
for i in packages_list:
name = os.path.basename(i)
if name == basename:
return i
return ''
def serve(self, host='0.0.0.0', port='7775'):
pass
def install(self, host='0.0.0.0', port='7775'):
pass
def usage(self):
pass
def get_readme(self, file, testzipfile=False):
if str(file).endswith(".gz"):
try:
tar_file = tarfile.open(file)
for i in tar_file.getnames():
if i.endswith('README.rst') or i.endswith('README.md') or i.endswith('README'):
f = tar_file.extractfile(i)
return f.read()
return ''
except:
print traceback.format_exc()
return False
elif str(file).endswith(".zip"):
try:
zip_file = zipfile.ZipFile(file)
if testzipfile:
testzip = zip_file.testzip()
if testzip == None:
pass
else:
return "zipfile corrupt !"
for i in zip_file.namelist():
if i.endswith('README.rst') or i.endswith('README.md') or i.endswith('README'):
return zip_file.read(i)
return ''
except:
print traceback.format_exc()
return False
elif str(file).endswith(".whl"):
try:
zip_file = zipfile.ZipFile(file)
if testzipfile:
testzip = zip_file.testzip()
if testzip == None:
pass
else:
return "zipfile corrupt !"
for i in zip_file.namelist():
if i.endswith('METADATA'):
return zip_file.read(i)
return ''
except:
print traceback.format_exc()
return False
else:
return ''
def update_database(self, path=None):
######################################
# STILL ERROR #
######################################
config = configset.configset()
config.configname = 'lfd.ini'
path_name = path
if os.getenv('PACKAGES_DIR'):
path = os.getenv('PACKAGES_DIR')
if config.read_config('PACKAGES', 'dir', value='.'):
path = config.read_config('PACKAGES', 'dir', value='.')
if path_name:
path = path_name
if path == ".":
path = os.getcwd()
packages = self.identify_localstorage(path)
self.database().truncate('packages')
self.database().truncate('localstorage')
for i in packages:
description = self.get_readme(packages.get(i)[0])
self.database((i, description, packages.get(i))).insert()
def get_name_localstorage(self, package):
p = self.pm()
package = os.path.basename(package)
name = p.translate(package)
if name:
return name.lower()
else:
return ''
def identify_localstorage(self, path=None):
config = configset.configset()
config.configname = 'lfd.ini'
path_name = path
if os.getenv('PACKAGES_DIR'):
path = os.getenv('PACKAGES_DIR')
if config.read_config('PACKAGES', 'dir', value='.'):
path = config.read_config('PACKAGES', 'dir', value='.')
# if path_name:
# path = path_name
if path == ".":
path = os.getcwd()
if path == None:
q = raw_input(
'PATH IS NONE [EMPTY], DO YOU WANT TO CONTINUE WITH THIS CURRENT PATH [y/n]:')
if q == 'y':
path = os.getcwd()
elif q == 'n':
print make_colors("EXIT NO PATH", 'white', 'lightred', ['blink'])
sys.exit('EXIT NO PATH')
else:
print make_colors("RETURN EMPTY DATA", 'white', 'lightred', ['blink'])
return dir_dict
dir_dict = {}
MODE = config.read_config('SCANNER', 'mode', value='walk')
def walk():
for ROOT, DIRS, FILES in os.walk(path):
if FILES:
for i in FILES:
name = self.get_name_localstorage(i)
if name:
if not dir_dict.get(name):
dir_dict.update({name: []})
dir_dict.get(name).append(os.path.join(ROOT, i))
def win():
if " " in path:
path = '"%s"' % (path)
pattern = "dir /s /b %s" % (path)
listdir = os.popen(pattern).readlines()
for i in listdir:
i_name = os.path.basename(i)
name = self.get_name_localstorage(i_name)
if name:
if not dir_dict.get(name):
dir_dict.update({name: []})
dir_dict.get(name).append(i)
if MODE == 'walk':
walk()
elif MODE == 'win':
win()
else:
walk()
return dir_dict
def database(self, datas=None, type='storage', table_name = None):
#type: storage | database
config = configset.configset()
config.configname = 'lfd.ini'
dbname = config.read_config('DATABASE', 'name', value='lfd.db3')
host = config.read_config('DATABASE', 'host', value='127.0.0.1')
port = config.read_config('DATABASE', 'port', value='3306')
dbtype = config.read_config('DATABASE', 'type', value='sqlite')
username = config.read_config('DATABASE', 'username', value='root')
password = config.read_config('DATABASE', 'password', value='')
debug(dbname=dbname)
debug(host=host)
debug(port=port)
debug(dbtype=dbtype)
if dbtype == 'sqlite':
try:
# from sqlite3 import dbapi2 as sqlite
import sqlite3 as sqlite
except ImportError:
sys.exit(
"You not have module \"pysqlite2\", please download before ! \n")
except:
traceback.format_exc()
sys.exit("ERROR by SYSTEM")
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
'id' INTEGER PRIMARY KEY AUTOINCREMENT, \
'name' VARCHAR(255) NOT NULL, \
'description' VARCHAR(255) NOT NULL, \
'relpath' VARCHAR(255) NOT NULL);'''
SQL_CREATE_REPO = '''CREATE TABLE IF NOT EXISTS localstorage ( \
'total' VARCHAR(255) NOT NULL, \
'packages' VARCHAR(255) NOT NULL);'''
SQL_DROP = "DROP TABLE %s;" % (table_name)
conn = sqlite.connect(dbname)
cursor = conn.cursor()
def create():
cursor.execute(SQL_CREATE)
conn.commit()
cursor.execute(SQL_CREATE_REPO)
conn.commit()
def get(table_name):
exc01 = cursor.execute('SELECT * FROM %s;'%(table_name))
conn.commit()
return exc01.fetchall()
def insert():
if datas:
SQL_INSERT = 'INSERT INTO packages (\'name\', \'description\', \'relpath\') VALUES("%s", "%s", "%s");' % (
datas[0], convert.convert(datas[1]), convert.convert(datas[2]))
SQL_INSERT_LOCALSTORAGE = 'INSERT INTO localstorage (\'total\', \'packages\') VALUES("%s", "%s");' % (
datas[0], convert.convert(datas[1]))
try:
# print "SQL_INSERT =", SQL_INSERT
if type == 'storage':
cursor.execute(SQL_INSERT_LOCALSTORAGE)
elif type == 'database':
cursor.execute(SQL_INSERT)
conn.commit()
except:
if type == 'database':
SQL_INSERT = "INSERT INTO packages ('name', 'relpath') VALUES('%s', '%s');" % (datas[0], convert.convert(datas[2]))
cursor.execute(SQL_INSERT)
conn.commit()
def truncate(table_name):
cursor.execute('DELETE FROM %s;'%(table_name))
conn.commit()
cursor.execute('VACUUM;')
conn.commit()
def drop():
cursor.execute(SQL_DROP)
conn.commit()
elif dbtype == 'mysql':
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
`id` BIGINT(100) AUTO_INCREMENT NOT NULL PRIMARY KEY, \
`name` VARCHAR(255) NOT NULL, \
`description` VARCHAR(255) NOT NULL, \
`relpath` VARCHAR(255) NOT NULL)'''
try:
import MySQLdb
conn = MySQLdb.connect(
host, username, password, dbname, port)
cursor = conn.cursor()
except ImportError:
sys.exit(
"You not have module \"MySQLdb\", please download before ! \n")
except:
try:
conn = MySQLdb.connect(
host, username, password, port=port)
cursor = conn.cursor()
cursor.execute(SQL_CREATE)
conn.commit()
except:
traceback.format_exc()
sys.exit("ERROR by SYSTEM")
if datas:
try:
SQL_INSERT = "INSERT INTO packages (`name`, `description`, `relpath`) VALUES(%s, %s, %s);" % (
datas[0], datas[1], datas[2])
cursor.execute(SQL_INSERT)
conn.commit()
except:
SQL_INSERT = "INSERT INTO packages (`name`, `description`, `relpath`) VALUES(%s, %s, %s);" % (
datas[0], ' ', datas[2])
cursor.execute(SQL_INSERT)
conn.commit()
elif dbtype == 'oracle':
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
'id' BIGINT(100) GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1) NOT NULL PRIMARY KEY, \
'name' VARCHAR(255) NOT NULL, \
'description' VARCHAR(255) NOT NULL, \
'relpath' VARCHAR(255) NOT NULL)'''
sys.exit("STILL DEVELOPMENT, PLEASE USE ANOTHER DATABASE TYPE")
elif dbtype == 'postgres':
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
'id' BIGSERIAL NOT NULL PRIMARY KEY, \
'name' VARCHAR(255) NOT NULL, \
'description' VARCHAR(255) NOT NULL, \
'relpath' VARCHAR(255) NOT NULL)'''
try:
import psycopg2
conn = psycopg2.connect("dbname=%s, user=%s, password=%s, host=%s, port=%s" % (
dbname, username, password, host, port))
cursor = conn.cursor()
cursor.execute(SQL_CREATE)
conn.commit()
except ImportError:
sys.exit(
"You not have module \"Psycopg2\", please download before ! \n")
except:
traceback.format_exc()
sys.exit("ERROR by SYSTEM")
if __name__ == '__main__':
print "PID:", PID
c = lfd()
c.find(sys.argv[1], 'cp27', 'win_amd64')
# c.parser_content()
# c.usage()
| {"/database.py": ["/convert.py"]} |
74,703 | licface/lfd | refs/heads/master | /configset.py | import sys
import argparse
import ConfigParser
import os
import traceback
import re
from collections import OrderedDict
from debug import *
from make_colors import make_colors
__sdk__ = '2.7'
__platform__ = 'all'
__url__ = 'licface@yahoo.com'
__build__ = '2.7'
configname ='conf.ini'
class MultiOrderedDict(OrderedDict):
def __setitem__(self, key, value):
if isinstance(value, list) and key in self:
self[key].extend(value)
else:
super(OrderedDict, self).__setitem__(key, value)
class configset(object):
cfg = ConfigParser.RawConfigParser(allow_no_value=True)
cfg.optionxform = str
THIS_PATH = os.path.dirname(__file__)
# configname ='conf.ini'
# debug(configname = configname)
def __init__(self):
super(configset, self)
global configname
self.configname = configname
def get_config_file(self, filename='', verbosity=None):
if not filename:
filename = self.configname
configname = filename
self.configname = configname
debug(configname = filename)
self.configname = configname
debug(configset_configname = self.configname)
if os.path.isfile(os.path.join(os.getcwd(), filename)):
debug(checking_001 = "os.path.isfile(os.path.join(os.getcwd(), filename))")
self.configname =os.path.join(os.getcwd(), filename)
debug(configname = os.path.join(os.getcwd(), filename))
return os.path.join(os.getcwd(), filename)
elif os.path.isfile(filename):
debug(checking_002 = "os.path.isfile(filename)")
self.configname =filename
debug(configname = os.path.abspath(filename))
return filename
elif os.path.isfile(os.path.join(os.path.dirname(__file__), filename)):
debug(checking_003 = "os.path.isfile(os.path.join(os.path.dirname(__file__), filename))")
self.configname =os.path.join(os.path.dirname(__file__), filename)
debug(configname = os.path.join(os.path.dirname(__file__), filename))
return os.path.join(os.path.dirname(__file__), filename)
elif os.path.isfile(self.configname):
debug(checking_004 = "os.path.isfile(configname)")
debug(configname = os.path.abspath(configname))
return configname
elif os.path.isfile(os.path.join(os.path.dirname(__file__), self.configname)):
debug(checking_005 = "os.path.isfile(os.path.join(os.path.dirname(__file__), configname))")
debug(configname = os.path.join(os.path.dirname(__file__), self.configname))
return os.path.join(os.path.dirname(__file__), self.configname)
else:
debug(checking_006 = "ELSE")
fcfg = self.configname
f = open(fcfg, 'w')
f.close()
filecfg = fcfg
debug(CREATE = os.path.abspath(filecfg))
return filecfg
def write_config(self, section, option, filename='', value=None, cfg = None, verbosity=None):
filename = self.get_config_file(filename, verbosity)
#cfg = ConfigParser.RawConfigParser(allow_no_value=True, dict_type=MultiOrderedDict)
#debug(filename = filename)
debug(value = value)
if cfg:
debug(cfg_not_none = True)
configset.cfg.read(filename)
#if not value == None:
#if os.path.isfile(os.path.join(THIS_PATH, filename)):
#configset.cfg.read(filename)
#else:
#filename = self.get_config_file()
#configset.cfg.read(filename)
try:
debug(cfg_set = True)
configset.cfg.set(section, option, value)
except ConfigParser.NoSectionError:
debug(cfg_add = True)
configset.cfg.add_section(section)
configset.cfg.set(section, option, value)
cfg_data = open(filename,'wb')
configset.cfg.write(cfg_data)
cfg_data.close()
return self.read_config(section, option, filename)
def write_config2(self, section, option, filename='', value=None, verbosity=None):
filename = self.get_config_file(filename, verbosity)
#cfg = ConfigParser.RawConfigParser(allow_no_value=True, dict_type=MultiOrderedDict)
if not value == None:
if os.path.isfile(os.path.join(THIS_PATH, filename)):
configset.cfg.read(filename)
else:
filename = self.get_config_file()
configset.cfg.read(filename)
try:
configset.cfg.get(section, option)
configset.cfg.set(section, option, value)
except ConfigParser.NoSectionError:
#configset.cfg.add_section(section)
#configset.cfg.set(section, option, value)
return "\tNo Section Name: '%s'" %(section)
except ConfigParser.NoOptionError:
return "\tNo Option Name: '%s'" %(option)
cfg_data = open(filename,'wb')
configset.cfg.write(cfg_data)
cfg_data.close()
return ''
else:
return "No Value set !"
def read_config(self, section, option, filename='', value=None, verbosity=None):
"""
option: section, option, filename='', value=None
"""
filecfg = self.get_config_file(filename, verbosity)
configset.cfg.read(filecfg)
# debug(section = section)
# debug(option = option)
# debug(value = value)
# debug(filecfg = str(filecfg))
# debug(filecfg = os.path.abspath(str(filecfg)))
try:
data = configset.cfg.get(section, option)
except:
try:
self.write_config(section, option, filename, value)
except:
traceback.format_exc()
data = configset.cfg.get(section, option)
return data
def read_config2(self, section, option, filename='', verbosity=None): #format ['aaa','bbb','ccc','ddd']
"""
option: section, option, filename=''
format result: ['aaa','bbb','ccc','ddd']
"""
filename = self.get_config_file(filename, verbosity)
cfg = ConfigParser.RawConfigParser(allow_no_value=True, dict_type=MultiOrderedDict)
cfg.read(filename)
cfg = cfg.get(section, option)
return cfg
def read_config3(self, section, option, filename='', verbosity=None): #format result: [[aaa.bbb.ccc.ddd, eee.fff.ggg.hhh], qqq.xxx.yyy.zzz]
"""
option: section, option, filename=''
format result: [[aaa.bbb.ccc.ddd, eee.fff.ggg.hhh], qqq.xxx.yyy.zzz]
"""
filename = self.get_config_file(filename, verbosity)
data = []
cfg = ConfigParser.RawConfigParser(allow_no_value=True, dict_type=MultiOrderedDict)
cfg.read(filename)
cfg = cfg.get(section, option)
for i in cfg:
if "," in i:
d1 = str(i).split(",")
d2 = []
for j in d1:
d2.append(str(j).strip())
data.append(d2)
else:
data.append(i)
return data
def read_config4(self, section, option, filename='', value = '', verbosity=None): #format result: [aaa.bbb.ccc.ddd, eee.fff.ggg.hhh, qqq.xxx.yyy.zzz]
"""
option: section, option, filename=''
format result: [aaa.bbb.ccc.ddd, eee.fff.ggg.hhh, qqq.xxx.yyy.zzz]
"""
filename = self.get_config_file(filename, verbosity)
debug(filename = filename)
data = []
cfg = ConfigParser.RawConfigParser(allow_no_value=True, dict_type=MultiOrderedDict)
#print "CFG =", cfg
cfg.read(filename)
debug(cfg0 = cfg)
try:
cfg = cfg.get(section, option)
debug(cfg1 = cfg)
if not cfg == None:
for i in cfg:
if "," in i:
d1 = str(i).split(",")
for j in d1:
data.append(str(j).strip())
else:
data.append(i)
return data
else:
return None
except:
debug(ERROR = traceback.format_exc())
debug(except_has = True)
data = self.write_config(section, option, filename, value, cfg)
debug(data = data)
return data
def read_config5(self, section, option, filename='', verbosity=None): #format result: {aaa:bbb, ccc:ddd, eee:fff, ggg:hhh, qqq:xxx, yyy:zzz}
"""
option: section, option, filename=''
format result: {aaa:bbb, ccc:ddd, eee:fff, ggg:hhh, qqq:xxx, yyy:zzz}
"""
filename = self.get_config_file(filename, verbosity)
data = {}
cfg = ConfigParser.RawConfigParser(allow_no_value=True, dict_type=MultiOrderedDict)
configset.cfg.read(filename)
cfg = configset.cfg.get(section, option)
for i in cfg:
if "," in i:
d1 = str(i).split(",")
for j in d1:
d2 = str(j).split(":")
data.update({str(d2[0]).strip():int(str(d2[1]).strip())})
else:
for x in i:
e1 = str(i).split(":")
data.update({str(e1[0]).strip():int(str(e1[1]).strip())})
return data
def read_config6(self, section, option, filename='', verbosity=None): #format result: {aaa:[bbb, ccc], ddd:[eee, fff], ggg:[hhh, qqq], xxx:[yyy:zzz]}
"""
option: section, option, filename=''
format result: {aaa:bbb, ccc:ddd, eee:fff, ggg:hhh, qqq:xxx, yyy:zzz}
"""
filename = self.get_config_file(filename, verbosity)
data = {}
cfg = ConfigParser.RawConfigParser(allow_no_value=True, dict_type=MultiOrderedDict)
configset.cfg.read(filename)
cfg = configset.cfg.get(section, option)
for i in cfg:
if ":" in i:
d1 = str(i).split(":")
d2 = int(str(d1[0]).strip())
for j in d1[1]:
d3 = re.split("['|','|']", d1[1])
d4 = str(d3[1]).strip()
d5 = str(d3[-2]).strip()
data.update({d2:[d4, d5]})
else:
pass
return data
def get_config(self, section, option, filename='', value=None, verbosity=None):
filename = self.get_config_file(filename, verbosity)
try:
data = self.read_config(section, option, filename, value)
except ConfigParser.NoSectionError:
print traceback.format_exc()
self.write_config(section, option, filename, value)
data = self.read_config(section, option, filename, value)
except ConfigParser.NoOptionError:
print traceback.format_exc()
self.write_config(section, option, filename, value)
data = self.read_config(section, option, filename, value)
return data
def get_config2(self, section, option, filename='', value=None, verbosity=None):
filename = self.get_config_file(filename, verbosity)
try:
data = self.read_config2(section, option, filename)
except ConfigParser.NoSectionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config2(section, option, filename)
except ConfigParser.NoOptionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config2(section, option, filename)
return data
def get_config3(self, section, option, filename='', value=None, verbosity=None):
filename = self.get_config_file(filename, verbosity)
try:
data = self.read_config3(section, option, filename)
except ConfigParser.NoSectionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config3(section, option, filename)
except ConfigParser.NoOptionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config3(section, option, filename)
return data
def get_config4(self, section, option, filename='', value='', verbosity=None):
filename = self.get_config_file(filename, verbosity)
try:
data = self.read_config4(section, option, filename)
except ConfigParser.NoSectionError:
#print "Error 1 =", traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config4(section, option, filename)
#print "data 1 =", data
except ConfigParser.NoOptionError:
#print "Error 2 =", traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config4(section, option, filename)
#print "data 2 =", data
#print "DATA =", data
return data
def get_config5(self, section, option, filename='', value=None, verbosity=None):
filename = self.get_config_file(filename, verbosity)
try:
data = self.read_config5(section, option, filename)
except ConfigParser.NoSectionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config5(section, option, filename)
except ConfigParser.NoOptionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config5(section, option, filename)
return data
def get_config6(self, section, option, filename='', value=None, verbosity=None):
filename = self.get_config_file(filename, verbosity)
try:
data = self.read_config6(section, option, filename)
except ConfigParser.NoSectionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config6(section, option, filename)
except ConfigParser.NoOptionError:
print traceback.format_exc()
self.write_config(section, option, value)
data = self.read_config6(section, option, filename)
return data
def write_all_config(self, filename='', verbosity=None):
filename = self.get_config_file(filename, verbosity)
def read_all_config(self, filename='', section=[], verbosity=None):
filecfg = self.get_config_file(filename, verbosity)
configset.cfg.read(filecfg)
data = {}
dbank = []
if len(section) != 0:
for x in configset.cfg.options(section):
d = configset.cfg.get(section, x)
data.update({x:d})
dbank.append([section,data])
else:
#print "configset.cfg.sections() =", configset.cfg.sections()
for i in configset.cfg.sections():
section.append(i)
for x in configset.cfg.options(i):
d = configset.cfg.get(i, x)
data.update({x:d})
dbank.append([i,data])
#print "dbank =", dbank
return dbank
def read_all_section(self, filename='', section='server', verbosity=None):
filecfg = self.get_config_file(filename, verbosity)
configset.cfg.read(filecfg)
dbank = []
dhost = []
for x in configset.cfg.options(section):
d = configset.cfg.get(section, x)
#data.update({x:d})
dbank.append(d)
if d:
if ":" in d:
data = str(d).split(":")
host = str(data[0]).strip()
port = int(str(data[1]).strip())
dhost.append([host, port])
#print "dbank =", dbank
#print "dhost =", dhost
return [dhost, dbank]
def test(verbosity=None):
filename = self.get_config_file(verbosity)
configset.cfg.read(filename)
data = configset.cfg.sections()
print configset.cfg.get('router','host')
print data
def usage(self):
parser = argparse.ArgumentParser(formatter_class= argparse.RawTextHelpFormatter)
parser.add_argument('CONFIG_FILE', action = 'store', help = 'Config file name path')
parser.add_argument('-r', '--read', help = 'Read Action', action = 'store_true')
parser.add_argument('-w', '--write', help = 'Write Action', action = 'store_true')
parser.add_argument('-s', '--section', help = 'Section Write/Read', action = 'store')
parser.add_argument('-o', '--option', help = 'Option Write/Read', action = 'store')
parser.add_argument('-t', '--type', help = 'Type Write/Read', action = 'store', default = 1, type = int)
if len(sys.argv) == 1:
print "\n"
parser.print_help()
else:
print "\n"
args = parser.parse_args()
if args.CONFIG_FILE:
self.configname =args.CONFIG_FILE
if args.read:
if args.type == 1:
if args.section and args.option:
self.read_config(args.section, args.option)
elif args.type == 2:
if args.section and args.option:
self.read_config2(args.section, args.option)
elif args.type == 3:
if args.section and args.option:
self.read_config3(args.section, args.option)
elif args.type == 4:
if args.section and args.option:
self.read_config4(args.section, args.option)
elif args.type == 5:
if args.section and args.option:
self.read_config5(args.section, args.option)
elif args.type == 6:
if args.section and args.option:
self.read_config6(args.section, args.option)
else:
print make_colors("INVALID TYPE !", 'white', 'red', ['blink'])
print "\n"
parser.print_help()
else:
print make_colors("Please use '-r' for read or '-w' for write", 'white', 'red', ['blink'])
print "\n"
parser.print_help()
else:
print make_colors("NO FILE CONFIG !", 'white', 'red', ['blink'])
print "\n"
parser.print_help()
configset_class = configset()
get_config_file = configset_class.get_config_file
write_config = configset_class.write_config
write_config2 = configset_class.write_config2
read_config = configset_class.read_config
read_config2 = configset_class.read_config2
read_config3 = configset_class.read_config3
read_config4 = configset_class.read_config4
read_config5 = configset_class.read_config5
read_config6 = configset_class.read_config6
get_config = configset_class.get_config
get_config2 = configset_class.get_config2
get_config3 = configset_class.get_config3
get_config4 = configset_class.get_config4
get_config5 = configset_class.get_config5
get_config6 = configset_class.get_config6
write_all_config = configset_class.write_all_config
read_all_config = configset_class.read_all_config
read_all_section = configset_class.read_all_section
test = configset_class.test
usage = configset_class.usage
if __name__ == '__main__':
usage() | {"/database.py": ["/convert.py"]} |
74,704 | licface/lfd | refs/heads/master | /database.py | import convert
__TABLES__ = ['packages', 'localstorage']
class database(object):
def __init__(self, host = None, port = None, username = None, password = None, dbname = None, table = None, dbtype = None):
super(database, self)
self.host = host
self.port = port
self.username = username
self.password = password
self.dbname = dbname
self.table = table
self.dbtype = dbtype
self.conn = None
self.cursor = None
self.SQL_CREATE = None
self.SQL_CREATE_REPO = None
def create(self):
self.cursor.execute(self.SQL_CREATE)
self.conn.commit()
cursor.execute(self.SQL_CREATE_REPO)
conn.commit()
def get(self, table_name):
exc01 = self.cursor.execute('SELECT * FROM %s;'%(table_name))
self.conn.commit()
return exc01.fetchall()
def insert(self, datas=None, type = 'storage'):
#type: database | storage
if datas:
SQL_INSERT = 'INSERT INTO packages (\'name\', \'description\', \'relpath\') VALUES("%s", "%s", "%s");' % (
datas[0], convert.convert(datas[1]), convert.convert(datas[2]))
SQL_INSERT_LOCALSTORAGE = 'INSERT INTO localstorage (\'total\', \'packages\') VALUES("%s", "%s");' % (
datas[0], convert.convert(datas[1]))
try:
# print "SQL_INSERT =", SQL_INSERT
if type == 'storage':
self.cursor.execute(SQL_INSERT_LOCALSTORAGE)
elif type == 'database':
self.cursor.execute(SQL_INSERT)
self.conn.commit()
except:
if type == 'database':
SQL_INSERT = "INSERT INTO packages ('name', 'relpath') VALUES('%s', '%s');" % (datas[0], convert.convert(datas[2]))
self.cursor.execute(SQL_INSERT)
self.conn.commit()
def truncate(self, table_name = 'all'):
if not table_name == 'all':
self.cursor.execute('DELETE FROM %s;'%(table_name))
self.conn.commit()
self.cursor.execute('VACUUM;')
self.conn.commit()
else:
for i in __TABLES__:
self.cursor.execute('DELETE FROM %s;'%(i))
self.conn.commit()
self.cursor.execute('VACUUM;')
self.conn.commit()
def drop(self, table_name = 'all'):
if not table_name == 'all':
SQL_DROP = "DROP TABLE %s;" % (table_name)
self.cursor.execute(SQL_DROP)
self.conn.commit()
else:
for i in __TABLES__:
SQL_DROP = "DROP TABLE %s;" % (i)
self.cursor.execute(SQL_DROP)
self.conn.commit()
def setup(self, type='storage'):
#type: storage | database
config = configset.configset()
config.configname = 'lfd.ini'
dbname = self.dbname
host = self.host
port = self.port
username = self.username
password = self.password
dbtype = self.dbtype
if not dbname:
dbname = config.read_config('DATABASE', 'name', value='lfd.db3')
if not host:
host = config.read_config('DATABASE', 'host', value='127.0.0.1')
if not port:
port = config.read_config('DATABASE', 'port', value='3306')
if not dbtype:
dbtype = config.read_config('DATABASE', 'type', value='sqlite')
if not username:
username = config.read_config('DATABASE', 'username', value='root')
if not password:
password = config.read_config('DATABASE', 'password', value='')
debug(dbname=dbname)
debug(host=host)
debug(port=port)
debug(dbtype=dbtype)
if dbtype == 'sqlite':
try:
# from sqlite3 import dbapi2 as sqlite
import sqlite3 as sqlite
except ImportError:
sys.exit(
"You not have module \"pysqlite2\", please download before ! \n")
except:
traceback.format_exc()
sys.exit("ERROR by SYSTEM")
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
'id' INTEGER PRIMARY KEY AUTOINCREMENT, \
'name' VARCHAR(255) NOT NULL, \
'description' VARCHAR(255) NOT NULL, \
'relpath' VARCHAR(255) NOT NULL);'''
SQL_CREATE_REPO = '''CREATE TABLE IF NOT EXISTS localstorage ( \
'total' VARCHAR(255) NOT NULL, \
'packages' VARCHAR(255) NOT NULL);'''
self.SQL_CREATE = SQL_CREATE
self.SQL_CREATE_REPO = SQL_CREATE_REPO
conn = sqlite.connect(dbname)
self.conn = conn
cursor = conn.cursor()
self.cursor = cursor
elif dbtype == 'mysql':
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
`id` BIGINT(100) AUTO_INCREMENT NOT NULL PRIMARY KEY, \
`name` VARCHAR(255) NOT NULL, \
`description` VARCHAR(255) NOT NULL, \
`relpath` VARCHAR(255) NOT NULL)'''
try:
import MySQLdb
conn = MySQLdb.connect(
host, username, password, dbname, port)
cursor = conn.cursor()
except ImportError:
sys.exit(
"You not have module \"MySQLdb\", please download before ! \n")
except:
try:
conn = MySQLdb.connect(
host, username, password, port=port)
cursor = conn.cursor()
cursor.execute(SQL_CREATE)
conn.commit()
except:
traceback.format_exc()
sys.exit("ERROR by SYSTEM")
if datas:
try:
SQL_INSERT = "INSERT INTO packages (`name`, `description`, `relpath`) VALUES(%s, %s, %s);" % (
datas[0], datas[1], datas[2])
cursor.execute(SQL_INSERT)
conn.commit()
except:
SQL_INSERT = "INSERT INTO packages (`name`, `description`, `relpath`) VALUES(%s, %s, %s);" % (
datas[0], ' ', datas[2])
cursor.execute(SQL_INSERT)
conn.commit()
elif dbtype == 'oracle':
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
'id' BIGINT(100) GENERATED ALWAYS AS IDENTITY (START WITH 1 INCREMENT BY 1) NOT NULL PRIMARY KEY, \
'name' VARCHAR(255) NOT NULL, \
'description' VARCHAR(255) NOT NULL, \
'relpath' VARCHAR(255) NOT NULL)'''
sys.exit("STILL DEVELOPMENT, PLEASE USE ANOTHER DATABASE TYPE")
elif dbtype == 'postgres':
SQL_CREATE = '''CREATE TABLE IF NOT EXISTS packages ( \
'id' BIGSERIAL NOT NULL PRIMARY KEY, \
'name' VARCHAR(255) NOT NULL, \
'description' VARCHAR(255) NOT NULL, \
'relpath' VARCHAR(255) NOT NULL)'''
try:
import psycopg2
conn = psycopg2.connect("dbname=%s, user=%s, password=%s, host=%s, port=%s" % (
dbname, username, password, host, port))
cursor = conn.cursor()
cursor.execute(SQL_CREATE)
conn.commit()
except ImportError:
sys.exit(
"You not have module \"Psycopg2\", please download before ! \n")
except:
traceback.format_exc()
sys.exit("ERROR by SYSTEM") | {"/database.py": ["/convert.py"]} |
74,705 | licface/lfd | refs/heads/master | /server.py | #!/usr/bin/env python
"""
Very simple HTTP server in python.
Usage::
./server.py [<port>]
Send a GET request::
curl http://localhost
Send a HEAD request::
curl -I http://localhost
Send a POST request::
curl -d "foo=bar&bin=baz" http://localhost
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import SocketServer
from SocketServer import ThreadingMixIn
# import threading
import os
import sys
import time
import parserheader
import json
import re
from lfd import lfd
# from make_colors import make_colors
from debug2 import debug
import traceback
import configset
import pm
import vping
import socket
import sendgrowl
from multiprocessing.pool import ThreadPool
from progressbar import AnimatedMarker, Bar, BouncingBar, Counter, ETA, \
FileTransferSpeed, FormatLabel, Percentage, \
ProgressBar, ReverseBar, RotatingMarker, \
SimpleProgress, Timer, AdaptiveETA, AbsoluteETA, AdaptiveTransferSpeed
MAXVAL=100
EVENT="UPDATE REPO"
WIDGETS = ['%s: '%(EVENT), Percentage(), ' | ', ETA(), ' | ', AbsoluteETA()]
PBAR = ProgressBar(widgets=WIDGETS, maxval=MAXVAL)
PID = os.getpid()
PACKAGES = ''
PACKAGE_LOCALSTORAGE = ''
SCAN_FINISHED = False
config = configset.configset()
config.configname = 'lfd.ini'
class_pm = pm.pm()
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
pass
class S(BaseHTTPRequestHandler):
# def debug_server_client(self, msg, server_host = '127.0.0.1', port = 50001):
# global config
# DEBUG_SERVER = config.read_config('DEBUG', 'HOST', value='%s:%s'%(server_host, str(port)))
# print "DEBUG_SERVER =", DEBUG_SERVER
# s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# if DEBUG_SERVER:
# if ":" in DEBUG_SERVER:
# host, port = str(DEBUG_SERVER).strip().split(":")
# port = int(port.strip())
# host = host.strip()
# else:
# host = DEBUG_SERVER.strip()
# print "host =", host
# print "port =", port
# s.sendto(msg, (host, int(port)))
# s.close()
# def debug(self, defname = None, debug_this = None, debug_server = False, line_number = '', print_function_parameters = False, **kwargs):
# msg = debug(defname, debug_this, debug_server, line_number, print_function_parameters, no_debugserver=True, **kwargs)
# self.debug_server_client(msg)
def setup(self):
BaseHTTPRequestHandler.setup(self)
self.request.settimeout(150)
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'text/html; charset=UTF-8')
self.send_header("Connection", "keep-alive")
self.send_header("keep-alive", "timeout=60, max=30")
self.end_headers()
def _response_file_static(self, file_path):
if os.path.isfile(file_path):
self.send_response(200)
debug(file_path_is_file=os.path.isfile(file_path))
if file_path.endswith(".whl"):
# self.send_header('Content-type', "application/octet-stream")
self.send_header('Content-type', "application/zip")
elif file_path.endswith(".gz"):
self.send_header('Content-type', 'application/x-tar')
elif file_path.endswith(".exe"):
self.send_header('Content-type', 'application/x-msdownload')
self.send_header("Connection", "keep-alive")
self.send_header("keep-alive", "timeout=60, max=30")
self.send_header('Content-Disposition',
'attachment; filename="%s"' % (os.path.basename(file_path)))
# not sure about this part below
with open(file_path, 'rb') as file:
# Read the file and send the contents
file_read = file.read()
file_length = len(file_read)
self.send_header("Content-Length", file_length)
self.end_headers()
self.wfile.write(file_read)
else:
self.send_response(400)
def _message_pack(self, found_package_name, found_link='#', found_name='#', repeat=None):
# repeat must be: [({'name':name, 'link': link})]
debug(repeat = repeat)
message = """<html><head><title>Links for %s</title></head><body>
<h1>Links for %s</h1>
<a href="%s">%s</a><br>
</body></html>""" % (found_package_name, found_package_name, found_link, found_name)
if repeat:
message = """<html><head><title>Links for %s</title></head><body>
<h1>Links for %s</h1>""" % (found_package_name, found_package_name)
for i in repeat:
message = message + \
"""<a href="%s">%s</a><br>""" % (
i.get('link'), i.get('name'))
message = message + """</body></html>"""
debug(message = message)
return message
def do_GET(self):
global PACKAGES
global PACKAGE_LOCALSTORAGE
global config
global class_pm
LFD_FOUND = False
STORAGE_FOUND = False
SPESIFIC = config.read_config('GENERAL', 'spesific', value='0')
lfd_class = lfd()
if os.getenv('DEBUG_EXTRA'):
print "dir(self) =", dir(self)
print "-" * 100
for i in dir(self):
print i, "=", getattr(self, i)
print "=" * 100
user_agent_string = parserheader.parserHeader(
self.headers).get('User-Agent')
user_agent, user_agent_split = parserheader.UserAgent(
user_agent_string)
debug(USER_AGENT_STRING = user_agent_string)
debug(USER_AGENT = user_agent)
debug(USER_AGENT_SPLIT = user_agent_split)
info_header = json.loads(user_agent_split[1])
debug(INFO_HEADER = info_header)
debug(PATH = self.path)
path_split = re.split('/', self.path)
try:
for i in path_split:
path_split.remove('')
except:
pass
debug(PATH_SPLIT = path_split)
find_name = path_split[-1].lower()
debug(find_name = find_name)
message_repeat_localstorage = []
message_repeat_lfd = []
message_repeat_all = []
print "self_header =", self.headers
# if not LFD_FOUND:
debug(SPESIFIC = SPESIFIC)
if SPESIFIC == '1':
find_python_version = "cp" + \
"".join(info_header.get('python').split('.', 2)[:2])
find_architecture = info_header.get('cpu').lower()
if find_architecture == 'amd64':
find_architecture = 'win_amd64'
if find_architecture == 'win32':
find_architecture = 'win32'
if PACKAGE_LOCALSTORAGE.get(find_name):
file_path = lfd_class.find_localstorage(
find_python_version, find_architecture, PACKAGES.get(i))
# message = self._message_pack(find_name, os.path.basename(
# file_path), os.path.basename(file_path))
# self._set_headers()
# self.wfile.write(message)
message_repeat_localstorage.append(({'name':find_name, 'link':os.path.basename(find_name)}))
STORAGE_FOUND = True
# else:
# self.send_response(404)
# if SPESIFIC == "1":
# find_python_version = "cp" + \
# "".join(info_header.get('python').split('.', 2)[:2])
# find_architecture = info_header.get('cpu').lower()
# if find_architecture == 'amd64':
# find_architecture = 'win_amd64'
found_link, found_name, found_package_name = lfd_class.find(
find_name, find_python_version, find_architecture, PACKAGES)
if found_link:
LFD_FOUND = True
message_repeat_lfd.append(({'name':found_name, 'link': found_link}))
# message = self._message_pack(
# found_package_name, found_link, found_name)
# self._set_headers()
# self.wfile.write(message)
else:
if os.getenv('DEBUG_EXTRA'):
debug(PACKAGE_LOCALSTORAGE = PACKAGE_LOCALSTORAGE)
debug(find_name = find_name)
found_list_package_localstorage = PACKAGE_LOCALSTORAGE.get(find_name)
debug(found_list_package_localstorage = found_list_package_localstorage)
if not found_list_package_localstorage:
found_list_package_localstorage = PACKAGE_LOCALSTORAGE.get(str(find_name).replace("-", "."))
debug(found_list_package_localstorage_changed = found_list_package_localstorage)
# repeat must be: [({'name':name, 'link': link})]
# message_repeat = []
if found_list_package_localstorage:
STORAGE_FOUND = True
for i in found_list_package_localstorage:
# message_repeat.append(({'name': os.path.basename(i), 'link':os.path.basename(i)}))
message_repeat_localstorage.append(({'name': os.path.basename(i), 'link':os.path.basename(i)}))
debug(message_repeat_localstorage=message_repeat_localstorage)
# debug(message_repeat = message_repeat)
#if not message_repeat:
#self.send_response(404)
#else:
# message = self._message_pack(find_name, repeat=message_repeat)
# debug(message = message)
# self._set_headers()
# self.wfile.write(message)
for i in PACKAGES:
if PACKAGES.get(i).get('package_name') == find_name:
all_packages = PACKAGES.get(i).get('packages')
for p in all_packages:
# message_repeat.append(
# ({'name': all_packages.get(p).get('name'), 'link': all_packages.get(p).get('link')}))
message_repeat_lfd.append(
({'name': all_packages.get(p).get('name'), 'link': all_packages.get(p).get('link')}))
debug(message_repeat_lfd=message_repeat_lfd)
LFD_FOUND = True
# message = self._message_pack(find_name, repeat=message_repeat)
# self._set_headers()
# self.wfile.write(message)
if os.getenv('DEBUG_EXTRA'):
debug(PACKAGE_LOCALSTORAGE = PACKAGE_LOCALSTORAGE)
if find_name.endswith(".gz") or find_name.endswith(".whl") or find_name.endswith(".zip") or find_name.endswith(".exe"):
find_name = path_split[-1]
debug(find_name = find_name)
debug(find_name = '', endswith = os.path.splitext(find_name)[1])
found_name_localstorage = class_pm.translate(find_name)
debug(found_name_localstorage = found_name_localstorage)
found_package_localstorage = PACKAGE_LOCALSTORAGE.get(found_name_localstorage.lower())
debug(found_package_localstorage = found_package_localstorage)
if found_package_localstorage:
for i in found_package_localstorage:
if os.path.basename(i) == find_name:
debug(i = i)
self._response_file_static(i)
else:
self.send_response(404)
# else:
# else:
# packages = PACKAGES
# repeat must be: [({'name':name, 'link': link})]
# message_repeat = []
# debug(LFD_FOUND = LFD_FOUND)
else:
debug("BEGIN MESSAGE PACK")
debug(STORAGE_FOUND=STORAGE_FOUND)
debug(LFD_FOUND=LFD_FOUND)
message_repeat_all = message_repeat_lfd + message_repeat_localstorage
debug(message_repeat_all=message_repeat_all)
# if STORAGE_FOUND:
# message_repeat_all = message_repeat_all + message_repeat_localstorage
# debug(message_repeat_all)
# if LFD_FOUND:
# message_repeat_all = message_repeat_all + message_repeat_lfd
# debug(message_repeat_all)
if message_repeat_all:
self._set_headers()
message = self._message_pack(find_name, repeat=message_repeat_all)
debug(message=message)
self.wfile.write(message)
else:
self.send_response(400)
def do_HEAD(self):
self._set_headers()
def do_POST(self):
# <--- Gets the size of data
content_length = int(self.headers['Content-Length'])
print "content_length =", content_length
# <--- Gets the data itself
post_data = self.rfile.read(content_length)
print "post_data =", post_data
self._set_headers()
self.wfile.write("<html><body><h1>POST!</h1></body></html>")
def monitor_repo_dir():
global PACKAGE_LOCALSTORAGE
global config
global SCAN_FINISHED
global MAXVAL
global EVENT
global WIDGETS
global PBAR
# PBAR.start()
TOTAL = 0
n_progress = 1
pool = ThreadPool(processes= 1)
if sys.version_info.major == 3:
def folder_size():
total = 0
for entry in os.scandir(path):
if entry.is_file():
total += entry.stat().st_size
elif entry.is_dir():
total += folder_size(entry.path)
return total
elif sys.version_info.major == 2:
def folder_size():
return os.stat(path).st_mtime
#def update_package_localstorage():
# lfd_class = lfd()
# print "running monitoring repo dir 1"
path = config.read_config('PACKAGES', 'dir')
timeout = config.read_config('SCANNER', 'timeout', value=1)
if timeout:
timeout = int(timeout)
else:
timeout = 1
# print("PATH =",path)
debug(PATH=path)
TOTAL = folder_size()
config.write_config('PACKAGES', 'total', value=TOTAL)
debug(TOTAL=TOTAL)
while 1:
if path and SCAN_FINISHED:
total1 = folder_size()
if not total1 == TOTAL and SCAN_FINISHED:
# print "REPO CHANGED !"
PBAR.start()
sendnotify("REPO CHANGED !", 'Monitoring')
debug("REPO CHANGED !", 'white', 'red')
TOTAL = total1
config.write_config('PACKAGES', 'total', value=TOTAL)
try:
SCAN_FINISHED = False
lfd_class = lfd()
tx = pool.apply_async(lfd_class.parser_content, ())
while 1:
if not tx.get():
SCAN_FINISHED = False
# progressbar(1)
EVENT = "UPDATE REPO CHANGED"
if not n_progress == 100:
PBAR.update(n_progress)
n_progress += 1
else:
PBAR.update(0)
n_progress = 1
else:
PACKAGE_LOCALSTORAGE = tx.get()
SCAN_FINISHED = True
PBAR.finish()
break
# PACKAGE_LOCALSTORAGE = lfd_class.identify_localstorage(path)
# print "SUCCESS UPDATE REPO !"
sendnotify("SUCCESS UPDATE REPO !", 'Update')
debug("SUCCESS UPDATE REPO !", 'white', 'red')
except:
PBAR.finish()
traceback.format_exc()
path = config.read_config('PACKAGES', 'dir')
# print("PATH =",path)
debug(PATH=path)
timeout = config.read_config('SCANNER', 'timeout', value=1)
if timeout:
timeout = int(timeout)
else:
timeout = 1
time.sleep(timeout)
else:
# print "monitoring ..."
path = config.read_config('PACKAGES', 'dir')
timeout = config.read_config('SCANNER', 'timeout', value=1)
if timeout:
timeout = int(timeout)
else:
timeout = 1
time.sleep(timeout)
else:
# print "still scanning ..."
# progressbar(1)
path = config.read_config('PACKAGES', 'dir')
# print("PATH =",path)
debug(PATH=path)
timeout = config.read_config('SCANNER', 'timeout', value=1)
if timeout:
timeout = int(timeout)
else:
timeout = 1
time.sleep(timeout)
if os.getenv('LFD_STOP_UPDATE') == '1':
break
def progressbar(value, isFinish=False):
# global PBAR
MAXVAL=100
EVENT="UPDATE REPO 1"
WIDGETS = ['%s: '%(EVENT), Percentage(), ' | ', ETA(), ' | ', AbsoluteETA()]
PBAR = ProgressBar(widgets=WIDGETS, maxval=MAXVAL).start()
n_progress = 1
while 1:
if not SCAN_FINISHED:
if not n_progress == 100:
PBAR.update(n_progress)
n_progress+=1
else:
PBAR.update(0)
n_progress = 0
else:
PBAR.finish()
break
def sendnotify(message, event='Control', title="FastPypi"):
try:
mclass = sendgrowl.growl()
icon = os.path.join(os.path.dirname(__file__), 'notify.png')
appname = 'FastPypi'
mclass.publish(appname, event, title, message, iconpath=icon)
except:
traceback.format_exc()
pass
def prepare():
global PACKAGES
global PACKAGE_LOCALSTORAGE
global config
global SCAN_FINISHED
path = config.read_config('PACKAGES', 'dir')
if sys.version_info.major == 3:
def folder_size():
total = 0
for entry in os.scandir(path):
if entry.is_file():
total += entry.stat().st_size
elif entry.is_dir():
total += folder_size(entry.path)
return total
elif sys.version_info.major == 2:
def folder_size():
return os.stat(path).st_mtime
TOTAL = float(folder_size())
TOTAL_CONFIG = float(config.read_config('PACKAGES', 'total', value='0'))
lfd_class = lfd()
from multiprocessing.pool import ThreadPool
pool = ThreadPool(processes= 2)
t1 = pool.apply_async(lfd_class.parser_content, ())
while 1:
if not t1.get():
time.sleep(1)
else:
PACKAGES = t1.get()
break
if not TOTAL == TOTAL_CONFIG:
if not PACKAGE_LOCALSTORAGE:
t2 = pool.apply_async(lfd_class.identify_localstorage, ())
while 1:
if not t2.get():
time.sleep(1)
else:
PACKAGE_LOCALSTORAGE = t2.get()
SCAN_FINISHED = True
if os.getenv('DEBUG_EXTRA'):
debug(PACKAGE_LOCALSTORAGE = PACKAGE_LOCALSTORAGE)
break
def run(server_class=HTTPServer, handler_class=S, port=80):
try:
server_address = ('', port)
server_address_print = server_address
if server_address[0] == '':
server_address_print = ('0.0.0.0', server_address[1])
# httpd = server_class(server_address, handler_class)
httpd = ThreadedHTTPServer(server_address, handler_class)
# PBAR.start()
if not vping.vping('8.8.8.8'):
# print make_colors('NO INTERNET CONNECTION, SERVER WILL RUNNING AS LOCAL PYPI !', 'white', 'red', attrs=['blink'])
print 'NO INTERNET CONNECTION, SERVER WILL RUNNING AS LOCAL PYPI !'
print "Server Bind %s:%s" % server_address_print
httpd.serve_forever()
except KeyboardInterrupt:
httpd.server_close()
sys.exit('Server shutdown ...')
except:
traceback.format_exc()
def main(port=80):
print 'Starting httpd ...'
global MAXVAL
global EVENT
global WIDGETS
global PBAR
global SCAN_FINISHED
n_progress = 1
# PBAR = ProgressBar(widgets=WIDGETS, maxval=MAXVAL).start()
PBAR.start()
from threading import Thread
# from multiprocessing.pool import ThreadPool
# pool = ThreadPool(processes= 1)
# t1 = pool.apply_async(prepare, ())
# t3 = pool.apply_async(monitor_repo_dir, ())
a = Thread(target=prepare)
a.start()
while 1:
# print "SCAN_FINISHED =", SCAN_FINISHED
if not SCAN_FINISHED:
# if a.is_alive():
if not n_progress == 100:
PBAR.update(n_progress)
n_progress += 1
time.sleep(0.5)
else:
PBAR.update(0)
n_progress = 1
else:
PBAR.finish()
pool = ThreadPool(processes= 1)
t1 = pool.apply_async(monitor_repo_dir, ())
run(port=port)
break
if __name__ == "__main__":
print "PID:", PID
from sys import argv
if len(argv) == 2:
# run(port=int(argv[1]))
main(port=int(argv[1]))
else:
# run()
main()
#path = r'D:\PROJECTS\python-mpd2\apps\python-mpd2'
#monitor_repo_dir(path)
# global PBAR
# PBAR.start()
# for i in range(100):
# progressbar(i)
| {"/database.py": ["/convert.py"]} |
74,706 | licface/lfd | refs/heads/master | /convert.py | import base64
import zlib
def deconvert(s):
b = base64.b64decode(s.encode('ascii'))
return zlib.decompress(b).decode('utf-8')
def convert(s):
b = zlib.compress(s.encode('utf-8'))
return base64.b64encode(b).decode('ascii')
if __name__ == '__main__':
print "EXAMPLE:"
data = """"eJx9UktPg0AQvpPwC3qZkBLlVurJNiRiQCEpjxDEmJpMVBYhKawR1L/vbnmtWMqJYeZ7zXBD3nIKNMtkqSYNKIkbxQ/mDm0/MRCFClGRJVkqMkhJVlQkBQx21jARRoEXxnApS8CeI1X7yVBPzKmMSwNyqEmPYLwVbQbuv3RTymW4vFfaliYInhAyVEGwi9h1Fnt9pW+vr8ox5qPrt03ExX5VggCWpcj2wKLVRQNf3LYGDYWXb1qk8PFJXw+krOGnaHJoclJCUTGwGTvqmY09xU7gO4FnwztlXLZvuXcJr2cjDQgWa3jn7jYiuI0pDPfuZ50wo3zVkxv8nxF88loXj83q6al5/tFch+BGxtOftTSRW8/u5ah9Qm7dL6OzJ/zO6jPireujb3o24nZA/wL0gNga"""
print "DATA:", data
data_deconvert = deconvert(data)
print "data_deconvert =", data_deconvert
print "="* 120
print "data_convert =", convert(data_deconvert) | {"/database.py": ["/convert.py"]} |
74,707 | licface/lfd | refs/heads/master | /pm.py | import os
import traceback
import sys
import tarfile
import zipfile
import shutil
import argparse
import time
import send2trash
import thread
from make_colors import make_colors
import easygui
from debug import debug
# debug = debugger.debug
class pm(object):
def __init__(self, MASTER_PATH = None):
super(pm, self)
self.MASTER_PATH = MASTER_PATH
#debug(self_MASTER_PATH_0 = self.MASTER_PATH)
def translate(self, name):
if str(name).endswith('.gz') or str(name).endswith('.zip') or str(name).endswith('.exe') or str(name).endswith('.msi') or str(name).endswith('.rar') or str(name).endswith('.whl') or str(name).endswith('.egg') or str(name).endswith('.bz2') or str(name).endswith('.tgz'):
basename = os.path.basename(name)
n = 2
if "-" in name:
len_basedname = len(str(name).split("-"))
#print "len_basedname =", len_basedname
basedname = str(basename).split("-",1)
#print "basedname 0 =", basedname
while 1:
if n <= len_basedname:
if basedname[n-1][0] == '' or basedname[n-1][0] == ' ':
break
else:
if not str(basedname[n-1][0]).isdigit():
#print "str(basedname[1][0]) =", str(basedname[1][0])
basedname = str(basename).split("-", n)
#print "basedname 1 =", basedname
n += 1
else:
if n == len_basedname:
break
break
else:
break
#print "basedname 0 =", basedname
#print "n =", n
#print '"-".join(basedname[0:n-1]).strip() =', "-".join(basedname[0:n-1]).strip()
#print "-" * 200
#print "name =", name
#print "join 0 =", "-".join(basedname[0:n-1]).strip()
if os.path.basename(name) == "-".join(basedname[0:n-1]).strip():
basedname = str(name).split("-", 1)
#print "basedname 0 =", basedname
return basedname[0]
else:
#print "basedname 1 =", basedname
return "-".join(basedname[0:n-1]).strip()
else:
if "." in name:
basedname = str(basename).split(".",1)
return basedname[0].strip()
else:
if " " in name:
basedname = str(basename).split(" ",1)
return basedname[0].strip()
return False
def check_archive(self, name):
if str(name).endswith(".gz"):
try:
tarname = tarfile.open(name)
return True
except:
return False
if str(name).endswith(".zip"):
try:
zipname = zipfile.ZipFile(name)
testzip = zipname.testzip()
if testzip == None:
return True
else:
return False
except:
print "ERROR:",
print traceback.format_exc()
return False
def moved(self, dirpath, path=None, overwrite=None, quiet=None, masterpath=None, noclean=None, verbosity = False, backup = False, simulate = False):
filename = os.listdir(dirpath)
if backup and not simulate:
BACKUP_PATH = os.path.join(self.MASTER_PATH, "BACKUP")
if not os.path.isdir(BACKUP_PATH):
os.makedirs(BACKUP_PATH)
is_overwrite = False
if masterpath != None:
self.MASTER_PATH = masterpath
else:
self.MASTER_PATH = self.MASTER_PATH
for i in filename:
i = os.path.join(self.MASTER_PATH, i)
#print "i =", i
if os.path.isfile(i):
#print "translate =", self.translate(i)
#print "-" * 200
if not self.translate(i):
pass
else:
M_PATH = os.path.join(self.MASTER_PATH, self.translate(i))
DEST_NAME = os.path.join(M_PATH, os.path.basename(i))
if path:
if not os.path.isdir(path) and not simulate:
os.makedirs(path)
M_PATH = path
if not os.path.isdir(M_PATH) and not simulate:
try:
os.makedirs(M_PATH)
except:
pass
#print "DEST_NAME =", DEST_NAME
#print "M_PATH =", M_PATH
#print "i =", i
if verbosity:
if overwrite:
if backup:
print make_colors('COPY', 'white', 'blue') + " " + make_colors("[OVERWRITE-BACKUP]", 'white', 'red') + ": " + make_colors(str(i), 'yellow') + make_colors(" --> ", 'green') + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[0], 'blue') + "\\" + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[1], 'cyan') + "\\" + make_colors(os.path.split(DEST_NAME)[1], 'magenta')
else:
print make_colors('COPY', 'white', 'blue') + " " + make_colors("[OVERWRITE]", 'white', 'red') + ": " + make_colors(str(i), 'yellow') + make_colors(" --> ", 'green') + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[0], 'blue') + "\\" + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[1], 'cyan') + "\\" + make_colors(os.path.split(DEST_NAME)[1], 'magenta')
else:
if backup:
print make_colors('COPY', 'white', 'blue') + " " + make_colors("[BACKUP]", 'white', 'red') + ": " + make_colors(str(i), 'yellow') + make_colors(" --> ", 'green') + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[0], 'blue') + "\\" + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[1], 'cyan') + "\\" + make_colors(os.path.split(DEST_NAME)[1], 'magenta')
else:
print make_colors('COPY', 'white', 'blue') + ": " + make_colors(str(i), 'yellow') + make_colors(" --> ", 'green') + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[0], 'blue') + "\\" + make_colors(os.path.split(os.path.split(DEST_NAME)[0])[1], 'cyan') + "\\" + make_colors(os.path.split(DEST_NAME)[1], 'magenta')
if overwrite and not simulate:
if os.path.isfile(DEST_NAME):
os.remove(DEST_NAME)
shutil.copy2(i, DEST_NAME)
#os.system('copy /y "%s" "%s >NUL"' % (i, DEST_NAME))
else:
if os.path.isfile(DEST_NAME):
if quiet and not simulate:
os.remove(DEST_NAME)
#os.system('copy /y "%s" "%s >NUL"' % (i, DEST_NAME))
shutil.copy2(i, DEST_NAME)
else:
if not overwrite:
q = raw_input(" Overwrite %s ? (y/n): " % make_colors(DEST_NAME, 'white', 'blue'))
if str(q).lower() == 'y' or str(q).lower() == 'yes':
if not simulate:
os.remove(DEST_NAME)
#os.system('copy /y "%s" "%s >NUL"' % (i, DEST_NAME))
shutil.copy2(i, DEST_NAME)
elif str(q).lower() == 'a' or str(q).lower() == 'all':
quiet = True
else:
if not simulate:
os.remove(DEST_NAME)
shutil.copy2(i, DEST_NAME)
else:
#os.system('copy "%s" "%s >NUL"' % (i, DEST_NAME))
if not simulate:
shutil.copy2(i, DEST_NAME)
# thread.start_new(shutil.copy2, (i, DEST_NAME))
#shutil.copy2(i, DEST_NAME)
#os.system("move")
if not noclean:
if backup and not simulate:
shutil.move(i, BACKUP_PATH)
#os.system('move /y "%s" "%s"' % (i, BACKUP_PATH))
else:
#time.sleep(1)
if not simulate:
send2trash.send2trash(i)
def usage(self):
parser = argparse.ArgumentParser(formatter_class= argparse.RawTextHelpFormatter, description= 'Manage all file of python modules or packages in spesific folder')
parser.add_argument('-o', '--overwrite', help='Overwrite File move to', action='store_true')
parser.add_argument('-d', '--destination', help='Destination Folder default: MASTER_PATH with defintion or environment', action='store')
parser.add_argument('-n', '--noclean', help='Dont delete source file', action='store_true')
parser.add_argument('-q', '--quiet', help='No Supress', action='store_true')
parser.add_argument('-v', '--verbosity', help='Show running process', action='store_true')
parser.add_argument('-b', '--backup', help='Move to backup folder after copy', action='store_true')
parser.add_argument('-t', '--test-simulate', help='Simulate process', action='store_true')
if len(sys.argv) == 1:
parser.print_help()
else:
args = parser.parse_args()
if args.destination:
self.MASTER_PATH = args.destination
if os.getenv("MASTER_PATH") != None:
self.MASTER_PATH = os.getenv("MASTER_PATH")
if not os.path.isdir(self.MASTER_PATH):
q1 = raw_input('Root Directory [b = browser]: ')
if q1 == 'b':
self.MASTER_PATH = easygui.diropenbox('Select Directory', 'MASTER_PATH', 'c:\\')
else:
self.MASTER_PATH = q1
if not os.path.isdir(self.MASTER_PATH):
print make_colors('MASTER_PATH not Found !!!', 'white', 'red', ['bold', 'blink'])
parser.print_help()
sys.exit()
print "MASTER_PATH =", make_colors(self.MASTER_PATH, 'white', 'red')
self.moved(self.MASTER_PATH, self.MASTER_PATH, args.overwrite, args.quiet, self.MASTER_PATH, args.noclean, args.verbosity, args.backup, args.test_simulate)
if __name__ == "__main__":
#print translate(sys.argv[1])
#check_archive(sys.argv[1])
#data = sys.argv[1:len(sys.argv)]
c = pm()
c.usage()
| {"/database.py": ["/convert.py"]} |
74,714 | 0h-n0/pybiodata | refs/heads/master | /pybiodata/database_urls.py | PDB_REST_URL = 'http://rest.rcsb.org/rest/'
PDB_DOWNLOAD_URL = 'https://files.rcsb.org/download/'
| {"/pybiodata/entry_point.py": ["/pybiodata/base.py", "/pybiodata/pdb.py"], "/pybiodata/pdb.py": ["/pybiodata/base.py", "/pybiodata/database_urls.py"]} |
74,715 | 0h-n0/pybiodata | refs/heads/master | /setup.py | import setuptools
from pathlib import Path
p = Path('.').resolve()
setup_requires = [
]
install_requires = [
'requests'
]
test_require = [
]
setuptools.setup(
name="pybiodata",
version='0.0.1',
python_requires='>3.5',
author="Koji Ono",
author_email="kbu94982@gmail.com",
description="library supporting for downloading bio-data.",
url='https://github.com/0h-n0/pybiodata',
long_description=(p / 'README.md').open(encoding='utf-8').read(),
packages=setuptools.find_packages(),
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=test_require,
extras_require={
'docs': [
'sphinx >= 1.4',
'sphinx_rtd_theme']},
classifiers=[
'Programming Language :: Python :: 3.6',
],
entry_points = {
'console_scripts' : ['pybiodata = pybiodata.entry_point:main'],
},
)
| {"/pybiodata/entry_point.py": ["/pybiodata/base.py", "/pybiodata/pdb.py"], "/pybiodata/pdb.py": ["/pybiodata/base.py", "/pybiodata/database_urls.py"]} |
74,716 | 0h-n0/pybiodata | refs/heads/master | /pybiodata/entry_point.py | #!/usr/bin/env python
import sys
from .base import AbstractDatabase
from .pdb import PDB
def main():
try:
assert len(sys.argv) > 1
dbname = sys.argv[1]
assert dbname in ['PDB',]
except AssertionError:
parser = AbstractDatabase.set_command_arguments()
print(parser.print_help(sys.stderr))
sys.exit(1)
if dbname == 'PDB':
parser = PDB.set_command_arguments()
db = PDB(parser)
db.run()
| {"/pybiodata/entry_point.py": ["/pybiodata/base.py", "/pybiodata/pdb.py"], "/pybiodata/pdb.py": ["/pybiodata/base.py", "/pybiodata/database_urls.py"]} |
74,717 | 0h-n0/pybiodata | refs/heads/master | /pybiodata/base.py |
class AbstractDatabase:
description = ('pybiodata')
def __init__(self):
pass
@classmethod
def set_command_arguments(cls):
import argparse
parser = argparse.ArgumentParser(description=cls.description)
parser.add_argument('database', choices=['PDB',], help='set database')
return parser
| {"/pybiodata/entry_point.py": ["/pybiodata/base.py", "/pybiodata/pdb.py"], "/pybiodata/pdb.py": ["/pybiodata/base.py", "/pybiodata/database_urls.py"]} |
74,718 | 0h-n0/pybiodata | refs/heads/master | /pybiodata/pdb.py | import requests
from .base import AbstractDatabase
from .database_urls import PDB_REST_URL, PDB_DOWNLOAD_URL
class PDB(AbstractDatabase):
def __init__(self, parser):
self.args = vars(parser.parse_args())
def run(self):
pdb_id = self.args['id']
filename = f'{pdb_id}.pdb'
with requests.get(PDB_DOWNLOAD_URL + f'/{filename}', stream=True) as r:
r.raise_for_status()
with open(filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
return filename
@classmethod
def set_command_arguments(cls):
parser = super().set_command_arguments()
parser.add_argument('--id', help='set PDBID')
return parser
| {"/pybiodata/entry_point.py": ["/pybiodata/base.py", "/pybiodata/pdb.py"], "/pybiodata/pdb.py": ["/pybiodata/base.py", "/pybiodata/database_urls.py"]} |
74,719 | ssghost/My_Heatmap | refs/heads/master | /heatmap.py | import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import keras
import cv2
import time
class Heatmap:
def __init__(self):
self.model = None
self.image = None
self.size = None
self.layers = None
self.lcl = ''
self.cfl = []
self.result = None
def image_array(self, imagepath):
img = keras.preprocessing.image.load_img(imagepath, target_size=self.size)
array = keras.preprocessing.image.img_to_array(img)
self.image = np.expand_dims(array, axis=0)
def read_model(self, modelpath):
self.model = keras.models.load_model(modelpath)
self.size = self.model.input_shape
self.layers = [[layer.name, layer.type] for layer in self.model.layers]
i = len(self.layers)-1
classifiers = []
while self.layers[i][1] != 'Conv2D':
classifiers.append(self.layers[i][0])
i-=1
self.cfl = classifiers
self.lcl = self.layers[i][0]
def create_heatmap(self):
lclayer = self.model.get_layer(self.lcl)
lclayer_model = keras.Model(self.model.inputs, lclayer.output)
cfl_input = keras.Input(shape=lclayer.output.shape[1:])
X = cfl_input
for name in self.cfl:
X = self.model.get_layer(name)(X)
cflayer_model = keras.Model(cfl_input, X)
with tf.GradientTape() as tape:
lcl_output = lclayer_model(self.image)
tape.watch(lcl_output)
predict = cflayer_model(lcl_output)
p_index = tf.argmax(predict[0])
c_channel = predict[:, p_index]
gradients = tape.gradient(c_channel, lcl_output)
g_pool = tf.reduce_mean(gradients, axis=(0,1,2))
lcl_output = lcl_output.numpy()[0]
g_pool = g_pool.numpy()
for i in range(g_pool.shape[-1]):
lcl_output[:,:,i] *= g_pool[i]
result = np.mean(lcl_output, axis=-1)
result = np.where(result>=0,result,0)/np.max(result)
self.result = result
def display_heatmap(self, imagepath, outpath):
predict = self.model.predict(self.image)
plt.matshow(self.result)
plt.title(str(predict))
plt.show()
time.sleep(.5)
img = cv2.imread(imagepath)
heatmap = cv2.resize(self.result, (img.shape[1], img.shape[0]))
heatmap = np.uint8(255*heatmap)
heatmap = cv2.applyColorMap(heatmap, cv2.COLORMAP_JET)
save_img = heatmap*0.4+img
cv2.imwrite(outpath, save_img)
time.sleep(.5)
out = cv2.imread(outpath)
plt.imshow(out)
plt.axis('off')
plt.show()
| {"/run.py": ["/heatmap.py"]} |
74,720 | ssghost/My_Heatmap | refs/heads/master | /run.py | import getopt, sys
from heatmap import *
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], 'i:o:m:', ['inpath=','outpath=','modelpath='])
except getopt.GetoptError as err:
print(err)
sys.exit()
imagepath,outpath,modelpath = '','',''
for o, a in opts:
if o in ('-i', '--inpath') and type(a)==str:
imagepath = a
elif o in ('-o', '--outpath') and type(a)==str:
outpath = a
elif o in ('-m', '--modelpath') and type(a)==str:
modelpath = a
else:
assert False, 'unhandled option'
if imagepath and outpath and modelpath:
Heatmap().read_model(modelpath)
Heatmap().image_array(imagepath)
Heatmap().create_heatmap()
Heatmap().display_heatmap(imagepath,outpath)
if __name__ == "__main__":
main()
| {"/run.py": ["/heatmap.py"]} |
74,721 | aleskruba/djangoschoolproject | refs/heads/main | /admin.py | from django.contrib import admin
from . models import Grade
admin.site.register(Grade) | {"/admin.py": ["/models.py"], "/views.py": ["/models.py", "/forms.py"], "/forms.py": ["/models.py"]} |
74,722 | aleskruba/djangoschoolproject | refs/heads/main | /models.py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Grade(models.Model):
user = models.OneToOneField(User,null=True, on_delete=models.CASCADE)
grades = models.TextField()
def __str__(self):
return str(self.user) | {"/admin.py": ["/models.py"], "/views.py": ["/models.py", "/forms.py"], "/forms.py": ["/models.py"]} |
74,723 | aleskruba/djangoschoolproject | refs/heads/main | /views.py | from django.shortcuts import render,redirect
from django.http import HttpResponse
from .models import *
from django.contrib.auth.forms import UserCreationForm
from .forms import GradeForm,CreateUserForm
def Home(request):
return render(request,'home.html')
def registerPage(request):
form = CreateUserForm
if request.method == "POST":
form = CreateUserForm(request.POST)
if form.is_valid():
form.save()
context = {'form':form}
return render(request, 'addstudent.html',context)
| {"/admin.py": ["/models.py"], "/views.py": ["/models.py", "/forms.py"], "/forms.py": ["/models.py"]} |
74,724 | aleskruba/djangoschoolproject | refs/heads/main | /forms.py | from django import forms
from .models import Grade
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
class GradeForm(forms.ModelForm):
class Meta:
model = Grade
fields = ('grades',)
widgets = {
'grades' : forms.TextInput(attrs={'class':'form-control'}),
}
class CreateUserForm(UserCreationForm):
class Meta:
model = User
fields = ['username', 'email', 'password1', 'password2'] | {"/admin.py": ["/models.py"], "/views.py": ["/models.py", "/forms.py"], "/forms.py": ["/models.py"]} |
74,726 | ktarrant/WinCandles | refs/heads/master | /main.py | import os
from collections import OrderedDict
import logging
from datetime import date
import pandas as pd
# Project-level imports
from urls import FANGRAPHS_NAME_REVERSE_REMAP
from schedule import get_mlb_schedule
from playlog import make_team_summary
from plot_summary import make_plotly_summary
log = logging.getLogger(__name__)
def _generate_plot_summaries(begin_date, cachedir):
os.makedirs(cachedir, exist_ok=True)
SCHEDULE_PATH = os.path.join(cachedir, "schedule.csv")
log.debug("Loading schedule with begin date: {}".format(begin_date))
try:
schedule = pd.DataFrame.from_csv(SCHEDULE_PATH)
log.info("Used cached schedule: {}".format(SCHEDULE_PATH))
except IOError:
schedule = get_mlb_schedule(begin_date)
log.info("Loaded schedule: {}".format(schedule))
schedule.to_csv(SCHEDULE_PATH)
for team in sorted(FANGRAPHS_NAME_REVERSE_REMAP):
# Generate a summary for each team from the playlog data
log.info("Creating summary for team: {}".format(team))
summary_path = os.path.join(cachedir, "summary_{}.csv".format(team))
try:
summary = pd.DataFrame.from_csv(summary_path)
log.info("Found cached schedule: {}".format(summary_path))
except IOError:
make_team_summary(SCHEDULE_PATH, team, summary_path)
# Generate a plotly chart for each summary
log.info("Creating summary for team: {}".format(team))
yield (team, make_plotly_summary(team, summary_path, auto_open=False))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates the reddit comment with all the links.")
parser.add_argument("--verbose", action="store_true")
parser.add_argument("--cachedir", default="./cache")
parser.add_argument("--output", default="comment.txt")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
# Collect the schedule for the month of may
begin_date = date(year=2017, month=5, day=1)
with open(args.output, "w") as fobj:
for team, plotly_url in _generate_plot_summaries(begin_date, args.cachedir):
fobj.write("{}: {}".format(team, plotly_url))
| {"/main.py": ["/urls.py", "/schedule.py", "/playlog.py", "/plot_summary.py"], "/schedule.py": ["/urls.py"], "/playlog.py": ["/urls.py"], "/plot_summary.py": ["/team_colors.py"], "/team_graph.py": ["/schedule.py", "/playlog.py"], "/team_colors.py": ["/urls.py"]} |
74,727 | ktarrant/WinCandles | refs/heads/master | /schedule.py | from datetime import date, timedelta, datetime
import pprint
from urllib.request import urlopen
from bs4 import BeautifulSoup
import pandas as pd
import re
from collections import OrderedDict
import logging
# Project-level imports
from urls import *
log = logging.getLogger(__name__)
# ==================================================================================================
# MLB.COM SCHEDULE DOWNLOADER
# ==================================================================================================
def split_mlb_schedule_result(schedule_result, schedule_date, key_remap=MLB_NAME_REMAP):
if "," in schedule_result:
results = schedule_result.split(", ")
result1 = results[0].split(" ")
result2 = results[1].split(" ")
return {
"away_team": key_remap[" ".join(result1[:-1])],
"away_score": int(result1[-1]),
"home_team": key_remap[" ".join(result2[:-1])],
"home_score": int(result2[-1]),
"date": schedule_date,
}
elif "@" in schedule_result:
# this game is still in progress
mid_result = schedule_result.split(" @ ")
return {
"home_team": key_remap[mid_result[0]],
"home_score": 0,
"away_team": key_remap[mid_result[1]],
"away_score": 0,
"date": schedule_date,
}
# Should match: Sunday, May 28, 2017 and return May 28, 2017
_mlb_date_re = re.compile("[A-Z][a-z]+, ([A-Z][a-z]+) ([0-9]+), ([0-9]{4})")
def _get_mlb_sched_by_url(url):
log.info("loading url: {}".format(url))
with urlopen(url) as webobj:
soup = BeautifulSoup(webobj, "lxml")
for schedule_module in soup.findAll('section', {'class': 'module'}):
schedule_header = schedule_module.find('h4')
header_match = _mlb_date_re.match(schedule_header.text)
if header_match is None:
continue
header_date = datetime.strptime(" ".join(header_match.groups()), '%B %d %Y')
schedule_table = schedule_module.find('table', {'class': 'schedule-list'})
matchup_columns = schedule_table.findAll('td', {'class': 'schedule-matchup'})
text_results = [ td.text for td in matchup_columns ]
try:
yield [ split_mlb_schedule_result(text, header_date.date())
for text in text_results ]
except KeyError:
continue
def get_mlb_schedule(start_date):
""" Gets the MLB schedule from start_date up to today. """
todays_schedule = [ game for game_list in _get_mlb_sched_by_url(MLB_URL_SCHEDULE_BASE)
for game in game_list ]
earliest_day = todays_schedule[0]["date"]
while earliest_day > start_date:
previous_set_day = earliest_day - timedelta(days=3)
previous_set_url = MLB_URL_SCHEDULE_DATE.format(date=previous_set_day)
# Tack the new game list onto the beginning because these are older games
todays_schedule = [ game for game_list in _get_mlb_sched_by_url(previous_set_url)
for game in game_list] + todays_schedule
earliest_day = previous_set_day
# url = MLB_URL_SCHEDULE.format(date=start_date)
schedule_table = pd.DataFrame(todays_schedule)
# filter out invalid date
schedule_table = schedule_table[
schedule_table["date"] >= start_date][schedule_table["date"] < date.today()]
return schedule_table
# ==================================================================================================
# FANGRAPHS GAME DAY LINK DOWNLOADER
# ==================================================================================================
""" NOTE: Should no longer be needed. Remove? """
def get_play_logs(date, root=FANGRAPHS_URL_ROOT, key_remap=FANGRAPHS_NAME_REMAP):
url = FANGRAPHS_URL_BASE.format(root=root, date=date)
log.info("loading url: {}".format(url))
with urlopen(url) as webobj:
soup = BeautifulSoup(webobj.read(), 'lxml')
play_log_soups = soup.findAll('a', text='Play Log')
play_log_urls = [ FANGRAPHS_URL_PLAY.format(root=FANGRAPHS_URL_ROOT, log=s['href'])
for s in play_log_soups ]
if key_remap is not None:
return { key_remap[FANGRAPHS_URL_TEAM_RE.findall(url)[0]] : url
for url in play_log_urls }
else:
return { FANGRAPHS_URL_TEAM_RE.findall(url)[0] : url
for url in play_log_urls }
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
# Collect the schedule for the month of may
schedule = get_mlb_schedule(date(year=2017, month=5, day=1))
print(schedule)
schedule.to_csv("schedule.csv") | {"/main.py": ["/urls.py", "/schedule.py", "/playlog.py", "/plot_summary.py"], "/schedule.py": ["/urls.py"], "/playlog.py": ["/urls.py"], "/plot_summary.py": ["/team_colors.py"], "/team_graph.py": ["/schedule.py", "/playlog.py"], "/team_colors.py": ["/urls.py"]} |
74,728 | ktarrant/WinCandles | refs/heads/master | /playlog.py | from bs4 import BeautifulSoup
from urllib.request import urlopen
from datetime import date, timedelta
import logging
import pandas as pd
# Project-level imports
from urls import FANGRAPHS_PLAYLOG_URL, FANGRAPHS_NAME_REVERSE_REMAP
log = logging.getLogger(__name__)
def get_playlog_data(url):
log.info("Loading play log from url: {}".format(url))
with urlopen(url) as webobj:
soup = BeautifulSoup(webobj.read(), 'lxml')
play_table = soup.find('table', {'class': 'rgMasterTable'})
if play_table is None:
raise ValueError("Play log missing: '{}'".format(url))
table_headers = play_table.findAll('th', {'class': 'rgHeader'})
clean_headers = [ td.text.strip() for td in table_headers ]
table_entries = play_table.findAll('tr', {'class': ['rgRow', 'rgAltRow']})
_clean = lambda s: s.strip().replace("%", "")
raw_df = pd.DataFrame([
{ header: _clean(td.text) for header, td in zip(clean_headers, row.findAll('td')) }
for row in table_entries
])
return raw_df.apply(pd.to_numeric, errors='ignore')
def convert_playlog_to_summary(playlog):
playlog["HalfInning"] = (playlog["Inn."] - 1) * 2 + playlog["Half"]
playlog["WE50"] = (playlog["WE"] - 50.0) / 50.0
entry_count = len(playlog.index)
close_values = playlog[playlog["HalfInning"].shift(-1) != playlog["HalfInning"]].set_index("HalfInning")
# Make open values from the close values. Set the first open value to 0 as that is game start
open_values = close_values.shift(1)
open_values["WE50"].iloc[0] = 0
min_values = playlog.pivot_table(index="HalfInning", aggfunc=min)
max_values = playlog.pivot_table(index="HalfInning", aggfunc=max)
summary = pd.concat([
open_values["WE50"],
close_values["WE50"],
min_values["WE50"],
max_values["WE50"],
max_values["LI"],
],
axis=1,
keys=["Open_WE50", "Close_WE50", "Min_WE50", "Max_WE50", "Max_LI"])
return summary
def summarize_team_from_schedule(schedule, team):
last_date = None
dh = 0
for i in schedule.index:
game = schedule.loc[i]
if game.home_team != team and game.away_team != team:
# This game isn't relevant to our search
continue
if last_date is None or last_date != game.date:
last_date = game.date
dh = 0
else:
if dh == 0:
log.error("Detected there are two games on {} yet only one saved".format(game.date))
elif dh == 1:
log.info("Detected there are two games on {} and setting up second game".format(game.date))
dh = 2
else:
raise ValueError("Detected 3 games in one day?? Is that possible??")
fangraphs_name = FANGRAPHS_NAME_REVERSE_REMAP[game.home_team].replace(" ", "%20")
try:
playlog_url = FANGRAPHS_PLAYLOG_URL.format(date=game.date, team=fangraphs_name, dh=dh)
playlog = get_playlog_data(playlog_url)
except ValueError:
log.debug("Failed to load date {} with dh=0, setting dh=1".format(game.date))
dh = 1
playlog_url = FANGRAPHS_PLAYLOG_URL.format(date=game.date, team=fangraphs_name, dh=dh)
playlog = get_playlog_data(playlog_url)
summary = convert_playlog_to_summary(playlog)
if team == game.away_team:
# Away team stats need to be negated
summary["Open_WE50"] *= -1
summary["Close_WE50"] *= -1
summary["Min_WE50"] *= -1
summary["Max_WE50"] *= -1
entry_count = len(summary.index)
summary["Date"] = [ game.date ] * entry_count
opponent = game.away_team if team == game.home_team else game.home_team
summary["Opponent"] = [ opponent ] * entry_count
yield summary
def make_team_summary(source_csv, team, outfile):
schedule = pd.DataFrame.from_csv(source_csv)
total_summary = None
for summary in summarize_team_from_schedule(schedule, team):
if total_summary is None:
total_summary = summary
else:
base_halfInning = max(total_summary.index) + 1
base_WE50 = total_summary.iloc[-1]["Close_WE50"]
log.debug("Using base_halfInning: {}".format(base_halfInning))
log.debug("Using base_WE50: {}".format(base_WE50))
summary.index = [i + base_halfInning for i in summary.index]
summary["Open_WE50"] += base_WE50
summary["Close_WE50"] += base_WE50
summary["Min_WE50"] += base_WE50
summary["Max_WE50"] += base_WE50
total_summary = pd.concat([total_summary, summary])
total_summary.to_csv(outfile)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Loads summary for a team using play logs")
parser.add_argument("team")
parser.add_argument("--verbose", action="store_true")
parser.add_argument("--source", default="./cache/schedule.csv")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
make_team_summary(args.source, args.team, "summary_{}.csv".format(args.team))
| {"/main.py": ["/urls.py", "/schedule.py", "/playlog.py", "/plot_summary.py"], "/schedule.py": ["/urls.py"], "/playlog.py": ["/urls.py"], "/plot_summary.py": ["/team_colors.py"], "/team_graph.py": ["/schedule.py", "/playlog.py"], "/team_colors.py": ["/urls.py"]} |
74,729 | ktarrant/WinCandles | refs/heads/master | /plot_summary.py | import plotly.plotly as py
import plotly.graph_objs as go
import pandas as pd
# Project-level imports
from team_colors import TEAM_COLORS_LOOKUP
DIVISIONS = {
"NLE": ["WAS", "NYN", "MIA", "ATL", "PHI"],
"NLW": ["ARI", "LAN", "SFN", "COL", "SDN"],
"NLC": ["CHN", "SLN", "PIT", "MIL", "CIN"],
"ALE": ["BAL", "NYA", "BOS", "TBA", "TOR"],
"ALW": ["HOU", "OAK", "SEA", "TEX", "ANA"],
"ALC": ["DET", "CLE", "KCA", "MIN", "CHA"],
}
def _make_line_annot(index):
return {
'x0': index, 'x1': index,
'y0': 0, 'y1': 1, 'xref': 'x', 'yref': 'paper',
'line': {'color': 'rgb(30,30,30)', 'width': 1, 'dash': 'dot'},
}
def _make_date_annot(index, date):
return {
'x': index, 'y': 0.05, 'xref': 'x', 'yref': 'paper',
'showarrow': False, 'xanchor': 'left',
'text': str(date)
}
def _make_team_fig(team_name, summary):
# Figure out which halfInnings represent new games
new_games = summary[summary.shift(-1)["Date"] != summary["Date"]]
colors = TEAM_COLORS_LOOKUP[team_name]
return ({
"x": summary.index,
"open":summary.Open_WE50,
"high":summary.Max_WE50,
"low":summary.Min_WE50,
"close":summary.Close_WE50,
"decreasing": {"line": {"color": colors[0]}},
"increasing": {"line": {"color": colors[1]}},
"line": {"color": 'rgba(31,119,180,1)'},
"type": 'candlestick',
"xaxis": 'x1',
},
{
"shapes": [ _make_line_annot(i) for i in new_games.index ],
'annotations': [ _make_date_annot(i, summary["Date"].loc[i])
for i in new_games.index[::4] ],
}
)
def make_team_summary(team_name, summary_filename, auto_open=True):
summary = pd.DataFrame.from_csv(summary_filename)
(trace, layout) = _make_team_fig(team_name, summary)
data = [trace]
layout.update({
'title': '{} Accumulated Win Expectency'.format(team_name),
'yaxis': {'title': 'Wins above .500'},
'xaxis1': {'title': 'Half-Innings', 'domain': [summary.index[0], summary.index[-1]]},
'showlegend': False,
})
fig = dict(data=data, layout=layout)
url = py.plot(fig, filename='{}_winCandles'.format(team_name), auto_open=auto_open)
return url
def make_division_summary(division_name, cachedir, auto_open=True):
teams = DIVISIONS[division_name]
data = []
layout = {"shapes":[], "annotations":[]}
for team in teams:
summary_filename = os.path.join(args.cachedir, "summary_{}.csv".format(team))
trace, this_layout = _make_team_fig(team, summary_filename)
data += [ trace ]
layout = { key: layout[key] + this_layout[key] for key in layout }
layout.update({
'title': '{} WE Accumulation Standings'.format(division_name),
'yaxis': {'title': 'Wins above .500'},
'xaxis1': {'title': 'Half-Innings', 'domain': [summary.index[0], summary.index[-1]]},
'showlegend': False,
})
fig = dict(data=data, layout=layout)
url = py.plot(fig, filename='{}_winCandles'.format(division_name), auto_open=auto_open)
return url
if __name__ == "__main__":
import argparse
import os
parser = argparse.ArgumentParser(description="Plots a summary for a team using play logs")
parser.add_argument("--cachedir", default="./cache")
subparsers = parser.add_subparsers(dest="command")
team_parser = subparsers.add_parser('team')
team_parser.add_argument("team")
division_parser = subparsers.add_parser('division')
division_parser.add_argument("division", choices=DIVISIONS.keys())
args = parser.parse_args()
print(args)
if args.command == "division":
make_division_summary(args.division, args.cachedir)
elif args.command == "team":
expected_filename = os.path.join(args.cachedir, "summary_{}.csv".format(args.team))
make_team_summary(args.team, expected_filename)
| {"/main.py": ["/urls.py", "/schedule.py", "/playlog.py", "/plot_summary.py"], "/schedule.py": ["/urls.py"], "/playlog.py": ["/urls.py"], "/plot_summary.py": ["/team_colors.py"], "/team_graph.py": ["/schedule.py", "/playlog.py"], "/team_colors.py": ["/urls.py"]} |
74,730 | ktarrant/WinCandles | refs/heads/master | /urls.py | import re
MLB_URL_SCHEDULE_BASE = "http://m.mlb.com/schedule"
MLB_URL_SCHEDULE_DATE = MLB_URL_SCHEDULE_BASE + "/{date.year:02}/{date.month:02}/{date.day:02}"
MLB_NAME_REMAP = {
"Twins": "MIN",
"Orioles": "BAL",
"Blue Jays": "TOR",
"Brewers": "MIL",
"Marlins": "MIA",
"Athletics": "SFA",
"White Sox": "CHA",
"D-backs": "ARI",
"Reds": "CIN",
"Indians": "CLE",
"Rockies": "COL",
"Phillies": "PHI",
"Royals": "KCA",
"Yankees": "NYA",
"Mariners": "SEA",
"Nationals": "WAS",
"Angels": "ANA",
"Rays": "TBA",
"Padres": "SDN",
"Mets": "NYN",
"Rangers": "TEX",
"Red Sox": "BOS",
"Pirates": "PIT",
"Braves": "ATL",
"Giants": "SFN",
"Cubs": "CHN",
"Tigers": "DET",
"Astros": "HOU",
"Cardinals": "SLN",
"Dodgers": "LAN",
}
FANGRAPHS_URL_ROOT = "http://www.fangraphs.com/"
FANGRAPHS_URL_BASE = "{root}scoreboard.aspx?date={date}"
FANGRAPHS_URL_PLAY = "{root}{log}"
FANGRAPHS_URL_TEAM_RE = re.compile(r"team=([\w\s]+)&")
FANGRAPHS_PLAYLOG_URL = FANGRAPHS_URL_ROOT + "plays.aspx?date={date}&team={team}&dh={dh}"
FANGRAPHS_NAME_REMAP = {
"Red Sox": "BOS",
"Indians": "CLE",
"Cubs": "CHN",
"Brewers": "MIL",
"Tigers": "DET",
"Orioles": "BAL",
"Royals": "KCA",
"Rangers": "TEX",
"Mets": "NYN",
"Angels": "ANA",
"Rockies": "COL",
"Astros": "HOU",
"Phillies": "PHI",
"Diamondbacks": "ARI",
"Reds": "CIN",
"Yankees": "NYA",
"Nationals": "WAS",
"Athletics": "SFA",
"Giants": "SFN",
"Braves": "ATL",
"Rays": "TBA",
"Blue Jays": "TOR",
"Padres": "SDN",
"Marlins": "MIA",
"Pirates": "PIT",
"White Sox": "CHA",
"Mariners": "SEA",
"Cardinals": "SLN",
"Twins": "MIN",
"Dodgers": "LAN",
}
FANGRAPHS_NAME_REVERSE_REMAP = { value: key for (key, value) in FANGRAPHS_NAME_REMAP.items() }
TEAM_COLORS_URL = "http://jim-nielsen.com/teamcolors/" | {"/main.py": ["/urls.py", "/schedule.py", "/playlog.py", "/plot_summary.py"], "/schedule.py": ["/urls.py"], "/playlog.py": ["/urls.py"], "/plot_summary.py": ["/team_colors.py"], "/team_graph.py": ["/schedule.py", "/playlog.py"], "/team_colors.py": ["/urls.py"]} |
74,731 | ktarrant/WinCandles | refs/heads/master | /team_graph.py | from schedule import get_schedule
from playlog import get_play_logs, get_play_log_data
from datetime import datetime
import logging
log = logging.getLogger(__name__)
def generate_play_logs(teamid, year=2016):
sched = get_schedule(year)
games = sched[(sched.home_team == teamid) | (sched.away_team == teamid)]
for i in games.index:
game = games.loc[i]
game_date = datetime.strptime(str(game.date), '%Y%m%d').date()
play_logs = get_play_logs(game_date)
try:
url_play_log = play_logs[game.home_team]
except KeyError as e:
log.error(
"KeyError: Failed to find expected '{}' in Play Log list @ {}"
.format(teamid, game_date))
continue
yield { 'date': game.date, 'home': teamid, 'away': game.away_team,
'log': get_play_log_data(url_play_log) }
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
print(list(generate_play_logs('WAS'))) | {"/main.py": ["/urls.py", "/schedule.py", "/playlog.py", "/plot_summary.py"], "/schedule.py": ["/urls.py"], "/playlog.py": ["/urls.py"], "/plot_summary.py": ["/team_colors.py"], "/team_graph.py": ["/schedule.py", "/playlog.py"], "/team_colors.py": ["/urls.py"]} |
74,732 | ktarrant/WinCandles | refs/heads/master | /team_colors.py | from urls import MLB_NAME_REMAP
TEAM_COLORS_RAW = {
"Arizona D-backs": [ '#A71930', '#000000', 'E3D4AD'],
"Atlanta Braves": [ '#CE1141', '#13274F'],
"Baltimore Orioles": [ '#DF4601', '#000000'],
"Boston Red Sox": [ '#BD3039', '#0D2B56'],
"Chicago Cubs": [ '#CC3433', '#0E3386'],
"Chicago White Sox": [ '#000000', '#C4CED4'],
"Cincinnati Reds": [ '#C6011F', '#000000'],
"Cleveland Indians": [ '#E31937', '#002B5C'],
"Colorado Rockies": [ '#333366', '#231F20', 'C4CED4'],
"Detroit Tigers": [ '#0C2C56', '#000000'],
"Houston Astros": [ '#002D62', '#EB6E1F'],
"Kansas City Royals": [ '#004687', '#C09A5B'],
"Los Angeles Angels of Anaheim": [ '#BA0021', '#003263'],
"Los Angeles Dodgers": [ '#EF3E42', '#005A9C'],
"Miami Marlins": [ '#FF6600', '#0077C8', 'FFD100', '000000'],
"Milwaukee Brewers": [ '#0A2351', '#B6922E'],
"Minnesota Twins": [ '#002B5C', '#D31145'],
"New York Mets": [ '#FF5910', '#002D72'],
"New York Yankees": [ '#E4002B', '#003087'],
"Oakland Athletics": [ '#003831', '#EFB21E'],
"Philadelphia Phillies": [ '#284898', '#E81828'],
"Pittsburgh Pirates": [ '#FDB827', '#000000'],
"San Diego Padres": [ '#002D62', '#FEC325', '7F411C', 'A0AAB2'],
"San Francisco Giants": [ '#FD5A1E', '#000000', '8B6F4E'],
"Seattle Mariners": [ '#0C2C56', '#005C5C', 'C4CED4'],
"St Louis Cardinals": [ '#C41E3A', '#000066', 'FEDB00'],
"Tampa Bay Rays": [ '#092C5C', '#8FBCE6', 'F5D130'],
"Texas Rangers": [ '#C0111F', '#003278'],
"Toronto Blue Jays": [ '#134A8E', '#1D2D5C', 'E8291C'],
"Washington Nationals": [ '#AB0003', '#11225B'],
}
def _find_team_id(team_name):
for mlb_name in MLB_NAME_REMAP:
if mlb_name in team_name:
return MLB_NAME_REMAP[mlb_name]
raise KeyError("Failed to find team id for: {}".format(team_name))
TEAM_COLORS_LOOKUP = { _find_team_id(team_name): TEAM_COLORS_RAW[team_name]
for team_name in TEAM_COLORS_RAW} | {"/main.py": ["/urls.py", "/schedule.py", "/playlog.py", "/plot_summary.py"], "/schedule.py": ["/urls.py"], "/playlog.py": ["/urls.py"], "/plot_summary.py": ["/team_colors.py"], "/team_graph.py": ["/schedule.py", "/playlog.py"], "/team_colors.py": ["/urls.py"]} |
74,753 | pwr-projects/event-reactions | refs/heads/master | /src/event_reactions/utils/config.py | import json
from typing import Text
class Config:
"""Config class which allows to load and save config from json.
"""
def __init__(self, path: Text):
self.path = path
with open(self.path, 'r') as fstr:
self.__dict__ = json.load(fstr)
def dump(self):
"""Just saves configuration to json located in place, where was the init conf.
"""
with open(self.path, 'w') as fstr:
json.dump(self, fstr)
| {"/src/event_reactions/utils/newsapi_wrapper.py": ["/src/event_reactions/utils/config.py"], "/src/event_reactions/utils/__init__.py": ["/src/event_reactions/utils/config.py", "/src/event_reactions/utils/newsapi_wrapper.py"]} |
74,754 | pwr-projects/event-reactions | refs/heads/master | /src/setup.py | #!/bin/python
from setuptools import find_packages, setup
setup(
name='event_reactions',
version='0.1',
packages=find_packages(),
url='https://github.com/pwr-projects/event-reactions',
license='',
author='Mateusz Gaweł, Grzegorz Suszka',
# author_email='',
description='',
install_requires=[
'newsapi-python',
'tqdm'
]
)
| {"/src/event_reactions/utils/newsapi_wrapper.py": ["/src/event_reactions/utils/config.py"], "/src/event_reactions/utils/__init__.py": ["/src/event_reactions/utils/config.py", "/src/event_reactions/utils/newsapi_wrapper.py"]} |
74,755 | pwr-projects/event-reactions | refs/heads/master | /src/run.py | from pprint import pprint
from event_reactions import Config, NewsAPIWrapper
if __name__ == '__main__':
config = Config('./config.json')
api = NewsAPIWrapper(config)
sources = api.sources_lang('en')
pprint(sources)
| {"/src/event_reactions/utils/newsapi_wrapper.py": ["/src/event_reactions/utils/config.py"], "/src/event_reactions/utils/__init__.py": ["/src/event_reactions/utils/config.py", "/src/event_reactions/utils/newsapi_wrapper.py"]} |
74,756 | pwr-projects/event-reactions | refs/heads/master | /src/event_reactions/utils/newsapi_wrapper.py | from typing import Dict, Sequence, Text
from newsapi import NewsAPI
from .config import Config
class NewsAPIWrapper(NewsAPI):
"""Just a simple wrapper for NewsAPI
Arguments:
NewsAPI {type} -- original NewsAPI class as base class
"""
def __init__(self, config: Config):
self._config = config
super().__init__(config.newsapi_key)
def sources_lang(self, language: Text) -> Sequence[Dict]:
"""Gets sources available in NewsAPI with language filtering
Arguments:
language {Text} -- language of news sources. Format: 'en', 'pl', etc.
Returns:
Sequence[Dict] -- list of dicts containing info about sources
"""
return self._filter_language(language, self.sources())
def _filter_language(self, language: Text, sources: Sequence[Dict]) -> Sequence[Dict]:
return list(filter(lambda info: info['language'] == language, sources))
| {"/src/event_reactions/utils/newsapi_wrapper.py": ["/src/event_reactions/utils/config.py"], "/src/event_reactions/utils/__init__.py": ["/src/event_reactions/utils/config.py", "/src/event_reactions/utils/newsapi_wrapper.py"]} |
74,757 | pwr-projects/event-reactions | refs/heads/master | /src/event_reactions/utils/__init__.py | from .config import Config
from .newsapi_wrapper import NewsAPIWrapper | {"/src/event_reactions/utils/newsapi_wrapper.py": ["/src/event_reactions/utils/config.py"], "/src/event_reactions/utils/__init__.py": ["/src/event_reactions/utils/config.py", "/src/event_reactions/utils/newsapi_wrapper.py"]} |
74,761 | nikeshkrjha/KarobarWeb | refs/heads/master | /karobar/migrations/0001_initial.py | # Generated by Django 3.1 on 2020-09-02 09:11
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('customer_fname', models.CharField(max_length=50, verbose_name='First Name')),
('customer_lname', models.CharField(max_length=50, verbose_name='Last Name')),
('email', models.EmailField(blank=True, max_length=50, verbose_name='Email')),
('phone_number', models.CharField(max_length=13, verbose_name='Phone Number')),
('customer_city', models.CharField(max_length=50, verbose_name='City')),
('customer_zipcode', models.CharField(blank=True, max_length=5, verbose_name='Zip code')),
],
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('transaction_title', models.CharField(max_length=200, verbose_name='Title')),
('transaction_date', models.DateTimeField(auto_now_add=True, verbose_name='Date')),
('transaction_amount', models.DecimalField(decimal_places=2, max_digits=12, verbose_name='Amount')),
('transaction_is_cash', models.BooleanField(default=True, verbose_name='Cash')),
],
),
]
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,762 | nikeshkrjha/KarobarWeb | refs/heads/master | /karobar/migrations/0003_paymentsreceived_supplierpayment.py | # Generated by Django 3.1 on 2020-09-03 06:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('karobar', '0002_auto_20200902_2314'),
]
operations = [
migrations.CreateModel(
name='SupplierPayment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=200)),
('amount', models.DecimalField(decimal_places=2, max_digits=12)),
('payment_date', models.DateTimeField(auto_now_add=True)),
('paid_to', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='karobar.supplier')),
],
),
migrations.CreateModel(
name='PaymentsReceived',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=200)),
('amount', models.DecimalField(decimal_places=2, max_digits=12)),
('payment_date', models.DateTimeField(auto_now_add=True)),
('paid_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='karobar.customer')),
],
),
]
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,763 | nikeshkrjha/KarobarWeb | refs/heads/master | /api/urls.py | from django.urls import path
from rest_framework.urlpatterns import format_suffix_patterns
from api import views
urlpatterns = [
path('customers/', views.CustomerList.as_view()),
path('customers/<int:pk>/', views.CustomerDetail.as_view()),
path('suppliers/', views.SupplierList.as_view()),
path('payments/', views.PaymentsReceivedList.as_view()),
]
urlpatterns = format_suffix_patterns(urlpatterns)
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,764 | nikeshkrjha/KarobarWeb | refs/heads/master | /api/views.py | from django.shortcuts import render
from api.serializers import CustomerSerializer, SuppliersSerializer, PaymentsReceivedSerializer
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from django.http import Http404
from karobar.models import Customer, Supplier, Purchase, SupplierPayment, PaymentsReceived
from rest_framework import mixins
from rest_framework import generics
# Create your views here.
# ============ OLD IMPLEMENTATION =============== #
# class CustomerList(APIView):
# """
# List all customers, or create a new snippet.
# """
# def get(self, request, format=None):
# customers = Customer.objects.all()
# serializer = CustomerSerializer(customers, many=True)
# data = {'data': serializer.data}
# return Response(data)
# ============ OLD IMPLEMENTATION =============== #
# ============ NEW IMPLEMENTATION USING MIXINS =============== #
class CustomerList(generics.ListCreateAPIView):
queryset = Customer.objects.all()
serializer_class = CustomerSerializer
class CustomerDetail(mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
generics.GenericAPIView):
queryset = Customer.objects.all()
serializer_class = CustomerSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
# ============ NEW IMPLEMENTATION USING MIXINS =============== #
class SupplierList(APIView):
"""
List all customers, or create a new snippet.
"""
def get(self, request, format=None):
suppliers = Supplier.objects.all()
serializer = SuppliersSerializer(suppliers, many=True)
data = {'data': serializer.data}
return Response(data)
class PaymentsReceivedList(APIView):
"""
List all customers, or create a new snippet.
"""
def get(self, request, format=None):
paymentsList = PaymentsReceived.objects.all()
serializer = PaymentsReceivedSerializer(paymentsList, many=True)
data = {'data': serializer.data}
return Response(data)
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,765 | nikeshkrjha/KarobarWeb | refs/heads/master | /api/serializers.py | from rest_framework import serializers
from karobar.models import Customer, Supplier, SupplierPayment, PaymentsReceived, Sales, Purchase
class CustomerSerializer(serializers.ModelSerializer):
class Meta:
model = Customer
fields = ('id', 'customer_fname', 'customer_lname', 'email',
'phone_number', 'customer_city', 'customer_zipcode')
class SuppliersSerializer(serializers.ModelSerializer):
class Meta:
model = Supplier
fields = ('id', 'supplier_name', 'email',
'phone_number', 'supplier_city', 'supplier_zipcode')
class PaymentsReceivedSerializer(serializers.ModelSerializer):
class Meta:
model = PaymentsReceived
fields = ('id', 'description', 'amount',
'payment_date', 'paid_by')
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,766 | nikeshkrjha/KarobarWeb | refs/heads/master | /karobar/migrations/0004_auto_20200903_0107.py | # Generated by Django 3.1 on 2020-09-03 06:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('karobar', '0003_paymentsreceived_supplierpayment'),
]
operations = [
migrations.AlterField(
model_name='paymentsreceived',
name='payment_date',
field=models.DateTimeField(),
),
migrations.AlterField(
model_name='supplierpayment',
name='payment_date',
field=models.DateTimeField(),
),
]
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,767 | nikeshkrjha/KarobarWeb | refs/heads/master | /karobar/admin.py | from django.contrib import admin
from karobar.models import Customer, Sales, Purchase, Supplier, SupplierPayment, PaymentsReceived
# The model classes for KarobarWeb app
class CustomerAdmin(admin.ModelAdmin):
fields = ('customer_fname', 'customer_lname', 'email', 'phone_number')
list_display = ('customer_fname', 'customer_lname',
'email', 'phone_number')
class SupplierAdmin(admin.ModelAdmin):
fields = ('supplier_name', 'phone_number', 'email', 'supplier_city')
list_display = ('supplier_name', 'phone_number', 'email', 'supplier_city')
class SalesAdmin(admin.ModelAdmin):
fields = ('transaction_title', 'transaction_date',
'transaction_amount', 'transaction_is_cash', 'sold_to')
list_display = ('transaction_title', 'transaction_date',
'transaction_amount', 'transaction_is_cash', 'sold_to')
class PurchaseAdmin(admin.ModelAdmin):
fields = ('transaction_title', 'transaction_date',
'transaction_amount', 'transaction_is_cash', 'purchased_from')
list_display = ('transaction_title', 'transaction_date',
'transaction_amount', 'transaction_is_cash', 'purchased_from')
class SupplierPaymentAdmin(admin.ModelAdmin):
fields = ('description', 'amount', 'payment_date', 'paid_to')
list_display = ('description', 'amount', 'payment_date', 'paid_to')
class PaymentsReceivedAdmin(admin.ModelAdmin):
fields = ('description', 'amount', 'payment_date', 'paid_by')
list_display = ('description', 'amount', 'payment_date', 'paid_by')
# models registration
admin.site.register(Customer, CustomerAdmin)
admin.site.register(Supplier, SupplierAdmin)
admin.site.register(SupplierPayment, SupplierPaymentAdmin)
admin.site.register(PaymentsReceived, PaymentsReceivedAdmin)
admin.site.register(Sales, SalesAdmin)
admin.site.register(Purchase, PurchaseAdmin)
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,768 | nikeshkrjha/KarobarWeb | refs/heads/master | /karobar/migrations/0002_auto_20200902_2314.py | # Generated by Django 3.1 on 2020-09-03 04:14
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('karobar', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Purchase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('transaction_title', models.CharField(max_length=200, verbose_name='Title')),
('transaction_date', models.DateTimeField(auto_now_add=True, verbose_name='Date')),
('transaction_amount', models.DecimalField(decimal_places=2, max_digits=12, verbose_name='Amount')),
('transaction_is_cash', models.BooleanField(default=True, verbose_name='Cash')),
],
),
migrations.CreateModel(
name='Sales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('transaction_title', models.CharField(max_length=200, verbose_name='Title')),
('transaction_date', models.DateTimeField(auto_now_add=True, verbose_name='Date')),
('transaction_amount', models.DecimalField(decimal_places=2, max_digits=12, verbose_name='Amount')),
('transaction_is_cash', models.BooleanField(default=True, verbose_name='Cash')),
('sold_to', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='karobar.customer')),
],
),
migrations.CreateModel(
name='Supplier',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('supplier_name', models.CharField(max_length=100)),
('email', models.EmailField(blank=True, max_length=50, verbose_name='Email')),
('phone_number', models.CharField(max_length=13, verbose_name='Phone Number')),
('supplier_city', models.CharField(max_length=50, verbose_name='City')),
('supplier_zipcode', models.CharField(blank=True, max_length=5, verbose_name='Zip Code')),
],
),
migrations.DeleteModel(
name='Transaction',
),
migrations.AddField(
model_name='purchase',
name='purchased_from',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='karobar.supplier'),
),
]
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,769 | nikeshkrjha/KarobarWeb | refs/heads/master | /karobar/models.py | from django.db import models
from django.utils import timezone
# Create your models here.
class Supplier(models.Model):
supplier_name = models.CharField(max_length=100, blank=False, null=False)
email = models.EmailField(max_length=50, verbose_name="Email", blank=True)
phone_number = models.CharField(
max_length=13, verbose_name="Phone Number", blank=False)
supplier_city = models.CharField(
max_length=50, blank=False, verbose_name="City")
supplier_zipcode = models.CharField(
max_length=5, blank=True, verbose_name="Zip Code")
def __str__(self):
return self.supplier_name
class Customer(models.Model):
customer_fname = models.CharField(
max_length=50, blank=False, verbose_name="First Name")
customer_lname = models.CharField(
max_length=50, blank=False, verbose_name="Last Name")
email = models.EmailField(max_length=50, verbose_name="Email", blank=True)
phone_number = models.CharField(
max_length=13, verbose_name="Phone Number", blank=False)
customer_city = models.CharField(
max_length=50, blank=False, verbose_name="City")
customer_zipcode = models.CharField(
max_length=5, blank=True, verbose_name="Zip code")
def __str__(self):
return self.customer_fname + " " + self.customer_lname
# The sales model. Represents the sales made to customers.
class Sales(models.Model):
transaction_title = models.CharField(
max_length=200, blank=False, verbose_name="Title")
transaction_date = models.DateTimeField(
auto_now_add=False, verbose_name="Date", default=timezone.now)
transaction_amount = models.DecimalField(
max_digits=12, decimal_places=2, verbose_name="Amount")
transaction_is_cash = models.BooleanField(
blank=False, null=False, verbose_name="Cash", default=True)
sold_to = models.ForeignKey(
Customer, related_name='+', on_delete=models.CASCADE, null=True)
def __str__(self):
return self.transaction_title + " for " + str(self.transaction_amount)
# The purchase model. Represents the purchase made from suppliers.
class Purchase(models.Model):
transaction_title = models.CharField(
max_length=200, blank=False, verbose_name="Title")
transaction_date = models.DateTimeField(
auto_now_add=False, verbose_name="Date", default=timezone.now)
transaction_amount = models.DecimalField(
max_digits=12, decimal_places=2, verbose_name="Amount")
transaction_is_cash = models.BooleanField(
blank=False, null=False, verbose_name="Cash", default=True)
purchased_from = models.ForeignKey(
Supplier, related_name='+', on_delete=models.CASCADE)
transaction_is_cash = models.BooleanField(
blank=False, null=False, verbose_name="Cash", default=True)
def __str__(self):
return self.transaction_title + " for " + str(self.transaction_amount)
# The SupplierPayment model. Represents the payments made to suppliers.
class SupplierPayment(models.Model):
description = models.CharField(max_length=200, blank=False, null=False)
amount = models.DecimalField(max_digits=12, decimal_places=2)
payment_date = models.DateTimeField(
auto_now_add=False, default=timezone.now)
paid_to = models.name = models.ForeignKey(
'Supplier', related_name='+', on_delete=models.CASCADE)
# The PaymentsReceived model. Represents the payments received from customers.
class PaymentsReceived(models.Model):
description = models.CharField(max_length=200, blank=False, null=False)
amount = models.DecimalField(max_digits=12, decimal_places=2)
payment_date = models.DateTimeField(
auto_now_add=False, default=timezone.now)
paid_by = models.name = models.ForeignKey(
'Customer', related_name='+', on_delete=models.CASCADE)
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,770 | nikeshkrjha/KarobarWeb | refs/heads/master | /karobar/migrations/0005_auto_20200903_0111.py | # Generated by Django 3.1 on 2020-09-03 06:11
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('karobar', '0004_auto_20200903_0107'),
]
operations = [
migrations.AlterField(
model_name='paymentsreceived',
name='payment_date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='supplierpayment',
name='payment_date',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
| {"/api/views.py": ["/api/serializers.py", "/karobar/models.py"], "/api/serializers.py": ["/karobar/models.py"], "/karobar/admin.py": ["/karobar/models.py"]} |
74,782 | tiyd-python-2015-01/todo-demo | refs/heads/master | /todo/models.py | from . import db
class Todo(db.Model):
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.String(255), nullable=False)
completed_at = db.Column(db.DateTime)
def __init__(self, text):
self.text = text
def __repr__(self):
return "<Todo {}>".format(self.text) | {"/todo/models.py": ["/todo/__init__.py"], "/todo/views.py": ["/todo/models.py", "/todo/forms.py", "/todo/__init__.py"], "/run.py": ["/todo/__init__.py"]} |
74,783 | tiyd-python-2015-01/todo-demo | refs/heads/master | /todo/views.py | from datetime import datetime
from flask import render_template, request, redirect, url_for, flash
from .models import Todo
from .forms import TodoForm
from . import app, db
@app.route("/")
def index():
current_todos = Todo.query.filter(Todo.completed_at == None).all()
completed_todos = Todo.query.filter(Todo.completed_at != None).all()
new_todo_form = TodoForm()
return render_template("index.html",
new_todo_form=new_todo_form,
todos=current_todos,
completed=completed_todos)
@app.route("/add", methods=['POST'])
def add_todo():
form = TodoForm()
if form.validate_on_submit():
todo = Todo(form.text.data)
db.session.add(todo)
db.session.commit()
flash("Your todo was created.")
else:
flash("Your todo could not be created.")
return redirect(url_for('index'))
@app.route("/complete", methods=['POST'])
def complete():
ids = request.form.getlist('todo')
for id in ids:
todo = Todo.query.get(id)
todo.completed_at = datetime.utcnow()
db.session.add(todo)
db.session.commit()
return redirect(url_for('index')) | {"/todo/models.py": ["/todo/__init__.py"], "/todo/views.py": ["/todo/models.py", "/todo/forms.py", "/todo/__init__.py"], "/run.py": ["/todo/__init__.py"]} |
74,784 | tiyd-python-2015-01/todo-demo | refs/heads/master | /todo/forms.py | from flask_wtf import Form
from wtforms import StringField
from wtforms.validators import DataRequired
class TodoForm(Form):
text = StringField('text', validators=[DataRequired()]) | {"/todo/models.py": ["/todo/__init__.py"], "/todo/views.py": ["/todo/models.py", "/todo/forms.py", "/todo/__init__.py"], "/run.py": ["/todo/__init__.py"]} |
74,785 | tiyd-python-2015-01/todo-demo | refs/heads/master | /todo/__init__.py | from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
DATABASE = '/tmp/todo.db'
DEBUG = True
SECRET_KEY = 'development-key'
SQLALCHEMY_DATABASE_URI = "sqlite:///" + DATABASE
app = Flask(__name__)
app.config.from_object(__name__)
db = SQLAlchemy(app)
from . import views | {"/todo/models.py": ["/todo/__init__.py"], "/todo/views.py": ["/todo/models.py", "/todo/forms.py", "/todo/__init__.py"], "/run.py": ["/todo/__init__.py"]} |
74,786 | tiyd-python-2015-01/todo-demo | refs/heads/master | /run.py | from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.script.commands import ShowUrls, Clean
from todo import app, db
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command("show-urls", ShowUrls())
manager.add_command("clean", Clean())
manager.run()
| {"/todo/models.py": ["/todo/__init__.py"], "/todo/views.py": ["/todo/models.py", "/todo/forms.py", "/todo/__init__.py"], "/run.py": ["/todo/__init__.py"]} |
74,798 | manarsherif/website | refs/heads/master | /photos/admin.py | from django.contrib import admin
from .models import Photo ,ImageClass
admin.site.register(Photo)
admin.site.register(ImageClass)
| {"/photos/admin.py": ["/photos/models.py"]} |
74,799 | manarsherif/website | refs/heads/master | /photos/views.py | from django.views import generic
from django.shortcuts import render , redirect
from django.views.generic.edit import CreateView , UpdateView , DeleteView
from models import Photo , ImageClass
from django.http import HttpResponse
import numpy as np
import tensorflow as tf
import argparse
import glob
import os
import re
from tensorflow.python.platform import gfile
import csv
from django.contrib.auth import authenticate , login
from django.views.generic import View
from django.contrib.auth.models import User
'''images_dir = '/home/asmaanabil/Desktop/test2/'
modelFullPath = '/home/asmaanabil/Downloads/inception-2015-12-05/classify_image_graph_def.pb'
indexpath = '/home/asmaanabil/Desktop/featureswaleed.csv'
list_images = [images_dir + f for f in os.listdir(images_dir) if re.search('jpg|JPG', f)]'''
def create_graph():
"""Creates a graph from saved GraphDef file and returns a saver."""
# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(modelFullPath, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
def extract_features(list_images):
nb_features = 2048
features = np.empty((len(list_images), nb_features))
labels = []
create_graph()
with tf.Session() as sess:
next_to_last_tensor = sess.graph.get_tensor_by_name('pool_3:0')
output = open(indexpath, "w")
for ind, image in enumerate(list_images):
print('Processing %s...' % (image))
if not gfile.Exists(image):
tf.logging.fatal('File does not exist %s', image)
image_data = gfile.FastGFile(image, 'rb').read() ##
predictions = sess.run(next_to_last_tensor,
{'DecodeJpeg/contents:0': image_data})
feature = np.squeeze(predictions)
feature = [str(f) for f in feature]
output.write("%s,%s\n" % (image, ",".join(feature)))
features[ind, :] = feature
# labels.append(re.split('_\d+',image.split('/')[1])[0])
return features
output.close()
class Test(generic.DetailView):
model = Photo
template_name='photos/test.html'
class Index(generic.ListView):
model=Photo
template_name='photos/index.html'
def search(request):
related_images = ImageClass().photo_set.all()
input_text = ""
if request.method == "POST":
input_text = request.POST.get("input")
for image_class in ImageClass.objects.all():
if image_class.class_name == input_text.lower() :
related_images = image_class.photo_set.all()
break
return render(request, 'photos/search.html' , {'input_text' : input_text , 'related_images':related_images })
| {"/photos/admin.py": ["/photos/models.py"]} |
74,800 | manarsherif/website | refs/heads/master | /website/urls.py | from django.conf.urls import url , include #include e7na elly benzwedha 34an ne2darn include el apps urls
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^photos/' , include('photos.urls')),
url(r'^users/' , include('users.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL , document_root = settings.STATIC_ROOT)
urlpatterns += static(settings.PHOTOS_URL, document_root=settings.PHOTOS_ROOT) | {"/photos/admin.py": ["/photos/models.py"]} |
74,801 | manarsherif/website | refs/heads/master | /database_insertions.py | import os
from photos.models import Photo, ImageClass
from django.core.files import File
images_directory = '/home/manar/Downloads/mini-dataset'
features_directory = '/home/manar/Downloads/mini-dataset2'
for root, dirs, files in os.walk(images_directory):
for direcory in dirs:
image_class = ImageClass()
image_class.class_name=direcory.lower()
images_dir= images_directory+'/'+direcory
if os.listdir(images_dir):
image_class.save()
print 'class ' + direcory + ' is added to database'
for filee in os.listdir(images_dir):
if filee.endswith('.jpg'):
ph = Photo()
ph.name = filee
ph.class_name=image_class
ph.photo.save(filee, File(open(images_dir+'/' + filee, 'r')))
feature_file_dir= features_directory+'/'+direcory+'/'+filee+'.txt'
ph.features.save(filee, File(open(feature_file_dir, 'r')))
ph.save()
| {"/photos/admin.py": ["/photos/models.py"]} |
74,802 | manarsherif/website | refs/heads/master | /photos/models.py | from __future__ import unicode_literals
from django.db import models
class ImageClass(models.Model):
class_name= models.CharField(max_length=255)
def __str__(self):
return self.class_name
class Photo(models.Model):
class_name = models.ForeignKey(ImageClass, on_delete=models.CASCADE)
name = models.CharField(max_length=255)
photo = models.ImageField(upload_to='./photos/media/photos/')
features = models.FileField(blank = True , upload_to= './photos/media/features/')
def __str__(self):
return self.name
| {"/photos/admin.py": ["/photos/models.py"]} |
74,803 | manarsherif/website | refs/heads/master | /photos/urls.py | from django.conf.urls import url
from . import views
from django.conf import settings
from django.views.generic import TemplateView
app_name = 'photos'
urlpatterns = [
# /photos/
url(r'^$', views.Index.as_view() , name='index'),
# /photos/register/
#url(r'^register/$', views.register , name='register'),
# /photos/<photo_id>
url(r'^(?P<pk>[0-9]+)$', views.Test.as_view(), name='test'),
#/photos/<class_id>
url(r'^$', views.search , name='search'),
# /photos/
#url(r'^$', views.signup , name='signup'),
]
| {"/photos/admin.py": ["/photos/models.py"]} |
74,804 | Dq7uV8/pictweet | refs/heads/master | /tweetapp/views.py | from django.shortcuts import render, redirect
from django.views.generic import ListView, DetailView, CreateView, DeleteView, UpdateView
from .models import PicTweetModel
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
# Create your views here.
class PicList(LoginRequiredMixin, ListView):
template_name = 'list.html'
model = PicTweetModel
class PicDetail(LoginRequiredMixin, DetailView):
template_name = 'detail.html'
model = PicTweetModel
context_object_name = 'item'
class PicCreate(LoginRequiredMixin, CreateView):
template_name = 'create.html'
model = PicTweetModel
fields = (
'title',
'coment',
'pic',
'author',
)
success_url = reverse_lazy('tweetapp:list')
class PicDelete(LoginRequiredMixin, DeleteView):
template_name = 'delete.html'
model = PicTweetModel
context_object_name = 'item'
success_url = reverse_lazy('tweetapp:list')
class PicUpdate(LoginRequiredMixin,UpdateView):
template_name = 'update.html'
model = PicTweetModel
fields = (
'title',
'coment',
'pic',
)
success_url = reverse_lazy('tweetapp:list')
class SignUpView(CreateView):
form_class = UserCreationForm
success_url = reverse_lazy('login')
template_name = 'registration/signup.html' | {"/tweetapp/views.py": ["/tweetapp/models.py"], "/tweetapp/admin.py": ["/tweetapp/models.py"], "/tweetapp/urls.py": ["/tweetapp/views.py"]} |
74,805 | Dq7uV8/pictweet | refs/heads/master | /tweetapp/admin.py | from django.contrib import admin
from .models import PicTweetModel
# Register your models here.
admin.site.register(PicTweetModel) | {"/tweetapp/views.py": ["/tweetapp/models.py"], "/tweetapp/admin.py": ["/tweetapp/models.py"], "/tweetapp/urls.py": ["/tweetapp/views.py"]} |
74,806 | Dq7uV8/pictweet | refs/heads/master | /tweetapp/models.py | from django.db import models
# Create your models here.
class PicTweetModel(models.Model):
title = models.CharField(max_length=100)
coment = models.TextField(null=True, blank=True)
pic = models.ImageField(upload_to='')
author = models.CharField(max_length=100, null=True, blank=True) | {"/tweetapp/views.py": ["/tweetapp/models.py"], "/tweetapp/admin.py": ["/tweetapp/models.py"], "/tweetapp/urls.py": ["/tweetapp/views.py"]} |
74,807 | Dq7uV8/pictweet | refs/heads/master | /tweetapp/urls.py | from django.urls import path
from .views import PicList, PicDetail, PicCreate, PicDelete, PicUpdate, SignUpView
app_name = 'tweetapp'
urlpatterns = [
path('list/', PicList.as_view(), name='list'),
path('detail/<int:pk>', PicDetail.as_view(), name='detail'),
path('create/', PicCreate.as_view(), name='create'),
path('delete/<int:pk>', PicDelete.as_view(), name='delete'),
path('update/<int:pk>', PicUpdate.as_view(), name='update'),
path('accounts/signup/', SignUpView.as_view(), name='signup'),
] | {"/tweetapp/views.py": ["/tweetapp/models.py"], "/tweetapp/admin.py": ["/tweetapp/models.py"], "/tweetapp/urls.py": ["/tweetapp/views.py"]} |
74,808 | extreme1337/FirstDjangoRestProject | refs/heads/main | /APITest/admin.py | from django.contrib import admin
from .models import Item, UserRegistration, CharityRegistration, OrderedItem
# Register your models here.
admin.site.register(Item)
admin.site.register(UserRegistration)
admin.site.register(CharityRegistration)
admin.site.register(OrderedItem)
| {"/RESTWithDjango/urls.py": ["/APITest/views.py"]} |
74,809 | extreme1337/FirstDjangoRestProject | refs/heads/main | /RESTWithDjango/urls.py | """RESTWithDjango URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from APITest.views import UserViewSet, GroupViewSet, add_item, register_new_charity, \
login_new_charity, login_user, get_all_charities, get_all_items_by_charity, order_new_item, modify_charity_details,\
get_charity_participants, delete_item
from rest_framework import routers
from rest_framework.authtoken import views
router = routers.DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'groups', GroupViewSet)
urlpatterns = [
path('', include(router.urls)),
path('api-token-auth/', views.obtain_auth_token, name='api-token-auth'),
path('admin/', admin.site.urls),
path('add_item/', add_item),
path('register_new_charity/', register_new_charity),
path('login_new_charity/', login_new_charity),
path('login_user/', login_user),
path('get_all_charities/', get_all_charities),
path('get_all_items_by_charity/', get_all_items_by_charity),
path('order_new_item/', order_new_item),
path('modify_charity_details/', modify_charity_details),
path('get_charity_participants/', get_charity_participants),
path('delete_item/', delete_item),
]
| {"/RESTWithDjango/urls.py": ["/APITest/views.py"]} |
74,810 | extreme1337/FirstDjangoRestProject | refs/heads/main | /APITest/views.py | from django.contrib.auth.models import User, Group
from rest_framework import viewsets, status
from rest_framework import permissions
from rest_framework.decorators import api_view, authentication_classes, permission_classes
from .serializers import UserSerializer, GroupSerializer, ItemSerializer, UserRegistrationSerializer, \
CharityRegistrationSerializer, UserLoginSerializer, CharityGetAllSerializer, OrderItemSerializer
from .models import CharityRegistration, UserRegistration, Item
from rest_framework.authtoken.models import Token
from rest_framework.authentication import TokenAuthentication, SessionAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
# Create your views here.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all().order_by('-date_joined')
serializer_class = UserSerializer
permission_classes = [permissions.IsAuthenticated]
class GroupViewSet(viewsets.ModelViewSet):
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = [permissions.IsAuthenticated]
@api_view(['POST'])
@authentication_classes([TokenAuthentication])
@permission_classes([IsAuthenticated])
def add_item(request):
serializer = ItemSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"Status": "Added"}, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
def register_new_charity(request):
serializer = CharityRegistrationSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"Status": "Added"}, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
def login_new_charity(request):
serializer = CharityRegistrationSerializer(data=request.data)
if serializer.is_valid():
email = serializer.data['email']
try:
password = list(CharityRegistration.objects.filter(email=email).values())[0]['password']
print(password)
if password == serializer.data['password']:
return Response({"Status": "Success"}, status=status.HTTP_202_ACCEPTED)
else:
return Response({"Status": "Fail"}, status=status.HTTP_400_BAD_REQUEST)
except:
return Response({"Status": "User Not Found"}, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
def login_user(request):
serializer = UserLoginSerializer(data=request.data)
if serializer.is_valid():
email = serializer.data['email']
try:
password = list(UserRegistration.objects.filter(email=email).values())[0]['password']
print(password)
if password == serializer.data['password']:
return Response({"Status": "Success"}, status=status.HTTP_202_ACCEPTED)
else:
return Response({"Status": "Fail"}, status=status.HTTP_400_BAD_REQUEST)
except:
return Response({"Status": "User Not Found"}, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET'])
def get_all_charities(request):
charities = CharityRegistration.objects.all().order_by('-charity_name')
serializer = CharityGetAllSerializer(charities, many=True)
return Response(serializer.data)
@api_view(['GET'])
def get_all_items_by_charity(request):
email = dict(request.data)['email'][0]
charities = Item.objects.filter(charity=email)
serializer = ItemSerializer(charities, many=True)
return Response(serializer.data)
@api_view(['POST'])
def order_new_item(request):
serializer = OrderItemSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"Status": "Ordered"}, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
def modify_charity_details(request):
serializer = CharityRegistrationSerializer(data=request.data)
if serializer.is_valid():
email = serializer.data['email']
charity_name = serializer.data['charity_name']
password = serializer.data['password']
city = serializer.data['city']
CharityRegistration.objects.filter(email=email).update(charity_name=charity_name, password=password, city=city)
return Response({"Status": "Modified"}, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET'])
def get_charity_participants(request):
charity_name = dict(request.data)['charity_name'][0]
participants = UserRegistration.objects.filter(charity_name=charity_name)
serializer = UserRegistrationSerializer(participants, many=True)
return Response(serializer.data)
@api_view(['DELETE'])
def delete_item(request):
itemId = dict(request.data)['itemID'][0]
Item.objects.get(itemID=itemId).delete()
return Response({"Status": "Deleted"}, status=status.HTTP_200_OK)
| {"/RESTWithDjango/urls.py": ["/APITest/views.py"]} |
74,829 | NikoTaga/Interview_preparation_course_django_project | refs/heads/main | /test_prj/test_app/views.py |
from django.shortcuts import render
from django.http import HttpResponseRedirect, HttpResponse, JsonResponse
from django.template.loader import render_to_string
from django.views.generic.edit import CreateView
from .models import Googs
from .forms import GoodsForm
# Create your views here.
def ajax_test(request):
return HttpResponse('<h1>AJAX</h1>')
def index(request):
template_name = 'goods_list.html'
goods_list = Googs.objects.all()
goods_str = []
for i in goods_list:
goods_str.append(i)
context = {'table_title': 'Все товары',
'goods': goods_str}
return render(request,
template_name=template_name,
context=context)
# def add(request):
# template_name = 'goods_add.html'
# if request.method == 'POST':
# add_form = GoodsForm(request.POST)
# if add_form.is_valid():
# add_form.save()
# return HttpResponseRedirect('/')
# else:
# context = {'form': add_form}
# return render(request,
# template_name=template_name,
# context=context)
# else:
# add_form = GoodsForm()
# context = {'form': add_form}
# return render(request,
# template_name=template_name,
# context=context)
def add(request):
template_name = 'goods_add.html'
if request.method == 'POST':
add_form = GoodsForm(request.POST)
return save_good_form(request, add_form, template_name)
else:
add_form = GoodsForm()
return save_good_form(request, add_form, template_name)
class Add(CreateView):
template_name = 'goods_add.html'
form_class = GoodsForm
success_url = '/'
def save_good_form(request, form, template_name):
data = {}
if request.method == 'POST':
if form.is_valid():
form.save
print('form.save')
goods_list = Googs.objects.all()
context = {'goods': goods_list}
data['form_is_valid'] = True
data['html_good_list'] = render_to_string(template_name='goods_list.html',
context=context
)
else:
data['form_is_valid'] = False
context = {'form': form}
data['html_form'] = render_to_string(template_name=template_name,
context=context,
request=request
)
return JsonResponse(data)
| {"/test_prj/test_app/views.py": ["/test_prj/test_app/models.py", "/test_prj/test_app/forms.py"], "/test_prj/test_app/forms.py": ["/test_prj/test_app/models.py"]} |
74,830 | NikoTaga/Interview_preparation_course_django_project | refs/heads/main | /test_prj/test_app/admin.py | from django.contrib import admin
from test_app.models import Googs
# Register your models here.
admin.site.register(Googs)
| {"/test_prj/test_app/views.py": ["/test_prj/test_app/models.py", "/test_prj/test_app/forms.py"], "/test_prj/test_app/forms.py": ["/test_prj/test_app/models.py"]} |
74,831 | NikoTaga/Interview_preparation_course_django_project | refs/heads/main | /test_prj/test_app/forms.py |
from django.forms import ModelForm
from .models import Googs
class GoodsForm(ModelForm):
class Meta:
model = Googs
fields = ('title', 'price', 'measurements', 'vendor') | {"/test_prj/test_app/views.py": ["/test_prj/test_app/models.py", "/test_prj/test_app/forms.py"], "/test_prj/test_app/forms.py": ["/test_prj/test_app/models.py"]} |
74,832 | NikoTaga/Interview_preparation_course_django_project | refs/heads/main | /test_prj/test_app/models.py | from django.db import models
class Googs(models.Model):
title = models.CharField(verbose_name='Наименование',
max_length=255)
created_at = models.DateTimeField(verbose_name='Дата поступления',
auto_now_add=True,
auto_created=True)
price = models.DecimalField(verbose_name='Цена',
decimal_places=2,
max_digits=10)
measurements = models.CharField(verbose_name='Единица измерения',
max_length=25)
vendor = models.CharField(verbose_name='Поставщик',
max_length=255)
def __str__(self):
return self.title
class Meta:
verbose_name = 'Карточка товара'
verbose_name_plural = 'Карточка товара'
| {"/test_prj/test_app/views.py": ["/test_prj/test_app/models.py", "/test_prj/test_app/forms.py"], "/test_prj/test_app/forms.py": ["/test_prj/test_app/models.py"]} |
74,833 | NikoTaga/Interview_preparation_course_django_project | refs/heads/main | /test_prj/test_app/migrations/0001_initial.py | # Generated by Django 3.2 on 2021-04-20 15:26
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Googs',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_created=True, auto_now_add=True, verbose_name='Дата поступления')),
('title', models.CharField(max_length=255, verbose_name='Наименование')),
('price', models.DecimalField(decimal_places=2, max_digits=3, verbose_name='Цена')),
('measurements', models.CharField(max_length=25, verbose_name='Единица измерения')),
('vendor', models.CharField(max_length=255, verbose_name='Поставщик')),
],
options={
'verbose_name': 'Карточка товара',
'verbose_name_plural': 'Карточка товара',
},
),
]
| {"/test_prj/test_app/views.py": ["/test_prj/test_app/models.py", "/test_prj/test_app/forms.py"], "/test_prj/test_app/forms.py": ["/test_prj/test_app/models.py"]} |
74,838 | Cherniakhovsky/test_APP3null | refs/heads/master | /accounts/api/urls.py | from django.contrib.auth.models import User
from django.conf.urls import url
#from django.contrib import admin
from .serializers import (
UserSerializer,
UserCreateSerializer,
UserLoginSerializer,
)
from .views import(
UserList, #UserAPIView,
UserCreateAPIView,
UserLoginAPIView
)
urlpatterns = [
url(r'^users/$', UserList.as_view(), name='users'),
# url(r'^/users/$', UserList.as_view(queryset=User.objects.all(),
# serializer_class=UserSerializer),name='user-list'),
url(r'^signin/$', UserLoginAPIView.as_view(), name='login'),
url(r'^signup/$', UserCreateAPIView.as_view(), name='register'),
#url(r'^create/$', CommentCreateAPIView.as_view(), name='create'),
#url(r'^(?P<pk>\d+)/$', CommentDetailAPIView.as_view(), name='thread'),
] | {"/accounts/api/urls.py": ["/accounts/api/views.py"]} |
74,839 | Cherniakhovsky/test_APP3null | refs/heads/master | /accounts/models.py | from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
# class User(models.Model):
# email = models.CharField(max_length=255)
# first_name = models.CharField(max_length=255)
# last_name = models.CharField(max_length=255)
# phone_number = models.CharField(max_length=255)
# address = models.CharField(max_length=255)
#
# name = models.CharField(max_length=255)
# description = models.TextField()
# price = models.DecimalField(decimal_places=2, max_digits=20)
#
# email, first_name, last_name, phone_number, address | {"/accounts/api/urls.py": ["/accounts/api/views.py"]} |
74,840 | Cherniakhovsky/test_APP3null | refs/heads/master | /accounts/api/views.py | from django.db.models import Q
from django.contrib.auth import get_user_model
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from rest_framework.views import APIView
from rest_framework.filters import (
SearchFilter,
OrderingFilter,
)
from rest_framework.mixins import DestroyModelMixin, UpdateModelMixin
from rest_framework.generics import (
CreateAPIView,
DestroyAPIView,
ListAPIView,
RetrieveAPIView,
RetrieveUpdateAPIView
)
from rest_framework.permissions import (
AllowAny,
IsAuthenticated,
IsAdminUser,
IsAuthenticatedOrReadOnly,
)
from rest_framework.views import APIView
#from django.contrib.auth.models import User
#from posts.api.permissions import IsOwnerOrReadOnly
#from posts.api.pagination import PostLimitOffsetPagination, PostPageNumberPagination
User = get_user_model()
from .serializers import (
UserSerializer,
UserCreateSerializer,
UserLoginSerializer,
)
# class UserAPIView(APIView):
# authentication_classes = authentication.TokenAuthentication ###Am assuming you're authenticating via a token
# def get(self, request):
# """
# Get user based on username.
# Am getting only the username since that's the only field used above.
# :param request:
# :param format:
# :return:
# """
# details = User.objects.all()
# serializer = UserSerializer(details)
# return Response(serializer.data)
#
# def post(self, request, format=None):
# """
# Create a new user instance
# :param request:
# :param format:
# :return:
# """
# serializer = UserSerializer(request.data)
# if serializer.is_valid():
# serializer.save()
# return Response(serializer.data)
# return Response(serializer.errors)
# def get(self, request, *args, **kwargs):
# return self.__list_view(request) if 'pk' not in self.kwargs else self.__detail_view(request)
# class UserList(generics.ListCreateAPIView):
# queryset = User.objects.all()
# serializer_class = UserSerializer
# permission_classes = (IsAdminOrReadOnly,)
# class UserList(generics.ListCreateAPIView):
# queryset = User.objects.all()
# serializer_class = UserSerializer
# permission_classes = (IsAdminUser,)
class UserList(generics.ListCreateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = [AllowAny]
# class UserAPIView(APIView):
# queryset = User.objects.all()
# serializer_class = UserSerializer
# permission_classes = [AllowAny]
# def list(self, request):
# # Note the use of `get_queryset()` instead of `self.queryset`
# queryset = self.get_queryset()
# serializer = UserSerializer(queryset, many=True)
# return Response(serializer.data)
# # queryset = User.objects.all()
# serializer_class = UserSerializer
# #permission_classes = (IsAdminUser,)
# def get(self, request):
# details = User.objects.all()
# serializer = UserSerializer(details)
# return Response(serializer.data)
class UserCreateAPIView(CreateAPIView):
serializer_class = UserCreateSerializer
queryset = User.objects.all()
class UserLoginAPIView(APIView):
permission_classes = [AllowAny]
serializer_class = UserLoginSerializer
def post(self, request, *args, **kwargs):
data = request.data
serializer = UserLoginSerializer(data=data)
if serializer.is_valid(raise_exception=True):
new_data = serializer.data
return Response(new_data, status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
# class UserInvitationAPIView() | {"/accounts/api/urls.py": ["/accounts/api/views.py"]} |
74,857 | amoghagarwal/messaging | refs/heads/master | /accept_requests/management/commands/retry_mechanism.py | from django.core.management.base import BaseCommand, CommandError
from accept_requests.callback_service import Callbacks
class Command(BaseCommand):
help = 'Gives the retry mechanism'
#def add_arguments(self, parser):
# parser.add_argument('poll_id', nargs='+', type=int)
def handle(self, *args, **options):
callback = Callbacks()
callback.fetch_msg_from_queue() | {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,858 | amoghagarwal/messaging | refs/heads/master | /accept_requests/tests.py |
import redis
import pika
# Create your tests here.
from django.test import TestCase
from accept_requests.utility import get_redis_connection, get_status_from_redis, get_notification_channel, create_queue, \
remove_key_from_redis
from appsphere.settings import RABBITMQ_EXCHANGE as exchange_name
class RedisTestCase(TestCase):
def test_redis_is_up(self):
"""
CHECK REDIS CONNECTION
:return:
"""
r = get_redis_connection(0)
self.assertTrue(isinstance(r, redis.StrictRedis))
def test_key_exists_in_redis(self):
"""
Check if key doesn't exist in Redis
:return:
"""
status = get_status_from_redis("FSFSFSD-44255", 0)
self.assertEqual(status, "Message Already Processed")
def test_remove_key_from_redis(self):
"""
Check key deletion from Redis
:return:
"""
r = get_redis_connection(0)
r.set("a","b")
remove_key_from_redis("a", 0)
if r.exists("a"):
self.assertFalse(True)
else:
self.assertTrue(True)
class RabbitmqTestCase(TestCase):
def test_rabbitmq_connection(self):
"""
Check Rabbitmq connection
:return:
"""
ch = get_notification_channel()
self.assertTrue(isinstance(ch, pika.adapters.blocking_connection.BlockingChannel))
def test_queue_creation(self):
"""
Check Queue Creation
:return:
"""
ch = get_notification_channel()
create_queue(ch, exchange_name, "process_messaging", "process")
self.assertTrue(True)
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2) | {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,859 | amoghagarwal/messaging | refs/heads/master | /appsphere/urls.py |
from django.contrib import admin
#urlpatterns = [
# Examples:
# url(r'^$', 'appsphere.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^admin/', include(admin.site.urls)),
#]
from django.conf.urls import *
urlpatterns = patterns('',
url(r'^$', 'accept_requests.views.landing_service' ,name="landing_page"),
url(r'^message_api/$', 'accept_requests.views.msg_service', name="msg_service"),
url(r'^test_api_success/$', 'accept_requests.views.test_api_success',
name="test_service_success"),
url(r'^test_api_failure/$', 'accept_requests.views.test_api_failure',
name="test_service_failure"),
) | {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,860 | amoghagarwal/messaging | refs/heads/master | /accept_requests/views.py | from django.http import HttpResponse
from django.http import HttpResponseBadRequest
import datetime
import uuid
import json
from django.views.decorators.csrf import csrf_exempt
import logging
from accept_requests.utility import get_redis_connection, \
publish_messages_to_queue, get_callback_status, store_status_in_redis, callback
from appsphere.settings import RABBITMQ_EXCHANGE as exchange_name
# Get an instance of a logger
log = logging.getLogger(__name__)
@csrf_exempt
def msg_service(request):
"""
:param request:
:return:
"""
msg = None
callback_url = None
if request.method == 'POST':
msg = str(request.POST.get('msg', None))
callback_url = str(request.POST.get('url', None))
time_received = str(datetime.datetime.now())
uid = str(uuid.uuid4())
if not callback_url or not msg:
log.error("%s %s error : Please enter valid message and callback" % (callback_url,msg))
response = {"error" : "Please enter valid message and callback"}
return HttpResponseBadRequest(json.dumps(response), content_type='application/json')
msg_status = "unqueued"
store_status_in_redis(uid, msg_status, 0)
if enqueue(msg, uid, callback_url):
msg_status = "queued"
callback(uid, callback_url, msg, msg_status)
else:
msg_status = "unqueued"
store_in_redis(callback_url, msg, msg_status, time_received, uid)
response = {"uid": uid, "message":msg, "status": "request_accepted",
"time": time_received}
return HttpResponse(json.dumps(response))
def store_in_redis(callback_url, msg, msg_status, time_received, uid):
payload = {"message": msg, "status": msg_status,
"callback_url": callback_url, "time": time_received}
store_status_in_redis(uid, payload, 1)
log.error("problem while enqueuing. msg stored in redis Db1")
def enqueue(msg, uid, callback_url):
"""
:param msg:
:param uid:
:param callback_url:
:return:
"""
try:
payload = {"message": msg, "uid": uid, "callback_url": callback_url}
payload_encoded = json.dumps(payload)
publish_messages_to_queue(exchange_name, "process",
"process_messaging", payload_encoded)
return True
except Exception as ex:
import traceback
log.info("Problem while enqueuing message:")
log.info(traceback.format_exc())
return False
def landing_service(request):
"""
API to test the landing page
:param request:
:return:
"""
return HttpResponse("Welcome to messaging app")
@csrf_exempt
def test_api_success(request):
"""
API for testing callback success
:param request:
:return:
"""
if request.method == 'POST':
msg = str(request.POST.get('msg', None))
callback_url = str(request.POST.get('url', None))
return HttpResponse()
@csrf_exempt
def test_api_failure(request):
"""
API for testing the callback failure
:param request:
:return:
"""
if request.method == 'POST':
msg = str(request.POST.get('msg', None))
callback_url = str(request.POST.get('url', None))
return HttpResponseBadRequest() | {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,861 | amoghagarwal/messaging | refs/heads/master | /accept_requests/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='FailedMessages',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('uid', models.CharField(max_length=512)),
('status', models.CharField(max_length=512, choices=[(b'queued', b'queued'), (b'sent', b'sent'), (b'delivered', b'delivered')])),
('message', models.CharField(max_length=1024)),
('retries', models.IntegerField(max_length=11)),
('callback_url', models.CharField(max_length=512)),
('created_time', models.DateTimeField(db_index=True, auto_now_add=True, null=True)),
('modified_time', models.DateTimeField(auto_now_add=True, db_index=True)),
],
options={
'db_table': 'failed_messages',
},
),
]
| {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,862 | amoghagarwal/messaging | refs/heads/master | /accept_requests/models.py | from django.db import models
# Create your models here.
#
STATUS_CHOICES = (
('queued', "queued"),
('sent', "sent"),
('delivered', "delivered")
)
class FailedMessages(models.Model):
uid = models.CharField(max_length=512)
status = models.CharField(choices=STATUS_CHOICES, max_length=512)
message = models.CharField(max_length=1024)
retries = models.IntegerField()
callback_url = models.CharField(max_length=512)
created_time = models.DateTimeField(auto_now_add=True, db_index=True, null=True)
modified_time = models.DateTimeField(auto_now_add=True, db_index=True)
def __unicode__(self):
return "%s - %s" % (self.uid, self.massage)
class Meta:
db_table = 'failed_messages' | {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,863 | amoghagarwal/messaging | refs/heads/master | /accept_requests/callback_service.py | import json
import pika
import requests
import logging
import time
from accept_requests.models import FailedMessages
from accept_requests.utility import get_redis_connection, get_status_from_redis, remove_key_from_redis, \
consume_message_from_queue
from constants import MAX_NUMBER_OF_RETRIES, TIME_INTERVAL_BETWEEN_EACH_RETRY
log = logging.getLogger(__name__)
from appsphere.settings import RABBITMQ_EXCHANGE as exchange_name
class Callbacks:
def __init__(self):
self.retries = 1
def fetch_msg_from_queue(self):
consume_message_from_queue(exchange_name, "retry_callbacks", "callbacks", self.cb)
def cb(self,ch, method, properties, body):
"""
:param ch: Channel
:param method: method for acknowledgement
:param properties:
:param body: Body of the message
:return:
"""
log.info(" [x] %r:%r" % (method.routing_key, body))
payload = json.loads(body)
ch.basic_ack(delivery_tag=method.delivery_tag)
retry_mechanism(payload, self.retries)
def retry(payload, retry_count):
"""
Utility function for callbacks
:param payload:
:param retry_count:
:return:
"""
try:
msg, callback_url, uid = unpack(payload, retry_count)
status = get_status_from_redis(uid, 0)
if status != "Message Already Processed":
body = {"message": msg, "status": status}
r = requests.post(callback_url, data=body)
return r.status_code
except Exception as ex:
import traceback
log.error(traceback.format_exc())
return -1
def retry_mechanism(payload, retry_count):
"""
Keeps on retrying callback till a max number
:param payload:
:param retry_count:
:return:
"""
uid = payload["uid"]
while retry_count <= MAX_NUMBER_OF_RETRIES:
status_code = retry(payload, retry_count)
if status_code != 200:
retry_count += 1
time.sleep(TIME_INTERVAL_BETWEEN_EACH_RETRY)
else:
remove_key_from_redis(uid, 0)
break
if retry_count > MAX_NUMBER_OF_RETRIES:
store_info_in_db(payload, retry_count)
remove_key_from_redis(uid, 0)
log.info("Finished retrying callback for the message")
def unpack(payload, retry_count):
"""
To unserialize the data
:param payload:
:return: msg, payload, uid
"""
log.info("retry count: " + str(retry_count))
log.info("msg is " + payload["message"])
log.info("url is " + payload["callback_url"])
log.info("uid is " + payload["uid"])
return payload["message"], payload["callback_url"], payload["uid"]
def unpack_without_print(payload, retry_count):
return payload["message"], payload["callback_url"], payload["uid"]
def store_info_in_db(payload, retry_count):
"""
Function to store the failed attempts in DB. This is for backup.
:param payload: The data to be stored in table
:param retry_count: The number of retries to be stored in table
:return:
"""
try:
msg, url, uid = unpack_without_print(payload, retry_count)
status = get_status_from_redis(uid, 0)
FailedMessages.objects.create(uid=uid, callback_url=url, message=msg, status=status, retries=retry_count)
log.info("Storing Record in DB")
except Exception as ex:
log.error("Error while saving info in DB: " + str(ex))
| {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,864 | amoghagarwal/messaging | refs/heads/master | /accept_requests/constants.py | MAX_NUMBER_OF_RETRIES = 5
TIME_INTERVAL_BETWEEN_EACH_RETRY = 5
| {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,865 | amoghagarwal/messaging | refs/heads/master | /accept_requests/utility.py | import redis
from appsphere import settings
import pika
import requests
import json
import logging
from appsphere.settings import RABBITMQ_EXCHANGE as exchange_name
notification_channel = None
log = logging.getLogger(__name__)
def get_redis_connection(db):
"""
A utility function that will return a redis connection.
"""
try:
return redis.StrictRedis(host=settings.SR_REDIS_HOST,
port=settings.SR_REDIS_PORT,
db=db)
except Exception as ex:
log.error('Unable to connect to redis. ' + str(ex))
def consume_message_from_queue(exchange_name, queue_name, routing_key, cb):
"""
Common helper method to register callback to get messages from queue.
:param exchange_name: Name of the exchange to bind.
:param queue_name: Name of the queue to be used.
:param cb: callback method which will get messages. Given below is the signature of cb method.
def cb(channel, method, properties, body):
"""
channel = get_notification_channel()
create_queue(channel, exchange_name, queue_name, routing_key)
channel.basic_consume(cb, queue=queue_name)
channel.start_consuming()
def create_queue(channel, exchange_name, queue_name, routing_key):
"""
Create a queue for given exchange and queue name (and routing queue).
:param channel: channel for rabbitMQ
:param exchange_name: exchange name
:param queue_name: queue name
:param routing_key: routing key
"""
channel.exchange_declare(exchange=exchange_name, type='direct')
channel.queue_declare(queue=queue_name)
channel.queue_bind(exchange=exchange_name, queue=queue_name, routing_key=routing_key)
def get_notification_channel():
"""
:return: RabbitMQ Channel for notification.
To maintain a single connection throughout the code.
"""
global notification_channel
if notification_channel is None:
connection = pika.BlockingConnection(pika.ConnectionParameters(settings.RABBITMQ_IP))
notification_channel = connection.channel()
return notification_channel
def publish_messages_to_queue(exchange_name, routing_key, queue_name, payload):
"""
:param exchange_name: Name of the exchange to bind.
:param routing_key: routing key
:param queue_name: Name of the queue to be used.
:param payload: the messaget to be enqueued
:return:
"""
channel = get_notification_channel()
create_queue(channel, exchange_name, queue_name, routing_key)
channel.basic_publish(exchange=exchange_name,
routing_key=routing_key,
body=payload,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
)
)
def get_callback_status(callback_url, msg, status):
"""
:param uid: Unique ID of the message
:param callback_url: the url for the callback
:param msg: the contents of the message
:param status: the status of the message to be sent
:return:
"""
try:
payload = {"status": status, "message":msg}
r = requests.post(callback_url, data=payload)
return r.status_code
except Exception as ex:
log.error("Unable to Send Callbacks: " + str(ex))
return -1
def store_status_in_redis(uid, status, db):
"""
:param uid: Unique ID of the message
:param payload: value to be stored in redis
:param db: db number to be used in redis
:return:
"""
try:
r = get_redis_connection(db)
r.set(uid, status)
except Exception as ex:
log.error("Unable to store status in redis: " + str(ex))
def callback(uid, callback_url, msg, msg_status):
"""
:param uid: Unique ID of the message
:param callback_url: the url for the callback
:param msg: the url for the callback
:param msg_status: Status of the message to be communicated
:return:
"""
try:
callback_status = get_callback_status(callback_url, msg, msg_status)
if callback_status != 200:
store_status_in_redis(uid, msg_status, 0)
call_retry_mechanism(msg, uid, callback_url)
except Exception as ex:
log.error("Unable to Send Callbacks")
import traceback
log.error(traceback.format_exc())
def call_retry_mechanism(msg, uid, callback_url):
"""
:param msg: the contents of the message
:param uid: Unique ID of the message
:param callback_url: the url for the callback
:return:
"""
payload = {"message": msg, "uid":uid, "callback_url":callback_url}
payload_encoded = json.dumps(payload)
publish_messages_to_queue(exchange_name, "callbacks",
"retry_callbacks", payload_encoded)
def get_status_from_redis(uid, db):
"""
:param uid: uid of the message to be deleted
:param db: name of the redis db
:return: status
"""
try:
r = get_redis_connection(db)
if r.exists(uid):
status = r.get(uid)
return status
else:
return "Message Already Processed"
except Exception as ex:
import traceback
log.error(traceback.format_exc())
def remove_key_from_redis(uid,db):
"""
:param uid: uid of the message to be deleted
:param db: name of the redis db
:return:
"""
try:
r = get_redis_connection(db)
r.delete(uid)
log.info("Message entry removed from redis")
except Exception as ex:
log.error("unable to remove keys:")
import traceback
log.error(traceback.format_exc())
def redis_entry_exists(uid, payload, db):
"""
Function to check if redis entry exists
:param uid:
:param payload:
:param db:
:return:
"""
try:
r = get_redis_connection(db)
if r.exists(uid) and r.get(uid)=="queued":
r.set(uid, payload)
return True
else:
return False
except Exception as ex:
log.error("error " + str(ex))
| {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,866 | amoghagarwal/messaging | refs/heads/master | /accept_requests/process_msg.py | import json
import requests
import logging
import time
from utility import consume_message_from_queue, store_status_in_redis, callback, get_redis_connection, \
redis_entry_exists
from appsphere.settings import RABBITMQ_EXCHANGE as exchange_name
log = logging.getLogger(__name__)
def process_msg():
"""
Function to consume messages from the queue, process them and then send them
:return:
"""
consume_message_from_queue(exchange_name, "process_messaging", "process", cb)
def cb(ch, method, properties, body):
"""
:param ch: Channel
:param method:
:param properties:
:param body: body of the message
:return:
"""
try:
log.info(" [x] %r:%r" % (method.routing_key, body))
payload = json.loads(body)
msg, callback_url, uid = processing(payload)
ch.basic_ack(delivery_tag=method.delivery_tag)
status = send_message(msg)
# if message is delivered to the client
if status == 200:
msg_status = "delivered" #sent and delivered
else:
msg_status = "sent" #sent from our side and not sure delivered to the cient
payload = {"message": msg, "status": msg_status,
"callback_url": callback_url}
if not redis_entry_exists(uid, payload, 0):
callback(uid, callback_url, msg, msg_status)
except Exception as ex:
log.error("Error received: " + str(ex))
def processing(payload):
"""
:param payload:
:return: msg, payload, uid
"""
print "msg is " + payload["message"]
print "url is " + payload["callback_url"]
print "uid is " + payload["uid"]
return payload["message"], payload["callback_url"], payload["uid"]
def send_message(msg):
"""
Function to send the message to the client.
Msg can be sms, email, push notification etc.
:param msg:
:return:
"""
status = send_message_to_client(msg)
return status
def send_message_to_client(msg):
"""
method to get the status of the message sent.
I am assuming that the status of 200 means a successful sent message.
:return: status
"""
time.sleep(2) #sending a request and getting response will take time
status = 200
return status
| {"/accept_requests/management/commands/retry_mechanism.py": ["/accept_requests/callback_service.py"], "/accept_requests/tests.py": ["/accept_requests/utility.py"], "/accept_requests/views.py": ["/accept_requests/utility.py"], "/accept_requests/callback_service.py": ["/accept_requests/models.py", "/accept_requests/utility.py"]} |
74,867 | Fotth/metrology-and-standardization | refs/heads/main | /test.py | import matplotlib.pyplot as plt
from PyQt5 import QtWidgets
#from PyQt5.QtCore import pyqtSignal, QObject
from grafiki import Ui_MainWindow # импорт нашего сгенерированного файла
import sys
import dastfun as df
from PyQt5.QtWidgets import QListWidgetItem,QMessageBox
from PyQt5.QtGui import QFont
import time
#from reader.__main__ import main
class mywindow(QtWidgets.QMainWindow):
def __init__(self):
super(mywindow, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.clot=df.Hisoblash
self.grafdata={}
self.ui.pushButton.clicked.connect(self.agty)
self.ui.pushButton_2.clicked.connect(self.dataclear)
self.ui.pushButton_3.clicked.connect(self.grafiki)
self.ui.commandLinkButton.clicked.connect(self.linkferuz)
self.ui.listWidget.setFont(QFont('Times',16,weight=1))
#
# QListWidgetItem("salom ", self.ui.listWidget)
# self.ui.listWidget.addItem(aitem=a)
def linkferuz(self):
msg = QMessageBox()
msg.setWindowTitle("Bog'lanish uchun")
msg.setText("Dastur muallifi : fottihsultan@gmail.com")
x = msg.exec_()
def grafiki(self):
self.agty()
xpoint=list()
ypoint=list()
for x , y in self.grafdata.values():
xpoint.append(x)
ypoint.append(y)
font1 = {'family': 'serif', 'color': 'blue', 'size': 20}
font2 = {'family': 'serif', 'color': 'darkred', 'size': 15}
plt.title("O'lchov qiymatlarini qayta ishlash Grafik shakili", fontdict=font1)
plt.xlabel(" Kiritlgan qiymatlar ", fontdict=font2)
plt.ylabel(" Chiqayotgan qiymatlar ", fontdict=font2)
plt.plot(xpoint,ypoint)
plt.show()
def agty(self):
# QListWidgetItem("salom", self.ui.listWidget)
fff=True
a=list()
try:
a.append(float(self.ui.lineEdit.text()))
a.append(float(self.ui.lineEdit_2.text()))
a.append(float(self.ui.lineEdit_3.text()))
a.append(float(self.ui.lineEdit_4.text()))
a.append(float(self.ui.lineEdit_5.text()))
a.append(float(self.ui.lineEdit_6.text()))
a.append(float(self.ui.lineEdit_7.text()))
a.append(float(self.ui.lineEdit_8.text()))
a.append(float(self.ui.lineEdit_9.text()))
a.append(float(self.ui.lineEdit_10.text()))
a.append(float(self.ui.lineEdit_11.text()))
a.append(float(self.ui.lineEdit_12.text()))
a.append(float(self.ui.lineEdit_13.text()))
a.append(float(self.ui.lineEdit_14.text()))
a.append(float(self.ui.lineEdit_15.text()))
a.append(float(self.ui.lineEdit_16.text()))
a.append(float(self.ui.lineEdit_18.text()))
a.append(float(self.ui.lineEdit_19.text()))
a.append(float(self.ui.lineEdit_17.text()))
a.append(float(self.ui.lineEdit_20.text()))
except:
QListWidgetItem('Xatolika yo\'li qo\'ydiz ',self.ui.listWidget)
fff=False
if fff:
self.ui.listWidget.clear()
hisob=df.Hisoblash(a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15],a[16],a[17],a[18],a[19])
time.sleep(1)
massiv={}
massiv=hisob.wow()
guf=0
datahisob=hisob.algo7()
for i in massiv.values(): #manashu joyida xatolik bor aloxida ozgaruvchi yaratib ishlab ko'r
QListWidgetItem(i,self.ui.listWidget)
self.grafdata[guf]=a[guf],datahisob[guf]
guf+=1
# d=f"{i[0]} {i[1]} {i[2]} {i[3]} {i[4]} {i[5]} {i[6]}".format(i[0],i[1],i[2],i[3],i[4],i[5],i[6])
# QListWidgetItem(d, self.ui.listWidget)
# time.sleep(0.1)
# print(i)
else:
QListWidgetItem("qaytib urunib koring ",self.ui.listWidget)
# else:
# self.clot(self.a[i])
# if i == 19:
# mass = self.clot.wow().copy()
# for ma in mass:
# self.ui.listWidget.addItem(
# f'{ma[0]} {ma[1]} {ma[2]} {ma[3]} {ma[4]} {ma[5]} {ma[6]} {ma[7]}'.format(ma[0], ma[1],
# ma[2], ma[3],
# ma[4], ma[5],
# ma[6], ma[7]))
def dataclear(self):
self.ui.listWidget.clear()
app = QtWidgets.QApplication([])
application = mywindow()
application.show()
sys.exit(app.exec()) | {"/test.py": ["/grafiki.py", "/dastfun.py"], "/grafiki.py": ["/dastfun.py"]} |
74,868 | Fotth/metrology-and-standardization | refs/heads/main | /grafiki.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'grafiki.ui'
#
# Created by: PyQt5 UI code generator 5.15.5
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
import dastfun as df
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(950, 770)
MainWindow.setMinimumSize(QtCore.QSize(950, 770))
MainWindow.setMaximumSize(QtCore.QSize(1500, 1500))
MainWindow.setStyleSheet("border-color: rgb(253, 255, 115);")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 140, 41, 535))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.label_2 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.label_3 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.verticalLayout.addWidget(self.label_3)
self.label_4 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.verticalLayout.addWidget(self.label_4)
self.label_5 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.verticalLayout.addWidget(self.label_5)
self.label_6 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.verticalLayout.addWidget(self.label_6)
self.label_7 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.verticalLayout.addWidget(self.label_7)
self.label_8 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.verticalLayout.addWidget(self.label_8)
self.label_9 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.verticalLayout.addWidget(self.label_9)
self.label_10 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.verticalLayout.addWidget(self.label_10)
self.label_11 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_11.setFont(font)
self.label_11.setObjectName("label_11")
self.verticalLayout.addWidget(self.label_11)
self.label_12 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_12.setFont(font)
self.label_12.setObjectName("label_12")
self.verticalLayout.addWidget(self.label_12)
self.label_13 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_13.setFont(font)
self.label_13.setObjectName("label_13")
self.verticalLayout.addWidget(self.label_13)
self.label_14 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_14.setFont(font)
self.label_14.setObjectName("label_14")
self.verticalLayout.addWidget(self.label_14)
self.label_15 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_15.setFont(font)
self.label_15.setObjectName("label_15")
self.verticalLayout.addWidget(self.label_15)
self.label_16 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_16.setFont(font)
self.label_16.setObjectName("label_16")
self.verticalLayout.addWidget(self.label_16)
self.label_17 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_17.setFont(font)
self.label_17.setObjectName("label_17")
self.verticalLayout.addWidget(self.label_17)
self.label_18 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_18.setFont(font)
self.label_18.setObjectName("label_18")
self.verticalLayout.addWidget(self.label_18)
self.label_19 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_19.setFont(font)
self.label_19.setObjectName("label_19")
self.verticalLayout.addWidget(self.label_19)
self.label_20 = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_20.setFont(font)
self.label_20.setObjectName("label_20")
self.verticalLayout.addWidget(self.label_20)
self.verticalLayoutWidget_2 = QtWidgets.QWidget(self.centralwidget)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(50, 140, 56, 553))
self.verticalLayoutWidget_2.setObjectName("verticalLayoutWidget_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.lineEdit = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit.setMaxLength(6)
self.lineEdit.setObjectName("lineEdit")
self.verticalLayout_2.addWidget(self.lineEdit)
self.lineEdit_2 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_2.setMaxLength(6)
self.lineEdit_2.setObjectName("lineEdit_2")
self.verticalLayout_2.addWidget(self.lineEdit_2)
self.lineEdit_3 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_3.setMaxLength(6)
self.lineEdit_3.setObjectName("lineEdit_3")
self.verticalLayout_2.addWidget(self.lineEdit_3)
self.lineEdit_4 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_4.setMaxLength(6)
self.lineEdit_4.setObjectName("lineEdit_4")
self.verticalLayout_2.addWidget(self.lineEdit_4)
self.lineEdit_5 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_5.setMaxLength(6)
self.lineEdit_5.setObjectName("lineEdit_5")
self.verticalLayout_2.addWidget(self.lineEdit_5)
self.lineEdit_6 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_6.setMaxLength(6)
self.lineEdit_6.setObjectName("lineEdit_6")
self.verticalLayout_2.addWidget(self.lineEdit_6)
self.lineEdit_7 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_7.setMaxLength(6)
self.lineEdit_7.setObjectName("lineEdit_7")
self.verticalLayout_2.addWidget(self.lineEdit_7)
self.lineEdit_8 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_8.setMaxLength(6)
self.lineEdit_8.setObjectName("lineEdit_8")
self.verticalLayout_2.addWidget(self.lineEdit_8)
self.lineEdit_9 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_9.setMaxLength(6)
self.lineEdit_9.setObjectName("lineEdit_9")
self.verticalLayout_2.addWidget(self.lineEdit_9)
self.lineEdit_10 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_10.setMaxLength(6)
self.lineEdit_10.setObjectName("lineEdit_10")
self.verticalLayout_2.addWidget(self.lineEdit_10)
self.lineEdit_11 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_11.setMaxLength(6)
self.lineEdit_11.setObjectName("lineEdit_11")
self.verticalLayout_2.addWidget(self.lineEdit_11)
self.lineEdit_12 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_12.setMaxLength(6)
self.lineEdit_12.setObjectName("lineEdit_12")
self.verticalLayout_2.addWidget(self.lineEdit_12)
self.lineEdit_13 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_13.setMaxLength(6)
self.lineEdit_13.setObjectName("lineEdit_13")
self.verticalLayout_2.addWidget(self.lineEdit_13)
self.lineEdit_14 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_14.setMaxLength(6)
self.lineEdit_14.setObjectName("lineEdit_14")
self.verticalLayout_2.addWidget(self.lineEdit_14)
self.lineEdit_15 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_15.setMaxLength(6)
self.lineEdit_15.setObjectName("lineEdit_15")
self.verticalLayout_2.addWidget(self.lineEdit_15)
self.lineEdit_16 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_16.setMaxLength(6)
self.lineEdit_16.setObjectName("lineEdit_16")
self.verticalLayout_2.addWidget(self.lineEdit_16)
self.lineEdit_17 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_17.setMaxLength(6)
self.lineEdit_17.setObjectName("lineEdit_17")
self.verticalLayout_2.addWidget(self.lineEdit_17)
self.lineEdit_18 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_18.setMaxLength(6)
self.lineEdit_18.setObjectName("lineEdit_18")
self.verticalLayout_2.addWidget(self.lineEdit_18)
self.lineEdit_19 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_19.setMaxLength(6)
self.lineEdit_19.setObjectName("lineEdit_19")
self.verticalLayout_2.addWidget(self.lineEdit_19)
self.lineEdit_20 = QtWidgets.QLineEdit(self.verticalLayoutWidget_2)
self.lineEdit_20.setMaxLength(6)
self.lineEdit_20.setObjectName("lineEdit_20")
self.verticalLayout_2.addWidget(self.lineEdit_20)
self.label_21 = QtWidgets.QLabel(self.centralwidget)
self.label_21.setGeometry(QtCore.QRect(140, 0, 581, 51))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.horizontalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 89, 751, 51))
font = QtGui.QFont()
font.setPointSize(11)
self.horizontalLayoutWidget.setFont(font)
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_23 = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.label_23.setMaximumSize(QtCore.QSize(40, 45))
self.label_23.setSizeIncrement(QtCore.QSize(20, 20))
font = QtGui.QFont()
font.setPointSize(11)
self.label_23.setFont(font)
self.label_23.setObjectName("label_23")
self.horizontalLayout.addWidget(self.label_23)
self.label_22 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_22.setFont(font)
self.label_22.setObjectName("label_22")
self.horizontalLayout.addWidget(self.label_22)
self.label_24 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_24.setFont(font)
self.label_24.setObjectName("label_24")
self.horizontalLayout.addWidget(self.label_24)
self.label_25 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_25.setFont(font)
self.label_25.setObjectName("label_25")
self.horizontalLayout.addWidget(self.label_25)
self.label_26 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_26.setFont(font)
self.label_26.setObjectName("label_26")
self.horizontalLayout.addWidget(self.label_26)
self.label_27 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_27.setFont(font)
self.label_27.setObjectName("label_27")
self.horizontalLayout.addWidget(self.label_27)
self.label_28 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_28.setFont(font)
self.label_28.setObjectName("label_28")
self.horizontalLayout.addWidget(self.label_28)
self.label_29 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_29.setFont(font)
self.label_29.setObjectName("label_29")
self.horizontalLayout.addWidget(self.label_29)
self.label_30 = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setPointSize(11)
self.label_30.setFont(font)
self.label_30.setObjectName("label_30")
self.horizontalLayout.addWidget(self.label_30)
self.label_31 = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.label_31.setMaximumSize(QtCore.QSize(60, 16777215))
font = QtGui.QFont()
font.setPointSize(11)
self.label_31.setFont(font)
self.label_31.setObjectName("label_31")
self.horizontalLayout.addWidget(self.label_31)
self.listWidget = QtWidgets.QListWidget(self.centralwidget)
self.listWidget.setGeometry(QtCore.QRect(110, 137, 655, 586))
self.listWidget.setObjectName("listWidget")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(780, 270, 141, 61))
self.pushButton.setMaximumSize(QtCore.QSize(200, 200))
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.pushButton.setFont(font)
self.pushButton.setStyleSheet("QPushButton{\n"
"border-radius:30px;\n"
" background-color: rgb(84, 84, 84);\n"
" color: rgb(0, 0, 0);\n"
"padding: 6px\n"
"}\n"
"\n"
"\n"
"QPushButton:hover{\n"
"border-radius:28px;\n"
" \n"
" background-color: rgb(59, 59, 59);\n"
" \n"
" color: rgb(198, 19, 19);\n"
"padding: 20px\n"
"\n"
"}\n"
"QPushButton:pressed{\n"
"border-radius:20px;\n"
" background-color: rgb(52, 55, 255);\n"
" color: rgb(15, 15, 15);\n"
"padding: 20px\n"
"\n"
"}")
self.pushButton.setObjectName("pushButton")
self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_2.setGeometry(QtCore.QRect(780, 360, 141, 61))
self.pushButton_2.setMaximumSize(QtCore.QSize(200, 200))
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.pushButton_2.setFont(font)
self.pushButton_2.setStyleSheet("QPushButton{\n"
"border-radius:30px;\n"
" background-color: rgb(84, 84, 84);\n"
" color: rgb(0, 0, 0);\n"
"padding: 6px\n"
"}\n"
"\n"
"\n"
"QPushButton:hover{\n"
"border-radius:28px;\n"
" \n"
" background-color: rgb(59, 59, 59);\n"
" \n"
" color: rgb(198, 19, 19);\n"
"padding: 20px\n"
"\n"
"}\n"
"QPushButton:pressed{\n"
"border-radius:20px;\n"
" background-color: rgb(52, 55, 255);\n"
" color: rgb(15, 15, 15);\n"
"padding: 20px\n"
"\n"
"}")
self.pushButton_2.setObjectName("pushButton_2")
self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_3.setGeometry(QtCore.QRect(780, 450, 141, 61))
self.pushButton_3.setMaximumSize(QtCore.QSize(200, 200))
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.pushButton_3.setFont(font)
self.pushButton_3.setStyleSheet("QPushButton{\n"
"border-radius:30px;\n"
" background-color: rgb(84, 84, 84);\n"
" color: rgb(0, 0, 0);\n"
"padding: 6px\n"
"}\n"
"\n"
"\n"
"QPushButton:hover{\n"
"border-radius:28px;\n"
" \n"
" background-color: rgb(59, 59, 59);\n"
" \n"
" color: rgb(198, 19, 19);\n"
"padding: 20px\n"
"\n"
"}\n"
"QPushButton:pressed{\n"
"border-radius:20px;\n"
" background-color: rgb(52, 55, 255);\n"
" color: rgb(15, 15, 15);\n"
"padding: 20px\n"
"\n"
"}")
self.pushButton_3.setObjectName("pushButton_3")
self.line = QtWidgets.QFrame(self.centralwidget)
self.line.setGeometry(QtCore.QRect(790, 530, 118, 3))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.line_2 = QtWidgets.QFrame(self.centralwidget)
self.line_2.setGeometry(QtCore.QRect(290, 40, 271, 21))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.line_3 = QtWidgets.QFrame(self.centralwidget)
self.line_3.setGeometry(QtCore.QRect(790, 430, 118, 3))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.line_4 = QtWidgets.QFrame(self.centralwidget)
self.line_4.setGeometry(QtCore.QRect(790, 340, 118, 3))
self.line_4.setFrameShape(QtWidgets.QFrame.HLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.line_5 = QtWidgets.QFrame(self.centralwidget)
self.line_5.setGeometry(QtCore.QRect(750, 140, 51, 581))
self.line_5.setFrameShape(QtWidgets.QFrame.VLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.line_6 = QtWidgets.QFrame(self.centralwidget)
self.line_6.setGeometry(QtCore.QRect(10, 730, 761, 16))
self.line_6.setFrameShape(QtWidgets.QFrame.HLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.line_7 = QtWidgets.QFrame(self.centralwidget)
self.line_7.setGeometry(QtCore.QRect(340, 50, 171, 20))
self.line_7.setFrameShape(QtWidgets.QFrame.HLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.line_8 = QtWidgets.QFrame(self.centralwidget)
self.line_8.setGeometry(QtCore.QRect(400, 60, 41, 20))
self.line_8.setFrameShape(QtWidgets.QFrame.HLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.commandLinkButton = QtWidgets.QCommandLinkButton(self.centralwidget)
self.commandLinkButton.setGeometry(QtCore.QRect(800, 720, 111, 41))
self.commandLinkButton.setAutoDefault(False)
self.commandLinkButton.setDefault(False)
self.commandLinkButton.setDescription("")
self.commandLinkButton.setObjectName("commandLinkButton")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label.setText(_translate("MainWindow", "X1"))
self.label_2.setText(_translate("MainWindow", "X2"))
self.label_3.setText(_translate("MainWindow", "X3"))
self.label_4.setText(_translate("MainWindow", "X4"))
self.label_5.setText(_translate("MainWindow", "X5"))
self.label_6.setText(_translate("MainWindow", "X6"))
self.label_7.setText(_translate("MainWindow", "X7"))
self.label_8.setText(_translate("MainWindow", "X8"))
self.label_9.setText(_translate("MainWindow", "X9"))
self.label_10.setText(_translate("MainWindow", "X10"))
self.label_11.setText(_translate("MainWindow", "X11"))
self.label_12.setText(_translate("MainWindow", "X12"))
self.label_13.setText(_translate("MainWindow", "X13"))
self.label_14.setText(_translate("MainWindow", "X14"))
self.label_15.setText(_translate("MainWindow", "X15"))
self.label_16.setText(_translate("MainWindow", "X16"))
self.label_17.setText(_translate("MainWindow", "X17"))
self.label_18.setText(_translate("MainWindow", "X18"))
self.label_19.setText(_translate("MainWindow", "X19"))
self.label_20.setText(_translate("MainWindow", "X20"))
self.label_21.setText(_translate("MainWindow", "O\'LCHASH NATIJALARINI QAYTA ISHLASH "))
self.label_23.setText(_translate("MainWindow", " №"))
self.label_22.setText(_translate("MainWindow", " Xᵢ"))
self.label_24.setText(_translate("MainWindow", " ̅X ̅"))
self.label_25.setText(_translate("MainWindow", "∆Xᵢ"))
self.label_26.setText(_translate("MainWindow", "(∆Xᵢ)²"))
self.label_27.setText(_translate("MainWindow", " ∑ᶰᵢ(∆Xᵢ)²"))
self.label_28.setText(_translate("MainWindow", " σ"))
self.label_29.setText(_translate("MainWindow", " σₐ"))
self.label_30.setText(_translate("MainWindow", " X"))
self.label_31.setText(_translate("MainWindow", " Y"))
self.pushButton.setText(_translate("MainWindow", "Hisoblash"))
self.pushButton_2.setText(_translate("MainWindow", "Tozalash"))
self.pushButton_3.setText(_translate("MainWindow", "Grafik"))
self.commandLinkButton.setText(_translate("MainWindow", "Help"))
| {"/test.py": ["/grafiki.py", "/dastfun.py"], "/grafiki.py": ["/dastfun.py"]} |
74,869 | Fotth/metrology-and-standardization | refs/heads/main | /dastfun.py | #!/use/bin/python3
import math
import numpy as np
class Hisoblash:
def __init__(self,*age,**kwargs):
self.age=age #age bu yerda list Xn qiymatlari turadi
self.kwargs=kwargs
self.n=0
for i in age:
self.kwargs[self.n]=i
self.n=self.n+1
def algo1(self):
"""argumentlarni o'rta arfmetigini qaytaradi 1-formula"""
sumt=0
ast=0
for i in self.kwargs.values():
sumt=sumt+i
ast=sumt/(self.n+1)
return ast
def algo2(self):
"""Delta Ardgumentni qaytaradi 2-formula"""
arr=list()
d=self.algo1()
for i in self.kwargs.values():
arr.append(d-i)
return arr #list qiymat qaytariyabdi
def kvad_algao3(self):
kvat=list()
for i in self.algo2():
kvat.append(math.pow(i,2))
return kvat
def algo3(self):
"""3-formula argument qiyamtlarining kvadratlari yig'indisi"""
sumE=0
for i in self.algo2():
sumE=sumE+i*i
return sumE # sonli qiymat qaytaradi
def algo4(self):
"""4-formula bo'yincha son qaytaradi"""
sigma=math.sqrt(self.algo3()/self.n)
return sigma
def algo5(self):
"""5-formula son qaytaradi"""
sigmaN=self.algo4()/math.sqrt(self.n+1)
return sigmaN
def algo6(self):
"""6-formula asosida x1 va x2 larni topib olib uzatish"""
# x=list()
# # x=self.algo2().copy()
# for u in self.algo2():
# x.append(u)
x1=list()
x2=list()
for i in range(20):
if i <= 9:
x1.append(self.algo2()[i]-2.1*self.algo5())
else:
x2.append(self.algo2()[i] + 2.1 * self.algo5())
for i in x2:
x1.append(i)
return x1
def algo7(self):
"""7-formulaga asosan ro'yxat qaytaradi"""
y=list()
for a in self.algo2():
y.append( (1/(self.algo4()*math.sqrt(2*math.pi)))*math.exp(-(math.pow(a,2)/(2*math.pow(self.algo4(),2)))))
return y
def wow(self):
gotto={}
ali1 = int(self.algo1() * 100) / 100
ali3 = int(self.algo3() * 100) / 100
ali4 = int(self.algo4() * 100) / 100
ali5 = int(self.algo5() * 100) / 100
for i in range(20):
ali2=int(self.algo2()[i]*100)/100
kvat_nat=int(self.kvad_algao3()[i]*100)/100
ali6=int(self.algo6()[i]*100)/100
ali7=int(self.algo7()[i]*100)/100
gotto[i]=f" {ali1} {ali2} {kvat_nat} {ali3} {ali4} {ali5} {ali6} {ali7}".format(ali1,ali2,kvat_nat,ali3,ali4,ali5,ali6,ali7)
return gotto
# s=Hisoblash
# for i in range(20):
# s=i
#
# s=Hisoblash(1,2,3,3,3,4,3,2,4,3,2,4,3,2,4,3,8,7,9,3)
# # #s=1,2,3,3,3,4,3,2,4,3,2,4,3,2,4,3,8,7,9,3
# # # g,u=s.algo6()
# # print(s.algo1()) #int 1
# # print(type(s.algo2())) #list 1
# # print(type(s.algo3())) #int 1
# # print(type(s.algo4())) #int 1
# # print(s.algo6()) #int 1
# # # print(type(g)) #tuple 2
# # print(type(s.algo7())) #list 1
# print(s.wow()) | {"/test.py": ["/grafiki.py", "/dastfun.py"], "/grafiki.py": ["/dastfun.py"]} |
74,905 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /preprocess_p1.py | import re
import sys
from processTweets_p1 import *
from DictionaryBuilder import *
sys.path.insert(0,'ark-tokenizer')
from ark import tokenizeRawTweetText
fp = open(sys.argv[1], 'r')
tokfp = open(sys.argv[2], 'w+')
line = fp.readline()
while line:
fields=re.split(r'\t+', line)
fields[3]=fields[3].strip();
if(fields[3]!= 'Not Available'):
polarity=fields[2]
processedTweet = processTweet(fields[3],ed,ad,swd)
processedTweet=" ".join(processedTweet)
tokenizedTweet=tokenizeRawTweetText(processedTweet)
tokfp.write(polarity+"\t"+" ".join(tokenizedTweet)+"\n")
line = fp.readline()
#end loop
fp.close()
tokfp.close()
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,906 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /DictionaryBuilder.py | from collections import defaultdict
specialChar='1234567890#@%^&()_=`{}:"|[]\;\',./\n\t\r '
#Create wordnet dictionary
def getWordnetDictionary():
fp_wn=open("wordnet.txt",'r')
wn_dict={}
line=fp_wn.readline()
while line:
line=line.rstrip()
fields=line.split(":")
if fields[0] not in wn_dict:
wn_dict[fields[0]]=float(fields[1])
line=fp_wn.readline()
fp_wn.close()
return wn_dict
def getEmoticonDictionary():
f=open("emoticonsWithPolarity.txt",'r')
data=f.read().split('\n')
emoticonsDict={}
for i in data:
if i:
i=i.split()
value=i[-1]
key=i[:-1]
for j in key:
emoticonsDict[j]=value
f.close()
return emoticonsDict
def getAcronymDictionary():
f=open("acronym.txt",'r')
data=f.read().split('\n')
acronymDict={}
for i in data:
if i:
i=i.split('\t')
key=i[0]
value=i[1]
acronymDict[key]=value
f.close()
return acronymDict
def getStopwordDictionary():
stopWords=defaultdict(int)
f=open("stopWords.txt", "r")
for line in f:
if line:
line=line.strip(specialChar).lower()
stopWords[line]=1
f.close()
return stopWords
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,907 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /processTweets.py | import re
import nltk
from DictionaryBuilder import *
#Creating Dictionaries
ed=getEmoticonDictionary()
ad=getAcronymDictionary()
swd=getStopwordDictionary()
def getPOSScore(tweet):
count = len(tweet)
listp = nltk.pos_tag(tweet)
a=0
nouns_cnt = 0
prep_cnt = 0
adj_cnt = 0
while a < count:
if(listp[a][1] == 'NN'):
nouns_cnt = nouns_cnt + 1
elif(listp[a][1] == 'IN'):
prep_cnt = prep_cnt + 1
elif(listp[a][1] == 'JJ'):
adj_cnt = adj_cnt + 1
a = a+1
return nouns_cnt,prep_cnt,adj_cnt
specialChar='1234567890#@%^&()_=`{}:"|[]\;\',./\n\t\r '
""" eg greaaaaaaaaaaaaat->greaaat
param: tweet - list of words in tweet
return: list of words and count of words which has repitetion"""
def replaceRepetition(tweet):
count=0
for i in range(len(tweet)):
x=list(tweet[i])
if len(x)>3:
flag=0
for j in range(3,len(x)):
if(x[j-3]==x[j-2]==x[j-1]==x[j]):
x[j-3]=''
if flag==0:
count+=1
flag=1
tweet[i]=''.join(x).strip(specialChar)
return tweet,count
"""remove the non-english or better non-ascii characters
param: list of words in tweets
return: tweet with English words only and the count of words removed."""
def removeNonEnglishWords(tweet):
newTweet=[]
count=0
for i in range(len(tweet)):
if tweet[i]!='':
chk=re.match(r'([a-zA-z0-9 \+\?\.\*\^\$\(\)\[\]\{\}\|\\/:;\'\"><,.#@!~`%&-_=])+$',tweet[i])
if chk:
count+=1
newTweet.append(tweet[i])
return newTweet,count
"""Removes the stopwords.
param: list of words in tweet,a Dictonary of stopword.
return: modified list words """
def removeStopWords(stopWordsDict,tweet):
newTweet=[]
for i in range(len(tweet)):
if tweet[i].strip(specialChar) not in stopWordsDict:
newTweet.append(tweet[i])
return newTweet
""" replaces the emoticons present in tweet with its polarity
param : emoticons dictioary emoticons as key polarity as value
return: list which contains words in tweet and return list of words in tweet after replacement"""
def replaceEmoticons(emoticonsDict,tweet):
isEmoticonPresent=0
for i in range(len(tweet)):
if tweet[i] in emoticonsDict:
isEmoticonPresent=1
tweet[i]=emoticonsDict[tweet[i]]
return tweet,isEmoticonPresent
"""expand the Acronym in tweet
param: acronym dictionary ,acronym as key and abbreviation as value,list of words in tweet.
return: list of words in tweet after expansion and their count"""
def expandAcronym(acronymDict,tweet):
count=0
newTweet=[]
for i in range(len(tweet)):
word=tweet[i].strip(specialChar)
if word:
if word in acronymDict:
count+=1
newTweet+=acronymDict[word].split(" ")
else:
newTweet+=[tweet[i]]
return newTweet,count
"""param: list of words in tweet
return: list of words in tweet after expanding"
eg isn't -> is not """
def expandNegation(tweet):
newTweet=[]
for i in range(len(tweet)):
word=tweet[i].strip(specialChar)
if(word[-3:]=="n't"):
if word[-5:]=="can't" :
newTweet.append('can')
else:
newTweet.append(word[:-3])
newTweet.append('not')
else:
newTweet.append(tweet[i])
return newTweet
"""param: a list which contains words in tweet.
return: list of words in tweet after replacement ("not","n't","no","~")
eg.
not -> negation
isn't -> negation """
def replaceNegation(tweet):
for i in range(len(tweet)):
word=tweet[i].lower().strip(specialChar)
if(word=="no" or word=="not" or word.count("n't")>0):
tweet[i]='negation'
return tweet
"""
replace url with IURLI
"""
def replaceURL(tweet):
tweet=re.sub('((www\.[^\s]+)|(https?://[^\s]+))','IURLI',tweet)
return tweet
"""
eg: replace @sunil with IATUSERI
"""
def replaceTarget(tweet):
tweet=re.sub('@[^\s]+','IATUSERI',tweet)
return tweet
""" param: tweet as a string
return: list of words in tweet after removing numbers """
def removeNumbers(tweet):
tweet=re.sub('^[0-9]+', '', tweet)
return tweet
"""param: string tweet
return: list of words in tweet after replacement
eg : #*** - > *** """
def replaceHashtag(tweet):
tweet=re.sub(r'#([^\s]+)', r'\1', tweet)
return tweet
def mergeSpace(tweet):
return re.sub('[\s]+', ' ', tweet)
""" Intial preprocessing
param: tweet string
return: preprocessed tweet """
def processTweet(tweet,ed,ad,swd):
#Other Feature List (NON_ENG,REPEAT,EMOTICON,ACRONYM and WN_SCORE)
features=[]
tweet=tweet.lower()
tweet = replaceURL(tweet)
tweet = replaceTarget(tweet)
tweet = replaceHashtag(tweet)
#print "After url hashtag target",tweet
tweet = mergeSpace(tweet)
tweet = tweet.strip('\'"')
tweet=tweet.strip(' ')
tweet=tweet.split(" ")
tweet,count=removeNonEnglishWords(tweet)
features.append(str(count))
#print "Non English",tweet
tweet,count=replaceRepetition(tweet)
features.append(str(count))
#print "Repetition",tweet
tweet,count=replaceEmoticons(ed,tweet)
features.append(str(count))
#print "Emoticons",tweet
tweet,count=expandAcronym(ad,tweet)
features.append(str(count))
#print "Acronym",tweet
tweet=expandNegation(tweet)
tweet=replaceNegation(tweet)
#print "Negation",tweet
tweet=removeStopWords(swd,tweet)
for i in xrange(len(tweet)-1,-1,-1):
if tweet[i] == '':
tweet.pop(i)
n,p,a=getPOSScore(tweet)
features.append(str(n))
features.append(str(p))
features.append(str(a))
#print "Stop Words",tweet
return tweet,features
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,908 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /preprocess.py | import re
import sys
from processTweets import *
from DictionaryBuilder import *
sys.path.insert(0,'ark-tokenizer')
from ark import tokenizeRawTweetText
fp = open(sys.argv[1], 'r')
tokfp = open(sys.argv[2], 'w+')
featurefp=open("processedTweet_vs_features.txt",'w+')
wn_dict=getWordnetDictionary()
line = fp.readline()
while line:
fields=re.split(r'\t+', line)
fields[3]=fields[3].strip();
if(fields[3]!= 'Not Available'):
polarity=fields[2]
processedTweet,featureVect = processTweet(fields[3],ed,ad,swd)
processedTweet=" ".join(processedTweet)
tokenizedTweet=tokenizeRawTweetText(processedTweet)
tweet_size=len(tokenizedTweet)
#Find word polarity score of the word using word net
wn_score=0
for j in range(tweet_size):
if tokenizedTweet[j] in wn_dict:
wn_score+=wn_dict[tokenizedTweet[j]]
featureVect.append(str(wn_score))
#Write coresp. processed tweet and feature vector 2 corresp. file
tokfp.write(polarity+"\t"+" ".join(tokenizedTweet)+"\n")
featurefp.write(" ".join(featureVect)+"\n")
line = fp.readline()
#end loop
fp.close()
tokfp.close()
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,909 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /svm_classifier.py | from sklearn.multiclass import OneVsOneClassifier
def train_classifier(clf,X_train,y_train,X_test,y_test):
clf = OneVsOneClassifier(clf)
clf.fit(X_train, y_train)
train_time = time() - t0
print("train time: %0.3fs" % train_time)
t0 = time()
return clf
def predict_sentiment(clf,test_vector):
pred = clf.predict(X_test)
return pred
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,910 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /pre.py | import re
import sys
sys.path.insert(0,'ark-tokenizer')
from ark import tokenizeRawTweetText
def processTweet(tweet):
tweet=tweet.lower()
tweet = re.sub('((www\.[^\s]+)|(https?://[^\s]+))','||URL||',tweet)
tweet = re.sub('@[^\s]+','||AT_USER||',tweet)
tweet = re.sub('[\s]+', ' ', tweet)
tweet = re.sub(r'#([^\s]+)', r'\1', tweet)
tweet = tweet.strip('\'"')
tweet=tweet.strip(' ')
return tweet
fp = open(sys.argv[1], 'r')
tokfp = open(sys.argv[2], 'w+')
line = fp.readline()
while line:
fields=re.split(r'\t+', line)
polarity=fields[2]
processedTweet = processTweet(fields[3])
if(processedTweet != 'not available'):
tokenizedTweet=tokenizeRawTweetText(processedTweet)
tokfp.write(polarity+"\t"+" ".join(tokenizedTweet)+"\n")
line = fp.readline()
#end loop
fp.close()
tokfp.close()
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,911 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /unigramBayes.py | import re
import sys
import random
from processTweets_p1 import *
from sklearn.multiclass import OneVsOneClassifier
from sklearn import svm
from sklearn import cross_validation
import numpy as np
sys.path.insert(0,'ark-tokenizer')
from ark import tokenizeRawTweetText
from DictionaryBuilder import *
fp = open("preprocessedTweets.txt", 'r')
fp_tr=open("trainpreprocessedTweets.txt", 'w+')
fp_te=open("testpreprocessedTweets.txt", 'w+')
line=fp.readline()
ttc=0
while line:
rnd=random.random()
if rnd<0.05 :
fp_te.write(line)
ttc+=1
fp_tr.write(line)
line=fp.readline()
print "Total TestTweets %d" %(ttc)
fp_tr.close()
fp_te.close()
fp = open("trainpreprocessedTweets.txt", 'r')
#Create dictionary of words
wordict={}
pos_count=0
neg_count=0
neu_count=0
line=fp.readline()
while line:
line=line.rstrip()
fields=re.split(r'\t+',line)
if len(fields) < 2:
line=fp.readline()
continue
if "positive" == fields[0]:
pos_count+=1
elif "negative" == fields[0]:
neg_count+=1
else:
neu_count+=1
tokens=re.split(' ',fields[1])
size=len(tokens)
for i in range(size):
wordict[tokens[i]]=0
line=fp.readline()
fp.close()
wordlist=sorted(wordict)
wordcount=0
for word in wordlist:
wordict[word]=wordcount;
wordcount+=1
wordlist = []
#print pos_count+neg_count+neu_count
#create boolean matrix (no. of tweets)*(no. of words in dict)
pos_matrix = [[0 for i in range(wordcount)] for j in range(pos_count)]
neg_matrix = [[0 for i in range(wordcount)] for j in range(neg_count)]
neu_matrix = [[0 for i in range(wordcount)] for j in range(neu_count)]
#pos_matrix = dok_matrix((pos_count,wordcount))
#neg_matrix = dok_matrix((neg_count,wordcount))
#neu_matrix = dok_matrix((neu_count,wordcount))
fp = open("trainpreprocessedTweets.txt", 'r')
line=fp.readline()
pos=0
neg=0
neu=0
while line:
line=line.rstrip()
fields=re.split(r'\t+',line)
if len(fields) <2:
line=fp.readline()
continue
tokens=re.split(' ',fields[1])
size=len(tokens)
if "positive"==fields[0]:
for i in range(size):
pos_matrix[pos][wordict[tokens[i]]]=1
pos+=1
elif "negative"==fields[0]:
for i in range(size):
neg_matrix[neg][wordict[tokens[i]]]=1
neg+=1
else:
for i in range(size):
neu_matrix[neu][wordict[tokens[i]]]=1
neu+=1
line=fp.readline()
total_tweets=pos_count+neg_count+neu_count
#classifying
pos_prob=float(pos_count)/float(total_tweets)
neg_prob=float(neg_count)/float(total_tweets)
neu_prob=float(neu_count)/float(total_tweets)
#Create wordnet dictionary
fp_wn=open("wordnet.txt",'r')
wn_dict=getWordnetDictionary()
fp = open("testpreprocessedTweets.txt", 'r')
line=fp.readline()
TP=0
total_test=0
choice=1
while line:
line=line.rstrip()
fields=re.split(r'\t+',line)
if len(fields)<2:
line=fp.readline()
continue
total_test+=1
test_tweet=fields[1]
test_tweet=processTweet(test_tweet,ed,ad,swd)
test_tweet=" ".join(test_tweet)
ark_tokenised=tokenizeRawTweetText(test_tweet)
tweet_size=len(ark_tokenised)
pos_tfreq=[1 for i in range(tweet_size)]
neg_tfreq=[1 for i in range(tweet_size)]
neu_tfreq=[1 for i in range(tweet_size)]
wn_score=0
for j in range(tweet_size):
if ark_tokenised[j] in wn_dict:
wn_score+=wn_dict[ark_tokenised[j]]
for i in range(pos_count):
for j in range(tweet_size):
if ark_tokenised[j] in wordict and pos_matrix[i][wordict[ark_tokenised[j]]]==1:
pos_tfreq[j]+=1
#print pos_tfreq
for i in range(neg_count):
for j in range(tweet_size):
if ark_tokenised[j] in wordict and neg_matrix[i][wordict[ark_tokenised[j]]]==1:
neg_tfreq[j]+=1
#print neg_tfreq
for i in range(neu_count):
for j in range(tweet_size):
if ark_tokenised[j] in wordict and neu_matrix[i][wordict[ark_tokenised[j]]]==1:
neu_tfreq[j]+=1
#print neu_tfreq
pos_uni_prob=10000
for i in range(tweet_size):
pos_uni_prob*=float(pos_tfreq[j])/float(pos_count+1)
neg_uni_prob=10000
for i in range(tweet_size):
neg_uni_prob*=float(neg_tfreq[j])/float(neg_count+1)
neu_uni_prob=10000
for i in range(tweet_size):
neu_uni_prob*=float(neu_tfreq[j])/float(neu_count+1)
pos_given_tweet=pos_prob*pos_uni_prob
neg_given_tweet=neg_prob*neg_uni_prob
neu_given_tweet=neu_prob*neu_uni_prob
#print wn_score
#print pos_given_tweet,neg_given_tweet,neu_given_tweet
if wn_score >1:
pos_given_tweet+=wn_score
elif wn_score <-1:
neg_given_tweet+=(-1*wn_score)
#print pos_given_tweet,neg_given_tweet,neu_given_tweet
if pos_given_tweet>=neg_given_tweet:
if pos_given_tweet>=neu_given_tweet:
#print "Classified : positive","Actual : %s" %fields[0]
if 'positive' in fields[0]:
TP+=1
else:
#print "Classified : neutral","Actual : %s" %fields[0]
if 'neutral'in fields[0]:
TP+=1
else:
if neg_given_tweet>=neu_given_tweet:
#print "Classified : negative","Actual : %s" %fields[0]
if 'negative' in fields[0]:
TP+=1
else:
#print "Classified : neutral","Actual : %s" %fields[0]
if 'neutral' in fields[0]:
TP+=1
line=fp.readline()
print "Unigram Accuracy : ",float(TP)/float(total_test)
fp.close()
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,912 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /classifier.py |
import logging
import numpy as np
from optparse import OptionParser
import sys
import json
from time import time
import matplotlib.pyplot as plt
from sklearn.datasets import load_files
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_selection import SelectKBest, chi2
from sklearn.ensemble import RandomForestClassifier
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC,NuSVC
from sklearn.naive_bayes import MultinomialNB, GaussianNB
from sklearn.utils.extmath import density
from sklearn import metrics
from sklearn.cross_validation import train_test_split
from fastica import fastica
from sklearn.decomposition.truncated_svd import TruncatedSVD
from scipy.io import mmread,mmwrite
"""
print("Loading dataset ")
bunch = load_files('../global/')
X_train, X_test, y_train, y_test = train_test_split(bunch.data, bunch.target, test_size=.16)
categories = [bunch.target_names[idx] for idx in y_test] # Load Categories
target_files = [bunch.filenames[idx] for idx in y_test]
print(target_files[0:5])
with open("question_tags.json","rb") as ques_tags:
question_tags = json.load(ques_tags)
print("Extracting features from the training data using a sparse vectorizer")
t0 = time()
if opts.use_hashing:
vectorizer = HashingVectorizer(stop_words='english', non_negative=True,
n_features=opts.n_features)
X_train = vectorizer.transform(X_train)
else:
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5,
stop_words='english')#,max_features=10000)
X_train = vectorizer.fit_transform(X_train)
duration = time() - t0
print("n_samples: %d, n_features: %d" % X_train.shape)
print()
print("Extracting features from the test data using the same vectorizer")
t0 = time()
#X_test,y_test = get_test_data()
X_test = vectorizer.transform(X_test)
duration = time() - t0
print(X_train.shape)
#x,z, X_train = fastica(X_train.toarray())
svd = TruncatedSVD(n_components=1000)
print(X_train)
#print("n_samples: %d, n_features: %d" % X_test.shape)
print()
# mapping from integer feature name to original token string
if opts.use_hashing:
feature_names = None
else:
feature_names = vectorizer.get_feature_names()
if opts.select_chi2:
print("Extracting %d best features by a chi-squared test" %
opts.select_chi2)
t0 = time()
ch2 = SelectKBest(chi2, k=opts.select_chi2)
X_train = ch2.fit_transform(X_train, y_train)
X_test = ch2.transform(X_test)
if feature_names:
# keep selected feature names
feature_names = [feature_names[i] for i
in ch2.get_support(indices=True)]
print("done in %fs" % (time() - t0))
print()
if feature_names:
feature_names = np.asarray(feature_names)
print(X_train.shape)
X_train = svd.fit_transform(X_train)
X_test = svd.transform(X_test)
#u,o,X_train = fastica(X_train.toarray(),n_comp=1000)
print(X_train)
print(X_train.shape)
def trim(s):
#Trim string to fit on terminal (assuming 80-column display)
return s if len(s) <= 80 else s[:77] + "..."
"""
###############################################################################
# Benchmark classifiers
def benchmark(clf,X_train,y_train,X_test,y_test):
print('_' * 80)
print("Training: ")
print(clf)
t0 = time()
clf.fit(X_train, y_train)
train_time = time() - t0
print("train time: %0.3fs" % train_time)
t0 = time()
pred = clf.predict(X_test)
# print("------------predictions------------")
# print(pred)
# print("-------------------------")
test_time = time() - t0
print("test time: %0.3fs" % test_time)
score = metrics.accuracy_score(y_test, pred)
print("accuracy: %0.3f" % score)
if hasattr(clf, 'coef_'):
print("dimensionality: %d" % clf.coef_.shape[1])
print("density: %f" % density(clf.coef_))
if opts.print_top10 and feature_names is not None:
print("top 10 keywords per class:")
for i, category in enumerate(categories):
top10 = np.argsort(clf.coef_[i])[-10:]
print(trim("%s: %s"
% (category, " ".join(feature_names[top10]))))
print()
if opts.print_report:
print("classification report:")
print(metrics.classification_report(y_test, pred,
target_names=categories))
if opts.print_cm:
print("confusion matrix:")
print(metrics.confusion_matrix(y_test, pred))
print()
clf_descr = str(clf).split('(')[0]
return clf_descr, score, train_time, test_time
def classification_task(X_train,y_train,X_test,y_test):
results = []
print('=' * 80)
for penalty in ["l2", "l1"]:
print('=' * 80)
print("%s penalty" % penalty.upper())
# Train Liblinear model
results.append(benchmark(LinearSVC(loss='l2', penalty=penalty,
dual=False, tol=1e-3),X_train,y_train,X_test,y_test))
# Train RBF SVM model
results.append(benchmark(NuSVC(cache_size=1000,probability=True)))
#train sigmoid SVM model
results.append(benchmark(NuSVC(kernel='sigmoid',cache_size=1000,probability = True)))
# Train sparse Naive Bayes classifiers
print('=' * 80)
print("LinearSVC with L1-based feature selection")
# The smaller C, the stronger the regularization.
# The more regularization, the more sparsity.
#results.append(benchmark(Pipeline([
# ('feature_selection', LinearSVC(penalty="l1", dual=False, tol=1e-3)),
# ('classification', LinearSVC())
#])))
#gaussian naiave bayes training
results.append(benchmark(GaussianNB()))
#results.append(benchmark(MultinomialNB()))
# make some plots
indices = np.arange(len(results))
results = [[x[i] for x in results] for i in range(4)]
clf_names, score, training_time, test_time = results
training_time = np.array(training_time) / np.max(training_time)
test_time = np.array(test_time) / np.max(test_time)
plt.figure(figsize=(12, 8))
plt.title("Score")
plt.barh(indices, score, .2, label="score", color='r')
plt.barh(indices + .3, training_time, .2, label="training time", color='g')
plt.barh(indices + .6, test_time, .2, label="test time", color='b')
plt.yticks(())
plt.legend(loc='best')
plt.subplots_adjust(left=.25)
plt.subplots_adjust(top=.95)
plt.subplots_adjust(bottom=.05)
for i, c in zip(indices, clf_names):
plt.text(-.3, i, c)
plt.show()
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
74,913 | AvinashKalivarapu/SentimentAnalysisOfTwitter | refs/heads/master | /unigram1.py | import re
import sys
import random
from processTweets import *
from sklearn.multiclass import OneVsOneClassifier
from sklearn import svm
from sklearn import cross_validation
from scipy.sparse import *
import numpy as np
from sklearn.utils import shuffle
sys.path.insert(0,'ark-tokenizer')
from ark import tokenizeRawTweetText
fp = open("preprocessedTweets.txt", 'r')
fp_tr=open("trainpreprocessedTweets.txt", 'w+')
fp_te=open("testpreprocessedTweets.txt", 'w+')
line=fp.readline()
ttc=0
while line:
rnd=random.random()
if rnd<0.99 :
fp_te.write(line)
ttc+=1
else:
fp_tr.write(line)
line=fp.readline()
print "Total TestTweets %d" %(ttc)
fp_tr.close()
fp_te.close()
fp = open("trainpreprocessedTweets.txt", 'r')
#Create dictionary of words
wordict={}
pos_count=0
neg_count=0
neu_count=0
line=fp.readline()
while line:
line=line.rstrip()
fields=re.split(r'\t+',line)
if len(fields) < 2:
line=fp.readline()
continue
if "positive" == fields[0]:
pos_count+=1
elif "negative" == fields[0]:
neg_count+=1
else:
neu_count+=1
tokens=re.split(' ',fields[1])
size=len(tokens)
for i in range(size):
wordict[tokens[i]]=0
line=fp.readline()
wordlist=sorted(wordict)
wordcount=0
for word in wordlist:
wordict[word]=wordcount;
wordcount+=1
fp.close()
wordlist = []
#print pos_count+neg_count+neu_count
#create boolean matrix (no. of tweets)*(no. of words in dict)
#pos_matrix = [[0 for i in range(wordcount)] for j in range(pos_count)]
#neg_matrix = [[0 for i in range(wordcount)] for j in range(neg_count)]
#neu_matrix = [[0 for i in range(wordcount)] for j in range(neu_count)]
pos_matrix = dok_matrix((pos_count,wordcount))
neg_matrix = dok_matrix((neg_count,wordcount))
neu_matrix = dok_matrix((neu_count,wordcount))
fp = open("trainpreprocessedTweets.txt", 'r')
line=fp.readline()
pos=0
neg=0
neu=0
while line:
line=line.rstrip()
fields=re.split(r'\t+',line)
if len(fields) <2:
line=fp.readline()
continue
tokens=re.split(' ',fields[1])
size=len(tokens)
if "positive"==fields[0]:
for i in range(size):
pos_matrix[pos,wordict[tokens[i]]]=1
pos+=1
elif "negative"==fields[0]:
for i in range(size):
neg_matrix[neg,wordict[tokens[i]]]=1
neg+=1
else:
for i in range(size):
neu_matrix[neu,wordict[tokens[i]]]=1
neu+=1
line=fp.readline()
pos_matrix.tocsr()
neg_matrix.tocsr()
neu_matrix.tocsr()
pos_matrix = hstack([pos_matrix,csr_matrix([[0],]*pos_count)])
neg_matrix = hstack([neg_matrix,csr_matrix([[1],]*neg_count)])
neu_matrix = hstack([neu_matrix,csr_matrix([[2],]*neu_count)])
final_matrix = vstack([pos_matrix,neg_matrix])
final_matrix = vstack([final_matrix,neu_matrix])
final_matrix = shuffle(final_matrix)
train_Y = final_matrix[:,-1].toarray()[:,0]
print train_Y
print "shape",final_matrix.get_shape()
total_tweets=pos_count+neg_count+neu_count
word_dict = {}
word_list = []
#train_X,train_Y = parse_to_classifier()
#trained_clf = train_classifier(LinearSVC(random_state=0),train_X,train_Y)
score = cross_validation.cross_val_score(OneVsOneClassifier(svm.LinearSVC(random_state=0)),final_matrix[:,:-1],train_Y,cv=5)
print "average accuracy of svm ",score.mean()
#classifying
pos_prob=float(pos_count)/float(total_tweets)
neg_prob=float(neg_count)/float(total_tweets)
neu_prob=float(neu_count)/float(total_tweets)
#Create wordnet dictionary
fp_wn=open("wordnet.txt",'r')
wn_dict={}
line=fp_wn.readline()
while line:
line=line.rstrip()
fields=line.split(":")
if fields[0] not in wn_dict:
wn_dict[fields[0]]=float(fields[1])
line=fp_wn.readline()
fp_wn.close()
fp = open("testpreprocessedTweets.txt", 'r')
line=fp.readline()
TP=0
total_test=0
choice=1
while line:
if not line:
test_tweet=raw_input("Enter Tweet :")
else:
line=line.rstrip()
fields=re.split(r'\t+',line)
if len(fields)<2:
line=fp.readline()
continue
total_test+=1
print total_test
test_tweet=fields[1]
test_tweet=processTweet(test_tweet,ed,ad,swd)
test_tweet=" ".join(test_tweet)
ark_tokenised=tokenizeRawTweetText(test_tweet)
tweet_size=len(ark_tokenised)
pos_tfreq=[1 for i in range(tweet_size)]
neg_tfreq=[1 for i in range(tweet_size)]
neu_tfreq=[1 for i in range(tweet_size)]
wn_score=0
for j in range(tweet_size):
if ark_tokenised[j] in wn_dict:
wn_score+=wn_dict[ark_tokenised[j]]
for i in range(pos_count):
for j in range(tweet_size):
if ark_tokenised[j] in wordict and pos_matrix[i][wordict[ark_tokenised[j]]]==1:
pos_tfreq[j]+=1
#print pos_tfreq
for i in range(neg_count):
for j in range(tweet_size):
if ark_tokenised[j] in wordict and neg_matrix[i][wordict[ark_tokenised[j]]]==1:
neg_tfreq[j]+=1
#print neg_tfreq
for i in range(neu_count):
for j in range(tweet_size):
if ark_tokenised[j] in wordict and neu_matrix[i][wordict[ark_tokenised[j]]]==1:
neu_tfreq[j]+=1
#print neu_tfreq
pos_uni_prob=10
for i in range(tweet_size):
pos_uni_prob*=float(pos_tfreq[j])/float(pos_count+1)
neg_uni_prob=10
for i in range(tweet_size):
neg_uni_prob*=float(neg_tfreq[j])/float(neg_count+1)
neu_uni_prob=10
for i in range(tweet_size):
neu_uni_prob*=float(neu_tfreq[j])/float(neu_count+1)
pos_given_tweet=pos_prob*pos_uni_prob
neg_given_tweet=neg_prob*neg_uni_prob
neu_given_tweet=neu_prob*neu_uni_prob
print wn_score
print pos_given_tweet,neg_given_tweet,neu_given_tweet
if wn_score >0.8:
pos_given_tweet+=wn_score
elif wn_score <-0.8:
neg_given_tweet+=(-1*wn_score)
print pos_given_tweet,neg_given_tweet,neu_given_tweet
if pos_given_tweet>=neg_given_tweet:
if pos_given_tweet>=neu_given_tweet:
print "Classified : positive","Actual : %s" %fields[0]
if 'positive' in fields[0]:
TP+=1
else:
print "Classified : neutral","Actual : %s" %fields[0]
if 'neutral'in fields[0]:
TP+=1
else:
if neg_given_tweet>=neu_given_tweet:
print "Classified : negative","Actual : %s" %fields[0]
if 'negative' in fields[0]:
TP+=1
else:
print "Classified : neutral","Actual : %s" %fields[0]
if 'neutral' in fields[0]:
TP+=1
line=fp.readline()
print "Accuracy : ",float(TP)/float(total_test)
fp.close()
| {"/preprocess_p1.py": ["/processTweets_p1.py", "/DictionaryBuilder.py"], "/processTweets.py": ["/DictionaryBuilder.py"], "/preprocess.py": ["/processTweets.py", "/DictionaryBuilder.py"], "/processTweets_p1.py": ["/DictionaryBuilder.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.