max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
neo/detector/urls.py | MLH-Fellowship/neo-detection | 4 | 6617851 | <gh_stars>1-10
from django.urls import path
from .views import browse
from .views import news
urlpatterns = [
path("browse/", browse, name="browse"),
path("news/", news, name="news"),
] | from django.urls import path
from .views import browse
from .views import news
urlpatterns = [
path("browse/", browse, name="browse"),
path("news/", news, name="news"),
] | none | 1 | 1.633212 | 2 | |
make_manifest.py | damooooooooooh/webthings-network-presence-detection | 0 | 6617852 | <reponame>damooooooooooh/webthings-network-presence-detection
#!/usr/bin/env python3
import json
import os
import sys
def translate(package_json):
dname = os.path.dirname(package_json)
manifest_json = os.path.join(dname, 'manifest.json')
try:
with open(package_json, 'rt') as f:
package = json.load(f)
except (IOError, OSError, ValueError) as e:
print('Error loading package.json:', e)
sys.exit(1)
manifest = {
'description': package['description'],
'gateway_specific_settings': {
'webthings': {
'exec': package['moziot']['exec'],
'strict_min_version': '0.10.0',
'strict_max_version': '*',
},
},
'homepage_url': package['homepage'],
'id': package['name'],
'license': package['license'],
'manifest_version': 1,
'name': package['display_name'],
'short_name': package['display_name'][0:12],
'version': package['version'],
}
if type(package['author']) is str:
manifest['author'] = package['author']
else:
manifest['author'] = package['author']['name']
if 'type' in package['moziot']:
manifest['gateway_specific_settings']['webthings']['primary_type'] = \
package['moziot']['type']
else:
manifest['gateway_specific_settings']['webthings']['primary_type'] = \
'adapter'
if 'config' in package['moziot'] or 'schema' in package['moziot']:
manifest['options'] = {}
if 'config' in package['moziot']:
manifest['options']['default'] = package['moziot']['config']
if 'schema' in package['moziot']:
manifest['options']['schema'] = package['moziot']['schema']
try:
with open(manifest_json, 'wt') as f:
json.dump(manifest, f, ensure_ascii=True, indent=2, sort_keys=True)
except (IOError, OSError, ValueError) as e:
print('Error writing manifest.json:', e)
sys.exit(1)
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Usage:\n\t{} /path/to/package.json'.format(sys.argv[0]))
sys.exit(1)
translate(sys.argv[1])
| #!/usr/bin/env python3
import json
import os
import sys
def translate(package_json):
dname = os.path.dirname(package_json)
manifest_json = os.path.join(dname, 'manifest.json')
try:
with open(package_json, 'rt') as f:
package = json.load(f)
except (IOError, OSError, ValueError) as e:
print('Error loading package.json:', e)
sys.exit(1)
manifest = {
'description': package['description'],
'gateway_specific_settings': {
'webthings': {
'exec': package['moziot']['exec'],
'strict_min_version': '0.10.0',
'strict_max_version': '*',
},
},
'homepage_url': package['homepage'],
'id': package['name'],
'license': package['license'],
'manifest_version': 1,
'name': package['display_name'],
'short_name': package['display_name'][0:12],
'version': package['version'],
}
if type(package['author']) is str:
manifest['author'] = package['author']
else:
manifest['author'] = package['author']['name']
if 'type' in package['moziot']:
manifest['gateway_specific_settings']['webthings']['primary_type'] = \
package['moziot']['type']
else:
manifest['gateway_specific_settings']['webthings']['primary_type'] = \
'adapter'
if 'config' in package['moziot'] or 'schema' in package['moziot']:
manifest['options'] = {}
if 'config' in package['moziot']:
manifest['options']['default'] = package['moziot']['config']
if 'schema' in package['moziot']:
manifest['options']['schema'] = package['moziot']['schema']
try:
with open(manifest_json, 'wt') as f:
json.dump(manifest, f, ensure_ascii=True, indent=2, sort_keys=True)
except (IOError, OSError, ValueError) as e:
print('Error writing manifest.json:', e)
sys.exit(1)
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Usage:\n\t{} /path/to/package.json'.format(sys.argv[0]))
sys.exit(1)
translate(sys.argv[1]) | fr | 0.221828 | #!/usr/bin/env python3 | 2.368249 | 2 |
backend/core/views.py | ES2-UFPI/404-portal | 1 | 6617853 | from django.shortcuts import render, redirect, get_object_or_404
from .models import Portal
from .forms import PortalForm
def home(request):
portal = Portal.objects.all().first()
context = {
"portal": portal
}
return render(request, 'index.html', context)
def listar(request):
portais = Portal.objects.all()
return render(request, 'core/listar.html', {'portais': portais})
def visualizar(request, id):
portal = get_object_or_404(Portal, id = id)
return render(request, 'core/visualizar.html', {'portal': portal})
def editar(request, id):
portal = get_object_or_404(Portal, id = id)
if request.method == "POST":
form = PortalForm(request.POST, instance = portal)
if form.is_valid():
form.save()
return redirect('core:listar')
else:
form = PortalForm(instance = portal)
return render(request, 'core/editar.html', {'form': form})
| from django.shortcuts import render, redirect, get_object_or_404
from .models import Portal
from .forms import PortalForm
def home(request):
portal = Portal.objects.all().first()
context = {
"portal": portal
}
return render(request, 'index.html', context)
def listar(request):
portais = Portal.objects.all()
return render(request, 'core/listar.html', {'portais': portais})
def visualizar(request, id):
portal = get_object_or_404(Portal, id = id)
return render(request, 'core/visualizar.html', {'portal': portal})
def editar(request, id):
portal = get_object_or_404(Portal, id = id)
if request.method == "POST":
form = PortalForm(request.POST, instance = portal)
if form.is_valid():
form.save()
return redirect('core:listar')
else:
form = PortalForm(instance = portal)
return render(request, 'core/editar.html', {'form': form})
| none | 1 | 2.046894 | 2 | |
project/controllers/library.py | 21-prashant/movielibiary | 0 | 6617854 | <reponame>21-prashant/movielibiary
# -*- coding: utf-8 -*-
from project import app, security
from flask import render_template, request, session, redirect, url_for,jsonify
from flask.ext.wtf import Form, TextField, validators
from project.model.Library import Library
from project.model.User import User
import json
@app.route('/libraries')
@security('user')
def libraries(user = None):
libraries = Library.objects(user=user,unit='Movie')
return render_template('library/master.html', libraries=libraries,user=user)
@app.route('/libraries/add', methods=['POST'])
@security('user')
def addLibrary(user = None):
name = request.form['name']
library = Library.objects(user=user,unit='Movie',name=name).first()
if library:
return jsonify(response='error',message='Library with name %s already exists' % library.name),404
library = Library(user=user,unit='Movie',name=name).save()
return jsonify(response='success',type='redirect',path=url_for(endpoint='libraries',_external=True))
@app.route('/libraries/remove', methods=['POST'])
@security('user')
def removeLibrary(user = None):
name = request.form['name']
library = Library.objects(user=user,unit='Movie',name=name).first()
if not library:
return jsonify(response='error',message='Library requested does not exists'),404
if library.name == 'Master' or library.name == 'Loaned':
return jsonify(response='error',message='Library %s cannot be deleted' % library.name),404
library.delete()
return jsonify(response='success',type='redirect',path=url_for(endpoint='libraries',_external=True))
@app.route('/libraries/<name>')
@security('user')
def library(name,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return render_template('404.html',message='Unable to find given Library',user=user),404
return render_template('library/library.html',library=library,user=user)
@app.route('/libraries/<name>/<int:index>')
@security('user')
def libraryItem(name, index,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return render_template('404.html',message='Unable to find given Library',user=user),404
movie = library.hydrateUnit(index-1)
if not movie:
return render_template('404.html',message='Unable to find given Movie',user=user),404
return render_template('library/libraryItem.html',item=movie,user=user,library=library,index=index)
@app.route('/libraries/<name>/remove', methods=['POST'])
@security('user')
def removelibraryItem(name,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return jsonify(response='error',message='Unable to find the given Library'),404
index = int(request.form['id'])
if not index:
return jsonify(response='error',message='Invalid parameters'),404
movie = library.hydrateUnit(index-1)
if not movie:
return jsonify(response='error',message='Unable to find the given Movie in Library %s' % library.name),404
if library.name == 'Master':
libraries = Library.objects(user=user,unit='Movie')
for library in libraries:
library.removeUnit(movie)
else:
library.removeUnit(movie)
return jsonify(response='success',type='redirect',path=url_for(endpoint='library',name=name,_external=True))
@app.route('/libraries/<name>/add', methods=['POST'])
@security('user')
def addlibraryItem(name,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return jsonify(response='error',message='Unable to find the given Library'),404
movie_id = request.form['id']
if not movie_id:
return jsonify(response='error',message='Invalid Movie given'),404
from project.model.Movie import Movie
movie = Movie.objects(tmdb_id=movie_id).first()
if movie:
if library.name != 'Master':
master = Library.objects(user=user,name="Master",unit='Movie').first()
master.addUnit(movie)
library.addUnit(movie)
return jsonify(response='success',type='redirect',path=url_for(endpoint='library',name=name,_external=True))
from tmdb3 import Movie as tmdbMovie
movie = tmdbMovie(movie_id)
if not movie:
return jsonify(response='error',message='Invalid Movie given'),404
from project.model.Movie import Movie
movie = Movie.convertMovie(movie)
library.addUnit(movie)
if library.name != 'Master':
master = Library.objects(user=user,name="Master",unit='Movie').first()
master.addUnit(movie)
return jsonify(response='success',type='redirect',path=url_for(endpoint='library',name=name,_external=True)) | # -*- coding: utf-8 -*-
from project import app, security
from flask import render_template, request, session, redirect, url_for,jsonify
from flask.ext.wtf import Form, TextField, validators
from project.model.Library import Library
from project.model.User import User
import json
@app.route('/libraries')
@security('user')
def libraries(user = None):
libraries = Library.objects(user=user,unit='Movie')
return render_template('library/master.html', libraries=libraries,user=user)
@app.route('/libraries/add', methods=['POST'])
@security('user')
def addLibrary(user = None):
name = request.form['name']
library = Library.objects(user=user,unit='Movie',name=name).first()
if library:
return jsonify(response='error',message='Library with name %s already exists' % library.name),404
library = Library(user=user,unit='Movie',name=name).save()
return jsonify(response='success',type='redirect',path=url_for(endpoint='libraries',_external=True))
@app.route('/libraries/remove', methods=['POST'])
@security('user')
def removeLibrary(user = None):
name = request.form['name']
library = Library.objects(user=user,unit='Movie',name=name).first()
if not library:
return jsonify(response='error',message='Library requested does not exists'),404
if library.name == 'Master' or library.name == 'Loaned':
return jsonify(response='error',message='Library %s cannot be deleted' % library.name),404
library.delete()
return jsonify(response='success',type='redirect',path=url_for(endpoint='libraries',_external=True))
@app.route('/libraries/<name>')
@security('user')
def library(name,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return render_template('404.html',message='Unable to find given Library',user=user),404
return render_template('library/library.html',library=library,user=user)
@app.route('/libraries/<name>/<int:index>')
@security('user')
def libraryItem(name, index,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return render_template('404.html',message='Unable to find given Library',user=user),404
movie = library.hydrateUnit(index-1)
if not movie:
return render_template('404.html',message='Unable to find given Movie',user=user),404
return render_template('library/libraryItem.html',item=movie,user=user,library=library,index=index)
@app.route('/libraries/<name>/remove', methods=['POST'])
@security('user')
def removelibraryItem(name,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return jsonify(response='error',message='Unable to find the given Library'),404
index = int(request.form['id'])
if not index:
return jsonify(response='error',message='Invalid parameters'),404
movie = library.hydrateUnit(index-1)
if not movie:
return jsonify(response='error',message='Unable to find the given Movie in Library %s' % library.name),404
if library.name == 'Master':
libraries = Library.objects(user=user,unit='Movie')
for library in libraries:
library.removeUnit(movie)
else:
library.removeUnit(movie)
return jsonify(response='success',type='redirect',path=url_for(endpoint='library',name=name,_external=True))
@app.route('/libraries/<name>/add', methods=['POST'])
@security('user')
def addlibraryItem(name,user=None):
from project.model.Movie import Movie
library = Library.objects(user=user,name=name,unit='Movie').first()
if not library:
return jsonify(response='error',message='Unable to find the given Library'),404
movie_id = request.form['id']
if not movie_id:
return jsonify(response='error',message='Invalid Movie given'),404
from project.model.Movie import Movie
movie = Movie.objects(tmdb_id=movie_id).first()
if movie:
if library.name != 'Master':
master = Library.objects(user=user,name="Master",unit='Movie').first()
master.addUnit(movie)
library.addUnit(movie)
return jsonify(response='success',type='redirect',path=url_for(endpoint='library',name=name,_external=True))
from tmdb3 import Movie as tmdbMovie
movie = tmdbMovie(movie_id)
if not movie:
return jsonify(response='error',message='Invalid Movie given'),404
from project.model.Movie import Movie
movie = Movie.convertMovie(movie)
library.addUnit(movie)
if library.name != 'Master':
master = Library.objects(user=user,name="Master",unit='Movie').first()
master.addUnit(movie)
return jsonify(response='success',type='redirect',path=url_for(endpoint='library',name=name,_external=True)) | en | 0.769321 | # -*- coding: utf-8 -*- | 2.641965 | 3 |
snic_python_interface/compile_snic_lib.py | achanta/SNIC | 1 | 6617855 | <filename>snic_python_interface/compile_snic_lib.py
from cffi import FFI
def compile_sniclib():
ffibuilder = FFI()
ffibuilder.cdef(open("snic.h").read())
ffibuilder.set_source(
"_snic",
r"""
#include "snic.h"
""",
sources=["snic.c"],
library_dirs=['.'],
extra_compile_args=['-O3', '-march=native', '-ffast-math'])
ffibuilder.compile(verbose=True)
compile_sniclib()
| <filename>snic_python_interface/compile_snic_lib.py
from cffi import FFI
def compile_sniclib():
ffibuilder = FFI()
ffibuilder.cdef(open("snic.h").read())
ffibuilder.set_source(
"_snic",
r"""
#include "snic.h"
""",
sources=["snic.c"],
library_dirs=['.'],
extra_compile_args=['-O3', '-march=native', '-ffast-math'])
ffibuilder.compile(verbose=True)
compile_sniclib()
| en | 0.134782 | #include "snic.h" | 1.9632 | 2 |
wesh/cli.py | tsileo/wesh | 1 | 6617856 | <filename>wesh/cli.py
# coding: utf-8
def main():
print 'wesh'
| <filename>wesh/cli.py
# coding: utf-8
def main():
print 'wesh'
| en | 0.833554 | # coding: utf-8 | 1.406341 | 1 |
bin/app.py | sosegon/moviesList | 0 | 6617857 | #!/usr/bin/python
# basic code from http://learnpythonthehardway.org/book/ex50.html
# code for using mod_wsgi http://www.thefourtheye.in/2013/03/deploying-webpy-application-in-apache.html
import web, os, sys
def add_to_sys_path(path):
if path in sys.path:
print "path already is sys.path"
else:
sys.path.append(path)
print "path added to sys.path"
parent_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)))
bin_dir = parent_dir + "/bin/"
movies_dir = parent_dir + "/movies/"
templates_dir = parent_dir + "/templates/"
static_dir = parent_dir + "/static/"
sys.path.append(bin_dir)
sys.path.append(movies_dir)
sys.path.append(templates_dir)
urls = (
'/.*', 'Index'
)
app = web.application(urls, globals())
render = web.template.render(templates_dir)
if __name__ == "__main__":
from movies.entertainment_center import *
else:
from entertainment_center import *
class Index:
def GET(self):
movies = read_movies_file(static_dir + "list.json")
return render.index(movies)
if __name__ == "__main__":
app.run()
else:
application = app.wsgifunc()
| #!/usr/bin/python
# basic code from http://learnpythonthehardway.org/book/ex50.html
# code for using mod_wsgi http://www.thefourtheye.in/2013/03/deploying-webpy-application-in-apache.html
import web, os, sys
def add_to_sys_path(path):
if path in sys.path:
print "path already is sys.path"
else:
sys.path.append(path)
print "path added to sys.path"
parent_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)))
bin_dir = parent_dir + "/bin/"
movies_dir = parent_dir + "/movies/"
templates_dir = parent_dir + "/templates/"
static_dir = parent_dir + "/static/"
sys.path.append(bin_dir)
sys.path.append(movies_dir)
sys.path.append(templates_dir)
urls = (
'/.*', 'Index'
)
app = web.application(urls, globals())
render = web.template.render(templates_dir)
if __name__ == "__main__":
from movies.entertainment_center import *
else:
from entertainment_center import *
class Index:
def GET(self):
movies = read_movies_file(static_dir + "list.json")
return render.index(movies)
if __name__ == "__main__":
app.run()
else:
application = app.wsgifunc()
| en | 0.592171 | #!/usr/bin/python # basic code from http://learnpythonthehardway.org/book/ex50.html # code for using mod_wsgi http://www.thefourtheye.in/2013/03/deploying-webpy-application-in-apache.html | 3.003482 | 3 |
grimsel/core/io.py | arthurrinaldi/grimsel | 0 | 6617858 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 2 15:03:42 2019
@author: user
"""
import time
import itertools
import os
import tables
import shutil
from glob import glob
import fastparquet as pq
import numpy as np
import pandas as pd
import psycopg2 as pg
import grimsel
import grimsel.auxiliary.sqlutils.aux_sql_func as aql
import grimsel.core.autocomplete as ac
import grimsel.core.table_struct as table_struct
from grimsel import _get_logger
logger = _get_logger(__name__)
FORMAT_RUN_ID = '{:04d}' # modify for > 9999 model runs
class _HDFWriter:
''' Mixing class for :class:`CompIO` and :class:`DataReader`. '''
def write_hdf(self, tb, df, put_append):
'''
Opens connection to HDF file and writes output.
Parameters
----------
put_append: str, one of `('append', 'put')`
Write one-time table to the output file (`'put'`) or append
to existing table (`'append'`).
'''
with pd.HDFStore(self.cl_out, mode='a') as store:
method_put_append = getattr(store, put_append)
method_put_append(tb, df, data_columns=True, format='table',
complevel=9, complib='blosc:blosclz')
class _ParqWriter:
''' Mixing class for :class:`CompIO` and :class:`DataReader`. '''
def write_parquet(self, fn, df, engine):
'''
Opens connection to HDF file and writes output.
Parameters
----------
fn: str
filename for table writing
df: pandas DataFrame
table to be written
engine: str
engine name as in the pandas DataFrame.to_parquet parameter
'''
if self.output_target == 'fastparquet':
pq.write(fn, df, append=os.path.isfile(fn), compression='GZIP')
# df.to_parquet(fn, engine='fastparquet',
# compression='gzip',)
# if 'run_id' in df.columns:
# df.to_parquet(fn, #append=os.path.isfile(fn),
# engine='fastparquet',
# compression='gzip',
# partition_cols=['run_id'])
# else:
# df.to_parquet(fn, #append=os.path.isfile(fn),
# engine='fastparquet',
# compression='gzip'
# )
else:
raise RuntimeError('Writing using parquet engine %s '
'not implemented.'%self.output_target)
class CompIO(_HDFWriter, _ParqWriter):
'''
A CompIO instance takes care of extracting a single variable/parameter from
the model and of writing a single table to the database.
'''
def __init__(self, tb, cl_out, comp_obj, idx, connect, output_target,
model=None):
self.tb = tb
self.cl_out = cl_out
self.comp_obj = comp_obj
self.output_target = output_target
self.connect = connect
self.model = model
self.columns = None # set in index setter
self.run_id = None # set in call to self.write_run
self.index = tuple(idx) if not isinstance(idx, tuple) else idx
self.coldict = aql.get_coldict()
def post_processing(self, df):
''' Child-specific method called after reading. '''
return df
def to_df(self):
'''
Calls classmethods _to_df.
Is overwritten in DualIO, where _to_df is not used as classmethod.
'''
return self._to_df(self.comp_obj,
[c for c in self.index if not c == 'bool_out'])
def init_output_table(self):
'''
Initialization of output table.
Calls the :func:`aux_sql_func` method with appropriate parameters.
.. note:
Keys need to be added in post-processing due to table
writing performance.
'''
logger.info('Generating output table {}'.format(self.tb))
col_names = self.index + ('value',)
cols = [(c,) + (self.coldict[c][0],) for c in col_names]
cols += [('run_id', 'SMALLINT')]
pk = [] # pk added later for writing/appending performance
unique = []
aql.init_table(tb_name=self.tb, cols=cols,
schema=self.cl_out,
ref_schema=self.cl_out, pk=pk,
unique=unique, bool_auto_fk=False, db=self.connect.db,
con_cur=self.connect.get_pg_con_cur())
def _to_file(self, df, tb):
'''
Casts the data types of the output table and writes the
table to the output HDF file.
'''
dtype_dict = {'value': np.dtype('float64'),
'bool_out': np.dtype('bool')}
dtype_dict.update({col: np.dtype('int32') for col in df.columns
if not col in ('value', 'bool_out')})
df = df.astype({col: dtype for col, dtype in dtype_dict.items()
if col in df.columns})
if self.output_target == 'hdf5':
self.write_hdf(tb, df, 'append')
elif self.output_target in ['fastparquet']:
fn = os.path.join(self.cl_out,
tb + ('_%s'%FORMAT_RUN_ID).format(self.run_id) + '.parq')
self.write_parquet(fn, df, engine=self.output_target)
else:
raise RuntimeError('_to_file: no '
'output_target applicable')
def _to_sql(self, df, tb):
df.to_sql(tb, self.connect.get_sqlalchemy_engine(),
schema=self.cl_out, if_exists='append', index=False)
def _finalize(self, df, tb=None):
''' Add run_id column and write to database table '''
tb = self.tb if not tb else tb
logger.info('Writing {} to {}.{}'.format(self.comp_obj.name,
self.cl_out, tb))
# value always positive, directionalities expressed through bool_out
df['value'] = df['value'].abs()
df['run_id'] = self.run_id
t = time.time()
if self.output_target in ['hdf5', 'fastparquet']:
self._to_file(df, tb)
elif self.output_target == 'psql':
self._to_sql(df, tb)
else:
raise RuntimeError('_finalize: no '
'output_target applicable')
logger.info(' ... done in %.3f sec'%(time.time() - t))
@property
def index(self):
return self._index
@index.setter
def index(self, value):
''' Makes sure idx is tuple and updates columns attribute. '''
self._index = (value,) if not isinstance(value, tuple) else value
self.columns = list(self.index + ('value',))
self.columns = [c for c in self.columns if not c == 'bool_out']
def get_df(self):
df = self.to_df()
df = self.post_processing(df)
return df
def write(self, run_id):
self.run_id = run_id
df = self.get_df()
self._finalize(df)
def _node_to_plant(self, pt):
'''
TODO: THIS SHOULD BE IN MAPS!!!!
TODO: THIS SHOULD ALSO INCLUDE ca_id FOR DMND!
Method for translation of node_id to respective plant_id
in the cases of demand and inter-node transmission. This is used to
append demand/inter-nodal transmission to pwr table.
Returns a dictionary node -> plant
Keyword arguments:
* pt -- string, selected plant type for translation
'''
df_np = self.model.df_def_plant[['nd_id', 'pp_id', 'pp', 'pt_id']]
df_pt = self.model.df_def_pp_type[['pt_id', 'pt']]
mask_pt = df_pt.pt.apply(lambda x: x.replace('_ST', '')) == pt
slct_pp_type = df_pt.loc[mask_pt, 'pt_id'].astype(int).iloc[0]
mask_tech = df_np['pt_id'] == slct_pp_type
df_np_slct = df_np.loc[mask_tech]
dict_node_plant_slct = df_np_slct.set_index('nd_id')
dict_node_plant_slct = dict_node_plant_slct['pp_id'].to_dict()
return dict_node_plant_slct
def __repr__(self):
return 'Comp_obj: ' + str(self.comp_obj)
class DualIO(CompIO):
'''
Base class for dual values. Performs the data extraction of constraint
shadow prices.
'''
def to_df(self):
dat = [ico + (self.model.dual[self.comp_obj[ico]],)
for ico in self.comp_obj
if self.comp_obj[ico].active]
return pd.DataFrame(dat, columns=self.columns)
class VariabIO(CompIO):
'''
Base class for variables. Performs the data extraction of variable
objects.
Also: Special methods related to the handling of negative plant
variables defined by setlst[sll] + setlst[curt]
as well as storage charging.
'''
@classmethod
def _to_df(cls, obj, cols):
''' Converts pyomo variable to DataFrame.'''
df = pd.Series(obj.extract_values()).fillna(0).reset_index()
df.columns = list(cols) + ['value']
return df
def post_processing(self, df):
'''
Calls _set_bool_out prior to writing.
Input arguments:
* df -- DataFrame; primary dataframe
'''
return self._set_bool_out(df) if 'bool_out' in self.index else df
def _set_bool_out(self, df):
'''
Set the bool_out values according to the pp_id.
pp_ids corresponding to curtailment, charging, and sales have
bool_out=True.
* df -- DataFrame; primary dataframe
'''
if self.comp_obj.name in ['pwr_st_ch', 'erg_ch_yr']:
df['bool_out'] = True
else:
df['bool_out'] = False
pp_true = self.model.setlst['curt'] + self.model.setlst['sll']
df.loc[df.pp_id.isin(pp_true), 'bool_out'] = True
return df
class ParamIO(CompIO):
'''
Base class for parameters.
Is inherited by :class:`DmndIO`.
Only contains the parameter ``_to_df`` classmethod.
'''
@classmethod
def _to_df(cls, obj, cols):
''' Converts pyomo parameter to DataFrame. '''
df = pd.Series(obj.extract_values()).fillna(0).reset_index()
if df.empty:
df = pd.DataFrame(columns=list(cols) + ['value'])
else:
if not cols and len(df) == 1:
df = df[[0]].rename(columns={0: 'value'})
else:
df.columns = list(cols) + ['value']
return df
class TransmIO(VariabIO):
"""
Special methods related to the translation of nodes to
transmission plant names and
to the simplified representation after aggregating secondary nodes.
"""
def post_processing(self, df):
''' Write aggregated transmission table to pwr. '''
dfagg = self.aggregate_nd2(df)
dfagg = self._translate_trm(dfagg)
self._finalize(dfagg, 'var_sy_pwr')
return self.add_bool_out_col(df)
def aggregate_nd2(self, dfall):
'''
Aggregates trm table over all secondary nodes for simplification and
to append to the pwr table.
'''
# mirror table to get both directions
dfall = pd.concat([dfall,
dfall.assign(nd_2_id=dfall.nd_id,
nd_id=dfall.nd_2_id,
value=-dfall.value)])
dict_nhours = {nd_id:
self.model._tm_objs[self.model.dict_nd_tm_id[nd_id]].nhours
for nd_id in dfall.nd_id.unique()}
def avg_to_nhours(x):
if self.model.is_min_node[x.name]:
return x.reset_index()
else: # reduce time resolution
nhours = dict_nhours[x.name[0]]
nhours_2 = dict_nhours[x.nd_2_id.iloc[0]]
x['sy'] = np.repeat(np.arange(
np.ceil(len(x) / (nhours / nhours_2))),
nhours / nhours_2)
idx = [c for c in x.columns if not c == 'value']
x = x.pivot_table(index=idx, values='value', aggfunc=np.mean)
return x.reset_index()
dfall = (dfall.groupby(['nd_id', 'nd_2_id'], as_index=True)
.apply(avg_to_nhours)
.reset_index(drop=True))
dfexp = dfall.loc[dfall.value > 0]
dfexp = dfexp.groupby(['sy', 'nd_id', 'ca_id'])['value'].sum()
dfexp = dfexp.reset_index()
dfexp['bool_out'] = True
dfimp = dfall.loc[dfall.value < 0]
dfimp = dfimp.groupby(['sy', 'nd_id', 'ca_id'])['value'].sum()
dfimp = dfimp.reset_index()
dfimp['bool_out'] = False
dfagg = pd.concat([dfexp, dfimp], axis=0)
dict_nd_weight = {key: self.model.nd_weight[key].value
for key in self.model.nd_weight}
dfagg['value'] /= dfagg.nd_id.replace(dict_nd_weight)
return dfagg
def _translate_trm(self, df):
df['pp_id'] = df.nd_id.replace(self._node_to_plant('TRNS'))
df.drop('nd_id', axis=1, inplace=True)
return df
def add_bool_out_col(self, df):
''' The bool out column value depends on the sign of the data. '''
df['bool_out'] = False
df.loc[df.value < 0, 'bool_out'] = True
return df
class DmndIO(ParamIO):
'''
Demand is appended to the *pwr* table after translating the nd_id to
the corresponding "power plant" pp_id.
'''
def post_processing(self, df):
dfpp = self._translate_dmnd(df.copy())
dfpp['bool_out'] = True
dfpp = dfpp[['sy', 'pp_id', 'ca_id', 'value', 'bool_out']]
self._finalize(dfpp, 'var_sy_pwr')
return df
def _translate_dmnd(self, df):
'''
Translate the demand ``pf_id`` to the corresponding ``pp_id``s
This is based on a mapping ``pf_id`` |rarr| ``nd_id`` |rarr| ``pp_id``.
The ``pp_id`` definition for demand is retrieved from the
``ModelBase.df_def_plant`` table.
'''
dict_ndpp = self._node_to_plant('DMND')
df['pp_id'] = df.nd_id.replace(dict_ndpp)
return df
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def skip_if_resume_loop(f):
def wrapper(self, *args, **kwargs):
if self.resume_loop:
pass
else:
f(self, *args, **kwargs)
return wrapper
def skip_if_no_output(f):
def wrapper(self, *args, **kwargs):
if self.no_output:
pass
else:
f(self, *args, **kwargs)
return wrapper
class ModelWriter():
'''
The IO singleton class manages the TableIO instances and communicates with
other classes. Manages database connection.
'''
io_class_dict = {'var': VariabIO,
'var_tr': TransmIO,
'par_dmnd': DmndIO,
'par': ParamIO,
'dual': DualIO}
_default_init = {'sc_warmstart': False,
'resume_loop': False,
'replace_runs_if_exist': False,
'model': None,
'output_target': 'hdf5',
'sql_connector': None,
'no_output': False,
'dev_mode': False,
'coll_out': None,
'keep': None,
'drop': None,
'db': None}
def __init__(self, **kwargs):
"""
"""
self.run_id = None # set in call to self.write_run
self.dict_comp_obj = {}
# define instance attributes and update with kwargs
for key, val in self._default_init.items():
setattr(self, key, val)
self.__dict__.update(kwargs)
self.dict_comp_idx = None
self.dict_comp_table = None
self.dict_comp_group = None
ls = 'Output collection: {}; resume loop={}'
logger.info(ls.format(self.cl_out, self.resume_loop))
self.reset_tablecollection()
def _make_table_dicts(self, keep=None, drop=None):
'''
Get the dictionaries describing all tables.
Also used in the class method ``post_process_index``,
therefore module function.
'''
keep = list(table_struct.DICT_COMP_IDX) if not keep else keep
keep = set(keep) - set(drop if drop else [])
options = list(table_struct.DICT_COMP_IDX)
unknowns = [tb for tb in keep if not tb in options]
if unknowns:
raise RuntimeError(('Unknown table selection %s. Possible options '
'are %s')%(str(unknowns), str(options)))
filter_dict = lambda d: {k: v for k, v in d.items() if k in keep}
self.dict_comp_idx = filter_dict(table_struct.DICT_COMP_IDX)
self.dict_comp_table = filter_dict(table_struct.DICT_COMP_TABLE)
self.dict_comp_group = filter_dict(table_struct.DICT_COMP_GROUP)
def reset_tablecollection(self):
'''
Reset the SQL schema or hdf file for model output writing.
'''
if self.output_target == 'psql':
self._reset_schema()
elif self.output_target == 'hdf5':
self._reset_hdf_file()
elif self.output_target in ['fastparquet']:
self._reset_parquet_file()
@skip_if_resume_loop
def _reset_hdf_file(self):
ModelWriter.reset_hdf_file(self.cl_out, not self.dev_mode)
def _reset_parquet_file(self):
ModelWriter.reset_parquet_file(self.cl_out, not self.dev_mode,
self.resume_loop)
@staticmethod
def reset_hdf_file(fn, warn):
'''
Deletes existing hdf5 file and creates empty one.
Parameters
----------
fn: str
filename
warn: bool
prompt user input if the file exists
'''
# pass
if os.path.isfile(fn):
try:
max_run_id = pd.read_hdf(fn, 'def_run',
columns=['run_id']).run_id.max()
except Exception as e:
logger.error(e)
logger.warn('reset_hdf_file: Could not determine max_run_id '
'... setting to None.')
max_run_id = None
if warn:
input(
'''
~~~~~~~~~~~~~~~ WARNING: ~~~~~~~~~~~~~~~~
You are about to delete existing file {fn}.
The maximum run_id is {max_run_id}.
Hit enter to proceed.
'''.format(fn=fn, max_run_id=max_run_id)
)
logger.info('Dropping output file {}'.format(fn))
os.remove(fn)
def reset_parquet_file(dirc, warn, resume_loop, ):
'''
Deletes existing parquet file folder and creates empty one.
Parameters
----------
dirc: str
parquet directory name
warn: bool
prompt user input if the file exists
'''
if os.path.isdir(dirc) and not resume_loop:
try:
max_run_id = pd.read_parquet(os.path.join(dirc, 'def_run.parq'),
columns=['run_id']).run_id.max()
except Exception as e:
logger.error(e)
logger.warn('reset_parquet_file: Could not determine max_run_id '
'... setting to None.')
max_run_id = None
if warn:
input(
'''
~~~~~~~~~~~~~~~ WARNING: ~~~~~~~~~~~~~~~~
You are about to delete existing directory {dirc}.
The maximum run_id is {max_run_id}.
Hit enter to proceed.
'''.format(dirc=dirc, max_run_id=max_run_id)
)
logger.info('Dropping parquet output directory {}'.format(dirc))
shutil.rmtree(dirc)
elif os.path.isdir(dirc) and resume_loop:
logger.info('Deleting run_ids >= resume_loop = '
'{:d}'.format(resume_loop))
del_fn = [fn for fn in glob(os.path.join(dirc, '*[0-9].parq')) if
int(fn.split('_')[-1].replace('.parq', ''))
>= resume_loop]
if del_fn:
for fn in del_fn:
logger.info('... deleting file {}'.format(fn))
os.remove(fn)
fn_run = os.path.join(dirc, 'def_run.parq')
df_def_run = pd.read_parquet(fn_run)
df_def_run = df_def_run.query('run_id < %d'%resume_loop)
df_def_run = df_def_run.reset_index(drop=True)
pq.write(fn_run, df_def_run, append=False, compression='GZIP')
else:
logger.info('... nothing to delete.')
if not os.path.isdir(dirc):
os.mkdir(dirc)
@skip_if_resume_loop
def _reset_schema(self):
aql.reset_schema(self.cl_out, self.sql_connector.db,
not self.dev_mode)
def init_output_schema(self):
aql.exec_sql('CREATE SCHEMA IF NOT EXISTS ' + self.cl_out,
db=self.db, )
@skip_if_no_output
def init_compio_objs(self):
'''
Initialize all output table IO objects.
'''
self._make_table_dicts(keep=self.keep, drop=self.drop)
if __name__ == '__main__':
comp, idx = 'pwr_st_ch', self.dict_comp_idx['pwr_st_ch']
for comp, idx in self.dict_comp_idx.items():
if not hasattr(self.model, comp):
logger.warning(('Component {} does not exist... '
'skipping init CompIO.').format(comp))
else:
logger.debug('Adding component %s to dict_comp_obj'%comp)
comp_obj = getattr(self.model, comp)
grp = self.dict_comp_group[comp]
if self.dict_comp_table[comp] in self.io_class_dict:
io_class = self.io_class_dict[self.dict_comp_table[comp]]
elif grp in self.io_class_dict:
io_class = self.io_class_dict[self.dict_comp_group[comp]]
else:
io_class = self.io_class_dict[self.dict_comp_group[comp].split('_')[0]]
io_class_kwars = dict(tb=self.dict_comp_table[comp],
cl_out=self.cl_out,
comp_obj=comp_obj,
idx=idx,
connect=self.sql_connector,
output_target=self.output_target,
model=self.model)
self.dict_comp_obj[comp] = io_class(**io_class_kwars)
@skip_if_no_output
def write_all(self):
''' Calls the write methods of all CompIO objects. '''
for comp, io_obj in self.dict_comp_obj.items():
io_obj.write(self.run_id)
@skip_if_no_output
def init_all(self):
'''
Initializes all SQL tables.
Calls the init_output_table methods of all CompIO instances.
'''
if self.output_target == 'psql':
coldict = aql.get_coldict(self.cl_out, self.sql_connector.db)
for comp, io_obj in self.dict_comp_obj.items():
io_obj.coldict = coldict
io_obj.init_output_table()
elif self.output_target in ['hdf5', 'fastparquet']:
pass
def delete_run_id(self, run_id=False, operator='>='):
'''
In output tables delete all rows with run_id >=/== the selected value.
Used in :
1. in ModelLoop.perform_model_run if replace_runs_if_exist == True
with operator '=' to remove current run_id
from all tables prior to writing
TODO: The SQL part would be better fit with the aux_sql_func module.
'''
if run_id:
# Get overview of all tables
list_all_tb_0 = [list(itb_list + '_' + itb[0] for itb
in getattr(table_struct, itb_list)
if not len(itb) == 3)
for itb_list in table_struct.list_collect]
self.list_all_tb = list(itertools.chain(*list_all_tb_0))
self.list_all_tb += ['def_run']
for itb in self.list_all_tb:
if self.output_target == 'fastparquet':
self._delete_run_id_parquet(tb=itb, run_id=run_id)
elif self.output_target == 'psql':
logger.info('Deleting from ' + self.cl_out + '.' + itb
+ ' where run_id {} {}'.format(operator,
str(run_id)))
exec_strg = '''
DELETE FROM {cl_out}.{tb}
WHERE run_id {op} {run_id};
'''.format(cl_out=self.cl_out, tb=itb,
run_id=run_id, op=operator)
try:
aql.exec_sql(exec_strg, db=self.db)
except pg.ProgrammingError as e:
logger.error(e)
raise(e)
def _delete_run_id_parquet(self, tb, run_id):
pat = os.path.join(self.cl_out, ('{}_%s.*'%FORMAT_RUN_ID).format(tb, run_id))
fn_del = glob(pat)
try:
assert fn_del, 'Pattern not found: %s.'%pat
assert not len(fn_del) > 1, \
'Found more than one table to delete: %s.'%fn_del
os.remove(fn_del[0])
logger.info('Successfully deleted table '
'{} of model run {:d}'.format(tb, run_id))
except Exception as e:
logger.error(e)
# @classmethod
# def post_process_index(cls, sc, db, drop=False):
#
# coldict = aql.get_coldict(sc, db)
#
# dict_idx, dict_table, _ = ModelWriter.get_table_dicts()
#
# list_tables = aql.get_sql_tables(sc, db)
#
# for comp, index in dict_idx.items():
#
# if not dict_table[comp] in list_tables:
# logger.warning('Table ' + comp + ' does not exist... skipping '
# 'index generation.')
# else:
#
# tb_name = dict_table[comp]
#
# logger.info('tb_name:', tb_name)
#
# pk_list = index + ('run_id',)
#
# fk_dict = {}
# for c in pk_list:
# if len(coldict[c]) > 1:
# fk_dict[c] = coldict[c][1]
#
#
# pk_kws = {'pk_list': ', '.join(pk_list),
# 'tb': tb_name, 'cl_out': sc}
# exec_str = ('''
# ALTER TABLE {cl_out}.{tb}
# DROP CONSTRAINT IF EXISTS {tb}_pkey;
# ''').format(**pk_kws)
# if not drop:
# exec_str += ('''
# ALTER TABLE {cl_out}.{tb}
# ADD CONSTRAINT {tb}_pkey
# PRIMARY KEY ({pk_list})
# ''').format(**pk_kws)
# logger.debug(exec_str)
# aql.exec_sql(exec_str, db=db)
#
# for fk_keys, fk_vals in fk_dict.items():
# fk_kws = {'cl_out': sc, 'tb': tb_name,
# 'fk': fk_keys, 'ref': fk_vals}
#
# exec_str = ('''
# ALTER TABLE {cl_out}.{tb}
# DROP CONSTRAINT IF EXISTS fk_{tb}_{fk};
# ''').format(**fk_kws)
#
# if not drop:
# exec_str += ('''
# ALTER TABLE {cl_out}.{tb}
# ADD CONSTRAINT fk_{tb}_{fk}
# FOREIGN KEY ({fk})
# REFERENCES {ref}
# ''').format(**fk_kws)
# logger.debug(exec_str)
# aql.exec_sql(exec_str, db=db)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class TableReader():
'''
Reads tables from input data sources and makes them attributes of the
model attribute.
'''
def __init__(self, sql_connector, sc_inp, data_path, model):
self.sqlc = sql_connector
self.sc_inp = sc_inp
self.data_path = (data_path if
isinstance(data_path, (tuple, list))
else [data_path])
self.model = model
if not self.sc_inp and not self.data_path:
logger.warning('Falling back to grimsel default csv tables.')
self.data_path = os.path.abspath(os.path.join(grimsel.__path__[0],
'..', 'input_data'))
self.table_set, self.dict_tb_path = self._get_table_dict()
def _get_table_dict(self):
'''
Obtain list of tables in the relevant data source.
TODO: Update PSQL
'''
if self.sc_inp:
return aql.get_sql_tables(self.sc_inp, self.sqlc.db)
elif self.data_path:
# path -> tables list
dict_pt_tb = {path: [fn.replace('.csv', '')
for fn in next(os.walk(path))[-1]]
for path in self.data_path}
table_set = set(itertools.chain.from_iterable(
dict_pt_tb.values()))
# table -> under which paths
dict_tb_path = {tb: [pt for pt, tb_list in dict_pt_tb.items() if
tb in tb_list] for tb in table_set}
return table_set, dict_tb_path
def _expand_table_families(self, dct):
'''
Searches for tables with identical name + suffix.
Updates the dct.
'''
dct_add = {}
for table, filt in dct.items():
tbs_other = [tb for tb in self.table_set
if table in tb and not tb == table]
if tbs_other:
dct_add.update({tb: filt for tb in tbs_other})
dct.update(dct_add)
def df_from_dict(self, dct):
'''
Reads filtered input tables and assigns them to instance
attributes.
'''
self._expand_table_families(dct)
for table, filt in dct.items():
list_df, tb_exists, source_str = self.get_input_table(table, filt)
df = pd.concat(list_df, axis=0, sort=False) if tb_exists else None
setattr(self.model, 'df_' + table, df)
if not tb_exists:
warn_str = ('Input table {tb} does not exist. Setting model '
'attribute df_{tb} to None.')
logger.warning(warn_str.format(tb=table))
else:
filt = ('filtered by ' if len(filt) > 0 else '') +\
', '.join([str(vvv[0]) + ' in ' + str(vvv[1]) for vvv in filt
if not len(vvv[1]) == 0])
logger.info(('Reading input table {tb} {flt} from '
'{source_str}').format(tb=table, flt=filt,
source_str=source_str))
def get_input_table(self, table, filt):
'''
Returns list of tables.
'''
if self.sc_inp:
tb_exists = table in aql.get_sql_tables(self.sc_inp, self.sqlc.db)
if tb_exists:
list_df = [aql.read_sql(self.sqlc.db, self.sc_inp,
table, filt)]
source = '%s %s.%s'%(self.sqlc.db, self.sc_inp, table)
else:
list_df = []
tb_exists = table in self.dict_tb_path
paths = self.dict_tb_path[table] if tb_exists else []
source = []
for path in paths:
fn = os.path.join(path, '{}.csv'.format(table))
source.append(fn)
df = pd.read_csv(fn)
logger.debug('Done reading, filtering according to {}'.format(filt))
for col, vals in filt:
if isinstance(col, str): # single column filtering
mask = df[col].isin(vals)
elif isinstance(col, (list, tuple)): # multiple columns
mask = df[list(col)].apply(tuple, axis=1).isin(vals)
df = df.loc[mask]
list_df.append(df)
return ((list_df if tb_exists else None), tb_exists,
(' from {}'.format(' and '.join(source)) if tb_exists else ''))
class DataReader(_HDFWriter, _ParqWriter):
'''
'''
runtime_tables = [('tm_soy', ['sy', 'tm_id']),
('hoy_soy', ['hy', 'tm_id']),
('tm_soy_full', ['sy', 'tm_id']),
('sy_min_all', ['sy_min', 'tm_id']),
]
def __init__(self, **kwargs):
defaults = {'resume_loop': False,
'replace_runs_if_exist': False,
'model': None,
'autocomplete_curtailment': False,
'autocompletion': True,
'no_output': False,
'dev_mode': False,
'data_path': None,
'sql_connector': None,
'sc_inp': None,
'cl_out': None,
'db': None,
}
defaults.update(kwargs)
for key, val in defaults.items():
setattr(self, key, val)
self.__dict__.update(kwargs)
self._coldict = aql.get_coldict()
def read_model_data(self):
'''
Read all input data and generate :class:`ModelBase` instance
attributes.
'''
tbrd = TableReader(self.sql_connector, self.sc_inp,
self.data_path, self.model)
# unfiltered input
dict_tb_2 = {'def_month': [], 'def_week': [],
'parameter_month': [], 'tm_soy': []}
tbrd.df_from_dict(dict_tb_2)
# read input data filtered by node and energy carrier
_flt_nd = ([('nd', self.model.slct_node)]
if self.model.slct_node else [])
_flt_ca = ([('ca', self.model.slct_encar)]
if self.model.slct_encar else [])
_flt_pt = ([('pt', self.model.slct_pp_type)]
if self.model.slct_pp_type else [])
dict_tb_3 = {'def_node': _flt_nd,
'def_pp_type': _flt_pt,
'def_encar': _flt_ca}
tbrd.df_from_dict(dict_tb_3)
# translate slct_node_connect to nd_ids
if self.model.slct_node_connect:
dict_nd = self.model.df_def_node.set_index('nd').nd_id.to_dict()
slct_node_connect_id = [(dict_nd[nd1], dict_nd[nd2])
for nd1, nd2 in self.model.slct_node_connect
if nd1 in dict_nd and nd2 in dict_nd]
_flt_ndcnn = [(('nd_id', 'nd_2_id'), slct_node_connect_id)]
else:
_flt_ndcnn = []
# update filters in case the keyword argument slct_node_id holds more
# nodes than present in the table
self.model.slct_node_id = self.model.df_def_node.nd_id.tolist()
self.model.slct_encar_id = self.model.df_def_encar.ca_id.tolist()
self.model.slct_pp_type_id = self.model.df_def_pp_type.pt_id.tolist()
_flt_nd = [('nd_id', self.model.slct_node_id)]
_flt_ca = [('ca_id', self.model.slct_encar_id)]
_flt_nd_2 = [('nd_2_id', self.model.df_def_node.nd_id.tolist())]
_flt_pt = [('pt_id', self.model.df_def_pp_type.pt_id.tolist())]
# read input data filtered by node, energy carrier, and fuel
dict_tb_0 = {'def_plant': _flt_nd + _flt_pt,
'profchp': _flt_nd,
'node_encar': _flt_nd + _flt_ca,
'node_connect': _flt_nd + _flt_ca + _flt_nd_2 + _flt_ndcnn}
tbrd.df_from_dict(dict_tb_0)
# secondary filtering by plant
_flt_pp = [('pp_id', self.model.df_def_plant['pp_id'].tolist())]
_flt_fl = [('fl_id', self.model.df_def_plant.fl_id.unique().tolist())]
dict_tb_1 = {'profinflow': _flt_pp,
'plant_encar': _flt_pp + _flt_ca,
'hydro': _flt_pp,
'def_fuel': _flt_fl,
'plant_month': _flt_pp,
'plant_week': _flt_pp,
'fuel_node_encar': _flt_fl + _flt_nd + _flt_ca}
tbrd.df_from_dict(dict_tb_1)
# initialize profile index dicts
self.model._init_pf_dicts()
_flt_pf_supply = [('supply_pf_id',
list(self.model.dict_supply_pf.values()))]
_flt_pf_dmnd = [('dmnd_pf_id',
list(self.model.dict_dmnd_pf.values()))]
_flt_pf_price = [('price_pf_id',
list(self.model.dict_pricebuy_pf.values())
+ list(self.model.dict_pricesll_pf.values()))]
dict_pf_0 = {'profsupply': _flt_pf_supply,
'profdmnd': _flt_pf_dmnd,
'profprice': _flt_pf_price,
}
tbrd.df_from_dict(dict_pf_0)
_flt_pf = [('pf_id', (_flt_pf_price[-1][-1] + _flt_pf_dmnd[-1][-1]
+ _flt_pf_supply[-1][-1]))]
dict_pf_1 = {'def_profile': _flt_pf}
tbrd.df_from_dict(dict_pf_1)
# filter plants requiring input from non-existing ca
# e.g. if a fuel-cell is in the input table but no hydrogen is
# included in the model, the plant's H2 demand wouldn't be accounted
# for;
if 'fl_id' in self.model.df_def_encar.columns:
fl_id_ca = self.model.df_def_encar.fl_id.tolist()
mask_del = (self.model.df_def_fuel.is_ca.isin([1])
& - self.model.df_def_fuel.fl_id.isin(fl_id_ca))
self.model.df_def_fuel = self.model.df_def_fuel.loc[-mask_del]
# filter table by special index name/id columns
self.model.df_parameter_month = \
self.filter_by_name_id_cols('df_parameter_month',
_flt_fl + _flt_nd + _flt_pp + _flt_ca)
self._split_profprice()
# autocomplete input tables
self.data_autocompletion()
if isinstance(self.model.df_node_connect, pd.DataFrame):
self._fix_df_node_connect()
self.input_table_list = (list(dict_tb_1) + list(dict_tb_2)
+ list(dict_tb_0) + list(dict_tb_3)
+ list(dict_pf_1))
# self.model._update_slct_lists()
def _split_profprice(self):
'''
Make two separate DataFrames for profprice buying and selling.
Having both in the same table gets too complicated down the road.
'''
bool_exists = (hasattr(self.model, 'df_profprice')
and self.model.df_profprice is not None)
for bs in ['buy', 'sll']:
tb_name = 'df_profprice%s' % bs
if bool_exists:
mask = self.model.df_def_profile.pf.str.contains('price' + bs)
list_pd_id = self.model.df_def_profile.loc[mask].pf_id.tolist()
mask_prf = self.model.df_profprice.price_pf_id.isin(list_pd_id)
df_split = self.model.df_profprice.loc[mask_prf]
setattr(self.model, tb_name, df_split)
else:
setattr(self.model, tb_name, None)
def data_autocompletion(self):
if self.autocompletion:
logger.info('#' * 60)
ac.AutoCompletePpType(self.model, self.autocomplete_curtailment)
ac.AutoCompleteFuelTrns(self.model)
ac.AutoCompleteFuelDmnd(self.model, self.autocomplete_curtailment)
ac.AutoCompletePlantTrns(self.model)
ac.AutoCompletePlantDmnd(self.model, self.autocomplete_curtailment)
if 'fl_id' in self.model.df_def_encar:
ac.AutoCompletePlantCons(self.model)
ac.AutoCompletePpCaFlex(self.model, self.autocomplete_curtailment)
logger.info('#' * 60)
def filter_by_name_id_cols(self, name_df, filt):
"""
Filter a pandas DataFrame with index names in columns.
This operates on pandas DataFrames where the indices are not provided
as column names but as row entries in special columns.
E.g., instead of
===== ===== =====
nd_id fl_id value
===== ===== =====
1 2 1.2
===== ===== =====
we have
========== ========== ======== ======== =======
set_1_name set_2_name set_1_id set_2_id value
========== ========== ======== ======== =======
nd_id fl_id 1 2 1.2
========== ========== ======== ======== =======
This allows to combine structurally different tables.
Filtering is implemented as an iteration over the set_n_name/set_n_id
column pairs, each of which is filtered with respect to all elements
in the filt parameter.
Parameters
==========
df : pandas DataFrame
as described above
filt : list
filtering list of the same format as the
:func:`grimsel.auxiliary.sqlutils.aux_sql_func.read_sql`
parameter
Returns
=======
filtered DataFrame
"""
df = getattr(self.model, name_df)
if df is not None:
# perform iterative filtering for each name/id column pair
for name_col, id_col in [(cc, cc.replace('name', 'id'))
for cc in df.columns if 'name' in cc]:
# init mask
mask = False
# loop over all filter elements
iflt = filt[0]
for iflt in filt:
mask |= ( # 1. select value for current set_n_name column
((df[name_col] == iflt[0])
# 2. select corresponding values for set_n_id col
& (df[id_col].isin(iflt[1])))
# 3. skip if this name_col is not relevant
| (df[name_col] == 'na'))
# reporting
report_df = df[[c for c in df.columns if 'set_' in c]]
report_df = report_df.drop_duplicates()
# get stackable columns
newcols = [tuple(c.split('_')[1:]) if '_' in c else (None, c)
for c in report_df.columns]
report_df.columns = pd.MultiIndex.from_tuples(newcols,
names=['col_n',
'col_type'])
report_df = report_df.stack(level='col_n')
# get all name values from all name columns
names = df[[c for c in df.columns if 'name' in c]]
names = names.stack().unique().tolist()
names = [nn for nn in names if not nn == 'na']
# get dictionary {set names: set values}
names_dict = {nn: list(set(report_df.loc[report_df.name == nn, 'id']))
for nn in names}
logger.info('Ex-post filtering of DataFrame {}:'.format(name_df))
for kk, vv in names_dict.items():
logger.info('\tSet {} is in ({})'.format(kk, ', '.join(map(str, vv))))
return df
def _fix_df_node_connect(self):
'''
Makes sure the table df_node_connect corresponds to the new style.
New style: The transmission capacities are expressed as
* ``cap_trme_leg`` for exports and
* ``cap_trmi_leg`` for imports
for single directions, i.e. non-redundant. The input table has columns
(nd_id, nd_2_id, ca_id, mt_id, cap_trme_leg, cap_trmi_leg).
Old style: Single transmission capacity for both directions; columns:
(nd_id, nd_2_id, ca_id, mt_id, eff, cap_trm_leg)
'''
if 'cap_trm_leg' in self.model.df_node_connect.columns:
df = self.model.df_node_connect
df['dir'] = df.nd_id < df.nd_2_id
df_e = df.loc[df.dir].assign(nd_id=df.nd_2_id,
nd_2_id=df.nd_id,
cap_trmi_leg=df.cap_trm_leg)
df = df.loc[-df.dir].assign(cap_trme_leg=df.cap_trm_leg)
dfn = pd.concat([df_e, df], sort=False)
dfn = dfn.drop('cap_trm_leg', axis=1).fillna(0)
idx = ['nd_id', 'nd_2_id', 'ca_id', 'mt_id']
logger.info('Aggregation count in fix_df_node_connect:\n',
dfn.pivot_table(index=idx, aggfunc=[min, max, len],
values=['cap_trme_leg',
'cap_trmi_leg']))
dfn = dfn.pivot_table(index=idx, aggfunc=sum,
values=['cap_trme_leg', 'cap_trmi_leg'])
self.model.df_node_connect = dfn.reset_index()
@skip_if_resume_loop
@skip_if_no_output
def _write_input_tables_to_output_schema(self, tb_list):
'''
Gathers relevant input tables and writes them to the output collection.
Note: As of now profile input tables are excluded. All relevant data
is included in the parameter output anyway.
'''
for itb in set(tb_list) - set(list(zip(*self.runtime_tables))[0]):
df = getattr(self.model, 'df_' + itb)
if (df is not None and ('def_' in itb or 'prof' not in itb)):
log_str = 'Writing input table {} to {} output: {}.'
logger.info(log_str.format(itb, self.output_target,
self.cl_out))
if self.output_target == 'psql':
logger.info('Writing table {} to output.'.format(itb))
engine = self.sql_connector.get_sqlalchemy_engine()
db = self.sql_connector.db
aql.write_sql(df, db, self.cl_out, itb,
if_exists='replace', engine=engine)
elif self.output_target == 'hdf5':
self.write_hdf(itb, df, 'put')
elif self.output_target in ['fastparquet']:
fn = os.path.join(self.cl_out, itb + '.parq')
self.write_parquet(fn, df, self.output_target)
else:
raise RuntimeError('_write_input_tables_to_output_schema: '
'no output_target applicable')
@skip_if_resume_loop
@skip_if_no_output
def write_runtime_tables(self):
'''
Some input tables depend on model parameters (time resolution).
Write these to output database schema.
Also, table def_node altered due to addition of column dmnd_max.
'''
self._write_input_tables_to_output_schema(self.input_table_list)
skip_fks = [('tm_soy', 'sy'), # defines sy
('hoy_soy', 'hy')] # defines hy
tb_name, pk = ('hoy_soy', ['hy', 'tm_id'])
for tb_name, pk in self.runtime_tables:
if getattr(self.model, 'df_' + tb_name, None) is not None:
df = getattr(self.model, 'df_' + tb_name)
logger.info('Writing runtime table ' + tb_name)
cols = []
for c in df.columns:
col_add = [c]
if c not in self._coldict: # same as "value"
self._coldict[c] = self._coldict['value']
col_add += (list(self._coldict[c])
if (tb_name, c) not in skip_fks
else list(self._coldict[c][:1]))
cols.append(tuple(col_add))
if self.output_target == 'psql':
engine = self.sql_connector.get_sqlalchemy_engine()
con_cur = self.sql_connector.get_pg_con_cur()
aql.init_table(tb_name=tb_name, cols=cols,
schema=self.cl_out,
ref_schema=self.cl_out, pk=pk, unique=[],
db=self.sql_connector.db, con_cur=con_cur)
aql.write_sql(df, sc=self.cl_out, tb=tb_name,
if_exists='append', engine=engine,
con_cur=con_cur)
elif self.output_target == 'hdf5':
self.write_hdf(tb_name, df, 'put')
elif self.output_target in ['fastparquet']:
fn = os.path.join(self.cl_out, tb_name + '.parq')
self.write_parquet(fn, df, engine=self.output_target)
else:
raise RuntimeError('write_runtime_tables: no '
'output_target applicable')
@staticmethod
def _get_max_run_id(cl_out):
run_fn = os.path.join(cl_out, 'def_run.parq')
if os.path.isfile(run_fn):
logger.debug('_get_max_run_id: Reading file %s.'%run_fn)
max_run = pd.read_parquet(run_fn, columns=['run_id']).run_id.max()
else:
logger.warning('_get_max_run_id: %s not found.'%run_fn)
max_run = False
return max_run
class IO:
'''
Primary IO class exposing the :module:`io` module.
:ivar datrd: :class:`DataReader` instance
:ivar modwr: :class:`ModelWriter` instance
'''
def __init__(self, **kwargs):
self._close_all_hdf_connections()
defaults = {'sc_warmstart': False,
'resume_loop': False,
'replace_runs_if_exist': False,
'model': None,
'autocomplete_curtailment': False,
'sql_connector': None,
'autocompletion': True,
'no_output': False,
'dev_mode': False,
'data_path': None,
'sc_inp': None,
'cl_out': None,
'db': 'postgres',
'output_target': 'psql'
}
defaults.update(kwargs)
self.sql_connector = defaults['sql_connector']
self.replace_runs_if_exist = defaults['replace_runs_if_exist']
self.db = self.sql_connector.db if self.sql_connector else None
self.cl_out = defaults['cl_out']
if defaults['resume_loop'] == 'auto':
defaults['resume_loop'] = \
self._get_auto_resume_loop(defaults['output_target'])
self.resume_loop = defaults['resume_loop']
self.datrd = DataReader(**defaults)
self.modwr = ModelWriter(**defaults)
def _get_auto_resume_loop(self, output_target):
if not output_target == 'fastparquet':
raise RuntimeError ('resume_loop="auto" not implemented for '
'%s output.')%output_target
resloop = DataReader._get_max_run_id(self.cl_out)
resloop = (resloop + 1) if not isinstance(resloop, bool) else resloop
logger.info('Setting "auto" resume_loop to %s'%resloop)
return resloop
@classmethod
def variab_to_df(cls, py_obj, sets=None):
''' Wrapper for backward compatibility. '''
if not sets:
sets = table_struct.DICT_COMP_IDX[py_obj.name]
sets = [st for st in sets if not st == 'bool_out']
return VariabIO._to_df(py_obj, sets)
@classmethod
def param_to_df(cls, py_obj, sets=None):
''' Wrapper for backward compatibility. '''
if not sets:
sets = table_struct.DICT_COMP_IDX[py_obj.name]
return ParamIO._to_df(py_obj, sets)
def read_model_data(self):
self.datrd.read_model_data()
def write_runtime_tables(self):
self.datrd.write_runtime_tables()
def init_output_tables(self):
self.modwr.init_compio_objs()
self.modwr.init_all()
def write_run(self, run_id):
self.modwr.run_id = run_id
self.modwr.write_all()
def _init_loop_table(self, cols_id, cols_step, cols_val):
tb_name = 'def_run'
cols = ([('tdiff_solve', 'DOUBLE PRECISION'),
('tdiff_write', 'DOUBLE PRECISION'),
('run_id', 'SMALLINT'),
]
+ [(s, 'SMALLINT') for s in cols_id]
+ [(s, 'DOUBLE PRECISION') for s in cols_step]
+ [(s, 'VARCHAR(30)') for s in cols_val]
+ [('info', 'VARCHAR'), ('objective', 'DOUBLE PRECISION')])
if self.modwr.output_target == 'psql':
aql.init_table(tb_name, cols, self.cl_out,
pk=cols_id, unique=['run_id'], db=self.db)
elif self.modwr.output_target == 'hdf5':
df = pd.DataFrame(columns=list(zip(*cols))[0])
if self.modwr.output_target == 'hdf5':
df.to_hdf(self.cl_out, tb_name, format='table')
elif self.modwr.output_target == 'fastparquet':
pass # parquet table is not initialized
else:
raise RuntimeError('_init_loop_table: no '
'output_target applicable')
@staticmethod
def _close_all_hdf_connections():
tables.file._open_files.close_all()
# %%
if __name__ == '__main__':
pass
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 2 15:03:42 2019
@author: user
"""
import time
import itertools
import os
import tables
import shutil
from glob import glob
import fastparquet as pq
import numpy as np
import pandas as pd
import psycopg2 as pg
import grimsel
import grimsel.auxiliary.sqlutils.aux_sql_func as aql
import grimsel.core.autocomplete as ac
import grimsel.core.table_struct as table_struct
from grimsel import _get_logger
logger = _get_logger(__name__)
FORMAT_RUN_ID = '{:04d}' # modify for > 9999 model runs
class _HDFWriter:
''' Mixing class for :class:`CompIO` and :class:`DataReader`. '''
def write_hdf(self, tb, df, put_append):
'''
Opens connection to HDF file and writes output.
Parameters
----------
put_append: str, one of `('append', 'put')`
Write one-time table to the output file (`'put'`) or append
to existing table (`'append'`).
'''
with pd.HDFStore(self.cl_out, mode='a') as store:
method_put_append = getattr(store, put_append)
method_put_append(tb, df, data_columns=True, format='table',
complevel=9, complib='blosc:blosclz')
class _ParqWriter:
''' Mixing class for :class:`CompIO` and :class:`DataReader`. '''
def write_parquet(self, fn, df, engine):
'''
Opens connection to HDF file and writes output.
Parameters
----------
fn: str
filename for table writing
df: pandas DataFrame
table to be written
engine: str
engine name as in the pandas DataFrame.to_parquet parameter
'''
if self.output_target == 'fastparquet':
pq.write(fn, df, append=os.path.isfile(fn), compression='GZIP')
# df.to_parquet(fn, engine='fastparquet',
# compression='gzip',)
# if 'run_id' in df.columns:
# df.to_parquet(fn, #append=os.path.isfile(fn),
# engine='fastparquet',
# compression='gzip',
# partition_cols=['run_id'])
# else:
# df.to_parquet(fn, #append=os.path.isfile(fn),
# engine='fastparquet',
# compression='gzip'
# )
else:
raise RuntimeError('Writing using parquet engine %s '
'not implemented.'%self.output_target)
class CompIO(_HDFWriter, _ParqWriter):
'''
A CompIO instance takes care of extracting a single variable/parameter from
the model and of writing a single table to the database.
'''
def __init__(self, tb, cl_out, comp_obj, idx, connect, output_target,
model=None):
self.tb = tb
self.cl_out = cl_out
self.comp_obj = comp_obj
self.output_target = output_target
self.connect = connect
self.model = model
self.columns = None # set in index setter
self.run_id = None # set in call to self.write_run
self.index = tuple(idx) if not isinstance(idx, tuple) else idx
self.coldict = aql.get_coldict()
def post_processing(self, df):
''' Child-specific method called after reading. '''
return df
def to_df(self):
'''
Calls classmethods _to_df.
Is overwritten in DualIO, where _to_df is not used as classmethod.
'''
return self._to_df(self.comp_obj,
[c for c in self.index if not c == 'bool_out'])
def init_output_table(self):
'''
Initialization of output table.
Calls the :func:`aux_sql_func` method with appropriate parameters.
.. note:
Keys need to be added in post-processing due to table
writing performance.
'''
logger.info('Generating output table {}'.format(self.tb))
col_names = self.index + ('value',)
cols = [(c,) + (self.coldict[c][0],) for c in col_names]
cols += [('run_id', 'SMALLINT')]
pk = [] # pk added later for writing/appending performance
unique = []
aql.init_table(tb_name=self.tb, cols=cols,
schema=self.cl_out,
ref_schema=self.cl_out, pk=pk,
unique=unique, bool_auto_fk=False, db=self.connect.db,
con_cur=self.connect.get_pg_con_cur())
def _to_file(self, df, tb):
'''
Casts the data types of the output table and writes the
table to the output HDF file.
'''
dtype_dict = {'value': np.dtype('float64'),
'bool_out': np.dtype('bool')}
dtype_dict.update({col: np.dtype('int32') for col in df.columns
if not col in ('value', 'bool_out')})
df = df.astype({col: dtype for col, dtype in dtype_dict.items()
if col in df.columns})
if self.output_target == 'hdf5':
self.write_hdf(tb, df, 'append')
elif self.output_target in ['fastparquet']:
fn = os.path.join(self.cl_out,
tb + ('_%s'%FORMAT_RUN_ID).format(self.run_id) + '.parq')
self.write_parquet(fn, df, engine=self.output_target)
else:
raise RuntimeError('_to_file: no '
'output_target applicable')
def _to_sql(self, df, tb):
df.to_sql(tb, self.connect.get_sqlalchemy_engine(),
schema=self.cl_out, if_exists='append', index=False)
def _finalize(self, df, tb=None):
''' Add run_id column and write to database table '''
tb = self.tb if not tb else tb
logger.info('Writing {} to {}.{}'.format(self.comp_obj.name,
self.cl_out, tb))
# value always positive, directionalities expressed through bool_out
df['value'] = df['value'].abs()
df['run_id'] = self.run_id
t = time.time()
if self.output_target in ['hdf5', 'fastparquet']:
self._to_file(df, tb)
elif self.output_target == 'psql':
self._to_sql(df, tb)
else:
raise RuntimeError('_finalize: no '
'output_target applicable')
logger.info(' ... done in %.3f sec'%(time.time() - t))
@property
def index(self):
return self._index
@index.setter
def index(self, value):
''' Makes sure idx is tuple and updates columns attribute. '''
self._index = (value,) if not isinstance(value, tuple) else value
self.columns = list(self.index + ('value',))
self.columns = [c for c in self.columns if not c == 'bool_out']
def get_df(self):
df = self.to_df()
df = self.post_processing(df)
return df
def write(self, run_id):
self.run_id = run_id
df = self.get_df()
self._finalize(df)
def _node_to_plant(self, pt):
'''
TODO: THIS SHOULD BE IN MAPS!!!!
TODO: THIS SHOULD ALSO INCLUDE ca_id FOR DMND!
Method for translation of node_id to respective plant_id
in the cases of demand and inter-node transmission. This is used to
append demand/inter-nodal transmission to pwr table.
Returns a dictionary node -> plant
Keyword arguments:
* pt -- string, selected plant type for translation
'''
df_np = self.model.df_def_plant[['nd_id', 'pp_id', 'pp', 'pt_id']]
df_pt = self.model.df_def_pp_type[['pt_id', 'pt']]
mask_pt = df_pt.pt.apply(lambda x: x.replace('_ST', '')) == pt
slct_pp_type = df_pt.loc[mask_pt, 'pt_id'].astype(int).iloc[0]
mask_tech = df_np['pt_id'] == slct_pp_type
df_np_slct = df_np.loc[mask_tech]
dict_node_plant_slct = df_np_slct.set_index('nd_id')
dict_node_plant_slct = dict_node_plant_slct['pp_id'].to_dict()
return dict_node_plant_slct
def __repr__(self):
return 'Comp_obj: ' + str(self.comp_obj)
class DualIO(CompIO):
'''
Base class for dual values. Performs the data extraction of constraint
shadow prices.
'''
def to_df(self):
dat = [ico + (self.model.dual[self.comp_obj[ico]],)
for ico in self.comp_obj
if self.comp_obj[ico].active]
return pd.DataFrame(dat, columns=self.columns)
class VariabIO(CompIO):
'''
Base class for variables. Performs the data extraction of variable
objects.
Also: Special methods related to the handling of negative plant
variables defined by setlst[sll] + setlst[curt]
as well as storage charging.
'''
@classmethod
def _to_df(cls, obj, cols):
''' Converts pyomo variable to DataFrame.'''
df = pd.Series(obj.extract_values()).fillna(0).reset_index()
df.columns = list(cols) + ['value']
return df
def post_processing(self, df):
'''
Calls _set_bool_out prior to writing.
Input arguments:
* df -- DataFrame; primary dataframe
'''
return self._set_bool_out(df) if 'bool_out' in self.index else df
def _set_bool_out(self, df):
'''
Set the bool_out values according to the pp_id.
pp_ids corresponding to curtailment, charging, and sales have
bool_out=True.
* df -- DataFrame; primary dataframe
'''
if self.comp_obj.name in ['pwr_st_ch', 'erg_ch_yr']:
df['bool_out'] = True
else:
df['bool_out'] = False
pp_true = self.model.setlst['curt'] + self.model.setlst['sll']
df.loc[df.pp_id.isin(pp_true), 'bool_out'] = True
return df
class ParamIO(CompIO):
'''
Base class for parameters.
Is inherited by :class:`DmndIO`.
Only contains the parameter ``_to_df`` classmethod.
'''
@classmethod
def _to_df(cls, obj, cols):
''' Converts pyomo parameter to DataFrame. '''
df = pd.Series(obj.extract_values()).fillna(0).reset_index()
if df.empty:
df = pd.DataFrame(columns=list(cols) + ['value'])
else:
if not cols and len(df) == 1:
df = df[[0]].rename(columns={0: 'value'})
else:
df.columns = list(cols) + ['value']
return df
class TransmIO(VariabIO):
"""
Special methods related to the translation of nodes to
transmission plant names and
to the simplified representation after aggregating secondary nodes.
"""
def post_processing(self, df):
''' Write aggregated transmission table to pwr. '''
dfagg = self.aggregate_nd2(df)
dfagg = self._translate_trm(dfagg)
self._finalize(dfagg, 'var_sy_pwr')
return self.add_bool_out_col(df)
def aggregate_nd2(self, dfall):
'''
Aggregates trm table over all secondary nodes for simplification and
to append to the pwr table.
'''
# mirror table to get both directions
dfall = pd.concat([dfall,
dfall.assign(nd_2_id=dfall.nd_id,
nd_id=dfall.nd_2_id,
value=-dfall.value)])
dict_nhours = {nd_id:
self.model._tm_objs[self.model.dict_nd_tm_id[nd_id]].nhours
for nd_id in dfall.nd_id.unique()}
def avg_to_nhours(x):
if self.model.is_min_node[x.name]:
return x.reset_index()
else: # reduce time resolution
nhours = dict_nhours[x.name[0]]
nhours_2 = dict_nhours[x.nd_2_id.iloc[0]]
x['sy'] = np.repeat(np.arange(
np.ceil(len(x) / (nhours / nhours_2))),
nhours / nhours_2)
idx = [c for c in x.columns if not c == 'value']
x = x.pivot_table(index=idx, values='value', aggfunc=np.mean)
return x.reset_index()
dfall = (dfall.groupby(['nd_id', 'nd_2_id'], as_index=True)
.apply(avg_to_nhours)
.reset_index(drop=True))
dfexp = dfall.loc[dfall.value > 0]
dfexp = dfexp.groupby(['sy', 'nd_id', 'ca_id'])['value'].sum()
dfexp = dfexp.reset_index()
dfexp['bool_out'] = True
dfimp = dfall.loc[dfall.value < 0]
dfimp = dfimp.groupby(['sy', 'nd_id', 'ca_id'])['value'].sum()
dfimp = dfimp.reset_index()
dfimp['bool_out'] = False
dfagg = pd.concat([dfexp, dfimp], axis=0)
dict_nd_weight = {key: self.model.nd_weight[key].value
for key in self.model.nd_weight}
dfagg['value'] /= dfagg.nd_id.replace(dict_nd_weight)
return dfagg
def _translate_trm(self, df):
df['pp_id'] = df.nd_id.replace(self._node_to_plant('TRNS'))
df.drop('nd_id', axis=1, inplace=True)
return df
def add_bool_out_col(self, df):
''' The bool out column value depends on the sign of the data. '''
df['bool_out'] = False
df.loc[df.value < 0, 'bool_out'] = True
return df
class DmndIO(ParamIO):
'''
Demand is appended to the *pwr* table after translating the nd_id to
the corresponding "power plant" pp_id.
'''
def post_processing(self, df):
dfpp = self._translate_dmnd(df.copy())
dfpp['bool_out'] = True
dfpp = dfpp[['sy', 'pp_id', 'ca_id', 'value', 'bool_out']]
self._finalize(dfpp, 'var_sy_pwr')
return df
def _translate_dmnd(self, df):
'''
Translate the demand ``pf_id`` to the corresponding ``pp_id``s
This is based on a mapping ``pf_id`` |rarr| ``nd_id`` |rarr| ``pp_id``.
The ``pp_id`` definition for demand is retrieved from the
``ModelBase.df_def_plant`` table.
'''
dict_ndpp = self._node_to_plant('DMND')
df['pp_id'] = df.nd_id.replace(dict_ndpp)
return df
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def skip_if_resume_loop(f):
def wrapper(self, *args, **kwargs):
if self.resume_loop:
pass
else:
f(self, *args, **kwargs)
return wrapper
def skip_if_no_output(f):
def wrapper(self, *args, **kwargs):
if self.no_output:
pass
else:
f(self, *args, **kwargs)
return wrapper
class ModelWriter():
'''
The IO singleton class manages the TableIO instances and communicates with
other classes. Manages database connection.
'''
io_class_dict = {'var': VariabIO,
'var_tr': TransmIO,
'par_dmnd': DmndIO,
'par': ParamIO,
'dual': DualIO}
_default_init = {'sc_warmstart': False,
'resume_loop': False,
'replace_runs_if_exist': False,
'model': None,
'output_target': 'hdf5',
'sql_connector': None,
'no_output': False,
'dev_mode': False,
'coll_out': None,
'keep': None,
'drop': None,
'db': None}
def __init__(self, **kwargs):
"""
"""
self.run_id = None # set in call to self.write_run
self.dict_comp_obj = {}
# define instance attributes and update with kwargs
for key, val in self._default_init.items():
setattr(self, key, val)
self.__dict__.update(kwargs)
self.dict_comp_idx = None
self.dict_comp_table = None
self.dict_comp_group = None
ls = 'Output collection: {}; resume loop={}'
logger.info(ls.format(self.cl_out, self.resume_loop))
self.reset_tablecollection()
def _make_table_dicts(self, keep=None, drop=None):
'''
Get the dictionaries describing all tables.
Also used in the class method ``post_process_index``,
therefore module function.
'''
keep = list(table_struct.DICT_COMP_IDX) if not keep else keep
keep = set(keep) - set(drop if drop else [])
options = list(table_struct.DICT_COMP_IDX)
unknowns = [tb for tb in keep if not tb in options]
if unknowns:
raise RuntimeError(('Unknown table selection %s. Possible options '
'are %s')%(str(unknowns), str(options)))
filter_dict = lambda d: {k: v for k, v in d.items() if k in keep}
self.dict_comp_idx = filter_dict(table_struct.DICT_COMP_IDX)
self.dict_comp_table = filter_dict(table_struct.DICT_COMP_TABLE)
self.dict_comp_group = filter_dict(table_struct.DICT_COMP_GROUP)
def reset_tablecollection(self):
'''
Reset the SQL schema or hdf file for model output writing.
'''
if self.output_target == 'psql':
self._reset_schema()
elif self.output_target == 'hdf5':
self._reset_hdf_file()
elif self.output_target in ['fastparquet']:
self._reset_parquet_file()
@skip_if_resume_loop
def _reset_hdf_file(self):
ModelWriter.reset_hdf_file(self.cl_out, not self.dev_mode)
def _reset_parquet_file(self):
ModelWriter.reset_parquet_file(self.cl_out, not self.dev_mode,
self.resume_loop)
@staticmethod
def reset_hdf_file(fn, warn):
'''
Deletes existing hdf5 file and creates empty one.
Parameters
----------
fn: str
filename
warn: bool
prompt user input if the file exists
'''
# pass
if os.path.isfile(fn):
try:
max_run_id = pd.read_hdf(fn, 'def_run',
columns=['run_id']).run_id.max()
except Exception as e:
logger.error(e)
logger.warn('reset_hdf_file: Could not determine max_run_id '
'... setting to None.')
max_run_id = None
if warn:
input(
'''
~~~~~~~~~~~~~~~ WARNING: ~~~~~~~~~~~~~~~~
You are about to delete existing file {fn}.
The maximum run_id is {max_run_id}.
Hit enter to proceed.
'''.format(fn=fn, max_run_id=max_run_id)
)
logger.info('Dropping output file {}'.format(fn))
os.remove(fn)
def reset_parquet_file(dirc, warn, resume_loop, ):
'''
Deletes existing parquet file folder and creates empty one.
Parameters
----------
dirc: str
parquet directory name
warn: bool
prompt user input if the file exists
'''
if os.path.isdir(dirc) and not resume_loop:
try:
max_run_id = pd.read_parquet(os.path.join(dirc, 'def_run.parq'),
columns=['run_id']).run_id.max()
except Exception as e:
logger.error(e)
logger.warn('reset_parquet_file: Could not determine max_run_id '
'... setting to None.')
max_run_id = None
if warn:
input(
'''
~~~~~~~~~~~~~~~ WARNING: ~~~~~~~~~~~~~~~~
You are about to delete existing directory {dirc}.
The maximum run_id is {max_run_id}.
Hit enter to proceed.
'''.format(dirc=dirc, max_run_id=max_run_id)
)
logger.info('Dropping parquet output directory {}'.format(dirc))
shutil.rmtree(dirc)
elif os.path.isdir(dirc) and resume_loop:
logger.info('Deleting run_ids >= resume_loop = '
'{:d}'.format(resume_loop))
del_fn = [fn for fn in glob(os.path.join(dirc, '*[0-9].parq')) if
int(fn.split('_')[-1].replace('.parq', ''))
>= resume_loop]
if del_fn:
for fn in del_fn:
logger.info('... deleting file {}'.format(fn))
os.remove(fn)
fn_run = os.path.join(dirc, 'def_run.parq')
df_def_run = pd.read_parquet(fn_run)
df_def_run = df_def_run.query('run_id < %d'%resume_loop)
df_def_run = df_def_run.reset_index(drop=True)
pq.write(fn_run, df_def_run, append=False, compression='GZIP')
else:
logger.info('... nothing to delete.')
if not os.path.isdir(dirc):
os.mkdir(dirc)
@skip_if_resume_loop
def _reset_schema(self):
aql.reset_schema(self.cl_out, self.sql_connector.db,
not self.dev_mode)
def init_output_schema(self):
aql.exec_sql('CREATE SCHEMA IF NOT EXISTS ' + self.cl_out,
db=self.db, )
@skip_if_no_output
def init_compio_objs(self):
'''
Initialize all output table IO objects.
'''
self._make_table_dicts(keep=self.keep, drop=self.drop)
if __name__ == '__main__':
comp, idx = 'pwr_st_ch', self.dict_comp_idx['pwr_st_ch']
for comp, idx in self.dict_comp_idx.items():
if not hasattr(self.model, comp):
logger.warning(('Component {} does not exist... '
'skipping init CompIO.').format(comp))
else:
logger.debug('Adding component %s to dict_comp_obj'%comp)
comp_obj = getattr(self.model, comp)
grp = self.dict_comp_group[comp]
if self.dict_comp_table[comp] in self.io_class_dict:
io_class = self.io_class_dict[self.dict_comp_table[comp]]
elif grp in self.io_class_dict:
io_class = self.io_class_dict[self.dict_comp_group[comp]]
else:
io_class = self.io_class_dict[self.dict_comp_group[comp].split('_')[0]]
io_class_kwars = dict(tb=self.dict_comp_table[comp],
cl_out=self.cl_out,
comp_obj=comp_obj,
idx=idx,
connect=self.sql_connector,
output_target=self.output_target,
model=self.model)
self.dict_comp_obj[comp] = io_class(**io_class_kwars)
@skip_if_no_output
def write_all(self):
''' Calls the write methods of all CompIO objects. '''
for comp, io_obj in self.dict_comp_obj.items():
io_obj.write(self.run_id)
@skip_if_no_output
def init_all(self):
'''
Initializes all SQL tables.
Calls the init_output_table methods of all CompIO instances.
'''
if self.output_target == 'psql':
coldict = aql.get_coldict(self.cl_out, self.sql_connector.db)
for comp, io_obj in self.dict_comp_obj.items():
io_obj.coldict = coldict
io_obj.init_output_table()
elif self.output_target in ['hdf5', 'fastparquet']:
pass
def delete_run_id(self, run_id=False, operator='>='):
'''
In output tables delete all rows with run_id >=/== the selected value.
Used in :
1. in ModelLoop.perform_model_run if replace_runs_if_exist == True
with operator '=' to remove current run_id
from all tables prior to writing
TODO: The SQL part would be better fit with the aux_sql_func module.
'''
if run_id:
# Get overview of all tables
list_all_tb_0 = [list(itb_list + '_' + itb[0] for itb
in getattr(table_struct, itb_list)
if not len(itb) == 3)
for itb_list in table_struct.list_collect]
self.list_all_tb = list(itertools.chain(*list_all_tb_0))
self.list_all_tb += ['def_run']
for itb in self.list_all_tb:
if self.output_target == 'fastparquet':
self._delete_run_id_parquet(tb=itb, run_id=run_id)
elif self.output_target == 'psql':
logger.info('Deleting from ' + self.cl_out + '.' + itb
+ ' where run_id {} {}'.format(operator,
str(run_id)))
exec_strg = '''
DELETE FROM {cl_out}.{tb}
WHERE run_id {op} {run_id};
'''.format(cl_out=self.cl_out, tb=itb,
run_id=run_id, op=operator)
try:
aql.exec_sql(exec_strg, db=self.db)
except pg.ProgrammingError as e:
logger.error(e)
raise(e)
def _delete_run_id_parquet(self, tb, run_id):
pat = os.path.join(self.cl_out, ('{}_%s.*'%FORMAT_RUN_ID).format(tb, run_id))
fn_del = glob(pat)
try:
assert fn_del, 'Pattern not found: %s.'%pat
assert not len(fn_del) > 1, \
'Found more than one table to delete: %s.'%fn_del
os.remove(fn_del[0])
logger.info('Successfully deleted table '
'{} of model run {:d}'.format(tb, run_id))
except Exception as e:
logger.error(e)
# @classmethod
# def post_process_index(cls, sc, db, drop=False):
#
# coldict = aql.get_coldict(sc, db)
#
# dict_idx, dict_table, _ = ModelWriter.get_table_dicts()
#
# list_tables = aql.get_sql_tables(sc, db)
#
# for comp, index in dict_idx.items():
#
# if not dict_table[comp] in list_tables:
# logger.warning('Table ' + comp + ' does not exist... skipping '
# 'index generation.')
# else:
#
# tb_name = dict_table[comp]
#
# logger.info('tb_name:', tb_name)
#
# pk_list = index + ('run_id',)
#
# fk_dict = {}
# for c in pk_list:
# if len(coldict[c]) > 1:
# fk_dict[c] = coldict[c][1]
#
#
# pk_kws = {'pk_list': ', '.join(pk_list),
# 'tb': tb_name, 'cl_out': sc}
# exec_str = ('''
# ALTER TABLE {cl_out}.{tb}
# DROP CONSTRAINT IF EXISTS {tb}_pkey;
# ''').format(**pk_kws)
# if not drop:
# exec_str += ('''
# ALTER TABLE {cl_out}.{tb}
# ADD CONSTRAINT {tb}_pkey
# PRIMARY KEY ({pk_list})
# ''').format(**pk_kws)
# logger.debug(exec_str)
# aql.exec_sql(exec_str, db=db)
#
# for fk_keys, fk_vals in fk_dict.items():
# fk_kws = {'cl_out': sc, 'tb': tb_name,
# 'fk': fk_keys, 'ref': fk_vals}
#
# exec_str = ('''
# ALTER TABLE {cl_out}.{tb}
# DROP CONSTRAINT IF EXISTS fk_{tb}_{fk};
# ''').format(**fk_kws)
#
# if not drop:
# exec_str += ('''
# ALTER TABLE {cl_out}.{tb}
# ADD CONSTRAINT fk_{tb}_{fk}
# FOREIGN KEY ({fk})
# REFERENCES {ref}
# ''').format(**fk_kws)
# logger.debug(exec_str)
# aql.exec_sql(exec_str, db=db)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class TableReader():
'''
Reads tables from input data sources and makes them attributes of the
model attribute.
'''
def __init__(self, sql_connector, sc_inp, data_path, model):
self.sqlc = sql_connector
self.sc_inp = sc_inp
self.data_path = (data_path if
isinstance(data_path, (tuple, list))
else [data_path])
self.model = model
if not self.sc_inp and not self.data_path:
logger.warning('Falling back to grimsel default csv tables.')
self.data_path = os.path.abspath(os.path.join(grimsel.__path__[0],
'..', 'input_data'))
self.table_set, self.dict_tb_path = self._get_table_dict()
def _get_table_dict(self):
'''
Obtain list of tables in the relevant data source.
TODO: Update PSQL
'''
if self.sc_inp:
return aql.get_sql_tables(self.sc_inp, self.sqlc.db)
elif self.data_path:
# path -> tables list
dict_pt_tb = {path: [fn.replace('.csv', '')
for fn in next(os.walk(path))[-1]]
for path in self.data_path}
table_set = set(itertools.chain.from_iterable(
dict_pt_tb.values()))
# table -> under which paths
dict_tb_path = {tb: [pt for pt, tb_list in dict_pt_tb.items() if
tb in tb_list] for tb in table_set}
return table_set, dict_tb_path
def _expand_table_families(self, dct):
'''
Searches for tables with identical name + suffix.
Updates the dct.
'''
dct_add = {}
for table, filt in dct.items():
tbs_other = [tb for tb in self.table_set
if table in tb and not tb == table]
if tbs_other:
dct_add.update({tb: filt for tb in tbs_other})
dct.update(dct_add)
def df_from_dict(self, dct):
'''
Reads filtered input tables and assigns them to instance
attributes.
'''
self._expand_table_families(dct)
for table, filt in dct.items():
list_df, tb_exists, source_str = self.get_input_table(table, filt)
df = pd.concat(list_df, axis=0, sort=False) if tb_exists else None
setattr(self.model, 'df_' + table, df)
if not tb_exists:
warn_str = ('Input table {tb} does not exist. Setting model '
'attribute df_{tb} to None.')
logger.warning(warn_str.format(tb=table))
else:
filt = ('filtered by ' if len(filt) > 0 else '') +\
', '.join([str(vvv[0]) + ' in ' + str(vvv[1]) for vvv in filt
if not len(vvv[1]) == 0])
logger.info(('Reading input table {tb} {flt} from '
'{source_str}').format(tb=table, flt=filt,
source_str=source_str))
def get_input_table(self, table, filt):
'''
Returns list of tables.
'''
if self.sc_inp:
tb_exists = table in aql.get_sql_tables(self.sc_inp, self.sqlc.db)
if tb_exists:
list_df = [aql.read_sql(self.sqlc.db, self.sc_inp,
table, filt)]
source = '%s %s.%s'%(self.sqlc.db, self.sc_inp, table)
else:
list_df = []
tb_exists = table in self.dict_tb_path
paths = self.dict_tb_path[table] if tb_exists else []
source = []
for path in paths:
fn = os.path.join(path, '{}.csv'.format(table))
source.append(fn)
df = pd.read_csv(fn)
logger.debug('Done reading, filtering according to {}'.format(filt))
for col, vals in filt:
if isinstance(col, str): # single column filtering
mask = df[col].isin(vals)
elif isinstance(col, (list, tuple)): # multiple columns
mask = df[list(col)].apply(tuple, axis=1).isin(vals)
df = df.loc[mask]
list_df.append(df)
return ((list_df if tb_exists else None), tb_exists,
(' from {}'.format(' and '.join(source)) if tb_exists else ''))
class DataReader(_HDFWriter, _ParqWriter):
'''
'''
runtime_tables = [('tm_soy', ['sy', 'tm_id']),
('hoy_soy', ['hy', 'tm_id']),
('tm_soy_full', ['sy', 'tm_id']),
('sy_min_all', ['sy_min', 'tm_id']),
]
def __init__(self, **kwargs):
defaults = {'resume_loop': False,
'replace_runs_if_exist': False,
'model': None,
'autocomplete_curtailment': False,
'autocompletion': True,
'no_output': False,
'dev_mode': False,
'data_path': None,
'sql_connector': None,
'sc_inp': None,
'cl_out': None,
'db': None,
}
defaults.update(kwargs)
for key, val in defaults.items():
setattr(self, key, val)
self.__dict__.update(kwargs)
self._coldict = aql.get_coldict()
def read_model_data(self):
'''
Read all input data and generate :class:`ModelBase` instance
attributes.
'''
tbrd = TableReader(self.sql_connector, self.sc_inp,
self.data_path, self.model)
# unfiltered input
dict_tb_2 = {'def_month': [], 'def_week': [],
'parameter_month': [], 'tm_soy': []}
tbrd.df_from_dict(dict_tb_2)
# read input data filtered by node and energy carrier
_flt_nd = ([('nd', self.model.slct_node)]
if self.model.slct_node else [])
_flt_ca = ([('ca', self.model.slct_encar)]
if self.model.slct_encar else [])
_flt_pt = ([('pt', self.model.slct_pp_type)]
if self.model.slct_pp_type else [])
dict_tb_3 = {'def_node': _flt_nd,
'def_pp_type': _flt_pt,
'def_encar': _flt_ca}
tbrd.df_from_dict(dict_tb_3)
# translate slct_node_connect to nd_ids
if self.model.slct_node_connect:
dict_nd = self.model.df_def_node.set_index('nd').nd_id.to_dict()
slct_node_connect_id = [(dict_nd[nd1], dict_nd[nd2])
for nd1, nd2 in self.model.slct_node_connect
if nd1 in dict_nd and nd2 in dict_nd]
_flt_ndcnn = [(('nd_id', 'nd_2_id'), slct_node_connect_id)]
else:
_flt_ndcnn = []
# update filters in case the keyword argument slct_node_id holds more
# nodes than present in the table
self.model.slct_node_id = self.model.df_def_node.nd_id.tolist()
self.model.slct_encar_id = self.model.df_def_encar.ca_id.tolist()
self.model.slct_pp_type_id = self.model.df_def_pp_type.pt_id.tolist()
_flt_nd = [('nd_id', self.model.slct_node_id)]
_flt_ca = [('ca_id', self.model.slct_encar_id)]
_flt_nd_2 = [('nd_2_id', self.model.df_def_node.nd_id.tolist())]
_flt_pt = [('pt_id', self.model.df_def_pp_type.pt_id.tolist())]
# read input data filtered by node, energy carrier, and fuel
dict_tb_0 = {'def_plant': _flt_nd + _flt_pt,
'profchp': _flt_nd,
'node_encar': _flt_nd + _flt_ca,
'node_connect': _flt_nd + _flt_ca + _flt_nd_2 + _flt_ndcnn}
tbrd.df_from_dict(dict_tb_0)
# secondary filtering by plant
_flt_pp = [('pp_id', self.model.df_def_plant['pp_id'].tolist())]
_flt_fl = [('fl_id', self.model.df_def_plant.fl_id.unique().tolist())]
dict_tb_1 = {'profinflow': _flt_pp,
'plant_encar': _flt_pp + _flt_ca,
'hydro': _flt_pp,
'def_fuel': _flt_fl,
'plant_month': _flt_pp,
'plant_week': _flt_pp,
'fuel_node_encar': _flt_fl + _flt_nd + _flt_ca}
tbrd.df_from_dict(dict_tb_1)
# initialize profile index dicts
self.model._init_pf_dicts()
_flt_pf_supply = [('supply_pf_id',
list(self.model.dict_supply_pf.values()))]
_flt_pf_dmnd = [('dmnd_pf_id',
list(self.model.dict_dmnd_pf.values()))]
_flt_pf_price = [('price_pf_id',
list(self.model.dict_pricebuy_pf.values())
+ list(self.model.dict_pricesll_pf.values()))]
dict_pf_0 = {'profsupply': _flt_pf_supply,
'profdmnd': _flt_pf_dmnd,
'profprice': _flt_pf_price,
}
tbrd.df_from_dict(dict_pf_0)
_flt_pf = [('pf_id', (_flt_pf_price[-1][-1] + _flt_pf_dmnd[-1][-1]
+ _flt_pf_supply[-1][-1]))]
dict_pf_1 = {'def_profile': _flt_pf}
tbrd.df_from_dict(dict_pf_1)
# filter plants requiring input from non-existing ca
# e.g. if a fuel-cell is in the input table but no hydrogen is
# included in the model, the plant's H2 demand wouldn't be accounted
# for;
if 'fl_id' in self.model.df_def_encar.columns:
fl_id_ca = self.model.df_def_encar.fl_id.tolist()
mask_del = (self.model.df_def_fuel.is_ca.isin([1])
& - self.model.df_def_fuel.fl_id.isin(fl_id_ca))
self.model.df_def_fuel = self.model.df_def_fuel.loc[-mask_del]
# filter table by special index name/id columns
self.model.df_parameter_month = \
self.filter_by_name_id_cols('df_parameter_month',
_flt_fl + _flt_nd + _flt_pp + _flt_ca)
self._split_profprice()
# autocomplete input tables
self.data_autocompletion()
if isinstance(self.model.df_node_connect, pd.DataFrame):
self._fix_df_node_connect()
self.input_table_list = (list(dict_tb_1) + list(dict_tb_2)
+ list(dict_tb_0) + list(dict_tb_3)
+ list(dict_pf_1))
# self.model._update_slct_lists()
def _split_profprice(self):
'''
Make two separate DataFrames for profprice buying and selling.
Having both in the same table gets too complicated down the road.
'''
bool_exists = (hasattr(self.model, 'df_profprice')
and self.model.df_profprice is not None)
for bs in ['buy', 'sll']:
tb_name = 'df_profprice%s' % bs
if bool_exists:
mask = self.model.df_def_profile.pf.str.contains('price' + bs)
list_pd_id = self.model.df_def_profile.loc[mask].pf_id.tolist()
mask_prf = self.model.df_profprice.price_pf_id.isin(list_pd_id)
df_split = self.model.df_profprice.loc[mask_prf]
setattr(self.model, tb_name, df_split)
else:
setattr(self.model, tb_name, None)
def data_autocompletion(self):
if self.autocompletion:
logger.info('#' * 60)
ac.AutoCompletePpType(self.model, self.autocomplete_curtailment)
ac.AutoCompleteFuelTrns(self.model)
ac.AutoCompleteFuelDmnd(self.model, self.autocomplete_curtailment)
ac.AutoCompletePlantTrns(self.model)
ac.AutoCompletePlantDmnd(self.model, self.autocomplete_curtailment)
if 'fl_id' in self.model.df_def_encar:
ac.AutoCompletePlantCons(self.model)
ac.AutoCompletePpCaFlex(self.model, self.autocomplete_curtailment)
logger.info('#' * 60)
def filter_by_name_id_cols(self, name_df, filt):
"""
Filter a pandas DataFrame with index names in columns.
This operates on pandas DataFrames where the indices are not provided
as column names but as row entries in special columns.
E.g., instead of
===== ===== =====
nd_id fl_id value
===== ===== =====
1 2 1.2
===== ===== =====
we have
========== ========== ======== ======== =======
set_1_name set_2_name set_1_id set_2_id value
========== ========== ======== ======== =======
nd_id fl_id 1 2 1.2
========== ========== ======== ======== =======
This allows to combine structurally different tables.
Filtering is implemented as an iteration over the set_n_name/set_n_id
column pairs, each of which is filtered with respect to all elements
in the filt parameter.
Parameters
==========
df : pandas DataFrame
as described above
filt : list
filtering list of the same format as the
:func:`grimsel.auxiliary.sqlutils.aux_sql_func.read_sql`
parameter
Returns
=======
filtered DataFrame
"""
df = getattr(self.model, name_df)
if df is not None:
# perform iterative filtering for each name/id column pair
for name_col, id_col in [(cc, cc.replace('name', 'id'))
for cc in df.columns if 'name' in cc]:
# init mask
mask = False
# loop over all filter elements
iflt = filt[0]
for iflt in filt:
mask |= ( # 1. select value for current set_n_name column
((df[name_col] == iflt[0])
# 2. select corresponding values for set_n_id col
& (df[id_col].isin(iflt[1])))
# 3. skip if this name_col is not relevant
| (df[name_col] == 'na'))
# reporting
report_df = df[[c for c in df.columns if 'set_' in c]]
report_df = report_df.drop_duplicates()
# get stackable columns
newcols = [tuple(c.split('_')[1:]) if '_' in c else (None, c)
for c in report_df.columns]
report_df.columns = pd.MultiIndex.from_tuples(newcols,
names=['col_n',
'col_type'])
report_df = report_df.stack(level='col_n')
# get all name values from all name columns
names = df[[c for c in df.columns if 'name' in c]]
names = names.stack().unique().tolist()
names = [nn for nn in names if not nn == 'na']
# get dictionary {set names: set values}
names_dict = {nn: list(set(report_df.loc[report_df.name == nn, 'id']))
for nn in names}
logger.info('Ex-post filtering of DataFrame {}:'.format(name_df))
for kk, vv in names_dict.items():
logger.info('\tSet {} is in ({})'.format(kk, ', '.join(map(str, vv))))
return df
def _fix_df_node_connect(self):
'''
Makes sure the table df_node_connect corresponds to the new style.
New style: The transmission capacities are expressed as
* ``cap_trme_leg`` for exports and
* ``cap_trmi_leg`` for imports
for single directions, i.e. non-redundant. The input table has columns
(nd_id, nd_2_id, ca_id, mt_id, cap_trme_leg, cap_trmi_leg).
Old style: Single transmission capacity for both directions; columns:
(nd_id, nd_2_id, ca_id, mt_id, eff, cap_trm_leg)
'''
if 'cap_trm_leg' in self.model.df_node_connect.columns:
df = self.model.df_node_connect
df['dir'] = df.nd_id < df.nd_2_id
df_e = df.loc[df.dir].assign(nd_id=df.nd_2_id,
nd_2_id=df.nd_id,
cap_trmi_leg=df.cap_trm_leg)
df = df.loc[-df.dir].assign(cap_trme_leg=df.cap_trm_leg)
dfn = pd.concat([df_e, df], sort=False)
dfn = dfn.drop('cap_trm_leg', axis=1).fillna(0)
idx = ['nd_id', 'nd_2_id', 'ca_id', 'mt_id']
logger.info('Aggregation count in fix_df_node_connect:\n',
dfn.pivot_table(index=idx, aggfunc=[min, max, len],
values=['cap_trme_leg',
'cap_trmi_leg']))
dfn = dfn.pivot_table(index=idx, aggfunc=sum,
values=['cap_trme_leg', 'cap_trmi_leg'])
self.model.df_node_connect = dfn.reset_index()
@skip_if_resume_loop
@skip_if_no_output
def _write_input_tables_to_output_schema(self, tb_list):
'''
Gathers relevant input tables and writes them to the output collection.
Note: As of now profile input tables are excluded. All relevant data
is included in the parameter output anyway.
'''
for itb in set(tb_list) - set(list(zip(*self.runtime_tables))[0]):
df = getattr(self.model, 'df_' + itb)
if (df is not None and ('def_' in itb or 'prof' not in itb)):
log_str = 'Writing input table {} to {} output: {}.'
logger.info(log_str.format(itb, self.output_target,
self.cl_out))
if self.output_target == 'psql':
logger.info('Writing table {} to output.'.format(itb))
engine = self.sql_connector.get_sqlalchemy_engine()
db = self.sql_connector.db
aql.write_sql(df, db, self.cl_out, itb,
if_exists='replace', engine=engine)
elif self.output_target == 'hdf5':
self.write_hdf(itb, df, 'put')
elif self.output_target in ['fastparquet']:
fn = os.path.join(self.cl_out, itb + '.parq')
self.write_parquet(fn, df, self.output_target)
else:
raise RuntimeError('_write_input_tables_to_output_schema: '
'no output_target applicable')
@skip_if_resume_loop
@skip_if_no_output
def write_runtime_tables(self):
'''
Some input tables depend on model parameters (time resolution).
Write these to output database schema.
Also, table def_node altered due to addition of column dmnd_max.
'''
self._write_input_tables_to_output_schema(self.input_table_list)
skip_fks = [('tm_soy', 'sy'), # defines sy
('hoy_soy', 'hy')] # defines hy
tb_name, pk = ('hoy_soy', ['hy', 'tm_id'])
for tb_name, pk in self.runtime_tables:
if getattr(self.model, 'df_' + tb_name, None) is not None:
df = getattr(self.model, 'df_' + tb_name)
logger.info('Writing runtime table ' + tb_name)
cols = []
for c in df.columns:
col_add = [c]
if c not in self._coldict: # same as "value"
self._coldict[c] = self._coldict['value']
col_add += (list(self._coldict[c])
if (tb_name, c) not in skip_fks
else list(self._coldict[c][:1]))
cols.append(tuple(col_add))
if self.output_target == 'psql':
engine = self.sql_connector.get_sqlalchemy_engine()
con_cur = self.sql_connector.get_pg_con_cur()
aql.init_table(tb_name=tb_name, cols=cols,
schema=self.cl_out,
ref_schema=self.cl_out, pk=pk, unique=[],
db=self.sql_connector.db, con_cur=con_cur)
aql.write_sql(df, sc=self.cl_out, tb=tb_name,
if_exists='append', engine=engine,
con_cur=con_cur)
elif self.output_target == 'hdf5':
self.write_hdf(tb_name, df, 'put')
elif self.output_target in ['fastparquet']:
fn = os.path.join(self.cl_out, tb_name + '.parq')
self.write_parquet(fn, df, engine=self.output_target)
else:
raise RuntimeError('write_runtime_tables: no '
'output_target applicable')
@staticmethod
def _get_max_run_id(cl_out):
run_fn = os.path.join(cl_out, 'def_run.parq')
if os.path.isfile(run_fn):
logger.debug('_get_max_run_id: Reading file %s.'%run_fn)
max_run = pd.read_parquet(run_fn, columns=['run_id']).run_id.max()
else:
logger.warning('_get_max_run_id: %s not found.'%run_fn)
max_run = False
return max_run
class IO:
'''
Primary IO class exposing the :module:`io` module.
:ivar datrd: :class:`DataReader` instance
:ivar modwr: :class:`ModelWriter` instance
'''
def __init__(self, **kwargs):
self._close_all_hdf_connections()
defaults = {'sc_warmstart': False,
'resume_loop': False,
'replace_runs_if_exist': False,
'model': None,
'autocomplete_curtailment': False,
'sql_connector': None,
'autocompletion': True,
'no_output': False,
'dev_mode': False,
'data_path': None,
'sc_inp': None,
'cl_out': None,
'db': 'postgres',
'output_target': 'psql'
}
defaults.update(kwargs)
self.sql_connector = defaults['sql_connector']
self.replace_runs_if_exist = defaults['replace_runs_if_exist']
self.db = self.sql_connector.db if self.sql_connector else None
self.cl_out = defaults['cl_out']
if defaults['resume_loop'] == 'auto':
defaults['resume_loop'] = \
self._get_auto_resume_loop(defaults['output_target'])
self.resume_loop = defaults['resume_loop']
self.datrd = DataReader(**defaults)
self.modwr = ModelWriter(**defaults)
def _get_auto_resume_loop(self, output_target):
if not output_target == 'fastparquet':
raise RuntimeError ('resume_loop="auto" not implemented for '
'%s output.')%output_target
resloop = DataReader._get_max_run_id(self.cl_out)
resloop = (resloop + 1) if not isinstance(resloop, bool) else resloop
logger.info('Setting "auto" resume_loop to %s'%resloop)
return resloop
@classmethod
def variab_to_df(cls, py_obj, sets=None):
''' Wrapper for backward compatibility. '''
if not sets:
sets = table_struct.DICT_COMP_IDX[py_obj.name]
sets = [st for st in sets if not st == 'bool_out']
return VariabIO._to_df(py_obj, sets)
@classmethod
def param_to_df(cls, py_obj, sets=None):
''' Wrapper for backward compatibility. '''
if not sets:
sets = table_struct.DICT_COMP_IDX[py_obj.name]
return ParamIO._to_df(py_obj, sets)
def read_model_data(self):
self.datrd.read_model_data()
def write_runtime_tables(self):
self.datrd.write_runtime_tables()
def init_output_tables(self):
self.modwr.init_compio_objs()
self.modwr.init_all()
def write_run(self, run_id):
self.modwr.run_id = run_id
self.modwr.write_all()
def _init_loop_table(self, cols_id, cols_step, cols_val):
tb_name = 'def_run'
cols = ([('tdiff_solve', 'DOUBLE PRECISION'),
('tdiff_write', 'DOUBLE PRECISION'),
('run_id', 'SMALLINT'),
]
+ [(s, 'SMALLINT') for s in cols_id]
+ [(s, 'DOUBLE PRECISION') for s in cols_step]
+ [(s, 'VARCHAR(30)') for s in cols_val]
+ [('info', 'VARCHAR'), ('objective', 'DOUBLE PRECISION')])
if self.modwr.output_target == 'psql':
aql.init_table(tb_name, cols, self.cl_out,
pk=cols_id, unique=['run_id'], db=self.db)
elif self.modwr.output_target == 'hdf5':
df = pd.DataFrame(columns=list(zip(*cols))[0])
if self.modwr.output_target == 'hdf5':
df.to_hdf(self.cl_out, tb_name, format='table')
elif self.modwr.output_target == 'fastparquet':
pass # parquet table is not initialized
else:
raise RuntimeError('_init_loop_table: no '
'output_target applicable')
@staticmethod
def _close_all_hdf_connections():
tables.file._open_files.close_all()
# %%
if __name__ == '__main__':
pass
| en | 0.627177 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- Created on Wed Jan 2 15:03:42 2019 @author: user # modify for > 9999 model runs Mixing class for :class:`CompIO` and :class:`DataReader`. Opens connection to HDF file and writes output. Parameters ---------- put_append: str, one of `('append', 'put')` Write one-time table to the output file (`'put'`) or append to existing table (`'append'`). Mixing class for :class:`CompIO` and :class:`DataReader`. Opens connection to HDF file and writes output. Parameters ---------- fn: str filename for table writing df: pandas DataFrame table to be written engine: str engine name as in the pandas DataFrame.to_parquet parameter # df.to_parquet(fn, engine='fastparquet', # compression='gzip',) # if 'run_id' in df.columns: # df.to_parquet(fn, #append=os.path.isfile(fn), # engine='fastparquet', # compression='gzip', # partition_cols=['run_id']) # else: # df.to_parquet(fn, #append=os.path.isfile(fn), # engine='fastparquet', # compression='gzip' # ) A CompIO instance takes care of extracting a single variable/parameter from the model and of writing a single table to the database. # set in index setter # set in call to self.write_run Child-specific method called after reading. Calls classmethods _to_df. Is overwritten in DualIO, where _to_df is not used as classmethod. Initialization of output table. Calls the :func:`aux_sql_func` method with appropriate parameters. .. note: Keys need to be added in post-processing due to table writing performance. # pk added later for writing/appending performance Casts the data types of the output table and writes the table to the output HDF file. Add run_id column and write to database table # value always positive, directionalities expressed through bool_out Makes sure idx is tuple and updates columns attribute. TODO: THIS SHOULD BE IN MAPS!!!! TODO: THIS SHOULD ALSO INCLUDE ca_id FOR DMND! Method for translation of node_id to respective plant_id in the cases of demand and inter-node transmission. This is used to append demand/inter-nodal transmission to pwr table. Returns a dictionary node -> plant Keyword arguments: * pt -- string, selected plant type for translation Base class for dual values. Performs the data extraction of constraint shadow prices. Base class for variables. Performs the data extraction of variable objects. Also: Special methods related to the handling of negative plant variables defined by setlst[sll] + setlst[curt] as well as storage charging. Converts pyomo variable to DataFrame. Calls _set_bool_out prior to writing. Input arguments: * df -- DataFrame; primary dataframe Set the bool_out values according to the pp_id. pp_ids corresponding to curtailment, charging, and sales have bool_out=True. * df -- DataFrame; primary dataframe Base class for parameters. Is inherited by :class:`DmndIO`. Only contains the parameter ``_to_df`` classmethod. Converts pyomo parameter to DataFrame. Special methods related to the translation of nodes to transmission plant names and to the simplified representation after aggregating secondary nodes. Write aggregated transmission table to pwr. Aggregates trm table over all secondary nodes for simplification and to append to the pwr table. # mirror table to get both directions # reduce time resolution The bool out column value depends on the sign of the data. Demand is appended to the *pwr* table after translating the nd_id to the corresponding "power plant" pp_id. Translate the demand ``pf_id`` to the corresponding ``pp_id``s This is based on a mapping ``pf_id`` |rarr| ``nd_id`` |rarr| ``pp_id``. The ``pp_id`` definition for demand is retrieved from the ``ModelBase.df_def_plant`` table. # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The IO singleton class manages the TableIO instances and communicates with other classes. Manages database connection. # set in call to self.write_run # define instance attributes and update with kwargs Get the dictionaries describing all tables. Also used in the class method ``post_process_index``, therefore module function. Reset the SQL schema or hdf file for model output writing. Deletes existing hdf5 file and creates empty one. Parameters ---------- fn: str filename warn: bool prompt user input if the file exists # pass ~~~~~~~~~~~~~~~ WARNING: ~~~~~~~~~~~~~~~~ You are about to delete existing file {fn}. The maximum run_id is {max_run_id}. Hit enter to proceed. Deletes existing parquet file folder and creates empty one. Parameters ---------- dirc: str parquet directory name warn: bool prompt user input if the file exists ~~~~~~~~~~~~~~~ WARNING: ~~~~~~~~~~~~~~~~ You are about to delete existing directory {dirc}. The maximum run_id is {max_run_id}. Hit enter to proceed. Initialize all output table IO objects. Calls the write methods of all CompIO objects. Initializes all SQL tables. Calls the init_output_table methods of all CompIO instances. In output tables delete all rows with run_id >=/== the selected value. Used in : 1. in ModelLoop.perform_model_run if replace_runs_if_exist == True with operator '=' to remove current run_id from all tables prior to writing TODO: The SQL part would be better fit with the aux_sql_func module. # Get overview of all tables DELETE FROM {cl_out}.{tb} WHERE run_id {op} {run_id}; # @classmethod # def post_process_index(cls, sc, db, drop=False): # # coldict = aql.get_coldict(sc, db) # # dict_idx, dict_table, _ = ModelWriter.get_table_dicts() # # list_tables = aql.get_sql_tables(sc, db) # # for comp, index in dict_idx.items(): # # if not dict_table[comp] in list_tables: # logger.warning('Table ' + comp + ' does not exist... skipping ' # 'index generation.') # else: # # tb_name = dict_table[comp] # # logger.info('tb_name:', tb_name) # # pk_list = index + ('run_id',) # # fk_dict = {} # for c in pk_list: # if len(coldict[c]) > 1: # fk_dict[c] = coldict[c][1] # # # pk_kws = {'pk_list': ', '.join(pk_list), # 'tb': tb_name, 'cl_out': sc} # exec_str = (''' # ALTER TABLE {cl_out}.{tb} # DROP CONSTRAINT IF EXISTS {tb}_pkey; # ''').format(**pk_kws) # if not drop: # exec_str += (''' # ALTER TABLE {cl_out}.{tb} # ADD CONSTRAINT {tb}_pkey # PRIMARY KEY ({pk_list}) # ''').format(**pk_kws) # logger.debug(exec_str) # aql.exec_sql(exec_str, db=db) # # for fk_keys, fk_vals in fk_dict.items(): # fk_kws = {'cl_out': sc, 'tb': tb_name, # 'fk': fk_keys, 'ref': fk_vals} # # exec_str = (''' # ALTER TABLE {cl_out}.{tb} # DROP CONSTRAINT IF EXISTS fk_{tb}_{fk}; # ''').format(**fk_kws) # # if not drop: # exec_str += (''' # ALTER TABLE {cl_out}.{tb} # ADD CONSTRAINT fk_{tb}_{fk} # FOREIGN KEY ({fk}) # REFERENCES {ref} # ''').format(**fk_kws) # logger.debug(exec_str) # aql.exec_sql(exec_str, db=db) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Reads tables from input data sources and makes them attributes of the model attribute. Obtain list of tables in the relevant data source. TODO: Update PSQL # path -> tables list # table -> under which paths Searches for tables with identical name + suffix. Updates the dct. Reads filtered input tables and assigns them to instance attributes. Returns list of tables. # single column filtering # multiple columns Read all input data and generate :class:`ModelBase` instance attributes. # unfiltered input # read input data filtered by node and energy carrier # translate slct_node_connect to nd_ids # update filters in case the keyword argument slct_node_id holds more # nodes than present in the table # read input data filtered by node, energy carrier, and fuel # secondary filtering by plant # initialize profile index dicts # filter plants requiring input from non-existing ca # e.g. if a fuel-cell is in the input table but no hydrogen is # included in the model, the plant's H2 demand wouldn't be accounted # for; # filter table by special index name/id columns # autocomplete input tables # self.model._update_slct_lists() Make two separate DataFrames for profprice buying and selling. Having both in the same table gets too complicated down the road. Filter a pandas DataFrame with index names in columns. This operates on pandas DataFrames where the indices are not provided as column names but as row entries in special columns. E.g., instead of ===== ===== ===== nd_id fl_id value ===== ===== ===== 1 2 1.2 ===== ===== ===== we have ========== ========== ======== ======== ======= set_1_name set_2_name set_1_id set_2_id value ========== ========== ======== ======== ======= nd_id fl_id 1 2 1.2 ========== ========== ======== ======== ======= This allows to combine structurally different tables. Filtering is implemented as an iteration over the set_n_name/set_n_id column pairs, each of which is filtered with respect to all elements in the filt parameter. Parameters ========== df : pandas DataFrame as described above filt : list filtering list of the same format as the :func:`grimsel.auxiliary.sqlutils.aux_sql_func.read_sql` parameter Returns ======= filtered DataFrame # perform iterative filtering for each name/id column pair # init mask # loop over all filter elements # 1. select value for current set_n_name column # 2. select corresponding values for set_n_id col # 3. skip if this name_col is not relevant # reporting # get stackable columns # get all name values from all name columns # get dictionary {set names: set values} Makes sure the table df_node_connect corresponds to the new style. New style: The transmission capacities are expressed as * ``cap_trme_leg`` for exports and * ``cap_trmi_leg`` for imports for single directions, i.e. non-redundant. The input table has columns (nd_id, nd_2_id, ca_id, mt_id, cap_trme_leg, cap_trmi_leg). Old style: Single transmission capacity for both directions; columns: (nd_id, nd_2_id, ca_id, mt_id, eff, cap_trm_leg) Gathers relevant input tables and writes them to the output collection. Note: As of now profile input tables are excluded. All relevant data is included in the parameter output anyway. Some input tables depend on model parameters (time resolution). Write these to output database schema. Also, table def_node altered due to addition of column dmnd_max. # defines sy # defines hy # same as "value" Primary IO class exposing the :module:`io` module. :ivar datrd: :class:`DataReader` instance :ivar modwr: :class:`ModelWriter` instance Wrapper for backward compatibility. Wrapper for backward compatibility. # parquet table is not initialized # %% | 2.062038 | 2 |
tests/conversation_manager/test_search_statement.py | Jack2313/WeChatterBot | 1 | 6617859 | <gh_stars>1-10
from unittest import TestCase
from app import create_app
from app.view.conversation_manager import generate_token
import json
class SearchStatementTestCase(TestCase):
"""
Unit tests for the Admin Search Statement.
LJF: all tests clear 2020-5-13
"""
def setUp(self):
self.app = create_app().test_client()
self.myheaders = {'Content-Type': 'application/json'}
self.token = generate_token(b'buaa', 3600)
def test_no_attribute(self):
r = self.app.get(
'admin/search_statement',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_username(self):
r = self.app.get(
'admin/search_statement?token=<PASSWORD>&id=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_token(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_wrong_username(self):
r = self.app.get(
'admin/search_statement?username=wechatterwhat' +
'&token=' + self.token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000044)
self.assertEqual(r.status_code, 401)
def test_wrong_token(self):
wrong_token = generate_token(b'what', 3600)
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + wrong_token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000044)
self.assertEqual(r.status_code, 401)
def test_empty_id_and_empty_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token,
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_empty_id_and_no_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&id=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_id_and_empty_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&text=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_id_and_no_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token,
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_id_not_a_number(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&id=string',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
def test_successful_search_with_text(self):
data = {
'response': '临时回复规则',
'text': '临时规则内容',
'username': 'wechatterbot',
'token': self.token
}
self.app.post(
'http://localhost:5000/admin/create_statement',
data=json.dumps(data),
headers=self.myheaders
)
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&text=临时规则内容',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
statements = result['statements']
self.assertEqual(statements[0]['text'], u"临时规则内容")
self.assertEqual(r.status_code, 200)
def test_successful_search_with_id(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
statements = result['statements']
self.assertEqual(statements[0]['id'], 1)
self.assertEqual(r.status_code, 200)
self.assertEqual(result['number'], 1)
| from unittest import TestCase
from app import create_app
from app.view.conversation_manager import generate_token
import json
class SearchStatementTestCase(TestCase):
"""
Unit tests for the Admin Search Statement.
LJF: all tests clear 2020-5-13
"""
def setUp(self):
self.app = create_app().test_client()
self.myheaders = {'Content-Type': 'application/json'}
self.token = generate_token(b'buaa', 3600)
def test_no_attribute(self):
r = self.app.get(
'admin/search_statement',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_username(self):
r = self.app.get(
'admin/search_statement?token=<PASSWORD>&id=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_token(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_wrong_username(self):
r = self.app.get(
'admin/search_statement?username=wechatterwhat' +
'&token=' + self.token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000044)
self.assertEqual(r.status_code, 401)
def test_wrong_token(self):
wrong_token = generate_token(b'what', 3600)
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + wrong_token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000044)
self.assertEqual(r.status_code, 401)
def test_empty_id_and_empty_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token,
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_empty_id_and_no_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&id=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_id_and_empty_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&text=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_id_and_no_text(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token,
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_id_not_a_number(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&id=string',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
def test_successful_search_with_text(self):
data = {
'response': '临时回复规则',
'text': '临时规则内容',
'username': 'wechatterbot',
'token': self.token
}
self.app.post(
'http://localhost:5000/admin/create_statement',
data=json.dumps(data),
headers=self.myheaders
)
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&text=临时规则内容',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
statements = result['statements']
self.assertEqual(statements[0]['text'], u"临时规则内容")
self.assertEqual(r.status_code, 200)
def test_successful_search_with_id(self):
r = self.app.get(
'admin/search_statement?username=wechatterbot' +
'&token=' + self.token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
statements = result['statements']
self.assertEqual(statements[0]['id'], 1)
self.assertEqual(r.status_code, 200)
self.assertEqual(result['number'], 1) | en | 0.700565 | Unit tests for the Admin Search Statement. LJF: all tests clear 2020-5-13 | 2.742622 | 3 |
pytorch-ppo-old/vrep_rotors.py | umd-agrc/QuadRL | 1 | 6617860 | #!/usr/bin/python3
import vrep
# propellers = ['propeller1Vel', 'propeller2Vel', 'propeller3Vel', 'propeller4Vel']
propellers = ['rotor1_thrust', 'rotor2_thrust', 'rotor3_thrust', 'rotor4_thrust']
def init_rotors(clientID):
# Clear all signals
for i in range(len(propellers)):
vrep.simxClearFloatSignal(clientID, propellers[i], vrep.simx_opmode_oneshot)
# Set all propellers to zero
for i in range(len(propellers)):
vrep.simxSetFloatSignal(clientID, propellers[i], 1e-8, vrep.simx_opmode_oneshot)
def set_rotors(clientID, propeller_vels):
[vrep.simxSetFloatSignal(clientID, prop, vels, vrep.simx_opmode_oneshot) for prop, vels in zip(propellers,
propeller_vels)]
return
| #!/usr/bin/python3
import vrep
# propellers = ['propeller1Vel', 'propeller2Vel', 'propeller3Vel', 'propeller4Vel']
propellers = ['rotor1_thrust', 'rotor2_thrust', 'rotor3_thrust', 'rotor4_thrust']
def init_rotors(clientID):
# Clear all signals
for i in range(len(propellers)):
vrep.simxClearFloatSignal(clientID, propellers[i], vrep.simx_opmode_oneshot)
# Set all propellers to zero
for i in range(len(propellers)):
vrep.simxSetFloatSignal(clientID, propellers[i], 1e-8, vrep.simx_opmode_oneshot)
def set_rotors(clientID, propeller_vels):
[vrep.simxSetFloatSignal(clientID, prop, vels, vrep.simx_opmode_oneshot) for prop, vels in zip(propellers,
propeller_vels)]
return
| en | 0.330799 | #!/usr/bin/python3 # propellers = ['propeller1Vel', 'propeller2Vel', 'propeller3Vel', 'propeller4Vel'] # Clear all signals # Set all propellers to zero | 2.183065 | 2 |
test/project/progress/models.py | tylerlacy/bootstrap-uploadprogress | 22 | 6617861 | <gh_stars>10-100
import re
from django.db import models
from django.core.validators import RegexValidator
RE_PACKAGE_NAME = re.compile(r'^[-a-zA-Z0-9_() .]+$')
class Package(models.Model):
class Meta:
ordering = ('name',)
name = models.CharField(
db_index=True,
max_length=512,
verbose_name='Software name',
validators=[RegexValidator(RE_PACKAGE_NAME, 'Use ASCII characters only')]
)
file = models.FileField(
verbose_name='File'
)
| import re
from django.db import models
from django.core.validators import RegexValidator
RE_PACKAGE_NAME = re.compile(r'^[-a-zA-Z0-9_() .]+$')
class Package(models.Model):
class Meta:
ordering = ('name',)
name = models.CharField(
db_index=True,
max_length=512,
verbose_name='Software name',
validators=[RegexValidator(RE_PACKAGE_NAME, 'Use ASCII characters only')]
)
file = models.FileField(
verbose_name='File'
) | none | 1 | 2.263384 | 2 | |
python/ht/pyfilter/operations/zdepth.py | Hengle/Houdini-Toolbox | 136 | 6617862 | <gh_stars>100-1000
"""This module contains an operation to force a zdepth pass."""
# =============================================================================
# IMPORTS
# =============================================================================
from __future__ import annotations
# Standard Library
from typing import TYPE_CHECKING
# Houdini Toolbox
from ht.pyfilter.operations.operation import PyFilterOperation, log_filter_call
from ht.pyfilter.property import get_property, set_property
if TYPE_CHECKING:
import argparse
from ht.pyfilter.manager import PyFilterManager
# =============================================================================
# CLASSES
# =============================================================================
class ZDepthPass(PyFilterOperation):
"""Force the render to only contain C and Pz planes.
As long as there is an extra image plane that is not C or Of this operation
will remap an extra image plane to be Pz and disable the rest.
:param manager: The manager this operation is registered with.
"""
CONST_SHADER = "opdef:/Shop/v_constant clr 0 0 0"
def __init__(self, manager: PyFilterManager):
super().__init__(manager)
# Should the operation be run.
self._active = False
# We have not set the Pz plane yet.
self._data["set_pz"] = False
# -------------------------------------------------------------------------
# PROPERTIES
# -------------------------------------------------------------------------
@property
def active(self) -> bool:
"""Whether or not the operation is active."""
return self._active
# -------------------------------------------------------------------------
# STATIC METHODS
# -------------------------------------------------------------------------
@staticmethod
def build_arg_string( # pylint: disable=arguments-differ
active: bool = False,
) -> str:
"""Build an argument string for this operation.
:param active: Whether or not to run the operation.
:return: The constructed argument string.
"""
args = []
if active:
args.append("--zdepth")
return " ".join(args)
@staticmethod
def register_parser_args(parser: argparse.ArgumentParser):
"""Register interested parser args for this operation.
:param parser: The argument parser to attach arguments to.
:return:
"""
parser.add_argument("--zdepth", action="store_true")
# -------------------------------------------------------------------------
# METHODS
# -------------------------------------------------------------------------
@log_filter_call("object:name")
def filter_instance(self):
"""Apply constant black shader to objects.
:return:
"""
matte = get_property("object:matte")
phantom = get_property("object:phantom")
surface = get_property("object:surface")
set_property("object:overridedetail", True)
if matte or phantom or surface == "matte":
set_property("object:phantom", 1)
else:
set_property("object:surface", self.CONST_SHADER.split())
set_property("object:displace", None)
@log_filter_call("plane:variable")
def filter_plane(self):
"""Modify image planes to ensure one will output Pz.
This will disable all planes that are not C and Pz.
:return:
"""
channel = get_property("plane:channel")
if channel == "Pz":
# If the channel is Pz but we've already forcibly set one to Pz
# then we need to disable the plane.
if self.data["set_pz"]:
set_property("plane:disable", True)
return
# The plane is Pz and we have yet to indicate we've got a Pz so
# store the data.
self.data["set_pz"] = True
return
# If we haven't found a Pz plane yet and this channel isn't a primary
# output channel then we will force it to be Pz.
if not self.data["set_pz"] and channel not in ("C", "Of"):
set_property("plane:variable", "Pz")
set_property("plane:vextype", "float")
set_property("plane:channel", "Pz")
set_property("plane:pfilter", "minmax min")
set_property("plane:quantize", None)
self.data["set_pz"] = True
# Disable any other planes.
elif channel not in ("C",):
set_property("plane:disable", True)
def process_parsed_args(self, filter_args: argparse.Namespace):
"""Process any parsed args that the operation may be interested in.
:param filter_args: The argparse namespace containing processed args.
:return:
"""
if filter_args.zdepth is not None:
self._active = filter_args.zdepth
def should_run(self) -> bool:
"""Determine whether or not this filter should be run.
This operation will run if the 'active' flag was passed.
:return: Whether or not this operation should run.
"""
return self._active
| """This module contains an operation to force a zdepth pass."""
# =============================================================================
# IMPORTS
# =============================================================================
from __future__ import annotations
# Standard Library
from typing import TYPE_CHECKING
# Houdini Toolbox
from ht.pyfilter.operations.operation import PyFilterOperation, log_filter_call
from ht.pyfilter.property import get_property, set_property
if TYPE_CHECKING:
import argparse
from ht.pyfilter.manager import PyFilterManager
# =============================================================================
# CLASSES
# =============================================================================
class ZDepthPass(PyFilterOperation):
"""Force the render to only contain C and Pz planes.
As long as there is an extra image plane that is not C or Of this operation
will remap an extra image plane to be Pz and disable the rest.
:param manager: The manager this operation is registered with.
"""
CONST_SHADER = "opdef:/Shop/v_constant clr 0 0 0"
def __init__(self, manager: PyFilterManager):
super().__init__(manager)
# Should the operation be run.
self._active = False
# We have not set the Pz plane yet.
self._data["set_pz"] = False
# -------------------------------------------------------------------------
# PROPERTIES
# -------------------------------------------------------------------------
@property
def active(self) -> bool:
"""Whether or not the operation is active."""
return self._active
# -------------------------------------------------------------------------
# STATIC METHODS
# -------------------------------------------------------------------------
@staticmethod
def build_arg_string( # pylint: disable=arguments-differ
active: bool = False,
) -> str:
"""Build an argument string for this operation.
:param active: Whether or not to run the operation.
:return: The constructed argument string.
"""
args = []
if active:
args.append("--zdepth")
return " ".join(args)
@staticmethod
def register_parser_args(parser: argparse.ArgumentParser):
"""Register interested parser args for this operation.
:param parser: The argument parser to attach arguments to.
:return:
"""
parser.add_argument("--zdepth", action="store_true")
# -------------------------------------------------------------------------
# METHODS
# -------------------------------------------------------------------------
@log_filter_call("object:name")
def filter_instance(self):
"""Apply constant black shader to objects.
:return:
"""
matte = get_property("object:matte")
phantom = get_property("object:phantom")
surface = get_property("object:surface")
set_property("object:overridedetail", True)
if matte or phantom or surface == "matte":
set_property("object:phantom", 1)
else:
set_property("object:surface", self.CONST_SHADER.split())
set_property("object:displace", None)
@log_filter_call("plane:variable")
def filter_plane(self):
"""Modify image planes to ensure one will output Pz.
This will disable all planes that are not C and Pz.
:return:
"""
channel = get_property("plane:channel")
if channel == "Pz":
# If the channel is Pz but we've already forcibly set one to Pz
# then we need to disable the plane.
if self.data["set_pz"]:
set_property("plane:disable", True)
return
# The plane is Pz and we have yet to indicate we've got a Pz so
# store the data.
self.data["set_pz"] = True
return
# If we haven't found a Pz plane yet and this channel isn't a primary
# output channel then we will force it to be Pz.
if not self.data["set_pz"] and channel not in ("C", "Of"):
set_property("plane:variable", "Pz")
set_property("plane:vextype", "float")
set_property("plane:channel", "Pz")
set_property("plane:pfilter", "minmax min")
set_property("plane:quantize", None)
self.data["set_pz"] = True
# Disable any other planes.
elif channel not in ("C",):
set_property("plane:disable", True)
def process_parsed_args(self, filter_args: argparse.Namespace):
"""Process any parsed args that the operation may be interested in.
:param filter_args: The argparse namespace containing processed args.
:return:
"""
if filter_args.zdepth is not None:
self._active = filter_args.zdepth
def should_run(self) -> bool:
"""Determine whether or not this filter should be run.
This operation will run if the 'active' flag was passed.
:return: Whether or not this operation should run.
"""
return self._active | en | 0.665839 | This module contains an operation to force a zdepth pass. # ============================================================================= # IMPORTS # ============================================================================= # Standard Library # Houdini Toolbox # ============================================================================= # CLASSES # ============================================================================= Force the render to only contain C and Pz planes. As long as there is an extra image plane that is not C or Of this operation will remap an extra image plane to be Pz and disable the rest. :param manager: The manager this operation is registered with. # Should the operation be run. # We have not set the Pz plane yet. # ------------------------------------------------------------------------- # PROPERTIES # ------------------------------------------------------------------------- Whether or not the operation is active. # ------------------------------------------------------------------------- # STATIC METHODS # ------------------------------------------------------------------------- # pylint: disable=arguments-differ Build an argument string for this operation. :param active: Whether or not to run the operation. :return: The constructed argument string. Register interested parser args for this operation. :param parser: The argument parser to attach arguments to. :return: # ------------------------------------------------------------------------- # METHODS # ------------------------------------------------------------------------- Apply constant black shader to objects. :return: Modify image planes to ensure one will output Pz. This will disable all planes that are not C and Pz. :return: # If the channel is Pz but we've already forcibly set one to Pz # then we need to disable the plane. # The plane is Pz and we have yet to indicate we've got a Pz so # store the data. # If we haven't found a Pz plane yet and this channel isn't a primary # output channel then we will force it to be Pz. # Disable any other planes. Process any parsed args that the operation may be interested in. :param filter_args: The argparse namespace containing processed args. :return: Determine whether or not this filter should be run. This operation will run if the 'active' flag was passed. :return: Whether or not this operation should run. | 2.001393 | 2 |
guet/commands/strategies/lambda_strategy.py | sturzl/guet | 0 | 6617863 | from typing import Callable
from guet.commands.strategies.strategy import CommandStrategy
class LambdaStrategy(CommandStrategy):
def __init__(self, apply: Callable):
self._apply = apply
def apply(self):
self._apply()
| from typing import Callable
from guet.commands.strategies.strategy import CommandStrategy
class LambdaStrategy(CommandStrategy):
def __init__(self, apply: Callable):
self._apply = apply
def apply(self):
self._apply()
| none | 1 | 2.405898 | 2 | |
radish/steps.py | suipotryot/mage_knight | 0 | 6617864 | <filename>radish/steps.py
# -*- coding: utf-8 -*-
from radish import given, when, then
from urllib import request
import requests
API_BASE_URL = "http://127.0.0.1:8000"
API_URLS = {
HEALTH = "%s/_health" % API_BASE_URL,
NEW_GAME = "%s/new_game" % API_BASE_URL,
}
@given(r"The API is up")
def api_is_up(step):
r = requests.get(API_BASE_URL)
assert r.status_code == 200, \
"Epected code %s to be 200" % r.status_code
@given(r"I start a new game with the following parameters:")
def start_new_game(step):
r = requests.post(API_BASE_URL, data=step.table)
assert r.status_code == 200, \
"Epected code %s to be 200" % r.status_code
# @given('Next {elements:g} are:')
# def have_the_string(step, elements):
# pass
| <filename>radish/steps.py
# -*- coding: utf-8 -*-
from radish import given, when, then
from urllib import request
import requests
API_BASE_URL = "http://127.0.0.1:8000"
API_URLS = {
HEALTH = "%s/_health" % API_BASE_URL,
NEW_GAME = "%s/new_game" % API_BASE_URL,
}
@given(r"The API is up")
def api_is_up(step):
r = requests.get(API_BASE_URL)
assert r.status_code == 200, \
"Epected code %s to be 200" % r.status_code
@given(r"I start a new game with the following parameters:")
def start_new_game(step):
r = requests.post(API_BASE_URL, data=step.table)
assert r.status_code == 200, \
"Epected code %s to be 200" % r.status_code
# @given('Next {elements:g} are:')
# def have_the_string(step, elements):
# pass
| en | 0.361842 | # -*- coding: utf-8 -*- # @given('Next {elements:g} are:') # def have_the_string(step, elements): # pass | 2.793005 | 3 |
doc/source/EXAMPLES/mu_fft.py | kapteyn-astro/kapteyn | 3 | 6617865 | from kapteyn import maputils
from matplotlib import pyplot as plt
from numpy import fft, log, abs, angle
f = maputils.FITSimage("m101.fits")
yshift = -0.1
fig = plt.figure(figsize=(8,6))
fig.subplots_adjust(left=0.01, bottom=0.1, right=1.0, top=0.98,
wspace=0.03, hspace=0.16)
frame = fig.add_subplot(2,3,1)
frame.text(0.5, yshift, "M101", ha='center', va='center',
transform = frame.transAxes)
mplim = f.Annotatedimage(frame, cmap="spectral")
mplim.Image()
fftA = fft.rfft2(f.dat, f.dat.shape)
frame = fig.add_subplot(2,3,2)
frame.text(0.5, yshift, "Amplitude of FFT", ha='center', va='center',
transform = frame.transAxes)
f = maputils.FITSimage("m101.fits", externaldata=log(abs(fftA)+1.0))
mplim2 = f.Annotatedimage(frame, cmap="gray")
im = mplim2.Image()
frame = fig.add_subplot(2,3,3)
frame.text(0.5, yshift, "Phase of FFT", ha='center', va='center',
transform = frame.transAxes)
f = maputils.FITSimage("m101.fits", externaldata=angle(fftA))
mplim3 = f.Annotatedimage(frame, cmap="gray")
im = mplim3.Image()
frame = fig.add_subplot(2,3,4)
frame.text(0.5, yshift, "Inverse FFT", ha='center', va='center',
transform = frame.transAxes)
D = fft.irfft2(fftA)
f = maputils.FITSimage("m101.fits", externaldata=D.real)
mplim4 = f.Annotatedimage(frame, cmap="spectral")
im = mplim4.Image()
frame = fig.add_subplot(2,3,5)
Diff = D.real - mplim.data
f = maputils.FITSimage("m101.fits", externaldata=Diff)
mplim5 = f.Annotatedimage(frame, cmap="spectral")
im = mplim5.Image()
frame.text(0.5, yshift, "M101 - inv. FFT", ha='center', va='center',
transform = frame.transAxes)
s = "Residual with min=%.1g max=%.1g" % (Diff.min(), Diff.max())
frame.text(0.5, yshift-0.08, s, ha='center', va='center',
transform = frame.transAxes, fontsize=8)
mplim.interact_imagecolors()
mplim2.interact_imagecolors()
mplim3.interact_imagecolors()
mplim4.interact_imagecolors()
mplim5.interact_imagecolors()
maputils.showall()
| from kapteyn import maputils
from matplotlib import pyplot as plt
from numpy import fft, log, abs, angle
f = maputils.FITSimage("m101.fits")
yshift = -0.1
fig = plt.figure(figsize=(8,6))
fig.subplots_adjust(left=0.01, bottom=0.1, right=1.0, top=0.98,
wspace=0.03, hspace=0.16)
frame = fig.add_subplot(2,3,1)
frame.text(0.5, yshift, "M101", ha='center', va='center',
transform = frame.transAxes)
mplim = f.Annotatedimage(frame, cmap="spectral")
mplim.Image()
fftA = fft.rfft2(f.dat, f.dat.shape)
frame = fig.add_subplot(2,3,2)
frame.text(0.5, yshift, "Amplitude of FFT", ha='center', va='center',
transform = frame.transAxes)
f = maputils.FITSimage("m101.fits", externaldata=log(abs(fftA)+1.0))
mplim2 = f.Annotatedimage(frame, cmap="gray")
im = mplim2.Image()
frame = fig.add_subplot(2,3,3)
frame.text(0.5, yshift, "Phase of FFT", ha='center', va='center',
transform = frame.transAxes)
f = maputils.FITSimage("m101.fits", externaldata=angle(fftA))
mplim3 = f.Annotatedimage(frame, cmap="gray")
im = mplim3.Image()
frame = fig.add_subplot(2,3,4)
frame.text(0.5, yshift, "Inverse FFT", ha='center', va='center',
transform = frame.transAxes)
D = fft.irfft2(fftA)
f = maputils.FITSimage("m101.fits", externaldata=D.real)
mplim4 = f.Annotatedimage(frame, cmap="spectral")
im = mplim4.Image()
frame = fig.add_subplot(2,3,5)
Diff = D.real - mplim.data
f = maputils.FITSimage("m101.fits", externaldata=Diff)
mplim5 = f.Annotatedimage(frame, cmap="spectral")
im = mplim5.Image()
frame.text(0.5, yshift, "M101 - inv. FFT", ha='center', va='center',
transform = frame.transAxes)
s = "Residual with min=%.1g max=%.1g" % (Diff.min(), Diff.max())
frame.text(0.5, yshift-0.08, s, ha='center', va='center',
transform = frame.transAxes, fontsize=8)
mplim.interact_imagecolors()
mplim2.interact_imagecolors()
mplim3.interact_imagecolors()
mplim4.interact_imagecolors()
mplim5.interact_imagecolors()
maputils.showall()
| none | 1 | 2.584414 | 3 | |
tests/utils/test_permutation.py | hadware/pyannote-audio | 1 | 6617866 | <reponame>hadware/pyannote-audio
import numpy as np
import torch
from pyannote.audio.utils.permutation import permutate
def test_permutate_torch():
num_frames, num_speakers = 10, 3
actual_permutations = [
(0, 1, 2),
(0, 2, 1),
(1, 0, 2),
(1, 2, 0),
(2, 0, 1),
(2, 1, 0),
]
batch_size = len(actual_permutations)
y2 = torch.randn((num_frames, num_speakers))
y1 = torch.zeros((batch_size, num_frames, num_speakers))
for p, permutation in enumerate(actual_permutations):
y1[p] = y2[:, permutation]
permutated_y2, permutations = permutate(y1, y2)
assert actual_permutations == permutations
for p, permutation in enumerate(actual_permutations):
np.testing.assert_allclose(permutated_y2[p], y2[:, permutation])
def test_permutate_numpy():
num_frames, num_speakers = 10, 3
actual_permutations = [
(0, 1, 2),
(0, 2, 1),
(1, 0, 2),
(1, 2, 0),
(2, 0, 1),
(2, 1, 0),
]
batch_size = len(actual_permutations)
y2 = np.random.randn(num_frames, num_speakers)
y1 = np.zeros((batch_size, num_frames, num_speakers))
for p, permutation in enumerate(actual_permutations):
y1[p] = y2[:, permutation]
permutated_y2, permutations = permutate(y1, y2)
assert actual_permutations == permutations
for p, permutation in enumerate(actual_permutations):
np.testing.assert_allclose(permutated_y2[p], y2[:, permutation])
def test_permutate_less_speakers():
num_frames = 10
actual_permutations = [
(0, 1, None),
(0, None, 1),
(1, 0, None),
(1, None, 0),
(None, 0, 1),
(None, 1, 0),
]
batch_size = len(actual_permutations)
y2 = np.random.randn(num_frames, 2)
y1 = np.zeros((batch_size, num_frames, 3))
for p, permutation in enumerate(actual_permutations):
for i, j in enumerate(permutation):
if j is not None:
y1[p, :, i] = y2[:, j]
permutated_y2, permutations = permutate(y1, y2)
assert permutations == actual_permutations
def test_permutate_more_speakers():
num_frames = 10
actual_permutations = [
(0, 1),
(0, 2),
(1, 0),
(1, 2),
(2, 0),
(2, 1),
]
batch_size = len(actual_permutations)
y2 = np.random.randn(num_frames, 3)
y1 = np.zeros((batch_size, num_frames, 2))
for p, permutation in enumerate(actual_permutations):
for i, j in enumerate(permutation):
y1[p, :, i] = y2[:, j]
permutated_y2, permutations = permutate(y1, y2)
assert permutations == actual_permutations
np.testing.assert_allclose(permutated_y2, y1)
| import numpy as np
import torch
from pyannote.audio.utils.permutation import permutate
def test_permutate_torch():
num_frames, num_speakers = 10, 3
actual_permutations = [
(0, 1, 2),
(0, 2, 1),
(1, 0, 2),
(1, 2, 0),
(2, 0, 1),
(2, 1, 0),
]
batch_size = len(actual_permutations)
y2 = torch.randn((num_frames, num_speakers))
y1 = torch.zeros((batch_size, num_frames, num_speakers))
for p, permutation in enumerate(actual_permutations):
y1[p] = y2[:, permutation]
permutated_y2, permutations = permutate(y1, y2)
assert actual_permutations == permutations
for p, permutation in enumerate(actual_permutations):
np.testing.assert_allclose(permutated_y2[p], y2[:, permutation])
def test_permutate_numpy():
num_frames, num_speakers = 10, 3
actual_permutations = [
(0, 1, 2),
(0, 2, 1),
(1, 0, 2),
(1, 2, 0),
(2, 0, 1),
(2, 1, 0),
]
batch_size = len(actual_permutations)
y2 = np.random.randn(num_frames, num_speakers)
y1 = np.zeros((batch_size, num_frames, num_speakers))
for p, permutation in enumerate(actual_permutations):
y1[p] = y2[:, permutation]
permutated_y2, permutations = permutate(y1, y2)
assert actual_permutations == permutations
for p, permutation in enumerate(actual_permutations):
np.testing.assert_allclose(permutated_y2[p], y2[:, permutation])
def test_permutate_less_speakers():
num_frames = 10
actual_permutations = [
(0, 1, None),
(0, None, 1),
(1, 0, None),
(1, None, 0),
(None, 0, 1),
(None, 1, 0),
]
batch_size = len(actual_permutations)
y2 = np.random.randn(num_frames, 2)
y1 = np.zeros((batch_size, num_frames, 3))
for p, permutation in enumerate(actual_permutations):
for i, j in enumerate(permutation):
if j is not None:
y1[p, :, i] = y2[:, j]
permutated_y2, permutations = permutate(y1, y2)
assert permutations == actual_permutations
def test_permutate_more_speakers():
num_frames = 10
actual_permutations = [
(0, 1),
(0, 2),
(1, 0),
(1, 2),
(2, 0),
(2, 1),
]
batch_size = len(actual_permutations)
y2 = np.random.randn(num_frames, 3)
y1 = np.zeros((batch_size, num_frames, 2))
for p, permutation in enumerate(actual_permutations):
for i, j in enumerate(permutation):
y1[p, :, i] = y2[:, j]
permutated_y2, permutations = permutate(y1, y2)
assert permutations == actual_permutations
np.testing.assert_allclose(permutated_y2, y1) | none | 1 | 2.466307 | 2 | |
projectpredict/learningmodels/__init__.py | JustinTervala/ProjectPredict | 2 | 6617867 | <reponame>JustinTervala/ProjectPredict
from .scikit import GaussianProcessRegressorModel | from .scikit import GaussianProcessRegressorModel | none | 1 | 1.035151 | 1 | |
toapi/storage/__init__.py | davidthewatson/toapi | 5 | 6617868 | <reponame>davidthewatson/toapi
from .storage import Storage
| from .storage import Storage | none | 1 | 1.087655 | 1 | |
medium/593_valid_square.py | niki4/leetcode_py3 | 0 | 6617869 | <gh_stars>0
"""
Given the coordinates of four points in 2D space p1, p2, p3 and p4, return true if the four points construct a square.
The coordinate of a point pi is represented as [xi, yi]. The input is not given in any order.
A valid square has four equal sides with positive length and four equal angles (90-degree angles).
Example 1:
Input: p1 = [0,0], p2 = [1,1], p3 = [1,0], p4 = [0,1]
Output: true
Example 2:
Input: p1 = [0,0], p2 = [1,1], p3 = [1,0], p4 = [0,12]
Output: false
Example 3:
Input: p1 = [1,0], p2 = [-1,0], p3 = [0,1], p4 = [0,-1]
Output: true
Constraints:
p1.length == p2.length == p3.length == p4.length == 2
-104 <= xi, yi <= 104
"""
import collections
from typing import List
class Solution:
"""
Algorithm: sort the points and figure out where the last two points should be with the coordinates of the first two.
Runtime: 32 ms, faster than 78.95% of Python3
Memory Usage: 14.4 MB, less than 11.12% of Python3
Time complexity : O(1). Sorting 4 points takes constant time.
Space complexity : O(1). Constant space is required.
"""
def validSquare(self, p1: List[int], p2: List[int], p3: List[int], p4: List[int]) -> bool:
if p1 == p2 == p3 == p4:
return False
p1, p2, p3, p4 = sorted([p1, p2, p3, p4])
if p2[1] < p3[1]:
p2, p3 = p3, p2
return (p3 == [p1[0] + (p2[1] - p1[1]), p1[1] - (p2[0] - p1[0])] and
p4 == [p2[0] + (p2[1] - p1[1]), p2[1] - (p2[0] - p1[0])])
class Solution2:
"""
Math solution from
https://leetcode.com/problems/valid-square/discuss/103482/Python-Straightforward-with-Explanation
"Suppose points ABCD have pairwise distances in sorted order S, S, S, S, S*sqrt(2), S*sqrt(2).
We want to show ABCD is a square. Let us call S a "small side" and S*sqrt(2) a "large side".
Without loss of generality, suppose AC is a large side. If BD is a large side, then AB and BC are small sides,
so B lies on the intersection of circles between A and C; similarly, D lies on the same intersection, and thus
ABCD is a square (as two different circles of the same radius only intersect in two different points)."
Runtime: 36 ms, faster than 52.15% of Python3
Memory Usage: 14.2 MB, less than 89.11% of Python3
"""
def dist(self, p1: List[int], p2: List[int]) -> int:
return (p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2
def validSquare(self, p1: List[int], p2: List[int], p3: List[int], p4: List[int]) -> bool:
D = [self.dist(p1, p2), self.dist(p1, p3), self.dist(p1, p4),
self.dist(p2, p3), self.dist(p2, p4), self.dist(p3, p4)]
D.sort()
# For input [0, 0], [1, 1], [1, 0], [0, 1] result D will be [1, 1, 1, 1, 2, 2] == True
# For input [1, 1], [5, 3], [3, 5], [7, 7] D will be [8, 20, 20, 20, 20, 72] == False
return 0 < D[0] == D[1] == D[2] == D[3] and 2 * D[0] == D[4] == D[5]
class Solution3:
"""
For valid square number of unique distances should be 2 (so 4 for sides and 2 for diagonals).
"""
def dist(self, p1: List[int], p2: List[int]) -> int:
return (p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2
def validSquare(self, p1: List[int], p2: List[int], p3: List[int], p4: List[int]) -> bool:
points = (p1, p2, p3, p4)
dists = collections.defaultdict(int)
for i in range(len(points)):
for j in range(i, len(points)):
dists[self.dist(points[i], points[j])] += 1
return set(dists.values()) == {4, 2}
if __name__ == '__main__':
solutions = [Solution(), Solution2(), Solution3()]
tc = (
([0, 0], [1, 1], [1, 0], [0, 1], True),
([0, 0], [1, 1], [1, 0], [0, 12], False),
([1, 0], [-1, 0], [0, 1], [0, -1], True),
([1, 1], [5, 3], [3, 5], [7, 7], False),
)
for sol in solutions:
for p1_, p2_, p3_, p4_, valid_square in tc:
res = sol.validSquare(p1_, p2_, p3_, p4_)
assert res is valid_square, f"{sol.__class__.__name__}: for input [{p1_}, {p2_}, {p3_}, {p4_}] " \
f"expected res: {valid_square}, got {res}"
| """
Given the coordinates of four points in 2D space p1, p2, p3 and p4, return true if the four points construct a square.
The coordinate of a point pi is represented as [xi, yi]. The input is not given in any order.
A valid square has four equal sides with positive length and four equal angles (90-degree angles).
Example 1:
Input: p1 = [0,0], p2 = [1,1], p3 = [1,0], p4 = [0,1]
Output: true
Example 2:
Input: p1 = [0,0], p2 = [1,1], p3 = [1,0], p4 = [0,12]
Output: false
Example 3:
Input: p1 = [1,0], p2 = [-1,0], p3 = [0,1], p4 = [0,-1]
Output: true
Constraints:
p1.length == p2.length == p3.length == p4.length == 2
-104 <= xi, yi <= 104
"""
import collections
from typing import List
class Solution:
"""
Algorithm: sort the points and figure out where the last two points should be with the coordinates of the first two.
Runtime: 32 ms, faster than 78.95% of Python3
Memory Usage: 14.4 MB, less than 11.12% of Python3
Time complexity : O(1). Sorting 4 points takes constant time.
Space complexity : O(1). Constant space is required.
"""
def validSquare(self, p1: List[int], p2: List[int], p3: List[int], p4: List[int]) -> bool:
if p1 == p2 == p3 == p4:
return False
p1, p2, p3, p4 = sorted([p1, p2, p3, p4])
if p2[1] < p3[1]:
p2, p3 = p3, p2
return (p3 == [p1[0] + (p2[1] - p1[1]), p1[1] - (p2[0] - p1[0])] and
p4 == [p2[0] + (p2[1] - p1[1]), p2[1] - (p2[0] - p1[0])])
class Solution2:
"""
Math solution from
https://leetcode.com/problems/valid-square/discuss/103482/Python-Straightforward-with-Explanation
"Suppose points ABCD have pairwise distances in sorted order S, S, S, S, S*sqrt(2), S*sqrt(2).
We want to show ABCD is a square. Let us call S a "small side" and S*sqrt(2) a "large side".
Without loss of generality, suppose AC is a large side. If BD is a large side, then AB and BC are small sides,
so B lies on the intersection of circles between A and C; similarly, D lies on the same intersection, and thus
ABCD is a square (as two different circles of the same radius only intersect in two different points)."
Runtime: 36 ms, faster than 52.15% of Python3
Memory Usage: 14.2 MB, less than 89.11% of Python3
"""
def dist(self, p1: List[int], p2: List[int]) -> int:
return (p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2
def validSquare(self, p1: List[int], p2: List[int], p3: List[int], p4: List[int]) -> bool:
D = [self.dist(p1, p2), self.dist(p1, p3), self.dist(p1, p4),
self.dist(p2, p3), self.dist(p2, p4), self.dist(p3, p4)]
D.sort()
# For input [0, 0], [1, 1], [1, 0], [0, 1] result D will be [1, 1, 1, 1, 2, 2] == True
# For input [1, 1], [5, 3], [3, 5], [7, 7] D will be [8, 20, 20, 20, 20, 72] == False
return 0 < D[0] == D[1] == D[2] == D[3] and 2 * D[0] == D[4] == D[5]
class Solution3:
"""
For valid square number of unique distances should be 2 (so 4 for sides and 2 for diagonals).
"""
def dist(self, p1: List[int], p2: List[int]) -> int:
return (p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2
def validSquare(self, p1: List[int], p2: List[int], p3: List[int], p4: List[int]) -> bool:
points = (p1, p2, p3, p4)
dists = collections.defaultdict(int)
for i in range(len(points)):
for j in range(i, len(points)):
dists[self.dist(points[i], points[j])] += 1
return set(dists.values()) == {4, 2}
if __name__ == '__main__':
solutions = [Solution(), Solution2(), Solution3()]
tc = (
([0, 0], [1, 1], [1, 0], [0, 1], True),
([0, 0], [1, 1], [1, 0], [0, 12], False),
([1, 0], [-1, 0], [0, 1], [0, -1], True),
([1, 1], [5, 3], [3, 5], [7, 7], False),
)
for sol in solutions:
for p1_, p2_, p3_, p4_, valid_square in tc:
res = sol.validSquare(p1_, p2_, p3_, p4_)
assert res is valid_square, f"{sol.__class__.__name__}: for input [{p1_}, {p2_}, {p3_}, {p4_}] " \
f"expected res: {valid_square}, got {res}" | en | 0.831744 | Given the coordinates of four points in 2D space p1, p2, p3 and p4, return true if the four points construct a square. The coordinate of a point pi is represented as [xi, yi]. The input is not given in any order. A valid square has four equal sides with positive length and four equal angles (90-degree angles). Example 1: Input: p1 = [0,0], p2 = [1,1], p3 = [1,0], p4 = [0,1] Output: true Example 2: Input: p1 = [0,0], p2 = [1,1], p3 = [1,0], p4 = [0,12] Output: false Example 3: Input: p1 = [1,0], p2 = [-1,0], p3 = [0,1], p4 = [0,-1] Output: true Constraints: p1.length == p2.length == p3.length == p4.length == 2 -104 <= xi, yi <= 104 Algorithm: sort the points and figure out where the last two points should be with the coordinates of the first two. Runtime: 32 ms, faster than 78.95% of Python3 Memory Usage: 14.4 MB, less than 11.12% of Python3 Time complexity : O(1). Sorting 4 points takes constant time. Space complexity : O(1). Constant space is required. Math solution from https://leetcode.com/problems/valid-square/discuss/103482/Python-Straightforward-with-Explanation "Suppose points ABCD have pairwise distances in sorted order S, S, S, S, S*sqrt(2), S*sqrt(2). We want to show ABCD is a square. Let us call S a "small side" and S*sqrt(2) a "large side". Without loss of generality, suppose AC is a large side. If BD is a large side, then AB and BC are small sides, so B lies on the intersection of circles between A and C; similarly, D lies on the same intersection, and thus ABCD is a square (as two different circles of the same radius only intersect in two different points)." Runtime: 36 ms, faster than 52.15% of Python3 Memory Usage: 14.2 MB, less than 89.11% of Python3 # For input [0, 0], [1, 1], [1, 0], [0, 1] result D will be [1, 1, 1, 1, 2, 2] == True # For input [1, 1], [5, 3], [3, 5], [7, 7] D will be [8, 20, 20, 20, 20, 72] == False For valid square number of unique distances should be 2 (so 4 for sides and 2 for diagonals). | 3.614943 | 4 |
test_cplane_np.py | chapman-cs510-2017f/cw-06-alyandcyndi | 0 | 6617870 | <gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
###
# Name: <NAME>, <NAME>
# Student ID: 1923165, 2303535
# Email: <EMAIL>, <EMAIL>
# Course: CS510 Fall 2017
# Assignment: Classwork 6
###
import cplane_np as cplane
import numpy as np
def test_cplanenp():
result=cplane.ArrayComplexPlane(0,10,2,0,8,2)
print(result)
correct=[[ 0.+0.j, 10.+0.j],[ 0.+8.j, 10.+8.j]]
print(correct)
np.testing.assert_almost_equal(result.plane,correct)
def f(z):
return z+1
def test_apply():
result2=cplane.ArrayComplexPlane(0,10,2,0,8,2)
print(result2)
correct2=[[ 0.+0.j, 10.+0.j],[ 0.+8.j, 10.+8.j]]
print(correct2)
np.testing.assert_almost_equal(result2.plane,correct2)
result3=result2.apply(f)
print(result3)
correct3=[[ 1.+0.j, 11.+0.j],[ 1.+8.j, 11.+8.j]]
print(correct3)
np.testing.assert_almost_equal(result3.plane,correct3) | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
###
# Name: <NAME>, <NAME>
# Student ID: 1923165, 2303535
# Email: <EMAIL>, <EMAIL>
# Course: CS510 Fall 2017
# Assignment: Classwork 6
###
import cplane_np as cplane
import numpy as np
def test_cplanenp():
result=cplane.ArrayComplexPlane(0,10,2,0,8,2)
print(result)
correct=[[ 0.+0.j, 10.+0.j],[ 0.+8.j, 10.+8.j]]
print(correct)
np.testing.assert_almost_equal(result.plane,correct)
def f(z):
return z+1
def test_apply():
result2=cplane.ArrayComplexPlane(0,10,2,0,8,2)
print(result2)
correct2=[[ 0.+0.j, 10.+0.j],[ 0.+8.j, 10.+8.j]]
print(correct2)
np.testing.assert_almost_equal(result2.plane,correct2)
result3=result2.apply(f)
print(result3)
correct3=[[ 1.+0.j, 11.+0.j],[ 1.+8.j, 11.+8.j]]
print(correct3)
np.testing.assert_almost_equal(result3.plane,correct3) | en | 0.412331 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- ### # Name: <NAME>, <NAME> # Student ID: 1923165, 2303535 # Email: <EMAIL>, <EMAIL> # Course: CS510 Fall 2017 # Assignment: Classwork 6 ### | 2.895954 | 3 |
yatube_api/api/serializers.py | kotofey97/api_yatube | 2 | 6617871 | from rest_framework import serializers
from rest_framework.serializers import SlugRelatedField
from posts.models import Comment, Group, Post
class PostSerializer(serializers.ModelSerializer):
author = SlugRelatedField(read_only=True, slug_field='username')
group = SlugRelatedField(slug_field='slug',
queryset=Group.objects.all(),
required=False)
class Meta:
model = Post
fields = '__all__'
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = '__all__'
class CommentSerializer(serializers.ModelSerializer):
author = SlugRelatedField(read_only=True, slug_field='username')
class Meta:
model = Comment
fields = '__all__'
| from rest_framework import serializers
from rest_framework.serializers import SlugRelatedField
from posts.models import Comment, Group, Post
class PostSerializer(serializers.ModelSerializer):
author = SlugRelatedField(read_only=True, slug_field='username')
group = SlugRelatedField(slug_field='slug',
queryset=Group.objects.all(),
required=False)
class Meta:
model = Post
fields = '__all__'
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = '__all__'
class CommentSerializer(serializers.ModelSerializer):
author = SlugRelatedField(read_only=True, slug_field='username')
class Meta:
model = Comment
fields = '__all__'
| none | 1 | 2.137342 | 2 | |
os3/fs/directory.py | Nekmo/gradale | 1 | 6617872 | <filename>os3/fs/directory.py<gh_stars>1-10
# -*- coding: utf-8 -*-
import os
import shutil
import six
from colorama import Fore, Style
from os3.core.list import init_tree, Os3List
from os3.fs.entry import Entry
from os3.utils.nodes import deep_scandir
def name_id_parent_fn(item):
parent = item.parent()
parent = parent.path if parent is not None else None
return item.name, item.path, parent
def init_dir_tree(directory, *args):
directory = directory.clone()
directory.root = directory.path
directories = list(sorted(directory, key=lambda x: x.depth()))
return init_tree([directory] + directories, name_id_parent_fn)
if six.PY3:
LS_EXCEPTIONS = (PermissionError, OSError)
else:
LS_EXCEPTIONS = (OSError,)
class Dir(Entry):
_type = 'directory'
mimetype = 'inode/directory'
@classmethod
def get_dir_list_class(cls):
return DirList
def copy(self, dst, symlinks=False, ignore=None):
shutil.copytree(self.path, os.path.expanduser(dst), symlinks, ignore)
def ls(self, depth=None, fail=False, **kwargs):
return self.get_dir_list_class()(self.path, depth, fail, **kwargs)
def mkdir(self, name, exist_ok=True):
subdirectory = self.sub(name)
if not subdirectory.lexists() or not exist_ok:
os.mkdir(subdirectory.path)
return subdirectory
def remove(self):
return shutil.rmtree(self.path)
def print_format(self):
return '{Fore.BLUE}{name}{Style.RESET_ALL}'.format(name=self.name, Fore=Fore, Style=Style)
class DirList(Dir, Os3List):
_pre_filters = None
__interfaces__ = ['name']
__clone_params__ = ['path', 'depth']
_ls = None
def __init__(self, path=None, depth=None, fail=False, **kwargs):
# TODO: renombrar depth a depth
path = path or os.getcwd()
super(DirList, self).__init__(path)
self.depth = depth
self.fail = fail
self.root = kwargs.pop('root', None)
self.default_format = kwargs.pop('default_format', self.default_format)
self._pre_filters = kwargs
def _get_iter(self):
return deep_scandir(self.path, self.depth, cls=self.get_entry_class(), filter=self._filter,
traverse_filter=self._traverse_filter, exceptions=self._get_catched_exceptions())
# return iter(os.listdir(self.path))
def _get_catched_exceptions(self):
return LS_EXCEPTIONS if not self.fail else ()
def _prepare_next(self, elem):
return self.get_entry_class().get_node(elem.path)
# return Node.get_node(os.path.join(self.path, elem))
def _filter(self, elem):
return elem.check_filters(**self._pre_filters or {}) and elem.check_filters(**self._dict_filters or {})
def _traverse_filter(self, elem):
return elem.check_filters(**self._pre_filters or {})
def tree_format(self, roots=None, fn_tree=None, roots_filter_fn=None):
return super(DirList, self).tree_format([self], init_dir_tree)
def print_format(self):
return Os3List.print_format(self)
def remove(self):
for item in self:
item.remove()
| <filename>os3/fs/directory.py<gh_stars>1-10
# -*- coding: utf-8 -*-
import os
import shutil
import six
from colorama import Fore, Style
from os3.core.list import init_tree, Os3List
from os3.fs.entry import Entry
from os3.utils.nodes import deep_scandir
def name_id_parent_fn(item):
parent = item.parent()
parent = parent.path if parent is not None else None
return item.name, item.path, parent
def init_dir_tree(directory, *args):
directory = directory.clone()
directory.root = directory.path
directories = list(sorted(directory, key=lambda x: x.depth()))
return init_tree([directory] + directories, name_id_parent_fn)
if six.PY3:
LS_EXCEPTIONS = (PermissionError, OSError)
else:
LS_EXCEPTIONS = (OSError,)
class Dir(Entry):
_type = 'directory'
mimetype = 'inode/directory'
@classmethod
def get_dir_list_class(cls):
return DirList
def copy(self, dst, symlinks=False, ignore=None):
shutil.copytree(self.path, os.path.expanduser(dst), symlinks, ignore)
def ls(self, depth=None, fail=False, **kwargs):
return self.get_dir_list_class()(self.path, depth, fail, **kwargs)
def mkdir(self, name, exist_ok=True):
subdirectory = self.sub(name)
if not subdirectory.lexists() or not exist_ok:
os.mkdir(subdirectory.path)
return subdirectory
def remove(self):
return shutil.rmtree(self.path)
def print_format(self):
return '{Fore.BLUE}{name}{Style.RESET_ALL}'.format(name=self.name, Fore=Fore, Style=Style)
class DirList(Dir, Os3List):
_pre_filters = None
__interfaces__ = ['name']
__clone_params__ = ['path', 'depth']
_ls = None
def __init__(self, path=None, depth=None, fail=False, **kwargs):
# TODO: renombrar depth a depth
path = path or os.getcwd()
super(DirList, self).__init__(path)
self.depth = depth
self.fail = fail
self.root = kwargs.pop('root', None)
self.default_format = kwargs.pop('default_format', self.default_format)
self._pre_filters = kwargs
def _get_iter(self):
return deep_scandir(self.path, self.depth, cls=self.get_entry_class(), filter=self._filter,
traverse_filter=self._traverse_filter, exceptions=self._get_catched_exceptions())
# return iter(os.listdir(self.path))
def _get_catched_exceptions(self):
return LS_EXCEPTIONS if not self.fail else ()
def _prepare_next(self, elem):
return self.get_entry_class().get_node(elem.path)
# return Node.get_node(os.path.join(self.path, elem))
def _filter(self, elem):
return elem.check_filters(**self._pre_filters or {}) and elem.check_filters(**self._dict_filters or {})
def _traverse_filter(self, elem):
return elem.check_filters(**self._pre_filters or {})
def tree_format(self, roots=None, fn_tree=None, roots_filter_fn=None):
return super(DirList, self).tree_format([self], init_dir_tree)
def print_format(self):
return Os3List.print_format(self)
def remove(self):
for item in self:
item.remove()
| en | 0.084553 | # -*- coding: utf-8 -*- # TODO: renombrar depth a depth # return iter(os.listdir(self.path)) # return Node.get_node(os.path.join(self.path, elem)) | 2.285899 | 2 |
pyserver/app/views.py | yabirgb/linksbot | 6 | 6617873 | import os
import requests
import json
from flask import (Flask,render_template, redirect, request, abort, jsonify)
from peewee import *
from playhouse.shortcuts import model_to_dict
from htmlmin.minify import html_minify
import peeweedbevolve
import pyotp
from telegram_bot.models import User, Link, Map, Message
app = Flask(__name__)
DB_NAME = os.environ.get("DB", None)
DB_USER = os.environ.get("DBUSER", None)
DB_PASS = os.environ.get("DBPASS", None)
DB_HOST = os.environ.get("DBHOST", None)
db = PostgresqlDatabase(
DB_NAME, # Required by Peewee.
user=DB_USER, # Will be passed directly to psycopg2.
password=<PASSWORD>, # Ditto.
host=DB_HOST, # Ditto.
)
TOKEN = os.environ.get("OTP", "<PASSWORD>")
hotp = pyotp.HOTP(TOKEN)
POCKET = os.environ.get("POCKET", None)
BASE_URL = os.environ.get("BASE_URL", "http://localhost:8000")
REDIRECT_URL = BASE_URL + '/auth/{}'
headers = {'Content-Type' : 'application/json; charset=UTF-8','X-Accept': 'application/json'}
@app.template_filter('urls_completer')
def urls_completer(url):
return url if "://" in url else "//" + url
@app.route('/')
@app.route('/index')
def index():
html = render_template('index.html',title='Home')
return html_minify(html)
@app.route('/changelog')
def changelog():
html = render_template('changelog.html',title='changelog')
return html_minify(html)
@app.route('/secret/<secret>/<code>')
def user_links(secret, code):
"""Fetch links for an user.
Args:
Secret: A secret code generated by uuid that identifies the user.
Code: An OTP code generated by telegram app.
Returns:
Minified version of the list with links, maps and messages if the
secret matchs the code at the moment.
"""
try:
user = User.get(User.secret==secret)
except:
return "403 - Secret code invalid"
print(code, user.authCode)
if hotp.verify(code, user.authCode):
links =Link.select().join(User).where(User.secret==secret).dicts()
maps = [model_to_dict(x, extra_attrs=["maps"], exclude=["latitude", "longitude"],
recurse=False) for x in Map.select().join(User).where(User.secret==secret)]
messages = Message.select().join(User).where(User.secret==secret).dicts()
links_json = json.dumps({'data':list(links)},sort_keys=True, default=str)
maps_json = json.dumps({'data':list(maps)},sort_keys=True, default=str)
messages_json = json.dumps({'data':list(messages)},sort_keys=True, default=str)
html = render_template('links.html',
user=user, urls=links_json, maps=maps_json,
messages=messages_json, secret=secret , code=code)
return html_minify(html)
else:
return "403 - Auth code error"
@app.route('/search/', methods=['GET', 'POST'])
def user_search():
if request.args.get("code"):
return redirect("/secret/{}".format(request.args.get("code")))
return render_template('search.html')
@app.route('/about/')
def about():
return render_template('about.html')
@app.route('/pocket/<tid>')
def get_pocket(tid):
"""Get token from pocket api to login user
Args:
tid: Telegram user id that identifies user.
Returns:
Error code or redirects to the authorize page of pocket
"""
user = User.get(User.telegramId==tid)
if user.pocket_configured == True:
return "Already configured"
r_url = REDIRECT_URL.format(tid)
payload = dict(consumer_key=POCKET, redirect_uri=r_url)
r = requests.post('https://getpocket.com/v3/oauth/request', data=json.dumps(payload), headers=headers)
if r.status_code == 200:
code = r.json()["code"]
print(code)
user.pocket_Token = code
user.save()
auth_url = "https://getpocket.com/auth/authorize?request_token={}&redirect_uri={}".format(code, r_url)
return redirect(auth_url)
else:
return str(r.status_code)
@app.route('/auth/<tid>')
def auth(tid):
"""Authorize user on pocket app
Args:
tid: Telegram user id that identifies user.
Returns:
Error code or redirects to the main page if success
"""
user = User.get(User.telegramId==tid)
if user.pocket_configured == True:
return "Pocket already configured"
code = user.pocket_Token
payload = {"code":code, "consumer_key": POCKET}
r = requests.post("https://getpocket.com/v3/oauth/authorize", data=json.dumps(payload), headers=headers)
print(r.json)
if r.status_code == 200:
data = r.json()
user = User.get(User.telegramId==tid)
user.pocket_Token=data["access_token"]
user.pocket_configured=True
user.save()
print("Token: ", data["access_token"], "\n")
return redirect("/")
else:
print(r.headers)
return str(r.status_code)
@app.before_request
def _db_connect():
print("open")
db.get_conn()
# This hook ensures that the connection is closed when we've finished
# processing the request.
@app.teardown_request
def _db_close(exc):
print("closed")
if not db.is_closed():
db.close()
# ========
# Api
# ========
@app.route('/api/<secret>/<code>/<obj>/<pk>')
def toggle_show(secret, code, obj, pk):
try:
user = User.get(User.secret==secret)
except:
return "403 - Secret code invalid"
if hotp.verify(code, user.authCode):
if obj == "link":
link = Link.get(Link.id == pk)
link.reviewed = not link.reviewed
link.save()
links =Link.select().join(User).where(User.secret==secret).dicts()
response_code = 200
return jsonify( response_code = 200, data =list(links) )
elif obj == "maps":
location = Map.get(Map.id == pk)
location.reviewed = not location.reviewed
location.save()
return "ok"
elif obj == "messages":
message = Message.get(Message.id == pk)
message.reviewed = not message.reviewed
message.save()
return "ok"
else:
return "404"
else:
return "403"
| import os
import requests
import json
from flask import (Flask,render_template, redirect, request, abort, jsonify)
from peewee import *
from playhouse.shortcuts import model_to_dict
from htmlmin.minify import html_minify
import peeweedbevolve
import pyotp
from telegram_bot.models import User, Link, Map, Message
app = Flask(__name__)
DB_NAME = os.environ.get("DB", None)
DB_USER = os.environ.get("DBUSER", None)
DB_PASS = os.environ.get("DBPASS", None)
DB_HOST = os.environ.get("DBHOST", None)
db = PostgresqlDatabase(
DB_NAME, # Required by Peewee.
user=DB_USER, # Will be passed directly to psycopg2.
password=<PASSWORD>, # Ditto.
host=DB_HOST, # Ditto.
)
TOKEN = os.environ.get("OTP", "<PASSWORD>")
hotp = pyotp.HOTP(TOKEN)
POCKET = os.environ.get("POCKET", None)
BASE_URL = os.environ.get("BASE_URL", "http://localhost:8000")
REDIRECT_URL = BASE_URL + '/auth/{}'
headers = {'Content-Type' : 'application/json; charset=UTF-8','X-Accept': 'application/json'}
@app.template_filter('urls_completer')
def urls_completer(url):
return url if "://" in url else "//" + url
@app.route('/')
@app.route('/index')
def index():
html = render_template('index.html',title='Home')
return html_minify(html)
@app.route('/changelog')
def changelog():
html = render_template('changelog.html',title='changelog')
return html_minify(html)
@app.route('/secret/<secret>/<code>')
def user_links(secret, code):
"""Fetch links for an user.
Args:
Secret: A secret code generated by uuid that identifies the user.
Code: An OTP code generated by telegram app.
Returns:
Minified version of the list with links, maps and messages if the
secret matchs the code at the moment.
"""
try:
user = User.get(User.secret==secret)
except:
return "403 - Secret code invalid"
print(code, user.authCode)
if hotp.verify(code, user.authCode):
links =Link.select().join(User).where(User.secret==secret).dicts()
maps = [model_to_dict(x, extra_attrs=["maps"], exclude=["latitude", "longitude"],
recurse=False) for x in Map.select().join(User).where(User.secret==secret)]
messages = Message.select().join(User).where(User.secret==secret).dicts()
links_json = json.dumps({'data':list(links)},sort_keys=True, default=str)
maps_json = json.dumps({'data':list(maps)},sort_keys=True, default=str)
messages_json = json.dumps({'data':list(messages)},sort_keys=True, default=str)
html = render_template('links.html',
user=user, urls=links_json, maps=maps_json,
messages=messages_json, secret=secret , code=code)
return html_minify(html)
else:
return "403 - Auth code error"
@app.route('/search/', methods=['GET', 'POST'])
def user_search():
if request.args.get("code"):
return redirect("/secret/{}".format(request.args.get("code")))
return render_template('search.html')
@app.route('/about/')
def about():
return render_template('about.html')
@app.route('/pocket/<tid>')
def get_pocket(tid):
"""Get token from pocket api to login user
Args:
tid: Telegram user id that identifies user.
Returns:
Error code or redirects to the authorize page of pocket
"""
user = User.get(User.telegramId==tid)
if user.pocket_configured == True:
return "Already configured"
r_url = REDIRECT_URL.format(tid)
payload = dict(consumer_key=POCKET, redirect_uri=r_url)
r = requests.post('https://getpocket.com/v3/oauth/request', data=json.dumps(payload), headers=headers)
if r.status_code == 200:
code = r.json()["code"]
print(code)
user.pocket_Token = code
user.save()
auth_url = "https://getpocket.com/auth/authorize?request_token={}&redirect_uri={}".format(code, r_url)
return redirect(auth_url)
else:
return str(r.status_code)
@app.route('/auth/<tid>')
def auth(tid):
"""Authorize user on pocket app
Args:
tid: Telegram user id that identifies user.
Returns:
Error code or redirects to the main page if success
"""
user = User.get(User.telegramId==tid)
if user.pocket_configured == True:
return "Pocket already configured"
code = user.pocket_Token
payload = {"code":code, "consumer_key": POCKET}
r = requests.post("https://getpocket.com/v3/oauth/authorize", data=json.dumps(payload), headers=headers)
print(r.json)
if r.status_code == 200:
data = r.json()
user = User.get(User.telegramId==tid)
user.pocket_Token=data["access_token"]
user.pocket_configured=True
user.save()
print("Token: ", data["access_token"], "\n")
return redirect("/")
else:
print(r.headers)
return str(r.status_code)
@app.before_request
def _db_connect():
print("open")
db.get_conn()
# This hook ensures that the connection is closed when we've finished
# processing the request.
@app.teardown_request
def _db_close(exc):
print("closed")
if not db.is_closed():
db.close()
# ========
# Api
# ========
@app.route('/api/<secret>/<code>/<obj>/<pk>')
def toggle_show(secret, code, obj, pk):
try:
user = User.get(User.secret==secret)
except:
return "403 - Secret code invalid"
if hotp.verify(code, user.authCode):
if obj == "link":
link = Link.get(Link.id == pk)
link.reviewed = not link.reviewed
link.save()
links =Link.select().join(User).where(User.secret==secret).dicts()
response_code = 200
return jsonify( response_code = 200, data =list(links) )
elif obj == "maps":
location = Map.get(Map.id == pk)
location.reviewed = not location.reviewed
location.save()
return "ok"
elif obj == "messages":
message = Message.get(Message.id == pk)
message.reviewed = not message.reviewed
message.save()
return "ok"
else:
return "404"
else:
return "403"
| en | 0.761731 | # Required by Peewee. # Will be passed directly to psycopg2. # Ditto. # Ditto. Fetch links for an user. Args: Secret: A secret code generated by uuid that identifies the user. Code: An OTP code generated by telegram app. Returns: Minified version of the list with links, maps and messages if the secret matchs the code at the moment. Get token from pocket api to login user Args: tid: Telegram user id that identifies user. Returns: Error code or redirects to the authorize page of pocket Authorize user on pocket app Args: tid: Telegram user id that identifies user. Returns: Error code or redirects to the main page if success # This hook ensures that the connection is closed when we've finished # processing the request. # ======== # Api # ======== | 2.325724 | 2 |
tests/t-memcached.py | kurtace72/lighttpd2 | 395 | 6617874 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
from base import GroupTest
from requests import CurlRequest
from service import Service
import socket
import os
import base
import time
class Memcached(Service):
name = "memcached"
binary = [ None ]
def __init__(self):
super(Memcached, self).__init__()
self.sockfile = os.path.join(base.Env.dir, "tmp", "sockets", self.name + ".sock")
self.binary = [ os.path.join(base.Env.sourcedir, "tests", "run-memcached.py") ]
def Prepare(self):
sockdir = self.tests.PrepareDir(os.path.join("tmp", "sockets"))
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(os.path.relpath(self.sockfile))
sock.listen(8)
self.fork(*self.binary, inp = sock)
def Cleanup(self):
if None != self.sockfile:
try:
os.remove(self.sockfile)
except BaseException as e:
base.eprint("Couldn't delete socket '%s': %s" % (self.sockfile, e))
self.tests.CleanupDir(os.path.join("tmp", "sockets"))
class TestStore1(CurlRequest):
URL = "/"
EXPECT_RESPONSE_BODY = "Hello World!"
EXPECT_RESPONSE_CODE = 200
EXPECT_RESPONSE_HEADERS = [("X-Memcached-Hit", "false")]
class TestLookup1(CurlRequest):
URL = "/"
EXPECT_RESPONSE_BODY = "Hello World!"
EXPECT_RESPONSE_CODE = 200
EXPECT_RESPONSE_HEADERS = [("X-Memcached-Hit", "true")]
def Run(self):
# storing might take some time: only after the request is actually
# finished does lighttpd start the memcache connection to store it
time.sleep(0.2)
return super(TestLookup1, self).Run()
class Test(GroupTest):
group = [
TestStore1,
TestLookup1,
]
config = """
memcache;
"""
def FeatureCheck(self):
memcached = Memcached()
self.plain_config = """
setup {{ module_load "mod_memcached"; }}
memcache = {{
memcached.lookup (( "server" => "unix:{socket}" ), {{
header.add "X-Memcached-Hit" => "true";
}}, {{
header.add "X-Memcached-Hit" => "false";
respond 200 => "Hello World!";
memcached.store ( "server" => "unix:{socket}" );
}});
}};
""".format(socket = memcached.sockfile)
self.tests.add_service(memcached)
return True
| # -*- coding: utf-8 -*-
from base import GroupTest
from requests import CurlRequest
from service import Service
import socket
import os
import base
import time
class Memcached(Service):
name = "memcached"
binary = [ None ]
def __init__(self):
super(Memcached, self).__init__()
self.sockfile = os.path.join(base.Env.dir, "tmp", "sockets", self.name + ".sock")
self.binary = [ os.path.join(base.Env.sourcedir, "tests", "run-memcached.py") ]
def Prepare(self):
sockdir = self.tests.PrepareDir(os.path.join("tmp", "sockets"))
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(os.path.relpath(self.sockfile))
sock.listen(8)
self.fork(*self.binary, inp = sock)
def Cleanup(self):
if None != self.sockfile:
try:
os.remove(self.sockfile)
except BaseException as e:
base.eprint("Couldn't delete socket '%s': %s" % (self.sockfile, e))
self.tests.CleanupDir(os.path.join("tmp", "sockets"))
class TestStore1(CurlRequest):
URL = "/"
EXPECT_RESPONSE_BODY = "Hello World!"
EXPECT_RESPONSE_CODE = 200
EXPECT_RESPONSE_HEADERS = [("X-Memcached-Hit", "false")]
class TestLookup1(CurlRequest):
URL = "/"
EXPECT_RESPONSE_BODY = "Hello World!"
EXPECT_RESPONSE_CODE = 200
EXPECT_RESPONSE_HEADERS = [("X-Memcached-Hit", "true")]
def Run(self):
# storing might take some time: only after the request is actually
# finished does lighttpd start the memcache connection to store it
time.sleep(0.2)
return super(TestLookup1, self).Run()
class Test(GroupTest):
group = [
TestStore1,
TestLookup1,
]
config = """
memcache;
"""
def FeatureCheck(self):
memcached = Memcached()
self.plain_config = """
setup {{ module_load "mod_memcached"; }}
memcache = {{
memcached.lookup (( "server" => "unix:{socket}" ), {{
header.add "X-Memcached-Hit" => "true";
}}, {{
header.add "X-Memcached-Hit" => "false";
respond 200 => "Hello World!";
memcached.store ( "server" => "unix:{socket}" );
}});
}};
""".format(socket = memcached.sockfile)
self.tests.add_service(memcached)
return True | en | 0.593065 | # -*- coding: utf-8 -*- # storing might take some time: only after the request is actually # finished does lighttpd start the memcache connection to store it memcache; setup {{ module_load "mod_memcached"; }} memcache = {{ memcached.lookup (( "server" => "unix:{socket}" ), {{ header.add "X-Memcached-Hit" => "true"; }}, {{ header.add "X-Memcached-Hit" => "false"; respond 200 => "Hello World!"; memcached.store ( "server" => "unix:{socket}" ); }}); }}; | 2.278363 | 2 |
app.py | somiljain7/Medash | 0 | 6617875 | import numpy as np
import pickle
from flask import Flask, request, jsonify, render_template
from svm_func import train_svm, test_svm, predict_svm
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
from time import time
app = Flask(__name__)
model1 = pickle.load(open('pickle/parkinson/model.pkl', 'rb'))
@app.route('/')
def index():
return render_template('dashboard.htm')
@app.route('/parkinson-home')
def index1():
return render_template('parkinson/parkinson.html')
@app.route('/parkinson-predict', methods=['POST','GET'])
def predict1():
int_features = [[float(x) for x in request.form.values()]]
final = np.array(int_features)
prediction = model1.predict(final)
output = prediction[0]
if output==0:
s= 'Negative'
elif output==1:
s = 'Positive'
proba = model1.predict_proba(final)
prob1 = proba[0][1]*100
if prob1>int(70):
a="High"
elif int(30)<prob1<=int(70):
a="Medium"
else:
a="Low"
return render_template('parkinson/result.html',
pred='Test Result : {}'.format(s)
,pred1='Percentage of risk : {:.2f}%'.format(prob1)
,pred2='Risk Level : {}'.format(a))
@app.route('/predict_api', methods=['POST'])
def predict_api():
data = request.get_json(force=True)
final = np.array([list(data.values())])
prediction = model1.predict_proba(final)
output = prediction[0]
return jsonify(output)
@app.route("/heartdisease-home")
def index2():
return render_template('heartdisease/home.html')
@app.route('/heartdisease-result', methods=['POST', 'GET'])
def result2():
age = int(request.form['age'])
sex = int(request.form['sex'])
trestbps = float(request.form['trestbps'])
chol = float(request.form['chol'])
restecg = float(request.form['restecg'])
thalach = float(request.form['thalach'])
exang = int(request.form['exang'])
cp = int(request.form['cp'])
fbs = float(request.form['fbs'])
x = np.array([age, sex, cp, trestbps, chol, fbs, restecg,
thalach, exang]).reshape(1, -1)
scaler_path = os.path.join(os.path.dirname(__file__), 'pickle/models/scaler.pkl')
scaler = None
with open(scaler_path, 'rb') as f:
scaler = pickle.load(f)
x = scaler.transform(x)
model_path = os.path.join(os.path.dirname(__file__), 'pickle/models/rfc.sav')
clf = joblib.load(model_path)
y = clf.predict(x)
print(y)
if y == 0:
return render_template('heartdisease/nodisease.html')
else:
return render_template('heartdisease/heartdisease.htm', stage=int(y))
@app.route('/breast-cancer-home')
def hello_method():
return render_template('breast-cancer/home.html')
@app.route('/breast-cancer-predict', methods=['POST'])
def login_user():
if(request.form['space']=='None'):
data = []
string = 'value'
for i in range(1,31):
data.append(float(request.form['value'+str(i)]))
for i in range(30):
print(data[i])
else:
string = request.form['space']
data = string.split()
print(data)
print("Type:", type(data))
print("Length:", len(data))
for i in range(30):
print(data[i])
data = [float(x.strip()) for x in data]
for i in range(30):
print(data[i])
data_np = np.asarray(data, dtype = float)
data_np = data_np.reshape(1,-1)
out, acc, t = predict_svm(clf, data_np)
if(out==1):
output = 'Malignant'
else:
output = 'Benign'
acc_x = acc[0][0]
acc_y = acc[0][1]
if(acc_x>acc_y):
acc = acc_x
else:
acc=acc_y
return render_template('breast-cancer/result.html', output=output, accuracy=round(acc*100,3), time=t)
@app.route('/profile')
def display():
return render_template('breast-cancer/profile.html')
@app.route('/fad/')
def gain():
return render_template('breast-cancer/connect.html')
@app.route('/my_form_post', methods=["GET",'POST'])
def my_form_post():
print(request.form)
if request.method=="POST":
resultss=request.form
file_name='yes.csv'
from csv import writer
def append_list_as_row(file_name, list_of_elem):
# Open file in append mode
with open(file_name, 'a+', newline='') as write_obj:
# Create a writer object from csv module
csv_writer = writer(write_obj)
# Add contents of list as last row in the csv file
csv_writer.writerow(list_of_elem)
print(resultss)
lsv=[]
for key,value in enumerate(resultss.items()):
lsv.append(value[1])
print(lsv,'red')
if __name__ == "__main__":
app.run(debug=True) | import numpy as np
import pickle
from flask import Flask, request, jsonify, render_template
from svm_func import train_svm, test_svm, predict_svm
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
from time import time
app = Flask(__name__)
model1 = pickle.load(open('pickle/parkinson/model.pkl', 'rb'))
@app.route('/')
def index():
return render_template('dashboard.htm')
@app.route('/parkinson-home')
def index1():
return render_template('parkinson/parkinson.html')
@app.route('/parkinson-predict', methods=['POST','GET'])
def predict1():
int_features = [[float(x) for x in request.form.values()]]
final = np.array(int_features)
prediction = model1.predict(final)
output = prediction[0]
if output==0:
s= 'Negative'
elif output==1:
s = 'Positive'
proba = model1.predict_proba(final)
prob1 = proba[0][1]*100
if prob1>int(70):
a="High"
elif int(30)<prob1<=int(70):
a="Medium"
else:
a="Low"
return render_template('parkinson/result.html',
pred='Test Result : {}'.format(s)
,pred1='Percentage of risk : {:.2f}%'.format(prob1)
,pred2='Risk Level : {}'.format(a))
@app.route('/predict_api', methods=['POST'])
def predict_api():
data = request.get_json(force=True)
final = np.array([list(data.values())])
prediction = model1.predict_proba(final)
output = prediction[0]
return jsonify(output)
@app.route("/heartdisease-home")
def index2():
return render_template('heartdisease/home.html')
@app.route('/heartdisease-result', methods=['POST', 'GET'])
def result2():
age = int(request.form['age'])
sex = int(request.form['sex'])
trestbps = float(request.form['trestbps'])
chol = float(request.form['chol'])
restecg = float(request.form['restecg'])
thalach = float(request.form['thalach'])
exang = int(request.form['exang'])
cp = int(request.form['cp'])
fbs = float(request.form['fbs'])
x = np.array([age, sex, cp, trestbps, chol, fbs, restecg,
thalach, exang]).reshape(1, -1)
scaler_path = os.path.join(os.path.dirname(__file__), 'pickle/models/scaler.pkl')
scaler = None
with open(scaler_path, 'rb') as f:
scaler = pickle.load(f)
x = scaler.transform(x)
model_path = os.path.join(os.path.dirname(__file__), 'pickle/models/rfc.sav')
clf = joblib.load(model_path)
y = clf.predict(x)
print(y)
if y == 0:
return render_template('heartdisease/nodisease.html')
else:
return render_template('heartdisease/heartdisease.htm', stage=int(y))
@app.route('/breast-cancer-home')
def hello_method():
return render_template('breast-cancer/home.html')
@app.route('/breast-cancer-predict', methods=['POST'])
def login_user():
if(request.form['space']=='None'):
data = []
string = 'value'
for i in range(1,31):
data.append(float(request.form['value'+str(i)]))
for i in range(30):
print(data[i])
else:
string = request.form['space']
data = string.split()
print(data)
print("Type:", type(data))
print("Length:", len(data))
for i in range(30):
print(data[i])
data = [float(x.strip()) for x in data]
for i in range(30):
print(data[i])
data_np = np.asarray(data, dtype = float)
data_np = data_np.reshape(1,-1)
out, acc, t = predict_svm(clf, data_np)
if(out==1):
output = 'Malignant'
else:
output = 'Benign'
acc_x = acc[0][0]
acc_y = acc[0][1]
if(acc_x>acc_y):
acc = acc_x
else:
acc=acc_y
return render_template('breast-cancer/result.html', output=output, accuracy=round(acc*100,3), time=t)
@app.route('/profile')
def display():
return render_template('breast-cancer/profile.html')
@app.route('/fad/')
def gain():
return render_template('breast-cancer/connect.html')
@app.route('/my_form_post', methods=["GET",'POST'])
def my_form_post():
print(request.form)
if request.method=="POST":
resultss=request.form
file_name='yes.csv'
from csv import writer
def append_list_as_row(file_name, list_of_elem):
# Open file in append mode
with open(file_name, 'a+', newline='') as write_obj:
# Create a writer object from csv module
csv_writer = writer(write_obj)
# Add contents of list as last row in the csv file
csv_writer.writerow(list_of_elem)
print(resultss)
lsv=[]
for key,value in enumerate(resultss.items()):
lsv.append(value[1])
print(lsv,'red')
if __name__ == "__main__":
app.run(debug=True) | en | 0.864387 | # Open file in append mode # Create a writer object from csv module # Add contents of list as last row in the csv file | 2.606298 | 3 |
Common/Dimensionality Reduction/ICA/Independent Component Analysis.py | faridsaud/ML-Snippets | 1 | 6617876 | <filename>Common/Dimensionality Reduction/ICA/Independent Component Analysis.py
#!/usr/bin/env python
# coding: utf-8
# # Independent Component Analysis Lab
#
# In this notebook, we'll use Independent Component Analysis to retrieve original signals from three observations each of which contains a different mix of the original signals. This is the same problem explained in the ICA video.
#
# ## Dataset
# Let's begin by looking at the dataset we have. We have three WAVE files, each of which is a mix, as we've mentioned. If you haven't worked with audio files in python before, that's okay, they basically boil down to being lists of floats.
#
#
# Let's begin by loading our first audio file, **[ICA mix 1.wav](ICA mix 1.wav)** [click to listen to the file]:
# In[1]:
import numpy as np
import wave
# Read the wave file
mix_1_wave = wave.open('ICA mix 1.wav','r')
# Let's peak at the parameters of the wave file to learn more about it
# In[2]:
mix_1_wave.getparams()
# So this file has only channel (so it's mono sound). It has a frame rate of 44100, which means each second of sound is represented by 44100 integers (integers because the file is in the common PCM 16-bit format). The file has a total of 264515 integers/frames, which means its length in seconds is:
# In[3]:
264515/44100
# Let's extract the frames of the wave file, which will be a part of the dataset we'll run ICA against:
# In[4]:
# Extract Raw Audio from Wav File
signal_1_raw = mix_1_wave.readframes(-1)
signal_1 = np.fromstring(signal_1_raw, 'Int16')
# signal_1 is now a list of ints representing the sound contained in the first file.
# In[5]:
'length: ', len(signal_1) , 'first 100 elements: ',signal_1[:100]
# If we plot this array as a line graph, we'll get the familiar wave form representation:
# In[8]:
import matplotlib.pyplot as plt
fs = mix_1_wave.getframerate()
timing = np.linspace(0, len(signal_1)/fs, num=len(signal_1))
plt.figure(figsize=(12,2))
plt.title('Recording 1')
plt.plot(timing,signal_1, c="#3ABFE7")
plt.ylim(-35000, 35000)
plt.show()
# In the same way, we can now load the other two wave files, **[ICA mix 2.wav](ICA mix 2.wav)** and **[ICA mix 3.wav](ICA mix 3.wav)**
# In[9]:
mix_2_wave = wave.open('ICA mix 2.wav','r')
#Extract Raw Audio from Wav File
signal_raw_2 = mix_2_wave.readframes(-1)
signal_2 = np.fromstring(signal_raw_2, 'Int16')
mix_3_wave = wave.open('ICA mix 3.wav','r')
#Extract Raw Audio from Wav File
signal_raw_3 = mix_3_wave.readframes(-1)
signal_3 = np.fromstring(signal_raw_3, 'Int16')
plt.figure(figsize=(12,2))
plt.title('Recording 2')
plt.plot(timing,signal_2, c="#3ABFE7")
plt.ylim(-35000, 35000)
plt.show()
plt.figure(figsize=(12,2))
plt.title('Recording 3')
plt.plot(timing,signal_3, c="#3ABFE7")
plt.ylim(-35000, 35000)
plt.show()
# Now that we've read all three files, we're ready to [zip](https://docs.python.org/3/library/functions.html#zip) them to create our dataset.
#
# * Create dataset ```X``` by zipping signal_1, signal_2, and signal_3 into a single list
# In[10]:
X = list(zip(signal_1, signal_2, signal_3))
# Let's peak at what X looks like
X[:10]
# We are now ready to run ICA to try to retrieve the original signals.
#
# * Import sklearn's [FastICA](http://scikit-learn.org/stable/modules/generated/sklearn.decomposition.FastICA.html) module
# * Initialize FastICA look for three components
# * Run the FastICA algorithm using fit_transform on dataset X
# In[12]:
# Import FastICA
from sklearn.decomposition import FastICA
# Initialize FastICA with n_components=3
ica = FastICA(n_components=3)
#Run the FastICA algorithm using fit_transform on dataset X
ica_result = ica.fit_transform(X)
# ```ica_result``` now contains the result of FastICA, which we hope are the original signals. It's in the shape:
# In[13]:
ica_result.shape
# Let's split into separate signals and look at them
# In[14]:
result_signal_1 = ica_result[:,0]
result_signal_2 = ica_result[:,1]
result_signal_3 = ica_result[:,2]
# Let's plot to see how the wave forms look
# In[15]:
# Plot Independent Component #1
plt.figure(figsize=(12,2))
plt.title('Independent Component #1')
plt.plot(result_signal_1, c="#df8efd")
plt.ylim(-0.010, 0.010)
plt.show()
# Plot Independent Component #2
plt.figure(figsize=(12,2))
plt.title('Independent Component #2')
plt.plot(result_signal_2, c="#87de72")
plt.ylim(-0.010, 0.010)
plt.show()
# Plot Independent Component #3
plt.figure(figsize=(12,2))
plt.title('Independent Component #3')
plt.plot(result_signal_3, c="#f65e97")
plt.ylim(-0.010, 0.010)
plt.show()
# Do some of these look like musical wave forms?
#
# The best way to confirm the result is to listen to resulting files. So let's save as wave files and verify. But before we do that, we'll have to:
# * convert them to integer (so we can save as PCM 16-bit Wave files), otherwise only some media players would be able to play them and others won't
# * Map the values to the appropriate range for int16 audio. That range is between -32768 and +32767. A basic mapping can be done by multiplying by 32767.
# * The sounds will be a little faint, we can increase the volume by multiplying by a value like 100
# In[16]:
from scipy.io import wavfile
# Convert to int, map the appropriate range, and increase the volume a little bit
result_signal_1_int = np.int16(result_signal_1*32767*100)
result_signal_2_int = np.int16(result_signal_2*32767*100)
result_signal_3_int = np.int16(result_signal_3*32767*100)
# Write wave files
wavfile.write("result_signal_1.wav", fs, result_signal_1_int)
wavfile.write("result_signal_2.wav", fs, result_signal_2_int)
wavfile.write("result_signal_3.wav", fs, result_signal_3_int)
# The resulting files we have now are: [note: make sure to lower the volume on your speakers first, just in case some problem caused the file to sound like static]
# * [result_signal_1.wav](result_signal_1.wav)
# * [result_signal_2.wav](result_signal_2.wav)
# * [result_signal_3.wav](result_signal_3.wav)
#
#
#
# Music:
# * Piano - The Carnival of the Animals - XIII. The Swan (Solo piano version). Performer: <NAME>
# * Cello - Cello Suite no. 3 in C, BWV 1009 - I. Prelude. Performer: European Archive
# In[ ]:
| <filename>Common/Dimensionality Reduction/ICA/Independent Component Analysis.py
#!/usr/bin/env python
# coding: utf-8
# # Independent Component Analysis Lab
#
# In this notebook, we'll use Independent Component Analysis to retrieve original signals from three observations each of which contains a different mix of the original signals. This is the same problem explained in the ICA video.
#
# ## Dataset
# Let's begin by looking at the dataset we have. We have three WAVE files, each of which is a mix, as we've mentioned. If you haven't worked with audio files in python before, that's okay, they basically boil down to being lists of floats.
#
#
# Let's begin by loading our first audio file, **[ICA mix 1.wav](ICA mix 1.wav)** [click to listen to the file]:
# In[1]:
import numpy as np
import wave
# Read the wave file
mix_1_wave = wave.open('ICA mix 1.wav','r')
# Let's peak at the parameters of the wave file to learn more about it
# In[2]:
mix_1_wave.getparams()
# So this file has only channel (so it's mono sound). It has a frame rate of 44100, which means each second of sound is represented by 44100 integers (integers because the file is in the common PCM 16-bit format). The file has a total of 264515 integers/frames, which means its length in seconds is:
# In[3]:
264515/44100
# Let's extract the frames of the wave file, which will be a part of the dataset we'll run ICA against:
# In[4]:
# Extract Raw Audio from Wav File
signal_1_raw = mix_1_wave.readframes(-1)
signal_1 = np.fromstring(signal_1_raw, 'Int16')
# signal_1 is now a list of ints representing the sound contained in the first file.
# In[5]:
'length: ', len(signal_1) , 'first 100 elements: ',signal_1[:100]
# If we plot this array as a line graph, we'll get the familiar wave form representation:
# In[8]:
import matplotlib.pyplot as plt
fs = mix_1_wave.getframerate()
timing = np.linspace(0, len(signal_1)/fs, num=len(signal_1))
plt.figure(figsize=(12,2))
plt.title('Recording 1')
plt.plot(timing,signal_1, c="#3ABFE7")
plt.ylim(-35000, 35000)
plt.show()
# In the same way, we can now load the other two wave files, **[ICA mix 2.wav](ICA mix 2.wav)** and **[ICA mix 3.wav](ICA mix 3.wav)**
# In[9]:
mix_2_wave = wave.open('ICA mix 2.wav','r')
#Extract Raw Audio from Wav File
signal_raw_2 = mix_2_wave.readframes(-1)
signal_2 = np.fromstring(signal_raw_2, 'Int16')
mix_3_wave = wave.open('ICA mix 3.wav','r')
#Extract Raw Audio from Wav File
signal_raw_3 = mix_3_wave.readframes(-1)
signal_3 = np.fromstring(signal_raw_3, 'Int16')
plt.figure(figsize=(12,2))
plt.title('Recording 2')
plt.plot(timing,signal_2, c="#3ABFE7")
plt.ylim(-35000, 35000)
plt.show()
plt.figure(figsize=(12,2))
plt.title('Recording 3')
plt.plot(timing,signal_3, c="#3ABFE7")
plt.ylim(-35000, 35000)
plt.show()
# Now that we've read all three files, we're ready to [zip](https://docs.python.org/3/library/functions.html#zip) them to create our dataset.
#
# * Create dataset ```X``` by zipping signal_1, signal_2, and signal_3 into a single list
# In[10]:
X = list(zip(signal_1, signal_2, signal_3))
# Let's peak at what X looks like
X[:10]
# We are now ready to run ICA to try to retrieve the original signals.
#
# * Import sklearn's [FastICA](http://scikit-learn.org/stable/modules/generated/sklearn.decomposition.FastICA.html) module
# * Initialize FastICA look for three components
# * Run the FastICA algorithm using fit_transform on dataset X
# In[12]:
# Import FastICA
from sklearn.decomposition import FastICA
# Initialize FastICA with n_components=3
ica = FastICA(n_components=3)
#Run the FastICA algorithm using fit_transform on dataset X
ica_result = ica.fit_transform(X)
# ```ica_result``` now contains the result of FastICA, which we hope are the original signals. It's in the shape:
# In[13]:
ica_result.shape
# Let's split into separate signals and look at them
# In[14]:
result_signal_1 = ica_result[:,0]
result_signal_2 = ica_result[:,1]
result_signal_3 = ica_result[:,2]
# Let's plot to see how the wave forms look
# In[15]:
# Plot Independent Component #1
plt.figure(figsize=(12,2))
plt.title('Independent Component #1')
plt.plot(result_signal_1, c="#df8efd")
plt.ylim(-0.010, 0.010)
plt.show()
# Plot Independent Component #2
plt.figure(figsize=(12,2))
plt.title('Independent Component #2')
plt.plot(result_signal_2, c="#87de72")
plt.ylim(-0.010, 0.010)
plt.show()
# Plot Independent Component #3
plt.figure(figsize=(12,2))
plt.title('Independent Component #3')
plt.plot(result_signal_3, c="#f65e97")
plt.ylim(-0.010, 0.010)
plt.show()
# Do some of these look like musical wave forms?
#
# The best way to confirm the result is to listen to resulting files. So let's save as wave files and verify. But before we do that, we'll have to:
# * convert them to integer (so we can save as PCM 16-bit Wave files), otherwise only some media players would be able to play them and others won't
# * Map the values to the appropriate range for int16 audio. That range is between -32768 and +32767. A basic mapping can be done by multiplying by 32767.
# * The sounds will be a little faint, we can increase the volume by multiplying by a value like 100
# In[16]:
from scipy.io import wavfile
# Convert to int, map the appropriate range, and increase the volume a little bit
result_signal_1_int = np.int16(result_signal_1*32767*100)
result_signal_2_int = np.int16(result_signal_2*32767*100)
result_signal_3_int = np.int16(result_signal_3*32767*100)
# Write wave files
wavfile.write("result_signal_1.wav", fs, result_signal_1_int)
wavfile.write("result_signal_2.wav", fs, result_signal_2_int)
wavfile.write("result_signal_3.wav", fs, result_signal_3_int)
# The resulting files we have now are: [note: make sure to lower the volume on your speakers first, just in case some problem caused the file to sound like static]
# * [result_signal_1.wav](result_signal_1.wav)
# * [result_signal_2.wav](result_signal_2.wav)
# * [result_signal_3.wav](result_signal_3.wav)
#
#
#
# Music:
# * Piano - The Carnival of the Animals - XIII. The Swan (Solo piano version). Performer: <NAME>
# * Cello - Cello Suite no. 3 in C, BWV 1009 - I. Prelude. Performer: European Archive
# In[ ]:
| en | 0.876834 | #!/usr/bin/env python # coding: utf-8 # # Independent Component Analysis Lab # # In this notebook, we'll use Independent Component Analysis to retrieve original signals from three observations each of which contains a different mix of the original signals. This is the same problem explained in the ICA video. # # ## Dataset # Let's begin by looking at the dataset we have. We have three WAVE files, each of which is a mix, as we've mentioned. If you haven't worked with audio files in python before, that's okay, they basically boil down to being lists of floats. # # # Let's begin by loading our first audio file, **[ICA mix 1.wav](ICA mix 1.wav)** [click to listen to the file]: # In[1]: # Read the wave file # Let's peak at the parameters of the wave file to learn more about it # In[2]: # So this file has only channel (so it's mono sound). It has a frame rate of 44100, which means each second of sound is represented by 44100 integers (integers because the file is in the common PCM 16-bit format). The file has a total of 264515 integers/frames, which means its length in seconds is: # In[3]: # Let's extract the frames of the wave file, which will be a part of the dataset we'll run ICA against: # In[4]: # Extract Raw Audio from Wav File # signal_1 is now a list of ints representing the sound contained in the first file. # In[5]: # If we plot this array as a line graph, we'll get the familiar wave form representation: # In[8]: # In the same way, we can now load the other two wave files, **[ICA mix 2.wav](ICA mix 2.wav)** and **[ICA mix 3.wav](ICA mix 3.wav)** # In[9]: #Extract Raw Audio from Wav File #Extract Raw Audio from Wav File # Now that we've read all three files, we're ready to [zip](https://docs.python.org/3/library/functions.html#zip) them to create our dataset. # # * Create dataset ```X``` by zipping signal_1, signal_2, and signal_3 into a single list # In[10]: # Let's peak at what X looks like # We are now ready to run ICA to try to retrieve the original signals. # # * Import sklearn's [FastICA](http://scikit-learn.org/stable/modules/generated/sklearn.decomposition.FastICA.html) module # * Initialize FastICA look for three components # * Run the FastICA algorithm using fit_transform on dataset X # In[12]: # Import FastICA # Initialize FastICA with n_components=3 #Run the FastICA algorithm using fit_transform on dataset X # ```ica_result``` now contains the result of FastICA, which we hope are the original signals. It's in the shape: # In[13]: # Let's split into separate signals and look at them # In[14]: # Let's plot to see how the wave forms look # In[15]: # Plot Independent Component #1 #1') # Plot Independent Component #2 #2') # Plot Independent Component #3 #3') # Do some of these look like musical wave forms? # # The best way to confirm the result is to listen to resulting files. So let's save as wave files and verify. But before we do that, we'll have to: # * convert them to integer (so we can save as PCM 16-bit Wave files), otherwise only some media players would be able to play them and others won't # * Map the values to the appropriate range for int16 audio. That range is between -32768 and +32767. A basic mapping can be done by multiplying by 32767. # * The sounds will be a little faint, we can increase the volume by multiplying by a value like 100 # In[16]: # Convert to int, map the appropriate range, and increase the volume a little bit # Write wave files # The resulting files we have now are: [note: make sure to lower the volume on your speakers first, just in case some problem caused the file to sound like static] # * [result_signal_1.wav](result_signal_1.wav) # * [result_signal_2.wav](result_signal_2.wav) # * [result_signal_3.wav](result_signal_3.wav) # # # # Music: # * Piano - The Carnival of the Animals - XIII. The Swan (Solo piano version). Performer: <NAME> # * Cello - Cello Suite no. 3 in C, BWV 1009 - I. Prelude. Performer: European Archive # In[ ]: | 3.290294 | 3 |
lyrics_prediction_file.py | castillogo/lyrics_project | 0 | 6617877 | <reponame>castillogo/lyrics_project<filename>lyrics_prediction_file.py
"""
This is a program for making a song prediction
according to the lyrcis available in
file output.csv
"""
import re
import warnings
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score, recall_score
from sklearn.metrics import confusion_matrix, f1_score
from sklearn.naive_bayes import MultinomialNB
from sklearn.ensemble import RandomForestClassifier
from imblearn.over_sampling import RandomOverSampler
warnings.filterwarnings("ignore")
TV = TfidfVectorizer()
M = MultinomialNB()
ROS = RandomOverSampler()
LYRICS_DF = pd.read_csv('output.csv')
LYRICS_DF['singer_number'] = LYRICS_DF['singer_number'].astype(int)
LYRICSWORDS = LYRICS_DF['lyrics_words'].to_list()
def print_evaluations(y_train, y_pred, model):
"""
This function summaries all scores and
makes a confusion matrix heatman
"""
print(f'How does model {model} score:')
print(f"The accuracy of the model is: {round(accuracy_score(y_train, y_pred), 3)}")
print(f"The precision of the model is: {round(precision_score(y_train, y_pred, average='weighted'), 3)}")
print(f"The recall of the model is: {round(recall_score(y_train, y_pred, average='weighted'), 3)}")
print(f"The f1-score of the model is: {round(f1_score(y_train, y_pred, average='weighted'), 3)}")
#print confusion matrix
plt.figure(figsize=(15, 15))
Cm = confusion_matrix(y_train, y_pred)
print(Cm)
Ax = plt.subplot()
sns.heatmap(Cm, annot=True, ax=Ax)
Ax.set_xlabel('Predicted labels')
Ax.set_ylabel('True labels')
Ax.set_title('Confusion Matrix %s' % (model))
return accuracy_score(y_train, y_pred, model)
if __name__ == '__main__':
print('')
print("""
This is a program for making a song prediction
according to the lyrcis available in
file output.csv
""")
print('')
print("""
Please save your lyrics as a txt file
in this folder and write the name
of the file here:
""")
print('')
SONG = input()
SONG = str(SONG)
with open("%s.txt" % (SONG), "r") as myfile:
DATA = myfile.readlines()
DATA = str(DATA)
DATA = re.sub(r"\\n", ' ', DATA)
print('')
print("""
These are your lyrics:
""")
print('')
print(DATA)
TV.fit(LYRICSWORDS)
TV_VECTORS = TV.transform(LYRICSWORDS)
Y = LYRICS_DF['singer'].to_list()
YNUMBERS = LYRICS_DF['singer_number'].to_list()
M.fit(TV_VECTORS, YNUMBERS)
NEW_SONG = [DATA]
TV_VEC = TV.transform(NEW_SONG)
#simple naive bayes
print('')
print("""
This is a simple naive bayes predcition
without input optimization:
""")
print('')
print("""
(please check dictionary printed at the end of the
run to see which artist corresponds
to which artistnumber)
""")
print('')
print("""
Your song belongs most probably
to this artistnumber:
""")
print('')
print(M.predict(TV_VEC))
print('')
print("""
These are the probabilities that your
song belongs to each artistnumber:
""")
print('')
print(M.predict_proba(TV_VEC))
print('')
DF = pd.DataFrame(zip(LYRICSWORDS, Y), columns=['LYRICSWORDS', 'YNUMBERS'])
Y = DF['YNUMBERS']
X = DF[['LYRICSWORDS']]
X_RESAMPLE, Y_RESAMPLE = ROS.fit_resample(X, Y)
CV = CountVectorizer(ngram_range=(1, 1))
CV.fit(LYRICSWORDS)
WORD_VECTORS = CV.transform(LYRICSWORDS)
CV.get_feature_names()
DF2 = pd.DataFrame(WORD_VECTORS.todense(), columns=CV.get_feature_names())
X = DF2
Y = DF['YNUMBERS']
print('')
print("""
These are the train-test predicitions
for a baseline model:
""")
print('')
SPLIT = 0.1
X_TRAIN, X_TEST, Y_TRAIN, Y_TEST = train_test_split(X,
YNUMBERS,
random_state=10,
test_size=SPLIT)
#Baseline model
YPRED_BL = [0] * X_TRAIN.shape[0]
print_evaluations(Y_TRAIN, YPRED_BL, 'Baseline')
NEW_DF = pd.concat([X, Y], axis=1)
NEW_DF.groupby('YNUMBERS').size()
#NEW_DF.groupby('YNUMBERS').size()[1]/NEW_DF.shape[0]*100
X = NEW_DF.iloc[:, :-1]
Y = NEW_DF.YNUMBERS
# simple Random forest model
print('')
print("""
These are the results of the
random forest evaluation:
""")
RF = RandomForestClassifier(n_estimators=20, max_depth=3, random_state=10)
RF.fit(X_TRAIN, Y_TRAIN)
YPRED_RF = RF.predict(X_TEST)
RF2 = RF
print('')
print("""
This is the random forest prediction
for the artist number for your song:
""")
print('')
print(RF.predict(TV_VEC))
print("""
These are the probabilities that
your song belongs to each artistnumber:
""")
print('')
print(RF.predict_proba(TV_VEC))
print('')
print("""
These are the random forest evaluations
for the train-test split:
""")
print('')
print_evaluations(Y_TEST, YPRED_RF, 'RandomForest')
# Random oversampling model
ROS = RandomOverSampler(random_state=10)
X_ROS, Y_ROS = ROS.fit_resample(X_TRAIN, Y_TRAIN)
np.unique(Y_ROS, return_counts=True)
RF2.fit(X_ROS, Y_ROS)
YPRED_ROS = RF2.predict(X_TEST)
print('')
print("""
This is the random oversampling prediction
of the artist number with random forest
evaluation for your song:
""")
print('')
print(RF2.predict(TV_VEC))
print('')
print("""
These are the probabilities that
your song belongs to each artistnumber:
""")
print('')
print(RF2.predict_proba(TV_VEC))
print('')
print("""
These are the random oversampling
evaluations with the train-test split:
""")
print('')
print_evaluations(Y_TEST, YPRED_ROS, 'RandomOversampling')
Y = LYRICS_DF['singer'].to_list()
YNUMBERS = LYRICS_DF['singer_number'].to_list()
ARTISTLISTFINAL = dict(zip(Y, YNUMBERS))
print('')
print("""
This is the code for the artists
and the belonging artistnumbers:
""")
print(ARTISTLISTFINAL)
print('')
print("""
These are the heatmaps for the confusion
matrix of each different evaluation:
""")
| """
This is a program for making a song prediction
according to the lyrcis available in
file output.csv
"""
import re
import warnings
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score, recall_score
from sklearn.metrics import confusion_matrix, f1_score
from sklearn.naive_bayes import MultinomialNB
from sklearn.ensemble import RandomForestClassifier
from imblearn.over_sampling import RandomOverSampler
warnings.filterwarnings("ignore")
TV = TfidfVectorizer()
M = MultinomialNB()
ROS = RandomOverSampler()
LYRICS_DF = pd.read_csv('output.csv')
LYRICS_DF['singer_number'] = LYRICS_DF['singer_number'].astype(int)
LYRICSWORDS = LYRICS_DF['lyrics_words'].to_list()
def print_evaluations(y_train, y_pred, model):
"""
This function summaries all scores and
makes a confusion matrix heatman
"""
print(f'How does model {model} score:')
print(f"The accuracy of the model is: {round(accuracy_score(y_train, y_pred), 3)}")
print(f"The precision of the model is: {round(precision_score(y_train, y_pred, average='weighted'), 3)}")
print(f"The recall of the model is: {round(recall_score(y_train, y_pred, average='weighted'), 3)}")
print(f"The f1-score of the model is: {round(f1_score(y_train, y_pred, average='weighted'), 3)}")
#print confusion matrix
plt.figure(figsize=(15, 15))
Cm = confusion_matrix(y_train, y_pred)
print(Cm)
Ax = plt.subplot()
sns.heatmap(Cm, annot=True, ax=Ax)
Ax.set_xlabel('Predicted labels')
Ax.set_ylabel('True labels')
Ax.set_title('Confusion Matrix %s' % (model))
return accuracy_score(y_train, y_pred, model)
if __name__ == '__main__':
print('')
print("""
This is a program for making a song prediction
according to the lyrcis available in
file output.csv
""")
print('')
print("""
Please save your lyrics as a txt file
in this folder and write the name
of the file here:
""")
print('')
SONG = input()
SONG = str(SONG)
with open("%s.txt" % (SONG), "r") as myfile:
DATA = myfile.readlines()
DATA = str(DATA)
DATA = re.sub(r"\\n", ' ', DATA)
print('')
print("""
These are your lyrics:
""")
print('')
print(DATA)
TV.fit(LYRICSWORDS)
TV_VECTORS = TV.transform(LYRICSWORDS)
Y = LYRICS_DF['singer'].to_list()
YNUMBERS = LYRICS_DF['singer_number'].to_list()
M.fit(TV_VECTORS, YNUMBERS)
NEW_SONG = [DATA]
TV_VEC = TV.transform(NEW_SONG)
#simple naive bayes
print('')
print("""
This is a simple naive bayes predcition
without input optimization:
""")
print('')
print("""
(please check dictionary printed at the end of the
run to see which artist corresponds
to which artistnumber)
""")
print('')
print("""
Your song belongs most probably
to this artistnumber:
""")
print('')
print(M.predict(TV_VEC))
print('')
print("""
These are the probabilities that your
song belongs to each artistnumber:
""")
print('')
print(M.predict_proba(TV_VEC))
print('')
DF = pd.DataFrame(zip(LYRICSWORDS, Y), columns=['LYRICSWORDS', 'YNUMBERS'])
Y = DF['YNUMBERS']
X = DF[['LYRICSWORDS']]
X_RESAMPLE, Y_RESAMPLE = ROS.fit_resample(X, Y)
CV = CountVectorizer(ngram_range=(1, 1))
CV.fit(LYRICSWORDS)
WORD_VECTORS = CV.transform(LYRICSWORDS)
CV.get_feature_names()
DF2 = pd.DataFrame(WORD_VECTORS.todense(), columns=CV.get_feature_names())
X = DF2
Y = DF['YNUMBERS']
print('')
print("""
These are the train-test predicitions
for a baseline model:
""")
print('')
SPLIT = 0.1
X_TRAIN, X_TEST, Y_TRAIN, Y_TEST = train_test_split(X,
YNUMBERS,
random_state=10,
test_size=SPLIT)
#Baseline model
YPRED_BL = [0] * X_TRAIN.shape[0]
print_evaluations(Y_TRAIN, YPRED_BL, 'Baseline')
NEW_DF = pd.concat([X, Y], axis=1)
NEW_DF.groupby('YNUMBERS').size()
#NEW_DF.groupby('YNUMBERS').size()[1]/NEW_DF.shape[0]*100
X = NEW_DF.iloc[:, :-1]
Y = NEW_DF.YNUMBERS
# simple Random forest model
print('')
print("""
These are the results of the
random forest evaluation:
""")
RF = RandomForestClassifier(n_estimators=20, max_depth=3, random_state=10)
RF.fit(X_TRAIN, Y_TRAIN)
YPRED_RF = RF.predict(X_TEST)
RF2 = RF
print('')
print("""
This is the random forest prediction
for the artist number for your song:
""")
print('')
print(RF.predict(TV_VEC))
print("""
These are the probabilities that
your song belongs to each artistnumber:
""")
print('')
print(RF.predict_proba(TV_VEC))
print('')
print("""
These are the random forest evaluations
for the train-test split:
""")
print('')
print_evaluations(Y_TEST, YPRED_RF, 'RandomForest')
# Random oversampling model
ROS = RandomOverSampler(random_state=10)
X_ROS, Y_ROS = ROS.fit_resample(X_TRAIN, Y_TRAIN)
np.unique(Y_ROS, return_counts=True)
RF2.fit(X_ROS, Y_ROS)
YPRED_ROS = RF2.predict(X_TEST)
print('')
print("""
This is the random oversampling prediction
of the artist number with random forest
evaluation for your song:
""")
print('')
print(RF2.predict(TV_VEC))
print('')
print("""
These are the probabilities that
your song belongs to each artistnumber:
""")
print('')
print(RF2.predict_proba(TV_VEC))
print('')
print("""
These are the random oversampling
evaluations with the train-test split:
""")
print('')
print_evaluations(Y_TEST, YPRED_ROS, 'RandomOversampling')
Y = LYRICS_DF['singer'].to_list()
YNUMBERS = LYRICS_DF['singer_number'].to_list()
ARTISTLISTFINAL = dict(zip(Y, YNUMBERS))
print('')
print("""
This is the code for the artists
and the belonging artistnumbers:
""")
print(ARTISTLISTFINAL)
print('')
print("""
These are the heatmaps for the confusion
matrix of each different evaluation:
""") | en | 0.895983 | This is a program for making a song prediction according to the lyrcis available in file output.csv This function summaries all scores and makes a confusion matrix heatman #print confusion matrix This is a program for making a song prediction according to the lyrcis available in file output.csv Please save your lyrics as a txt file in this folder and write the name of the file here: These are your lyrics: #simple naive bayes This is a simple naive bayes predcition without input optimization: (please check dictionary printed at the end of the run to see which artist corresponds to which artistnumber) Your song belongs most probably to this artistnumber: These are the probabilities that your song belongs to each artistnumber: These are the train-test predicitions for a baseline model: #Baseline model #NEW_DF.groupby('YNUMBERS').size()[1]/NEW_DF.shape[0]*100 # simple Random forest model These are the results of the random forest evaluation: This is the random forest prediction for the artist number for your song: These are the probabilities that your song belongs to each artistnumber: These are the random forest evaluations for the train-test split: # Random oversampling model This is the random oversampling prediction of the artist number with random forest evaluation for your song: These are the probabilities that your song belongs to each artistnumber: These are the random oversampling evaluations with the train-test split: This is the code for the artists and the belonging artistnumbers: These are the heatmaps for the confusion matrix of each different evaluation: | 3.471702 | 3 |
setup.py | Devansh3712/PyDep | 1 | 6617878 | from setuptools import *
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name = 'pydep-cli',
version = '0.1.1',
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/Devansh3712/PyDep',
description = 'Create pyproject.toml & poetry.lock dependency files from requirements.txt',
long_description = long_description,
long_description_content_type = "text/markdown",
license = 'MIT',
packages = find_packages(),
include_package_data = True,
entry_points = {
"console_scripts": [
"pydep=pydep.__main__:pydep",
]
},
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires = ["click==7.1.2"],
) | from setuptools import *
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name = 'pydep-cli',
version = '0.1.1',
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/Devansh3712/PyDep',
description = 'Create pyproject.toml & poetry.lock dependency files from requirements.txt',
long_description = long_description,
long_description_content_type = "text/markdown",
license = 'MIT',
packages = find_packages(),
include_package_data = True,
entry_points = {
"console_scripts": [
"pydep=pydep.__main__:pydep",
]
},
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires = ["click==7.1.2"],
) | none | 1 | 1.451732 | 1 | |
userbot/plugins/alive.py | Userbot007/X-tra-Telegram | 0 | 6617879 | """Check if userbot alive. If you change these, you become the gayest gay such that even the gay world will disown you."""
import asyncio
from telethon import events
from telethon.tl.types import ChannelParticipantsAdmins
from platform import uname
from userbot import ALIVE_NAME
from userbot.utils import admin_cmd
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else "No name set yet nibba, Set it first"
@command(outgoing=True, pattern="^.alive$")
async def amireallyalive(alive):
""" For .alive command, check if the bot is running. """
await alive.edit("**`<NAME> ψ Zinda hu mei ! Maaf krna gusse mei idhr udhr ho jata hu !.. (`∇´)ψ`**\n\n"
"`Telethon version: 69.69 LMFAO\nPython:69.9\nHacked by SHadow Broker\n"
"`Bot created by:`Machintosh \n"
"`Database Status: What is a database btw ?!\n\nAlways with you, my master!\n`"
f"`My peru owner`: {DEFAULTUSER}\n"
"Tanya izz only Mine")
| """Check if userbot alive. If you change these, you become the gayest gay such that even the gay world will disown you."""
import asyncio
from telethon import events
from telethon.tl.types import ChannelParticipantsAdmins
from platform import uname
from userbot import ALIVE_NAME
from userbot.utils import admin_cmd
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else "No name set yet nibba, Set it first"
@command(outgoing=True, pattern="^.alive$")
async def amireallyalive(alive):
""" For .alive command, check if the bot is running. """
await alive.edit("**`<NAME> ψ Zinda hu mei ! Maaf krna gusse mei idhr udhr ho jata hu !.. (`∇´)ψ`**\n\n"
"`Telethon version: 69.69 LMFAO\nPython:69.9\nHacked by SHadow Broker\n"
"`Bot created by:`Machintosh \n"
"`Database Status: What is a database btw ?!\n\nAlways with you, my master!\n`"
f"`My peru owner`: {DEFAULTUSER}\n"
"Tanya izz only Mine")
| en | 0.874974 | Check if userbot alive. If you change these, you become the gayest gay such that even the gay world will disown you. For .alive command, check if the bot is running. | 2.55574 | 3 |
Wallet/Wallet.py | samEmi/digital-currency | 1 | 6617880 | import CreateDatabase as cd
import KeyGeneration as KeyGeneration
import Token as Token
import datetime
class ColdWallet:
def __init__(self):
self.TotalAmount = 0
self.walletAddress = None
self.walletObj = None
self.initialiseWalletAddress()
# self.displayTotal()
# tk = Token.Token(50, datetime.datetime.now())
# self.receive(tk)
self.displayTotal()
amount = [2, 4, 6]
self.updateTotal(amount)
self.displayTotal()
def initialiseWalletAddress(self):
if (self.walletAddress == None):
kg = KeyGeneration.KeyGenerator()
privk = kg.generate_key()
self.walletAddress = privk
def receive(self):
# Do we need this function? Might only be useful when transaction is sent from another PW
# since receiving transactions from account should be handled by request_tokens_from_account
pass
def request_tokens_from_account(self):
'''Function to sent transaction with payload: pubKey, blind(token ids), amounts'''
# generate random token ids, each token key is associated with amount
# blind the token ids: interactive back and forth scheme between user and signer
# blind the amount with Pedersen commitments
# generate public key for tokens to be sent to
# connect to fabric gateway
# retrieve account identity from Fabric wallet i.e. Alice 'account' credentials
# invoke Fabric chaincode with transaction payload: pubKey, blind(token ids), amounts
# For this transaction endorsement should only verify that the transaction has been correctly built
# and that the account is legitimate
# The chaincode function invocation returns a transaction response which in our case should contain the signed blinded tokens
# These will be added to the wallet database?
def send_to_pw(self):
pass
def send_to_account(self):
'''Function which takes a set of signed(blinded(tokens)) unblinds them
and sends a transaction with payload: signed(tokens), amount, receiver identity ('Bob')'''
# In this case the network endorsing peers will only verify the signature
def send(self, recipientAddress):
pass
def receive(self, token):
kg = KeyGeneration.KeyGenerator()
privk = kg.generate_key()
pk = kg.private2public(privk)
InsertReceived = """INSERT INTO wallet(PrivateKey, PublicKey, Amount, ReceivedTimeStamp)
VALUES(?, ?, ?, ?)"""
parameters = (privk, pk, token.amount, token.receivedtime)
conn = cd.createConnection()
cd.sqlInsert(conn, InsertReceived, parameters)
print("Successfully Received")
def send(self, recipientAddress):
pass
def updateTotal(self, amount):
#Delete the tokens that were transacted
#Amount is a list of primary keys
sqlDelete = "DELETE FROM Wallet WHERE id = ?"
conn = cd.createConnection()
cd.sqlDelete(conn, sqlDelete, amount)
def displayTotal(self):
conn = cd.createConnection()
SQLQuery = """SELECT SUM(AMOUNT) FROM wallet"""
rows = cd.sqlSelect(conn, SQLQuery)
print("Total is £" + str(rows[0][0]))
def createTransactionAddress(self):
#Stealth Address: Wallet Address + Private Key of Tokens + Random Data
pass
def main():
conn = cd.createConnection()
sqlCreateWallet = """ CREATE TABLE IF NOT EXISTS wallet (
id integer PRIMARY KEY,
PrivateKey VARCHAR(130) NOT NULL,
PublicKey VARCHAR(130) NOT NULL,
Amount FLOAT NOT NULL,
ReceivedTimeStamp TIMESTAMP NOT NULL
); """
cd.sqlCreate(conn, sqlCreateWallet)
w = ColdWallet()
if __name__ == "__main__":
main()
'''
When sending - you just need to know the recipients address
(stealth Address - wallet address and random data)
When receiving - connect to Ledger?
Transaction - sender address, recipient address, amount (list of tokens)
''' | import CreateDatabase as cd
import KeyGeneration as KeyGeneration
import Token as Token
import datetime
class ColdWallet:
def __init__(self):
self.TotalAmount = 0
self.walletAddress = None
self.walletObj = None
self.initialiseWalletAddress()
# self.displayTotal()
# tk = Token.Token(50, datetime.datetime.now())
# self.receive(tk)
self.displayTotal()
amount = [2, 4, 6]
self.updateTotal(amount)
self.displayTotal()
def initialiseWalletAddress(self):
if (self.walletAddress == None):
kg = KeyGeneration.KeyGenerator()
privk = kg.generate_key()
self.walletAddress = privk
def receive(self):
# Do we need this function? Might only be useful when transaction is sent from another PW
# since receiving transactions from account should be handled by request_tokens_from_account
pass
def request_tokens_from_account(self):
'''Function to sent transaction with payload: pubKey, blind(token ids), amounts'''
# generate random token ids, each token key is associated with amount
# blind the token ids: interactive back and forth scheme between user and signer
# blind the amount with Pedersen commitments
# generate public key for tokens to be sent to
# connect to fabric gateway
# retrieve account identity from Fabric wallet i.e. Alice 'account' credentials
# invoke Fabric chaincode with transaction payload: pubKey, blind(token ids), amounts
# For this transaction endorsement should only verify that the transaction has been correctly built
# and that the account is legitimate
# The chaincode function invocation returns a transaction response which in our case should contain the signed blinded tokens
# These will be added to the wallet database?
def send_to_pw(self):
pass
def send_to_account(self):
'''Function which takes a set of signed(blinded(tokens)) unblinds them
and sends a transaction with payload: signed(tokens), amount, receiver identity ('Bob')'''
# In this case the network endorsing peers will only verify the signature
def send(self, recipientAddress):
pass
def receive(self, token):
kg = KeyGeneration.KeyGenerator()
privk = kg.generate_key()
pk = kg.private2public(privk)
InsertReceived = """INSERT INTO wallet(PrivateKey, PublicKey, Amount, ReceivedTimeStamp)
VALUES(?, ?, ?, ?)"""
parameters = (privk, pk, token.amount, token.receivedtime)
conn = cd.createConnection()
cd.sqlInsert(conn, InsertReceived, parameters)
print("Successfully Received")
def send(self, recipientAddress):
pass
def updateTotal(self, amount):
#Delete the tokens that were transacted
#Amount is a list of primary keys
sqlDelete = "DELETE FROM Wallet WHERE id = ?"
conn = cd.createConnection()
cd.sqlDelete(conn, sqlDelete, amount)
def displayTotal(self):
conn = cd.createConnection()
SQLQuery = """SELECT SUM(AMOUNT) FROM wallet"""
rows = cd.sqlSelect(conn, SQLQuery)
print("Total is £" + str(rows[0][0]))
def createTransactionAddress(self):
#Stealth Address: Wallet Address + Private Key of Tokens + Random Data
pass
def main():
conn = cd.createConnection()
sqlCreateWallet = """ CREATE TABLE IF NOT EXISTS wallet (
id integer PRIMARY KEY,
PrivateKey VARCHAR(130) NOT NULL,
PublicKey VARCHAR(130) NOT NULL,
Amount FLOAT NOT NULL,
ReceivedTimeStamp TIMESTAMP NOT NULL
); """
cd.sqlCreate(conn, sqlCreateWallet)
w = ColdWallet()
if __name__ == "__main__":
main()
'''
When sending - you just need to know the recipients address
(stealth Address - wallet address and random data)
When receiving - connect to Ledger?
Transaction - sender address, recipient address, amount (list of tokens)
''' | en | 0.841631 | # self.displayTotal() # tk = Token.Token(50, datetime.datetime.now()) # self.receive(tk) # Do we need this function? Might only be useful when transaction is sent from another PW # since receiving transactions from account should be handled by request_tokens_from_account Function to sent transaction with payload: pubKey, blind(token ids), amounts # generate random token ids, each token key is associated with amount # blind the token ids: interactive back and forth scheme between user and signer # blind the amount with Pedersen commitments # generate public key for tokens to be sent to # connect to fabric gateway # retrieve account identity from Fabric wallet i.e. Alice 'account' credentials # invoke Fabric chaincode with transaction payload: pubKey, blind(token ids), amounts # For this transaction endorsement should only verify that the transaction has been correctly built # and that the account is legitimate # The chaincode function invocation returns a transaction response which in our case should contain the signed blinded tokens # These will be added to the wallet database? Function which takes a set of signed(blinded(tokens)) unblinds them and sends a transaction with payload: signed(tokens), amount, receiver identity ('Bob') # In this case the network endorsing peers will only verify the signature INSERT INTO wallet(PrivateKey, PublicKey, Amount, ReceivedTimeStamp) VALUES(?, ?, ?, ?) #Delete the tokens that were transacted #Amount is a list of primary keys SELECT SUM(AMOUNT) FROM wallet #Stealth Address: Wallet Address + Private Key of Tokens + Random Data CREATE TABLE IF NOT EXISTS wallet ( id integer PRIMARY KEY, PrivateKey VARCHAR(130) NOT NULL, PublicKey VARCHAR(130) NOT NULL, Amount FLOAT NOT NULL, ReceivedTimeStamp TIMESTAMP NOT NULL ); When sending - you just need to know the recipients address (stealth Address - wallet address and random data) When receiving - connect to Ledger? Transaction - sender address, recipient address, amount (list of tokens) | 2.582943 | 3 |
admin/scripts/delete-entity.py | protein-bioinformatics/STAMPS | 1 | 6617881 | <reponame>protein-bioinformatics/STAMPS
#!/usr/bin/python3
import sqlite3
from cgi import FieldStorage
import os
conf = {}
with open("../qsdb.conf", mode="rt") as fl:
for line in fl:
line = line.strip().strip(" ")
if len(line) < 1 or line[0] == "#": continue
token = line.split("=")
if len(token) < 2: continue
conf[token[0].strip(" ")] = token[1].strip(" ")
form = FieldStorage()
entity_type = form.getvalue('type')
entity_id = form.getvalue('id')
print("Content-Type: text/html")
print()
def dict_rows(cur): return [{k: v for k, v in zip(cur.description, row)} for row in cur]
def dict_row(cur): return {k[0]: v for k, v in zip(cur.description, cur.fetchone())}
if type(entity_type) is not str or type(entity_id) is not str or entity_type not in ["node", "edge", "edge_direct", "protein", "metabolite", "pathway", "pathway_group", "species", "tissues", "loci_names"]:
print(-1)
exit()
try:
a = int(entity_id)
except:
print(-2)
exit()
database = "%s/data/database.sqlite" % conf["root_path"]
conn = sqlite3.connect(database)
my_cur = conn.cursor()
if entity_type == "node":
sql_query = "SELECT type FROM nodes WHERE id = ?;"
print(entity_id)
my_cur.execute(sql_query, (entity_id,))
entity_type = dict_row(my_cur)["type"]
if entity_type == "pathway":
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "protein":
sql_query = "DELETE FROM reagents WHERE reaction_id IN (SELECT id FROM reactions WHERE node_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodeproteincorrelations WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "metabolite":
sql_query = "SELECT rc.id FROM reactions rc INNER JOIN reagents r ON rc.id = r.reaction_id INNER JOIN nodes n ON rc.node_id = n.id WHERE r.node_id = ? AND n.type = 'pathway';" # reactions for pathways
my_cur.execute(sql_query, (entity_id,))
del_reaction = ", ".join(str(row["id"]) for row in dict_rows(my_cur))
if len(del_reaction) > 0:
sql_query = "DELETE FROM reactions WHERE id IN (?);" % del_reaction # reactions for pathways
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM reagents WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type in ["label", "membrane", "image", "invisible"]:
if entity_type == "label":
sql_query = "DELETE FROM labels WHERE id IN (SELECT foreign_id from nodes WHERE id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "image":
sql_query = "DELETE FROM images WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "edge":
sql_query = "DELETE FROM reagents WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "edge_direct":
sql_query = "DELETE FROM reactions_direct WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "species":
sql_query = "DELETE FROM species WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "tissues":
sql_query = "DELETE FROM tissues WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "loci_names":
sql_query = "DELETE FROM loci_names WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
sql_query = "DELETE FROM protein_loci WHERE locus_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "protein":
sql_query = "DELETE FROM protein_functions WHERE protein_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM protein_loci WHERE protein_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodeproteincorrelations WHERE protein_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM proteins WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "pathway":
sql_query = "DELETE FROM reagents WHERE reaction_id IN (SELECT r.id FROM reactions r INNER JOIN nodes n on r.node_id = n.id WHERE n.type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions WHERE node_id IN (SELECT id FROM nodes WHERE type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE npc FROM nodeproteincorrelations npc INNER JOIN nodes n on npc.node_id = n.id WHERE n.pathway_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE type = 'pathway' AND foreign_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE pathway_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM pathways WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "metabolite":
sql_query = "SELECT n.id FROM nodes n INNER JOIN metabolites m ON n.foreign_id = m.id WHERE m.id = ? AND n.type = 'metabolite';"
my_cur.execute(sql_query, (entity_id,))
del_nodes = ", ".join(str(row["id"]) for row in dict_rows(my_cur))
if len(del_nodes) > 0:
sql_query = "DELETE FROM reagents WHERE node_id IN (%s);" % del_nodes
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start IN (%s) OR node_id_end IN (%s);" % (del_nodes, del_nodes)
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM nodes WHERE id IN (%s);" % del_nodes
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM metabolites WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "pathway_group":
sql_query = "SELECT id FROM pathways WHERE pathway_group_id = ?;" # reactions for pathways
my_cur.execute(sql_query, (entity_id,))
ids_for_delete = [str(row["id"]) for row in dict_rows(my_cur)]
for pw_id in ids_for_delete:
sql_query = "DELETE FROM reagents WHERE reaction_id IN (SELECT r.id FROM reactions r INNER JOIN nodes n on r.node_id = n.id WHERE n.type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM reactions WHERE node_id IN (SELECT id FROM nodes WHERE type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM nodeproteincorrelations npc INNER JOIN nodes n on npc.node_id = n.id WHERE n.pathway_id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE type = 'pathway' AND foreign_id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE pathway_id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM pathways WHERE id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM pathway_groups WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
conn.commit()
print(0) | #!/usr/bin/python3
import sqlite3
from cgi import FieldStorage
import os
conf = {}
with open("../qsdb.conf", mode="rt") as fl:
for line in fl:
line = line.strip().strip(" ")
if len(line) < 1 or line[0] == "#": continue
token = line.split("=")
if len(token) < 2: continue
conf[token[0].strip(" ")] = token[1].strip(" ")
form = FieldStorage()
entity_type = form.getvalue('type')
entity_id = form.getvalue('id')
print("Content-Type: text/html")
print()
def dict_rows(cur): return [{k: v for k, v in zip(cur.description, row)} for row in cur]
def dict_row(cur): return {k[0]: v for k, v in zip(cur.description, cur.fetchone())}
if type(entity_type) is not str or type(entity_id) is not str or entity_type not in ["node", "edge", "edge_direct", "protein", "metabolite", "pathway", "pathway_group", "species", "tissues", "loci_names"]:
print(-1)
exit()
try:
a = int(entity_id)
except:
print(-2)
exit()
database = "%s/data/database.sqlite" % conf["root_path"]
conn = sqlite3.connect(database)
my_cur = conn.cursor()
if entity_type == "node":
sql_query = "SELECT type FROM nodes WHERE id = ?;"
print(entity_id)
my_cur.execute(sql_query, (entity_id,))
entity_type = dict_row(my_cur)["type"]
if entity_type == "pathway":
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "protein":
sql_query = "DELETE FROM reagents WHERE reaction_id IN (SELECT id FROM reactions WHERE node_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodeproteincorrelations WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "metabolite":
sql_query = "SELECT rc.id FROM reactions rc INNER JOIN reagents r ON rc.id = r.reaction_id INNER JOIN nodes n ON rc.node_id = n.id WHERE r.node_id = ? AND n.type = 'pathway';" # reactions for pathways
my_cur.execute(sql_query, (entity_id,))
del_reaction = ", ".join(str(row["id"]) for row in dict_rows(my_cur))
if len(del_reaction) > 0:
sql_query = "DELETE FROM reactions WHERE id IN (?);" % del_reaction # reactions for pathways
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM reagents WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type in ["label", "membrane", "image", "invisible"]:
if entity_type == "label":
sql_query = "DELETE FROM labels WHERE id IN (SELECT foreign_id from nodes WHERE id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "image":
sql_query = "DELETE FROM images WHERE node_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "edge":
sql_query = "DELETE FROM reagents WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "edge_direct":
sql_query = "DELETE FROM reactions_direct WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "species":
sql_query = "DELETE FROM species WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "tissues":
sql_query = "DELETE FROM tissues WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "loci_names":
sql_query = "DELETE FROM loci_names WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
sql_query = "DELETE FROM protein_loci WHERE locus_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "protein":
sql_query = "DELETE FROM protein_functions WHERE protein_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM protein_loci WHERE protein_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodeproteincorrelations WHERE protein_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM proteins WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "pathway":
sql_query = "DELETE FROM reagents WHERE reaction_id IN (SELECT r.id FROM reactions r INNER JOIN nodes n on r.node_id = n.id WHERE n.type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions WHERE node_id IN (SELECT id FROM nodes WHERE type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE npc FROM nodeproteincorrelations npc INNER JOIN nodes n on npc.node_id = n.id WHERE n.pathway_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE type = 'pathway' AND foreign_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE pathway_id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
sql_query = "DELETE FROM pathways WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "metabolite":
sql_query = "SELECT n.id FROM nodes n INNER JOIN metabolites m ON n.foreign_id = m.id WHERE m.id = ? AND n.type = 'metabolite';"
my_cur.execute(sql_query, (entity_id,))
del_nodes = ", ".join(str(row["id"]) for row in dict_rows(my_cur))
if len(del_nodes) > 0:
sql_query = "DELETE FROM reagents WHERE node_id IN (%s);" % del_nodes
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start IN (%s) OR node_id_end IN (%s);" % (del_nodes, del_nodes)
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM nodes WHERE id IN (%s);" % del_nodes
my_cur.execute(sql_query)
conn.commit()
sql_query = "DELETE FROM metabolites WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
elif entity_type == "pathway_group":
sql_query = "SELECT id FROM pathways WHERE pathway_group_id = ?;" # reactions for pathways
my_cur.execute(sql_query, (entity_id,))
ids_for_delete = [str(row["id"]) for row in dict_rows(my_cur)]
for pw_id in ids_for_delete:
sql_query = "DELETE FROM reagents WHERE reaction_id IN (SELECT r.id FROM reactions r INNER JOIN nodes n on r.node_id = n.id WHERE n.type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM reactions WHERE node_id IN (SELECT id FROM nodes WHERE type = 'protein' AND pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_start IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM reactions_direct WHERE node_id_end IN (SELECT id FROM nodes WHERE pathway_id = ?);"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM nodeproteincorrelations npc INNER JOIN nodes n on npc.node_id = n.id WHERE n.pathway_id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE type = 'pathway' AND foreign_id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM nodes WHERE pathway_id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM pathways WHERE id = ?;"
my_cur.execute(sql_query, (pw_id,))
conn.commit()
sql_query = "DELETE FROM pathway_groups WHERE id = ?;"
my_cur.execute(sql_query, (entity_id,))
conn.commit()
conn.commit()
print(0) | en | 0.775741 | #!/usr/bin/python3 # reactions for pathways # reactions for pathways # reactions for pathways | 2.675101 | 3 |
phishsense/modelqueue/urls.py | sidin/phishsense | 0 | 6617882 | <reponame>sidin/phishsense
from django.conf.urls import url
from . import views, loginviews
app_name = 'modelqueue'
urlpatterns = [
url(r'^$', views.IndexPageView.as_view(), name='index'),
url(r'^index/$', views.IndexPageView.as_view(), name='index'),
url(r'^status/(?P<uuid>[0-9a-z-]+)/$', views.check_status, name='check_status'),
url(r'^signin/$', loginviews.signin, name='signin'),
# URLs for various stages of finding verdict - Django way
url(r'^crawl_url/$', views.crawl_url, name='crawl_url'),
url(r'^extract_features/$', views.extract_features, name='extract_features'),
url(r'^query_model/$', views.query_model, name='query_model'),
url(r'^show_result/$', views.show_result, name='show_result'),
# URLs enabling offloading tasks to caller (enabling REST APIs)
url(r'^api/v1/request_task/$', views.request_task, name='request_task'),
url(r'^api/v1/request_task_bulk/(?P<count>[0-9]+)/$', views.request_task_bulk, name='request_task_bulk'),
url(r'^api/v1/respond_task_results/$', views.respond_task_results, name='respond_task_results'),
url(r'^api/v1/post_single_analysisurl/$', views.post_single_analysisurl, name='post_single_analysisurl'),
url(r'^api/v1/get_single_verdict/$', views.get_single_verdict, name='get_single_verdict'),
]
| from django.conf.urls import url
from . import views, loginviews
app_name = 'modelqueue'
urlpatterns = [
url(r'^$', views.IndexPageView.as_view(), name='index'),
url(r'^index/$', views.IndexPageView.as_view(), name='index'),
url(r'^status/(?P<uuid>[0-9a-z-]+)/$', views.check_status, name='check_status'),
url(r'^signin/$', loginviews.signin, name='signin'),
# URLs for various stages of finding verdict - Django way
url(r'^crawl_url/$', views.crawl_url, name='crawl_url'),
url(r'^extract_features/$', views.extract_features, name='extract_features'),
url(r'^query_model/$', views.query_model, name='query_model'),
url(r'^show_result/$', views.show_result, name='show_result'),
# URLs enabling offloading tasks to caller (enabling REST APIs)
url(r'^api/v1/request_task/$', views.request_task, name='request_task'),
url(r'^api/v1/request_task_bulk/(?P<count>[0-9]+)/$', views.request_task_bulk, name='request_task_bulk'),
url(r'^api/v1/respond_task_results/$', views.respond_task_results, name='respond_task_results'),
url(r'^api/v1/post_single_analysisurl/$', views.post_single_analysisurl, name='post_single_analysisurl'),
url(r'^api/v1/get_single_verdict/$', views.get_single_verdict, name='get_single_verdict'),
] | en | 0.644585 | # URLs for various stages of finding verdict - Django way # URLs enabling offloading tasks to caller (enabling REST APIs) | 1.944419 | 2 |
tools/esbuild/index.bzl | dgp1130/components | 0 | 6617883 | load("@npm//@bazel/esbuild:index.bzl", _esbuild = "esbuild", _esbuild_config = "esbuild_config")
load("@npm//@angular/dev-infra-private/bazel:expand_template.bzl", "expand_template")
load("//tools/esbuild:devmode-output.bzl", "extract_devmode_output_with_mappings")
# Re-export of the actual esbuild definitions.
esbuild_config = _esbuild_config
def esbuild(name, deps = [], mapping_targets = [], testonly = False, **kwargs):
# Extract all JS module sources before passing to ESBuild. The ESBuild rule requests
# both the devmode and prodmode unfortunately and this would slow-down the development
# turnaround significantly. We only request the devmode sources which are ESM as well.
devmode_targets = extract_devmode_output_with_mappings(name, deps, testonly)
_esbuild(
name = name,
deps = devmode_targets,
testonly = testonly,
**kwargs
)
"""Generates an AMD bundle for the specified entry-point with the given AMD module name."""
def esbuild_amd(name, entry_point, module_name, testonly = False, config = None, deps = [], **kwargs):
expand_template(
name = "%s_config" % name,
testonly = testonly,
template = "//tools/esbuild:esbuild-amd-config.mjs",
output_name = "%s_config.mjs" % name,
substitutions = {
"TMPL_MODULE_NAME": module_name,
"TMPL_CONFIG_PATH": "$(execpath %s)" % config if config else "",
},
data = [config] if config else None,
)
_esbuild_config(
name = "%s_config_lib" % name,
testonly = testonly,
config_file = "%s_config" % name,
# Adds the user configuration and its deps as dependency of the AMD ESBuild config.
# https://github.com/bazelbuild/rules_nodejs/blob/a892caf5a040bae5eeec174a3cf6250f02caf364/packages/esbuild/esbuild_config.bzl#L23.
deps = [config, "%s_deps" % config] if config else None,
)
esbuild(
name = name,
testonly = testonly,
deps = deps,
entry_point = entry_point,
config = "%s_config_lib" % name,
**kwargs
)
| load("@npm//@bazel/esbuild:index.bzl", _esbuild = "esbuild", _esbuild_config = "esbuild_config")
load("@npm//@angular/dev-infra-private/bazel:expand_template.bzl", "expand_template")
load("//tools/esbuild:devmode-output.bzl", "extract_devmode_output_with_mappings")
# Re-export of the actual esbuild definitions.
esbuild_config = _esbuild_config
def esbuild(name, deps = [], mapping_targets = [], testonly = False, **kwargs):
# Extract all JS module sources before passing to ESBuild. The ESBuild rule requests
# both the devmode and prodmode unfortunately and this would slow-down the development
# turnaround significantly. We only request the devmode sources which are ESM as well.
devmode_targets = extract_devmode_output_with_mappings(name, deps, testonly)
_esbuild(
name = name,
deps = devmode_targets,
testonly = testonly,
**kwargs
)
"""Generates an AMD bundle for the specified entry-point with the given AMD module name."""
def esbuild_amd(name, entry_point, module_name, testonly = False, config = None, deps = [], **kwargs):
expand_template(
name = "%s_config" % name,
testonly = testonly,
template = "//tools/esbuild:esbuild-amd-config.mjs",
output_name = "%s_config.mjs" % name,
substitutions = {
"TMPL_MODULE_NAME": module_name,
"TMPL_CONFIG_PATH": "$(execpath %s)" % config if config else "",
},
data = [config] if config else None,
)
_esbuild_config(
name = "%s_config_lib" % name,
testonly = testonly,
config_file = "%s_config" % name,
# Adds the user configuration and its deps as dependency of the AMD ESBuild config.
# https://github.com/bazelbuild/rules_nodejs/blob/a892caf5a040bae5eeec174a3cf6250f02caf364/packages/esbuild/esbuild_config.bzl#L23.
deps = [config, "%s_deps" % config] if config else None,
)
esbuild(
name = name,
testonly = testonly,
deps = deps,
entry_point = entry_point,
config = "%s_config_lib" % name,
**kwargs
)
| en | 0.838639 | # Re-export of the actual esbuild definitions. # Extract all JS module sources before passing to ESBuild. The ESBuild rule requests # both the devmode and prodmode unfortunately and this would slow-down the development # turnaround significantly. We only request the devmode sources which are ESM as well. Generates an AMD bundle for the specified entry-point with the given AMD module name. # Adds the user configuration and its deps as dependency of the AMD ESBuild config. # https://github.com/bazelbuild/rules_nodejs/blob/a892caf5a040bae5eeec174a3cf6250f02caf364/packages/esbuild/esbuild_config.bzl#L23. | 1.625543 | 2 |
tests/task/test_localvariablesupdate.py | asyncee/pycamunda | 0 | 6617884 | # -*- coding: utf-8 -*-
import io
import unittest.mock
import pytest
import pycamunda.task
from tests.mock import raise_requests_exception_mock, not_ok_response_mock
def test_localvariablesupdate_params(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
assert update_var.url == engine_url + '/task/anId/localVariables/aVar'
assert update_var.query_parameters() == {}
assert update_var.body_parameters() == {'value': 'aVal', 'type': 'String', 'valueInfo': {}}
def test_localvariablesupdate_binary_params(engine_url):
update_var1 = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='File'
)
update_var2 = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='Bytes'
)
assert update_var1.url == engine_url + '/task/anId/localVariables/aVar/data'
assert update_var2.url == engine_url + '/task/anId/localVariables/aVar/data'
assert update_var1.query_parameters() == {}
assert update_var2.query_parameters() == {}
assert update_var1.body_parameters() == {'valueType': 'File'}
assert update_var2.body_parameters() == {'valueType': 'Bytes'}
@unittest.mock.patch('requests.Session.request')
def test_localvariablesupdate_calls_requests(mock, engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
update_var()
assert mock.called
assert mock.call_args[1]['method'].upper() == 'PUT'
@unittest.mock.patch('requests.Session.request')
def test_localvariablesupdate_binary_calls_requests(mock, engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='Bytes'
)
update_var()
assert mock.called
@unittest.mock.patch('requests.Session.request', raise_requests_exception_mock)
def test_localvariablesupdate_raises_pycamunda_exception(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
with pytest.raises(pycamunda.PyCamundaException):
update_var()
@unittest.mock.patch('requests.Session.request', raise_requests_exception_mock)
def test_localvariablesupdate_binary_raises_pycamunda_exception(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='Bytes'
)
with pytest.raises(pycamunda.PyCamundaException):
update_var()
@unittest.mock.patch('requests.Session.request', not_ok_response_mock)
@unittest.mock.patch('pycamunda.base._raise_for_status')
def test_localvariablesupdate_raises_for_status(mock, engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
update_var()
assert mock.called
@unittest.mock.patch('requests.Session.request', unittest.mock.MagicMock())
def test_localvariablesupdate_returns_none(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
result = update_var()
assert result is None
| # -*- coding: utf-8 -*-
import io
import unittest.mock
import pytest
import pycamunda.task
from tests.mock import raise_requests_exception_mock, not_ok_response_mock
def test_localvariablesupdate_params(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
assert update_var.url == engine_url + '/task/anId/localVariables/aVar'
assert update_var.query_parameters() == {}
assert update_var.body_parameters() == {'value': 'aVal', 'type': 'String', 'valueInfo': {}}
def test_localvariablesupdate_binary_params(engine_url):
update_var1 = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='File'
)
update_var2 = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='Bytes'
)
assert update_var1.url == engine_url + '/task/anId/localVariables/aVar/data'
assert update_var2.url == engine_url + '/task/anId/localVariables/aVar/data'
assert update_var1.query_parameters() == {}
assert update_var2.query_parameters() == {}
assert update_var1.body_parameters() == {'valueType': 'File'}
assert update_var2.body_parameters() == {'valueType': 'Bytes'}
@unittest.mock.patch('requests.Session.request')
def test_localvariablesupdate_calls_requests(mock, engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
update_var()
assert mock.called
assert mock.call_args[1]['method'].upper() == 'PUT'
@unittest.mock.patch('requests.Session.request')
def test_localvariablesupdate_binary_calls_requests(mock, engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='Bytes'
)
update_var()
assert mock.called
@unittest.mock.patch('requests.Session.request', raise_requests_exception_mock)
def test_localvariablesupdate_raises_pycamunda_exception(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
with pytest.raises(pycamunda.PyCamundaException):
update_var()
@unittest.mock.patch('requests.Session.request', raise_requests_exception_mock)
def test_localvariablesupdate_binary_raises_pycamunda_exception(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value=io.StringIO('myfile'), type_='Bytes'
)
with pytest.raises(pycamunda.PyCamundaException):
update_var()
@unittest.mock.patch('requests.Session.request', not_ok_response_mock)
@unittest.mock.patch('pycamunda.base._raise_for_status')
def test_localvariablesupdate_raises_for_status(mock, engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
update_var()
assert mock.called
@unittest.mock.patch('requests.Session.request', unittest.mock.MagicMock())
def test_localvariablesupdate_returns_none(engine_url):
update_var = pycamunda.task.LocalVariablesUpdate(
url=engine_url, task_id='anId', var_name='aVar', value='aVal', type_='String', value_info={}
)
result = update_var()
assert result is None
| en | 0.769321 | # -*- coding: utf-8 -*- | 2.329362 | 2 |
leetcode/0016_3Sum_Closest/3sum_closest.py | zyeak/leetcode | 0 | 6617885 | <reponame>zyeak/leetcode<gh_stars>0
# Solution: two-pointers
class Solution:
def threeSumClosest(self, nums: List[int], target: int) -> int:
nums.sort()
result = nums[0] + nums[1] + nums[2]
for i in range(len(nums)-2):
if i >0 and nums[i] == nums[i-1]:
continue
left, right = i+1, len(nums)-1
while left < right:
cur = nums[i] + nums[left] + nums[right]
if abs(cur - target) < abs(result - target):
result = cur
if cur < target:
left += 1
elif cur > target:
right -= 1
else:
return result
return result | # Solution: two-pointers
class Solution:
def threeSumClosest(self, nums: List[int], target: int) -> int:
nums.sort()
result = nums[0] + nums[1] + nums[2]
for i in range(len(nums)-2):
if i >0 and nums[i] == nums[i-1]:
continue
left, right = i+1, len(nums)-1
while left < right:
cur = nums[i] + nums[left] + nums[right]
if abs(cur - target) < abs(result - target):
result = cur
if cur < target:
left += 1
elif cur > target:
right -= 1
else:
return result
return result | en | 0.984025 | # Solution: two-pointers | 3.491071 | 3 |
Python/kraken/ui/DataTypeWidgets/DictWidgetImpl.py | FabricExile/Kraken | 7 | 6617886 | import copy
from PySide import QtGui, QtCore
from AttributeWidgetImpl import AttributeWidget
from parameter import Parameter
class DictWidget(AttributeWidget):
def __init__(self, attribute, parentWidget=None, addNotificationListener = True):
super(DictWidget, self).__init__(attribute, parentWidget=parentWidget, addNotificationListener = addNotificationListener)
vbox = QtGui.QVBoxLayout()
vbox.setSpacing(2)
self.setLayout(vbox)
self._grid = QtGui.QGridLayout()
self._grid.setContentsMargins(0, 0, 0, 0)
gridWidget = QtGui.QWidget(self)
gridWidget.setLayout(self._grid)
vbox.addWidget(gridWidget, 1)
self.setLayout(self._grid)
self.setSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Preferred)
self.__enableAddRemoveElements = attribute.getOption('enableAddRemoveElements', True)
self.__addElementButtonLabel = attribute.getOption('addElementButtonLabel', 'add')
self.__removeElementButtonLabel = attribute.getOption('removeElementButtonLabel', 'remove')
self.__defaultKeyValue = attribute.getOption('defaultKeyValue', 'key')
self._dataType = attribute.getDataType()
# print attribute.getOwner().getScene().getFabricClient().RT.getAggregateMemberInfo(self._dataType)
# raise Exception("Foo")
self.determineElementType()
self.build()
def determineElementType(self):
# Determine the element value type from the value type of the array.
openBraceIdx = self._dataType.find('[')
closeBraceIdx = self._dataType.find(']')
keyType = self._dataType[openBraceIdx+1:closeBraceIdx]
self._elementValueType = self._dataType.replace('['+keyType+']', '', 1)
def build(self):
self.__value = self._invokeGetter()
self.__keywidgets = {}
self.__widgets = {}
# this dictionary maps the keys used in the initial dict passed in, and the new keys as the are modified in the UI.
# this is required because you can't modify the 'attrName' value defined in the closure below.
self.__attrNameMapping = {}
def constructWidget(index, attrName, attrValueType):
self.__attrNameMapping[attrName] = attrName
def keyGetter():
return attrName
if self.isEditable():
def keySetter(key):
value = self.__value[self.__attrNameMapping[attrName]]
del self.__value[self.__attrNameMapping[attrName]]
self.__value[key] = value
keyWidget = self.__keywidgets[self.__attrNameMapping[attrName]]
valueWidget = self.__widgets[self.__attrNameMapping[attrName]]
del self.__keywidgets[self.__attrNameMapping[attrName]]
del self.__widgets[self.__attrNameMapping[attrName]]
self.__keywidgets[key] = keyWidget
self.__widgets[key] = valueWidget
self.__attrNameMapping[attrName] = key
self._invokeSetter(self.__value)
else:
keySetter = None
# sub-widgets should initialize their values.
keyParam = Parameter(
controller=self.getController(),
name="",
portType=self._attribute.getPortType(),
dataType = 'String',
getterFn = keyGetter,
setterFn = keySetter
)
keyWidget = AttributeWidget.constructAttributeWidget(self.getController(), keyParam, parentWidget=self, addNotificationListener=False)
def valueGetter():
return self.__value[self.__attrNameMapping[attrName]]
if self.isEditable():
def valueSetter(value):
self.__value[self.__attrNameMapping[attrName]] = value
self._invokeSetter(self.__value)
else:
valueSetter = None
valueParam = Parameter(
controller=self.getController(),
name="",
portType=self._attribute.getPortType(),
dataType = attrValueType,
getterFn = valueGetter,
setterFn = valueSetter
)
valueWidget = AttributeWidget.constructAttributeWidget(self.getController(), valueParam, parentWidget=self, addNotificationListener=False)
# self._grid.addWidget(QtGui.QLabel(attrName, self), index, 0, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
self._grid.addWidget(keyWidget, index, 0, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
self._grid.addWidget(valueWidget, index, 1)
self.__keywidgets[attrName] = keyWidget
self.__widgets[attrName] = valueWidget
if self.isEditable() and self.__enableAddRemoveElements:
removeElementButton = QtGui.QPushButton(self.__removeElementButtonLabel, self)
def removeElement():
self.getController().beginUndoBracket(name="Remove element from :" + self.getLabel())
newDict = self.getController().constructRTVal(self._dataType)
for key in self.__value:
if key != attrName:
newDict[key] = self.__value[key]
self.__value = newDict
self._invokeSetter()
self.getController().endUndoBracket()
self.rebuild()
removeElementButton.clicked.connect(removeElement)
self._grid.addWidget(removeElementButton, index, 2)
index = 0
for attrName in self.__value:
constructWidget(index, attrName, self._elementValueType)
index = index + 1
if self.isEditable() and self.__enableAddRemoveElements:
addElementButton = QtGui.QPushButton(self.__addElementButtonLabel, self)
def addElement():
self.getController().beginUndoBracket(name="Add element to :" + self.getLabel())
# generate a unique key for the new value.
# Keep interating until a key has been generated that does not collide
# with any existing keys.
keyId = 1
newValueKey = self.__defaultKeyValue
keyUsed = True
while keyUsed:
found = False
for currkey in self.__value:
if currkey == newValueKey:
newValueKey = self.__defaultKeyValue + str(keyId)
found = True
keyId += 1
keyUsed = found
newDict = self.getController().constructRTVal(self._dataType)
for key in self.__value:
newDict[str(key)] = self.__value[str(key)]
# Caused a crash (TODO: log bug for Andrew)
# newDict = self.__value.clone(self._dataType)
newValue = self.getController().constructRTVal(self._elementValueType)
if newValue is None:
raise Exception("Invalid element type:" + self._elementValueType)
newDict[str(newValueKey)] = newValue
self.__value = newDict
self._invokeSetter()
self.getController().endInteraction()
self.rebuild()
addElementButton.clicked.connect(addElement)
self._grid.addWidget(addElementButton, index, 0)
def rebuild(self):
""" Rebuild the sub-widgets because the structure of elements has changed."""
# first clear the layout and then build again.
for attrName in self.__widgets:
self.__widgets[attrName].unregisterNotificationListener()
while self._grid.count():
self._grid.takeAt(0).widget().deleteLater()
self.build()
def getWidgetValue(self):
return self.__value
def setWidgetValue(self, value):
# Rebuild the UI if there is a key in the value that id not
# represented in the widgets, or if there is a widget not
# represented in the value.
for attrName in value:
if attrName not in self.__widgets:
self.rebuild()
return
for attrName in self.__widgets:
if attrName not in value:
self.rebuild()
return
# Update the existing widget values.
for attrName in value:
self.__keywidgets[attrName].setWidgetValue(attrName)
self.__widgets[attrName].setWidgetValue(value[attrName])
self.__value = value
def unregisterNotificationListener(self):
"""
When the widget is being removed from the inspector,
this method must be called to unregister the event handlers
"""
for widget in self.__widgets:
self.__widgets[widget].unregisterNotificationListener()
super(DictWidget, self).unregisterNotificationListener()
@classmethod
def canDisplay(cls, attribute):
dataType = attribute.getDataType()
openBraceIdx = dataType.find('[')
closeBraceIdx = dataType.find(']')
if closeBraceIdx > openBraceIdx+1:
keyType = dataType[openBraceIdx+1:closeBraceIdx]
if keyType == 'String':
return True
return False
DictWidget.registerPortWidget()
| import copy
from PySide import QtGui, QtCore
from AttributeWidgetImpl import AttributeWidget
from parameter import Parameter
class DictWidget(AttributeWidget):
def __init__(self, attribute, parentWidget=None, addNotificationListener = True):
super(DictWidget, self).__init__(attribute, parentWidget=parentWidget, addNotificationListener = addNotificationListener)
vbox = QtGui.QVBoxLayout()
vbox.setSpacing(2)
self.setLayout(vbox)
self._grid = QtGui.QGridLayout()
self._grid.setContentsMargins(0, 0, 0, 0)
gridWidget = QtGui.QWidget(self)
gridWidget.setLayout(self._grid)
vbox.addWidget(gridWidget, 1)
self.setLayout(self._grid)
self.setSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Preferred)
self.__enableAddRemoveElements = attribute.getOption('enableAddRemoveElements', True)
self.__addElementButtonLabel = attribute.getOption('addElementButtonLabel', 'add')
self.__removeElementButtonLabel = attribute.getOption('removeElementButtonLabel', 'remove')
self.__defaultKeyValue = attribute.getOption('defaultKeyValue', 'key')
self._dataType = attribute.getDataType()
# print attribute.getOwner().getScene().getFabricClient().RT.getAggregateMemberInfo(self._dataType)
# raise Exception("Foo")
self.determineElementType()
self.build()
def determineElementType(self):
# Determine the element value type from the value type of the array.
openBraceIdx = self._dataType.find('[')
closeBraceIdx = self._dataType.find(']')
keyType = self._dataType[openBraceIdx+1:closeBraceIdx]
self._elementValueType = self._dataType.replace('['+keyType+']', '', 1)
def build(self):
self.__value = self._invokeGetter()
self.__keywidgets = {}
self.__widgets = {}
# this dictionary maps the keys used in the initial dict passed in, and the new keys as the are modified in the UI.
# this is required because you can't modify the 'attrName' value defined in the closure below.
self.__attrNameMapping = {}
def constructWidget(index, attrName, attrValueType):
self.__attrNameMapping[attrName] = attrName
def keyGetter():
return attrName
if self.isEditable():
def keySetter(key):
value = self.__value[self.__attrNameMapping[attrName]]
del self.__value[self.__attrNameMapping[attrName]]
self.__value[key] = value
keyWidget = self.__keywidgets[self.__attrNameMapping[attrName]]
valueWidget = self.__widgets[self.__attrNameMapping[attrName]]
del self.__keywidgets[self.__attrNameMapping[attrName]]
del self.__widgets[self.__attrNameMapping[attrName]]
self.__keywidgets[key] = keyWidget
self.__widgets[key] = valueWidget
self.__attrNameMapping[attrName] = key
self._invokeSetter(self.__value)
else:
keySetter = None
# sub-widgets should initialize their values.
keyParam = Parameter(
controller=self.getController(),
name="",
portType=self._attribute.getPortType(),
dataType = 'String',
getterFn = keyGetter,
setterFn = keySetter
)
keyWidget = AttributeWidget.constructAttributeWidget(self.getController(), keyParam, parentWidget=self, addNotificationListener=False)
def valueGetter():
return self.__value[self.__attrNameMapping[attrName]]
if self.isEditable():
def valueSetter(value):
self.__value[self.__attrNameMapping[attrName]] = value
self._invokeSetter(self.__value)
else:
valueSetter = None
valueParam = Parameter(
controller=self.getController(),
name="",
portType=self._attribute.getPortType(),
dataType = attrValueType,
getterFn = valueGetter,
setterFn = valueSetter
)
valueWidget = AttributeWidget.constructAttributeWidget(self.getController(), valueParam, parentWidget=self, addNotificationListener=False)
# self._grid.addWidget(QtGui.QLabel(attrName, self), index, 0, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
self._grid.addWidget(keyWidget, index, 0, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
self._grid.addWidget(valueWidget, index, 1)
self.__keywidgets[attrName] = keyWidget
self.__widgets[attrName] = valueWidget
if self.isEditable() and self.__enableAddRemoveElements:
removeElementButton = QtGui.QPushButton(self.__removeElementButtonLabel, self)
def removeElement():
self.getController().beginUndoBracket(name="Remove element from :" + self.getLabel())
newDict = self.getController().constructRTVal(self._dataType)
for key in self.__value:
if key != attrName:
newDict[key] = self.__value[key]
self.__value = newDict
self._invokeSetter()
self.getController().endUndoBracket()
self.rebuild()
removeElementButton.clicked.connect(removeElement)
self._grid.addWidget(removeElementButton, index, 2)
index = 0
for attrName in self.__value:
constructWidget(index, attrName, self._elementValueType)
index = index + 1
if self.isEditable() and self.__enableAddRemoveElements:
addElementButton = QtGui.QPushButton(self.__addElementButtonLabel, self)
def addElement():
self.getController().beginUndoBracket(name="Add element to :" + self.getLabel())
# generate a unique key for the new value.
# Keep interating until a key has been generated that does not collide
# with any existing keys.
keyId = 1
newValueKey = self.__defaultKeyValue
keyUsed = True
while keyUsed:
found = False
for currkey in self.__value:
if currkey == newValueKey:
newValueKey = self.__defaultKeyValue + str(keyId)
found = True
keyId += 1
keyUsed = found
newDict = self.getController().constructRTVal(self._dataType)
for key in self.__value:
newDict[str(key)] = self.__value[str(key)]
# Caused a crash (TODO: log bug for Andrew)
# newDict = self.__value.clone(self._dataType)
newValue = self.getController().constructRTVal(self._elementValueType)
if newValue is None:
raise Exception("Invalid element type:" + self._elementValueType)
newDict[str(newValueKey)] = newValue
self.__value = newDict
self._invokeSetter()
self.getController().endInteraction()
self.rebuild()
addElementButton.clicked.connect(addElement)
self._grid.addWidget(addElementButton, index, 0)
def rebuild(self):
""" Rebuild the sub-widgets because the structure of elements has changed."""
# first clear the layout and then build again.
for attrName in self.__widgets:
self.__widgets[attrName].unregisterNotificationListener()
while self._grid.count():
self._grid.takeAt(0).widget().deleteLater()
self.build()
def getWidgetValue(self):
return self.__value
def setWidgetValue(self, value):
# Rebuild the UI if there is a key in the value that id not
# represented in the widgets, or if there is a widget not
# represented in the value.
for attrName in value:
if attrName not in self.__widgets:
self.rebuild()
return
for attrName in self.__widgets:
if attrName not in value:
self.rebuild()
return
# Update the existing widget values.
for attrName in value:
self.__keywidgets[attrName].setWidgetValue(attrName)
self.__widgets[attrName].setWidgetValue(value[attrName])
self.__value = value
def unregisterNotificationListener(self):
"""
When the widget is being removed from the inspector,
this method must be called to unregister the event handlers
"""
for widget in self.__widgets:
self.__widgets[widget].unregisterNotificationListener()
super(DictWidget, self).unregisterNotificationListener()
@classmethod
def canDisplay(cls, attribute):
dataType = attribute.getDataType()
openBraceIdx = dataType.find('[')
closeBraceIdx = dataType.find(']')
if closeBraceIdx > openBraceIdx+1:
keyType = dataType[openBraceIdx+1:closeBraceIdx]
if keyType == 'String':
return True
return False
DictWidget.registerPortWidget()
| en | 0.798467 | # print attribute.getOwner().getScene().getFabricClient().RT.getAggregateMemberInfo(self._dataType) # raise Exception("Foo") # Determine the element value type from the value type of the array. # this dictionary maps the keys used in the initial dict passed in, and the new keys as the are modified in the UI. # this is required because you can't modify the 'attrName' value defined in the closure below. # sub-widgets should initialize their values. # self._grid.addWidget(QtGui.QLabel(attrName, self), index, 0, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop) # generate a unique key for the new value. # Keep interating until a key has been generated that does not collide # with any existing keys. # Caused a crash (TODO: log bug for Andrew) # newDict = self.__value.clone(self._dataType) Rebuild the sub-widgets because the structure of elements has changed. # first clear the layout and then build again. # Rebuild the UI if there is a key in the value that id not # represented in the widgets, or if there is a widget not # represented in the value. # Update the existing widget values. When the widget is being removed from the inspector, this method must be called to unregister the event handlers | 2.184123 | 2 |
blog/migrations/0003_auto_20210903_1539.py | dzejkobi/institution-comparisoner | 0 | 6617887 | <reponame>dzejkobi/institution-comparisoner<gh_stars>0
# Generated by Django 3.1.12 on 2021-09-03 15:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cms', '0022_auto_20180620_1551'),
('blog', '0002_auto_20210903_1206'),
]
operations = [
migrations.CreateModel(
name='BlogRecentPluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='blog_blogrecentpluginmodel', serialize=False, to='cms.cmsplugin')),
('post_count', models.PositiveSmallIntegerField(default=3, verbose_name='number of posts')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.AlterField(
model_name='blogindexpluginmodel',
name='more_title',
field=models.CharField(default='Read more blog posts', help_text='Title of the remaining posts section.', max_length=250, verbose_name='remaining posts title'),
),
]
| # Generated by Django 3.1.12 on 2021-09-03 15:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cms', '0022_auto_20180620_1551'),
('blog', '0002_auto_20210903_1206'),
]
operations = [
migrations.CreateModel(
name='BlogRecentPluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='blog_blogrecentpluginmodel', serialize=False, to='cms.cmsplugin')),
('post_count', models.PositiveSmallIntegerField(default=3, verbose_name='number of posts')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.AlterField(
model_name='blogindexpluginmodel',
name='more_title',
field=models.CharField(default='Read more blog posts', help_text='Title of the remaining posts section.', max_length=250, verbose_name='remaining posts title'),
),
] | en | 0.817533 | # Generated by Django 3.1.12 on 2021-09-03 15:39 | 1.721569 | 2 |
Programmers/04. Sort/02. Max_Num.py | Minoolian/Coding_Test | 0 | 6617888 | # 가장 큰 수
# str의 사전식 정렬을 이용한 풀이
# https://programmers.co.kr/learn/courses/30/lessons/42746
def solution(numbers):
return str(int(''.join(sorted(map(str,numbers),key=lambda x:x*3,reverse=True))))
print(solution([90,908,89,898,10,101,1,8,9]))
# 2. 병합정렬 (시간초과)
# def merge(left, right):
# i,j=0,0
# sorted_list=[]
#
# while i<len(left) and j<len(right):
# l,r=str(left[i]), str(right[j])
# for k in range(max(len(l),len(r))):
# if min(len(l),len(r))<=k:
# if l[-1]>r[-1]:
# sorted_list.append(l)
# i+=1
# else:
# sorted_list.append(r)
# j+=1
# break
#
# if l[k]>r[k]:
# sorted_list.append(l)
# i+=1
# break
#
# elif l[k]<r[k]:
# sorted_list.append(r)
# j+=1
# break
#
# while i<len(left):
# sorted_list.append(str(left[i]))
# i+=1
#
# while j<len(right):
# sorted_list.append(str(right[j]))
# j+=1
#
# return sorted_list
#
#
# def sort(numbers):
#
# if len(numbers)<=1:
# return numbers
#
# mid=len(numbers)//2
# left=numbers[:mid]
# right=numbers[mid:]
#
# left1=sort(left)
# right1=sort(right)
#
# return merge(left1,right1)
#
#
# def solution(numbers):
#
# return ''.join(sort(numbers))
#
# print(solution([212,21]))
# 1. 같은 수가 있을때 구별이 불가
# def solution(numbers):
#
# return int(''.join(map(numbers,sorted(a,key=lambda x:(str(x)[0],str(x)[1] if len(str(x))>1 else str(x)[0],str(x)[2] if len(str(x))>2 else str(x)[0],str(x)[3] if len(str(x))>3 else str(x)[0]),reverse=True))))
| # 가장 큰 수
# str의 사전식 정렬을 이용한 풀이
# https://programmers.co.kr/learn/courses/30/lessons/42746
def solution(numbers):
return str(int(''.join(sorted(map(str,numbers),key=lambda x:x*3,reverse=True))))
print(solution([90,908,89,898,10,101,1,8,9]))
# 2. 병합정렬 (시간초과)
# def merge(left, right):
# i,j=0,0
# sorted_list=[]
#
# while i<len(left) and j<len(right):
# l,r=str(left[i]), str(right[j])
# for k in range(max(len(l),len(r))):
# if min(len(l),len(r))<=k:
# if l[-1]>r[-1]:
# sorted_list.append(l)
# i+=1
# else:
# sorted_list.append(r)
# j+=1
# break
#
# if l[k]>r[k]:
# sorted_list.append(l)
# i+=1
# break
#
# elif l[k]<r[k]:
# sorted_list.append(r)
# j+=1
# break
#
# while i<len(left):
# sorted_list.append(str(left[i]))
# i+=1
#
# while j<len(right):
# sorted_list.append(str(right[j]))
# j+=1
#
# return sorted_list
#
#
# def sort(numbers):
#
# if len(numbers)<=1:
# return numbers
#
# mid=len(numbers)//2
# left=numbers[:mid]
# right=numbers[mid:]
#
# left1=sort(left)
# right1=sort(right)
#
# return merge(left1,right1)
#
#
# def solution(numbers):
#
# return ''.join(sort(numbers))
#
# print(solution([212,21]))
# 1. 같은 수가 있을때 구별이 불가
# def solution(numbers):
#
# return int(''.join(map(numbers,sorted(a,key=lambda x:(str(x)[0],str(x)[1] if len(str(x))>1 else str(x)[0],str(x)[2] if len(str(x))>2 else str(x)[0],str(x)[3] if len(str(x))>3 else str(x)[0]),reverse=True))))
| en | 0.194877 | # 가장 큰 수 # str의 사전식 정렬을 이용한 풀이 # https://programmers.co.kr/learn/courses/30/lessons/42746 # 2. 병합정렬 (시간초과) # def merge(left, right): # i,j=0,0 # sorted_list=[] # # while i<len(left) and j<len(right): # l,r=str(left[i]), str(right[j]) # for k in range(max(len(l),len(r))): # if min(len(l),len(r))<=k: # if l[-1]>r[-1]: # sorted_list.append(l) # i+=1 # else: # sorted_list.append(r) # j+=1 # break # # if l[k]>r[k]: # sorted_list.append(l) # i+=1 # break # # elif l[k]<r[k]: # sorted_list.append(r) # j+=1 # break # # while i<len(left): # sorted_list.append(str(left[i])) # i+=1 # # while j<len(right): # sorted_list.append(str(right[j])) # j+=1 # # return sorted_list # # # def sort(numbers): # # if len(numbers)<=1: # return numbers # # mid=len(numbers)//2 # left=numbers[:mid] # right=numbers[mid:] # # left1=sort(left) # right1=sort(right) # # return merge(left1,right1) # # # def solution(numbers): # # return ''.join(sort(numbers)) # # print(solution([212,21])) # 1. 같은 수가 있을때 구별이 불가 # def solution(numbers): # # return int(''.join(map(numbers,sorted(a,key=lambda x:(str(x)[0],str(x)[1] if len(str(x))>1 else str(x)[0],str(x)[2] if len(str(x))>2 else str(x)[0],str(x)[3] if len(str(x))>3 else str(x)[0]),reverse=True)))) | 3.635652 | 4 |
skradiomics/feature_selection/Information.py | kangzh015/radiomics | 11 | 6617889 | <filename>skradiomics/feature_selection/Information.py
# !/usr/bin/env python
# -*- coding:utf-8 -*-
# author: jeremy.zhang(<EMAIL>, Shenzhen University, China)
import numpy as np
from .utils import check_X_y, feature_binary_partition_discretize
def elog(x):
if x <= 0 or x >= 1:
return 0
else:
return x * np.log(x)
def hist(x_list):
d = dict()
for s in x_list:
d[s] = d.get(s, 0) + 1
return map(lambda z: float(z) / len(x_list), d.values())
def entropy(prob_list, base=2):
return -sum(map(elog, prob_list)) / np.log(base)
def mutual_info(X, y):
X, y = check_X_y(X, y)
n_samples, n_features = X.shape
score_func = lambda x: -entropy(hist(list(zip(x, y)))) + entropy(hist(x)) + entropy(hist(y))
scores = []
for i in range(n_features):
x_i = X[:, i]
discreteize_list = feature_binary_partition_discretize(x_i)
score = np.nanmax(list(map(score_func, discreteize_list)))
scores.append(score)
return np.array(scores)
def info_gain(X, y):
X, y = check_X_y(X, y)
n_samples, n_features = X.shape
score_func = lambda x: -entropy(hist(list(zip(x, y)))) + entropy(hist(x)) + entropy(hist(y))
scores = []
for i in range(n_features):
x_i = X[:, i]
discreteize_list = feature_binary_partition_discretize(x_i)
score = np.nanmax(list(map(score_func, discreteize_list)))
scores.append(score)
return np.array(scores)
def info_gain_ratio(X, y):
X, y = check_X_y(X, y)
n_samples, n_features = X.shape
score_func = lambda x: (-entropy(hist(list(zip(x, y)))) + entropy(hist(x)) + entropy(hist(y))) / (entropy(hist(x)) + 1e-5)
scores = []
for i in range(n_features):
x_i = X[:, i]
discreteize_list = feature_binary_partition_discretize(x_i)
score = np.nanmax(list(map(score_func, discreteize_list)))
scores.append(score)
return np.array(scores)
| <filename>skradiomics/feature_selection/Information.py
# !/usr/bin/env python
# -*- coding:utf-8 -*-
# author: jeremy.zhang(<EMAIL>, Shenzhen University, China)
import numpy as np
from .utils import check_X_y, feature_binary_partition_discretize
def elog(x):
if x <= 0 or x >= 1:
return 0
else:
return x * np.log(x)
def hist(x_list):
d = dict()
for s in x_list:
d[s] = d.get(s, 0) + 1
return map(lambda z: float(z) / len(x_list), d.values())
def entropy(prob_list, base=2):
return -sum(map(elog, prob_list)) / np.log(base)
def mutual_info(X, y):
X, y = check_X_y(X, y)
n_samples, n_features = X.shape
score_func = lambda x: -entropy(hist(list(zip(x, y)))) + entropy(hist(x)) + entropy(hist(y))
scores = []
for i in range(n_features):
x_i = X[:, i]
discreteize_list = feature_binary_partition_discretize(x_i)
score = np.nanmax(list(map(score_func, discreteize_list)))
scores.append(score)
return np.array(scores)
def info_gain(X, y):
X, y = check_X_y(X, y)
n_samples, n_features = X.shape
score_func = lambda x: -entropy(hist(list(zip(x, y)))) + entropy(hist(x)) + entropy(hist(y))
scores = []
for i in range(n_features):
x_i = X[:, i]
discreteize_list = feature_binary_partition_discretize(x_i)
score = np.nanmax(list(map(score_func, discreteize_list)))
scores.append(score)
return np.array(scores)
def info_gain_ratio(X, y):
X, y = check_X_y(X, y)
n_samples, n_features = X.shape
score_func = lambda x: (-entropy(hist(list(zip(x, y)))) + entropy(hist(x)) + entropy(hist(y))) / (entropy(hist(x)) + 1e-5)
scores = []
for i in range(n_features):
x_i = X[:, i]
discreteize_list = feature_binary_partition_discretize(x_i)
score = np.nanmax(list(map(score_func, discreteize_list)))
scores.append(score)
return np.array(scores)
| en | 0.517225 | # !/usr/bin/env python # -*- coding:utf-8 -*- # author: jeremy.zhang(<EMAIL>, Shenzhen University, China) | 2.418933 | 2 |
3D/rayleigh-benard/rayleigh-benard.py | pylbm/pylbm_gallery | 1 | 6617890 | <filename>3D/rayleigh-benard/rayleigh-benard.py
import numpy as np
import sympy as sp
import pylbm
X, Y, Z = sp.symbols('X, Y, Z')
rho, qx, qy, qz, T, LA = sp.symbols('rho, qx, qy, qz, T, LA', real=True)
# parameters
dx = 1./128
la = 1
cs = la/np.sqrt(3)
Tu = -0.5
Td = 0.5
Ra = 1e6
Pr = 0.71
g = 9.81
tau = 1./1.8
nu = (2*tau-1)/6*la*dx
diffusivity = nu/Pr
taup = .5+2*diffusivity/la/dx
DeltaT = Td - Tu
xmin, xmax, ymin, ymax, zmin, zmax = 0., 2., 0., 1., 0., 2.
H = ymax - ymin
beta = Ra*nu*diffusivity/(g*DeltaT*H**3)
sf = [0]*4 + [1./tau]*15
sT = [0] + [1./taup]*5
def init_T(x, y, z):
return Td + (Tu-Td)/(ymax-ymin)*(y-ymin) + (Td-Tu) * (0.1*np.random.random_sample((x.shape[0],y.shape[1],z.shape[2]))-0.5)
def bc_up(f, m, x, y, z):
m[qx] = 0.
m[qy] = 0.
m[qz] = 0.
m[T] = Tu
def bc_down(f, m, x, y, z):
m[qx] = 0.
m[qy] = 0.
m[qz] = 0.
m[T] = Td
def save(sol, im):
x, y, z = sol.domain.x, sol.domain.y, sol.domain.z
h5 = pylbm.H5File(sol.mpi_topo, 'rayleigh-benard', './rayleigh-benard', im)
h5.set_grid(x, y, z)
h5.add_scalar('T', sol.m[T])
h5.save()
def feq_NS(v, u):
cs2 = sp.Rational(1, 3)
x, y, z = sp.symbols('x, y, z')
vsymb = sp.Matrix([x, y, z])
w = sp.Matrix([sp.Rational(1, 3)] + [sp.Rational(1, 18)]*6 + [sp.Rational(1, 36)]*12)
f = rho + u.dot(vsymb)/cs2 + u.dot(vsymb)**2/(2*cs2**2) - u.norm()**2/(2*cs2)
return sp.Matrix([w[iv]*f.subs([(x, vv[0]), (y, vv[1]), (z, vv[2])]) for iv, vv in enumerate(v)])
def feq_T(v, u):
c0 = 1#LA
x, y, z = sp.symbols('x, y, z')
vsymb = sp.Matrix([x, y, z])
f = T/6*(1 + 2*u.dot(vsymb)/c0)
return sp.Matrix([f.subs([(x, vv[0]), (y, vv[1]), (z, vv[2])]) for iv, vv in enumerate(v)])
def run(dx, Tf, generator="cython", sorder=None, withPlot=True):
"""
Parameters
----------
dx: double
spatial step
Tf: double
final time
generator: pylbm generator
sorder: list
storage order
withPlot: boolean
if True plot the solution otherwise just compute the solution
"""
r = X**2+Y**2+Z**2
dico = {
'box': {
'x': [xmin, xmax],
'y': [ymin, ymax],
'z': [zmin, zmax],
'label': [-1, -1, 0, 1, -1, -1]
},
'space_step': dx,
'scheme_velocity': la,
'schemes': [
{
'velocities': list(range(19)),
'conserved_moments': [rho, qx, qy, qz],
'polynomials': [
1,
X, Y, Z,
19*r - 30,
2*X**2 - Y**2 - Z**2,
Y**2-Z**2,
X*Y,
Y*Z,
Z*X,
X*(5*r - 9),
Y*(5*r - 9),
Z*(5*r - 9),
X*(Y**2 - Z**2),
Y*(Z**2 - X**2),
Z*(X**2 - Y**2),
(2*X**2 - Y**2 - Z**2)*(3*r - 5),
(Y**2 - Z**2)*(3*r - 5),
-sp.Rational(53, 2)*r + sp.Rational(21, 2)*r**2 + 12
],
'relaxation_parameters': sf,
'feq': (feq_NS, (sp.Matrix([qx, qy, qz]),)),
'source_terms': {qy: beta*g*T},
},
{
'velocities': list(range(1, 7)),
'conserved_moments': T,
'polynomials': [1, X, Y, Z,
X**2 - Y**2,
Y**2 - Z**2,
],
'feq': (feq_T, (sp.Matrix([qx, qy, qz]),)),
'relaxation_parameters': sT,
},
],
'init': {rho: 1.,
qx: 0.,
qy: 0.,
qz: 0.,
T: init_T
},
'boundary_conditions': {
0: {'method': {0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value': bc_down},
1: {'method': {0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value': bc_up},
},
'generator': "cython",
'parameters': {LA: la},
}
sol = pylbm.Simulation(dico)
im = 0
compt = 0
while sol.t < Tf:
sol.one_time_step()
compt += 1
if compt == 128:
im += 1
save(sol, im)
compt = 0
return sol
if __name__ == '__main__':
Tf = 400.
run(dx, Tf)
| <filename>3D/rayleigh-benard/rayleigh-benard.py
import numpy as np
import sympy as sp
import pylbm
X, Y, Z = sp.symbols('X, Y, Z')
rho, qx, qy, qz, T, LA = sp.symbols('rho, qx, qy, qz, T, LA', real=True)
# parameters
dx = 1./128
la = 1
cs = la/np.sqrt(3)
Tu = -0.5
Td = 0.5
Ra = 1e6
Pr = 0.71
g = 9.81
tau = 1./1.8
nu = (2*tau-1)/6*la*dx
diffusivity = nu/Pr
taup = .5+2*diffusivity/la/dx
DeltaT = Td - Tu
xmin, xmax, ymin, ymax, zmin, zmax = 0., 2., 0., 1., 0., 2.
H = ymax - ymin
beta = Ra*nu*diffusivity/(g*DeltaT*H**3)
sf = [0]*4 + [1./tau]*15
sT = [0] + [1./taup]*5
def init_T(x, y, z):
return Td + (Tu-Td)/(ymax-ymin)*(y-ymin) + (Td-Tu) * (0.1*np.random.random_sample((x.shape[0],y.shape[1],z.shape[2]))-0.5)
def bc_up(f, m, x, y, z):
m[qx] = 0.
m[qy] = 0.
m[qz] = 0.
m[T] = Tu
def bc_down(f, m, x, y, z):
m[qx] = 0.
m[qy] = 0.
m[qz] = 0.
m[T] = Td
def save(sol, im):
x, y, z = sol.domain.x, sol.domain.y, sol.domain.z
h5 = pylbm.H5File(sol.mpi_topo, 'rayleigh-benard', './rayleigh-benard', im)
h5.set_grid(x, y, z)
h5.add_scalar('T', sol.m[T])
h5.save()
def feq_NS(v, u):
cs2 = sp.Rational(1, 3)
x, y, z = sp.symbols('x, y, z')
vsymb = sp.Matrix([x, y, z])
w = sp.Matrix([sp.Rational(1, 3)] + [sp.Rational(1, 18)]*6 + [sp.Rational(1, 36)]*12)
f = rho + u.dot(vsymb)/cs2 + u.dot(vsymb)**2/(2*cs2**2) - u.norm()**2/(2*cs2)
return sp.Matrix([w[iv]*f.subs([(x, vv[0]), (y, vv[1]), (z, vv[2])]) for iv, vv in enumerate(v)])
def feq_T(v, u):
c0 = 1#LA
x, y, z = sp.symbols('x, y, z')
vsymb = sp.Matrix([x, y, z])
f = T/6*(1 + 2*u.dot(vsymb)/c0)
return sp.Matrix([f.subs([(x, vv[0]), (y, vv[1]), (z, vv[2])]) for iv, vv in enumerate(v)])
def run(dx, Tf, generator="cython", sorder=None, withPlot=True):
"""
Parameters
----------
dx: double
spatial step
Tf: double
final time
generator: pylbm generator
sorder: list
storage order
withPlot: boolean
if True plot the solution otherwise just compute the solution
"""
r = X**2+Y**2+Z**2
dico = {
'box': {
'x': [xmin, xmax],
'y': [ymin, ymax],
'z': [zmin, zmax],
'label': [-1, -1, 0, 1, -1, -1]
},
'space_step': dx,
'scheme_velocity': la,
'schemes': [
{
'velocities': list(range(19)),
'conserved_moments': [rho, qx, qy, qz],
'polynomials': [
1,
X, Y, Z,
19*r - 30,
2*X**2 - Y**2 - Z**2,
Y**2-Z**2,
X*Y,
Y*Z,
Z*X,
X*(5*r - 9),
Y*(5*r - 9),
Z*(5*r - 9),
X*(Y**2 - Z**2),
Y*(Z**2 - X**2),
Z*(X**2 - Y**2),
(2*X**2 - Y**2 - Z**2)*(3*r - 5),
(Y**2 - Z**2)*(3*r - 5),
-sp.Rational(53, 2)*r + sp.Rational(21, 2)*r**2 + 12
],
'relaxation_parameters': sf,
'feq': (feq_NS, (sp.Matrix([qx, qy, qz]),)),
'source_terms': {qy: beta*g*T},
},
{
'velocities': list(range(1, 7)),
'conserved_moments': T,
'polynomials': [1, X, Y, Z,
X**2 - Y**2,
Y**2 - Z**2,
],
'feq': (feq_T, (sp.Matrix([qx, qy, qz]),)),
'relaxation_parameters': sT,
},
],
'init': {rho: 1.,
qx: 0.,
qy: 0.,
qz: 0.,
T: init_T
},
'boundary_conditions': {
0: {'method': {0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value': bc_down},
1: {'method': {0: pylbm.bc.BouzidiBounceBack, 1: pylbm.bc.BouzidiAntiBounceBack}, 'value': bc_up},
},
'generator': "cython",
'parameters': {LA: la},
}
sol = pylbm.Simulation(dico)
im = 0
compt = 0
while sol.t < Tf:
sol.one_time_step()
compt += 1
if compt == 128:
im += 1
save(sol, im)
compt = 0
return sol
if __name__ == '__main__':
Tf = 400.
run(dx, Tf)
| en | 0.398964 | # parameters #LA Parameters ---------- dx: double spatial step Tf: double final time generator: pylbm generator sorder: list storage order withPlot: boolean if True plot the solution otherwise just compute the solution | 2.008693 | 2 |
day14.py | kbturk/Advent_of_Code2020 | 0 | 6617891 | import sys, itertools, re
from copy import deepcopy
from typing import Dict, List, Optional, Set, Tuple
def floating_bit_poss(floating_mask: int, value:int) -> Set[int]:
if floating_mask == 0:
return {value}
for i in range(36):
bit_pos = 35 - i
if floating_mask & (1 << bit_pos):
value_hi = value | (1 << bit_pos)
value_lo = value &~ (1 << bit_pos)
return (floating_bit_poss(floating_mask &~ (1 << bit_pos),value_hi) |
floating_bit_poss(floating_mask &~ (1 << bit_pos), value_lo))
raise Exception("Reached impossible state.")
def main(args: List[str]) -> int:
registrar: Dict[int,int]= {}
with open(args[1], 'r') as f:
input = [ line.strip() for line in f ]
for line in input:
if "mask" in line:
_,_, mask_input_string = line.split()
print(mask_input_string)
ones_mask = 0b000000000000000000000000000000000000
#no more zero masks...
floating_mask = 0b000000000000000000000000000000000000
for i,c in enumerate(mask_input_string):
bit_pos = 35 - i
if c == '1':
ones_mask |= ( 1 << bit_pos )
elif c == 'X':
floating_mask |= ( 1 << bit_pos )
elif "mem" in line:
#convert the line into a list of locations and registrars
added_commands = re.fullmatch( r'mem\[([0-9]+)\] = ([0-9]+)', line )
key, value = int(added_commands.group(1)), int(added_commands.group(2))
addr = key | ones_mask
#now generate the remaining iterations:
for key in floating_bit_poss(floating_mask, addr):
registrar[key] = value
#value = value & zeros_mask
print(sum(registrar.values()))
return 0
if __name__ == '__main__':
sys.exit( main( sys.argv ) ) | import sys, itertools, re
from copy import deepcopy
from typing import Dict, List, Optional, Set, Tuple
def floating_bit_poss(floating_mask: int, value:int) -> Set[int]:
if floating_mask == 0:
return {value}
for i in range(36):
bit_pos = 35 - i
if floating_mask & (1 << bit_pos):
value_hi = value | (1 << bit_pos)
value_lo = value &~ (1 << bit_pos)
return (floating_bit_poss(floating_mask &~ (1 << bit_pos),value_hi) |
floating_bit_poss(floating_mask &~ (1 << bit_pos), value_lo))
raise Exception("Reached impossible state.")
def main(args: List[str]) -> int:
registrar: Dict[int,int]= {}
with open(args[1], 'r') as f:
input = [ line.strip() for line in f ]
for line in input:
if "mask" in line:
_,_, mask_input_string = line.split()
print(mask_input_string)
ones_mask = 0b000000000000000000000000000000000000
#no more zero masks...
floating_mask = 0b000000000000000000000000000000000000
for i,c in enumerate(mask_input_string):
bit_pos = 35 - i
if c == '1':
ones_mask |= ( 1 << bit_pos )
elif c == 'X':
floating_mask |= ( 1 << bit_pos )
elif "mem" in line:
#convert the line into a list of locations and registrars
added_commands = re.fullmatch( r'mem\[([0-9]+)\] = ([0-9]+)', line )
key, value = int(added_commands.group(1)), int(added_commands.group(2))
addr = key | ones_mask
#now generate the remaining iterations:
for key in floating_bit_poss(floating_mask, addr):
registrar[key] = value
#value = value & zeros_mask
print(sum(registrar.values()))
return 0
if __name__ == '__main__':
sys.exit( main( sys.argv ) ) | en | 0.55806 | #no more zero masks... #convert the line into a list of locations and registrars #now generate the remaining iterations: #value = value & zeros_mask | 2.849586 | 3 |
lib/bmp280.py | gandro/micropython-m5stickc-plus | 4 | 6617892 | <gh_stars>1-10
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
I2C-based driver for the BMP280 temperature and pressure sensor.
"""
from micropython import const
from ustruct import unpack_from
from utime import sleep_ms, sleep_us
_BMP280_I2C_DEFAULT_ADDR = const(0x76)
_BMP280_CHIP_ID = const(0xd0)
_BMP280_CHIP_ID_VALUE = const(0x58)
_BMP280_RESET = const(0xe0)
_BMP280_RESET_VALUE = const(0xb6)
_BMP280_STATUS = const(0xf3)
_BMP280_CONTROL = const(0xf4)
_BMP280_CONTROL_TEMP_SAMPLES_MASK = const(0b1110_0000)
_BMP280_CONTROL_TEMP_SAMPLES_POS = const(5)
_BMP280_CONTROL_PRESS_SAMPLES_MASK = const(0b0001_1100)
_BMP280_CONTROL_PRESS_SAMPLES_POS = const(2)
_BMP280_CONTROL_MODE_MASK = const(0b0000_0011)
_BMP280_CONTROL_MODE_POS = const(0)
_BMP280_CONFIG = const(0xf5)
_BMP280_CONFIG_STANDBY_MASK = const(0b1110_0000)
_BMP280_CONFIG_STANDBY_POS = const(5)
_BMP280_CONFIG_IIR_MASK = const(0b0001_1100)
_BMP280_CONFIG_IIR_POS = const(2)
_BMP280_DATA = const(0xf7)
_BMP280_CALIBRATION = const(0x88)
_BMP280_DATA_LEN = const(6)
_BMP280_CALIBRATION_LEN = const(24)
_BMP280_DURATION_PER_SAMPLE_US = const(2000)
_BMP280_DURATION_STARTUP_US = const(1000)
_BMP280_DURATION_PRESS_STARTUP_US = const(500)
MODE_NORMAL = const(0b11)
MODE_FORCED = const(0b01)
MODE_SLEEP = const(0b00)
TEMP_SAMPLES_SKIP = const(0b000)
TEMP_SAMPLES_1 = const(0b001)
TEMP_SAMPLES_2 = const(0b010)
TEMP_SAMPLES_4 = const(0b011)
TEMP_SAMPLES_8 = const(0b100)
TEMP_SAMPLES_16 = const(0b111)
PRESS_SAMPLES_SKIP = const(0b000)
PRESS_SAMPLES_1 = const(0b001)
PRESS_SAMPLES_2 = const(0b010)
PRESS_SAMPLES_4 = const(0b011)
PRESS_SAMPLES_8 = const(0b100)
PRESS_SAMPLES_16 = const(0b111)
IIR_FILTER_OFF = const(0b0000_0000)
IIR_FILTER_2 = const(0b0000_0100)
IIR_FILTER_4 = const(0b0000_1000)
IIR_FILTER_8 = const(0b0000_1100)
IIR_FILTER_16 = const(0b0001_0000)
STANDBY_0_5_MS = const(0b0000_0000)
STANDBY_62_5_MS = const(0b0010_0000)
STANDBY_125_MS = const(0b0100_0000)
STANDBY_250_MS = const(0b0110_0000)
STANDBY_500_MS = const(0b1000_0000)
STANDBY_1000_MS = const(0b1010_0000)
STANDBY_2000_MS = const(0b1100_0000)
STANDBY_4000_MS = const(0b1110_0000)
class BMP280:
def __init__(self, i2c, addr=_BMP280_I2C_DEFAULT_ADDR, *,
mode=MODE_NORMAL,
press_samples=PRESS_SAMPLES_4,
temp_samples=TEMP_SAMPLES_1,
iir_filter=IIR_FILTER_16,
standby_ms=STANDBY_0_5_MS):
self.i2c = i2c
self.addr = addr
chipid = self.i2c.readfrom_mem(self.addr, _BMP280_CHIP_ID, 1)
if chipid[0] != _BMP280_CHIP_ID_VALUE:
raise ValueError("device not found")
self.reset()
sleep_ms(10)
control = bytearray(1)
control[0] |= ((temp_samples << _BMP280_CONTROL_TEMP_SAMPLES_POS)
& _BMP280_CONTROL_TEMP_SAMPLES_MASK)
control[0] |= ((press_samples << _BMP280_CONTROL_PRESS_SAMPLES_POS)
& _BMP280_CONTROL_PRESS_SAMPLES_MASK)
# MODE_FORCED will be set in the call to measure()
if mode == MODE_NORMAL:
control[0] |= ((MODE_NORMAL << _BMP280_CONTROL_MODE_POS)
& _BMP280_CONTROL_MODE_MASK)
self.i2c.writeto_mem(self.addr, _BMP280_CONTROL, control)
config = bytearray(1)
config[0] |= ((standby_ms << _BMP280_CONFIG_STANDBY_POS)
& _BMP280_CONFIG_STANDBY_MASK)
config[0] |= ((iir_filter << _BMP280_CONFIG_IIR_POS)
& _BMP280_CONFIG_IIR_MASK)
self.i2c.writeto_mem(self.addr, _BMP280_CONFIG, config)
calibration = self.i2c.readfrom_mem(
self.addr, _BMP280_CALIBRATION, _BMP280_CALIBRATION_LEN)
self._T1, = unpack_from('<H', calibration, 0)
self._T2, = unpack_from('<h', calibration, 2)
self._T3, = unpack_from('<h', calibration, 4)
self._P1, = unpack_from('<H', calibration, 6)
self._P2, = unpack_from('<h', calibration, 8)
self._P3, = unpack_from('<h', calibration, 10)
self._P4, = unpack_from('<h', calibration, 12)
self._P5, = unpack_from('<h', calibration, 14)
self._P6, = unpack_from('<h', calibration, 16)
self._P7, = unpack_from('<h', calibration, 18)
self._P8, = unpack_from('<h', calibration, 20)
self._P9, = unpack_from('<h', calibration, 22)
if mode == MODE_NORMAL:
# wait for initial measurement to complete
sleep_us(self._measure_delay_us(temp_samples, press_samples))
def reset(self):
self.i2c.writeto_mem(self.addr, _BMP280_RESET,
bytes([_BMP280_RESET_VALUE]))
def _measure_delay_us(self, temp_os, press_os):
"""
Returns the measurement delay in microseconds for the given oversampling
register values for temperature and pressure.
"""
temp_dur_us = _BMP280_DURATION_PER_SAMPLE_US * ((1 << temp_os) >> 1)
press_dur_us = _BMP280_DURATION_PER_SAMPLE_US * ((1 << press_os) >> 1)
press_dur_us += _BMP280_DURATION_PRESS_STARTUP_US if press_os else 0
return _BMP280_DURATION_STARTUP_US + temp_dur_us + press_dur_us
def _measure_prepare(self):
"""
Sets up a measurement if the sensor is in sleep mode. Returns two
booleans indicating whether temperature and pressure measurements are
enabled.
"""
control = bytearray(1)
# read out values from control register so see if we have to force
# a measurement, and if so, how long we have to wait for the result
self.i2c.readfrom_mem_into(self.addr, _BMP280_CONTROL, control)
mode = ((control[0] & _BMP280_CONTROL_MODE_MASK)
>> _BMP280_CONTROL_MODE_POS)
temp_samples = ((control[0] & _BMP280_CONTROL_TEMP_SAMPLES_MASK)
>> _BMP280_CONTROL_TEMP_SAMPLES_POS)
temp_en = bool(temp_samples)
press_samples = ((control[0] & _BMP280_CONTROL_PRESS_SAMPLES_MASK)
>> _BMP280_CONTROL_PRESS_SAMPLES_POS)
press_en = bool(press_samples)
# if sensor was in sleep mode, force a measurement now
if mode == MODE_SLEEP:
control[0] |= MODE_FORCED
self.i2c.writeto_mem(self.addr, _BMP280_CONTROL, control)
# wait for measurement to complete
sleep_us(self._measure_delay_us(temp_samples, press_samples))
return (temp_en, press_en)
def measure(self):
"""
Returns the temperature (in °C) and the pressure (in Pa) as a 2-tuple
in the form of:
(temperature, pressure)
This function will wake up the sensor for a single measurement if the
sensor is in sleep mode.
"""
temp_en, press_en = self._measure_prepare()
# Datasheet 3.11.3: Compute t_fine, temperature and pressure
d = self.i2c.readfrom_mem(self.addr, _BMP280_DATA, _BMP280_DATA_LEN)
p_raw = (d[0] << 12) | (d[1] << 4) | (d[2] >> 4)
t_raw = (d[3] << 12) | (d[4] << 4) | (d[5] >> 4)
# t_fine
var1 = (((t_raw >> 3) - (self._T1 << 1)) * self._T2) >> 11
var2 = (((((t_raw >> 4) - self._T1) * ((t_raw >> 4) - self._T1))
>> 12) * self._T3) >> 14
t_fine = var1 + var2
# temperature
temperature = 0.0
if temp_en:
temperature = ((t_fine * 5 + 128) >> 8) / 100.0
# pressure
pressure = 0.0
if press_en:
var1 = t_fine - 128000
var2 = var1 * var1 * self._P6
var2 = var2 + ((var1 * self._P5) << 17)
var2 = var2 + (self._P4 << 35)
var1 = ((var1 * var1 * self._P3) >> 8) + ((var1 * self._P2) << 12)
var1 = (((1 << 47) + var1) * self._P1) >> 33
if var1 != 0:
p = 1048576 - p_raw
p = (((p << 31) - var2) * 3125) // var1
var1 = (self._P9 * (p >> 13) * (p >> 13)) >> 25
var2 = (self._P8 * p) >> 19
p = ((p + var1 + var2) >> 8) + (self._P7 << 4)
else:
p = 0
pressure = p / 256.0
return (temperature, pressure)
| # Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
I2C-based driver for the BMP280 temperature and pressure sensor.
"""
from micropython import const
from ustruct import unpack_from
from utime import sleep_ms, sleep_us
_BMP280_I2C_DEFAULT_ADDR = const(0x76)
_BMP280_CHIP_ID = const(0xd0)
_BMP280_CHIP_ID_VALUE = const(0x58)
_BMP280_RESET = const(0xe0)
_BMP280_RESET_VALUE = const(0xb6)
_BMP280_STATUS = const(0xf3)
_BMP280_CONTROL = const(0xf4)
_BMP280_CONTROL_TEMP_SAMPLES_MASK = const(0b1110_0000)
_BMP280_CONTROL_TEMP_SAMPLES_POS = const(5)
_BMP280_CONTROL_PRESS_SAMPLES_MASK = const(0b0001_1100)
_BMP280_CONTROL_PRESS_SAMPLES_POS = const(2)
_BMP280_CONTROL_MODE_MASK = const(0b0000_0011)
_BMP280_CONTROL_MODE_POS = const(0)
_BMP280_CONFIG = const(0xf5)
_BMP280_CONFIG_STANDBY_MASK = const(0b1110_0000)
_BMP280_CONFIG_STANDBY_POS = const(5)
_BMP280_CONFIG_IIR_MASK = const(0b0001_1100)
_BMP280_CONFIG_IIR_POS = const(2)
_BMP280_DATA = const(0xf7)
_BMP280_CALIBRATION = const(0x88)
_BMP280_DATA_LEN = const(6)
_BMP280_CALIBRATION_LEN = const(24)
_BMP280_DURATION_PER_SAMPLE_US = const(2000)
_BMP280_DURATION_STARTUP_US = const(1000)
_BMP280_DURATION_PRESS_STARTUP_US = const(500)
MODE_NORMAL = const(0b11)
MODE_FORCED = const(0b01)
MODE_SLEEP = const(0b00)
TEMP_SAMPLES_SKIP = const(0b000)
TEMP_SAMPLES_1 = const(0b001)
TEMP_SAMPLES_2 = const(0b010)
TEMP_SAMPLES_4 = const(0b011)
TEMP_SAMPLES_8 = const(0b100)
TEMP_SAMPLES_16 = const(0b111)
PRESS_SAMPLES_SKIP = const(0b000)
PRESS_SAMPLES_1 = const(0b001)
PRESS_SAMPLES_2 = const(0b010)
PRESS_SAMPLES_4 = const(0b011)
PRESS_SAMPLES_8 = const(0b100)
PRESS_SAMPLES_16 = const(0b111)
IIR_FILTER_OFF = const(0b0000_0000)
IIR_FILTER_2 = const(0b0000_0100)
IIR_FILTER_4 = const(0b0000_1000)
IIR_FILTER_8 = const(0b0000_1100)
IIR_FILTER_16 = const(0b0001_0000)
STANDBY_0_5_MS = const(0b0000_0000)
STANDBY_62_5_MS = const(0b0010_0000)
STANDBY_125_MS = const(0b0100_0000)
STANDBY_250_MS = const(0b0110_0000)
STANDBY_500_MS = const(0b1000_0000)
STANDBY_1000_MS = const(0b1010_0000)
STANDBY_2000_MS = const(0b1100_0000)
STANDBY_4000_MS = const(0b1110_0000)
class BMP280:
def __init__(self, i2c, addr=_BMP280_I2C_DEFAULT_ADDR, *,
mode=MODE_NORMAL,
press_samples=PRESS_SAMPLES_4,
temp_samples=TEMP_SAMPLES_1,
iir_filter=IIR_FILTER_16,
standby_ms=STANDBY_0_5_MS):
self.i2c = i2c
self.addr = addr
chipid = self.i2c.readfrom_mem(self.addr, _BMP280_CHIP_ID, 1)
if chipid[0] != _BMP280_CHIP_ID_VALUE:
raise ValueError("device not found")
self.reset()
sleep_ms(10)
control = bytearray(1)
control[0] |= ((temp_samples << _BMP280_CONTROL_TEMP_SAMPLES_POS)
& _BMP280_CONTROL_TEMP_SAMPLES_MASK)
control[0] |= ((press_samples << _BMP280_CONTROL_PRESS_SAMPLES_POS)
& _BMP280_CONTROL_PRESS_SAMPLES_MASK)
# MODE_FORCED will be set in the call to measure()
if mode == MODE_NORMAL:
control[0] |= ((MODE_NORMAL << _BMP280_CONTROL_MODE_POS)
& _BMP280_CONTROL_MODE_MASK)
self.i2c.writeto_mem(self.addr, _BMP280_CONTROL, control)
config = bytearray(1)
config[0] |= ((standby_ms << _BMP280_CONFIG_STANDBY_POS)
& _BMP280_CONFIG_STANDBY_MASK)
config[0] |= ((iir_filter << _BMP280_CONFIG_IIR_POS)
& _BMP280_CONFIG_IIR_MASK)
self.i2c.writeto_mem(self.addr, _BMP280_CONFIG, config)
calibration = self.i2c.readfrom_mem(
self.addr, _BMP280_CALIBRATION, _BMP280_CALIBRATION_LEN)
self._T1, = unpack_from('<H', calibration, 0)
self._T2, = unpack_from('<h', calibration, 2)
self._T3, = unpack_from('<h', calibration, 4)
self._P1, = unpack_from('<H', calibration, 6)
self._P2, = unpack_from('<h', calibration, 8)
self._P3, = unpack_from('<h', calibration, 10)
self._P4, = unpack_from('<h', calibration, 12)
self._P5, = unpack_from('<h', calibration, 14)
self._P6, = unpack_from('<h', calibration, 16)
self._P7, = unpack_from('<h', calibration, 18)
self._P8, = unpack_from('<h', calibration, 20)
self._P9, = unpack_from('<h', calibration, 22)
if mode == MODE_NORMAL:
# wait for initial measurement to complete
sleep_us(self._measure_delay_us(temp_samples, press_samples))
def reset(self):
self.i2c.writeto_mem(self.addr, _BMP280_RESET,
bytes([_BMP280_RESET_VALUE]))
def _measure_delay_us(self, temp_os, press_os):
"""
Returns the measurement delay in microseconds for the given oversampling
register values for temperature and pressure.
"""
temp_dur_us = _BMP280_DURATION_PER_SAMPLE_US * ((1 << temp_os) >> 1)
press_dur_us = _BMP280_DURATION_PER_SAMPLE_US * ((1 << press_os) >> 1)
press_dur_us += _BMP280_DURATION_PRESS_STARTUP_US if press_os else 0
return _BMP280_DURATION_STARTUP_US + temp_dur_us + press_dur_us
def _measure_prepare(self):
"""
Sets up a measurement if the sensor is in sleep mode. Returns two
booleans indicating whether temperature and pressure measurements are
enabled.
"""
control = bytearray(1)
# read out values from control register so see if we have to force
# a measurement, and if so, how long we have to wait for the result
self.i2c.readfrom_mem_into(self.addr, _BMP280_CONTROL, control)
mode = ((control[0] & _BMP280_CONTROL_MODE_MASK)
>> _BMP280_CONTROL_MODE_POS)
temp_samples = ((control[0] & _BMP280_CONTROL_TEMP_SAMPLES_MASK)
>> _BMP280_CONTROL_TEMP_SAMPLES_POS)
temp_en = bool(temp_samples)
press_samples = ((control[0] & _BMP280_CONTROL_PRESS_SAMPLES_MASK)
>> _BMP280_CONTROL_PRESS_SAMPLES_POS)
press_en = bool(press_samples)
# if sensor was in sleep mode, force a measurement now
if mode == MODE_SLEEP:
control[0] |= MODE_FORCED
self.i2c.writeto_mem(self.addr, _BMP280_CONTROL, control)
# wait for measurement to complete
sleep_us(self._measure_delay_us(temp_samples, press_samples))
return (temp_en, press_en)
def measure(self):
"""
Returns the temperature (in °C) and the pressure (in Pa) as a 2-tuple
in the form of:
(temperature, pressure)
This function will wake up the sensor for a single measurement if the
sensor is in sleep mode.
"""
temp_en, press_en = self._measure_prepare()
# Datasheet 3.11.3: Compute t_fine, temperature and pressure
d = self.i2c.readfrom_mem(self.addr, _BMP280_DATA, _BMP280_DATA_LEN)
p_raw = (d[0] << 12) | (d[1] << 4) | (d[2] >> 4)
t_raw = (d[3] << 12) | (d[4] << 4) | (d[5] >> 4)
# t_fine
var1 = (((t_raw >> 3) - (self._T1 << 1)) * self._T2) >> 11
var2 = (((((t_raw >> 4) - self._T1) * ((t_raw >> 4) - self._T1))
>> 12) * self._T3) >> 14
t_fine = var1 + var2
# temperature
temperature = 0.0
if temp_en:
temperature = ((t_fine * 5 + 128) >> 8) / 100.0
# pressure
pressure = 0.0
if press_en:
var1 = t_fine - 128000
var2 = var1 * var1 * self._P6
var2 = var2 + ((var1 * self._P5) << 17)
var2 = var2 + (self._P4 << 35)
var1 = ((var1 * var1 * self._P3) >> 8) + ((var1 * self._P2) << 12)
var1 = (((1 << 47) + var1) * self._P1) >> 33
if var1 != 0:
p = 1048576 - p_raw
p = (((p << 31) - var2) * 3125) // var1
var1 = (self._P9 * (p >> 13) * (p >> 13)) >> 25
var2 = (self._P8 * p) >> 19
p = ((p + var1 + var2) >> 8) + (self._P7 << 4)
else:
p = 0
pressure = p / 256.0
return (temperature, pressure) | en | 0.808956 | # Copyright (c) 2020 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. I2C-based driver for the BMP280 temperature and pressure sensor. # MODE_FORCED will be set in the call to measure() # wait for initial measurement to complete Returns the measurement delay in microseconds for the given oversampling register values for temperature and pressure. Sets up a measurement if the sensor is in sleep mode. Returns two booleans indicating whether temperature and pressure measurements are enabled. # read out values from control register so see if we have to force # a measurement, and if so, how long we have to wait for the result # if sensor was in sleep mode, force a measurement now # wait for measurement to complete Returns the temperature (in °C) and the pressure (in Pa) as a 2-tuple in the form of: (temperature, pressure) This function will wake up the sensor for a single measurement if the sensor is in sleep mode. # Datasheet 3.11.3: Compute t_fine, temperature and pressure # t_fine # temperature # pressure | 1.620341 | 2 |
app.py | kevin1061517/Retrieval-based-Line-bot_Word2Vec | 0 | 6617893 | <gh_stars>0
from linebot import (
LineBotApi, WebhookHandler
)
from linebot.exceptions import (
LineBotApiError, InvalidSignatureError
)
from firebase import firebase
from linebot.models import (
SourceUser,SourceGroup,SourceRoom,LeaveEvent,JoinEvent,
TemplateSendMessage,PostbackEvent,AudioMessage,LocationMessage,
ButtonsTemplate,LocationSendMessage,AudioSendMessage,ButtonsTemplate,
ImageMessage,URITemplateAction,MessageTemplateAction,ConfirmTemplate,
PostbackTemplateAction,ImageSendMessage,MessageEvent, TextMessage,
TextSendMessage,StickerMessage, StickerSendMessage,DatetimePickerTemplateAction,
CarouselColumn,CarouselTemplate,VideoSendMessage,ImagemapSendMessage,BaseSize,
URIImagemapAction,MessageImagemapAction,ImagemapArea,ImageCarouselColumn,ImageCarouselTemplate,
FlexSendMessage, BubbleContainer, ImageComponent, BoxComponent,
TextComponent, SpacerComponent, IconComponent, ButtonComponent,
SeparatorComponent,URIAction,LocationAction,QuickReply,QuickReplyButton,
DatetimePickerAction,PostbackAction,MessageAction,CameraAction,CameraRollAction
)
from imgurpython import ImgurClient
import re
from bs4 import BeautifulSoup as bf
import requests
import random
import os,tempfile
from datetime import timedelta, datetime
from time import sleep
import json
from selenium import webdriver
from urllib.parse import quote
from urllib import parse
from flask import Flask, request, render_template, make_response, abort
#from flask_bootstrap import Bootstrap
#from PIL import Image
#import warnings
#warnings.simplefilter('error', Image.DecompressionBombWarning)
client_id = os.getenv('client_id',None)
client_secret = os.getenv('client_secret',None)
album_id = os.getenv('album_id',None)
access_token = os.getenv('access_token',None)
refresh_token = os.getenv('refresh_token',None)
client = ImgurClient(client_id, client_secret, access_token, refresh_token)
url = os.getenv('firebase_bot',None)
fb = firebase.FirebaseApplication(url,None)
line_bot_api = LineBotApi(os.getenv('LINE_CHANNEL_ACCESS_TOKEN',None))
handler = WebhookHandler(os.getenv('LINE_CHANNEL_SECRET', None))
app = Flask(__name__)
#bootstrap = Bootstrap(app)
#
<EMAIL>('/list')
#def do_get():
# return render_template('list.html')
#
<EMAIL>('/2')
#def do_get():
# return render_template('index2.html')
@app.route("/callback", methods=['POST'])
def callback():
# get X-Line-Signature header value
signature = request.headers['X-Line-Signature']
# get request body as text
print('----------in-------')
body = request.get_data(as_text=True)
bodyjson=json.loads(body)
#app.logger.error("Request body: " + bodyjson['events'][0]['message']['text'])
# app.logger.error("Request body: " + body)
#insertdata
# print('-----in----------')
# add_data = usermessage(
# id = bodyjson['events'][0]['message']['id'],
# user_id = bodyjson['events'][0]['source']['userId'],
# message = bodyjson['events'][0]['message']['text'],
# birth_date = datetime.fromtimestamp(int(bodyjson['events'][0]['timestamp'])/1000)
# )
# db.session.add(add_data)
# db.session.commit()
# handle webhook body
try:
handler.handle(body,signature)
except LineBotApiError as e:
print("Catch exception from LINE Messaging API: %s\n" % e.message)
for m in e.error.details:
print("ERROR is %s: %s" % (m.property, m.message))
print("\n")
except InvalidSignatureError:
abort(400)
return 'OK'
def movie_template():
buttons_template = TemplateSendMessage(
alt_text='電影 template',
template=ButtonsTemplate(
title='服務類型',
text='請選擇',
thumbnail_image_url='https://i.imgur.com/zzv2aSR.jpg',
actions=[
MessageTemplateAction(
label='近期上映電影',
text='近期上映電影'
),
MessageTemplateAction(
label='依莉下載電影',
text='eyny'
),
MessageTemplateAction(
label='觸電網-youtube',
text='觸電網-youtube')
]
)
)
return buttons_template
def apple_news():
target_url = 'https://tw.appledaily.com/new/realtime'
print('Start parsing appleNews....')
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
content = ""
for index, data in enumerate(soup.select('.rtddt a'), 0):
if index == 5:
return content
title = data.select('font')[0].text
link = data['href']
content += '{}\n{}\n'.format(title,link)
return content
def youtube_page(keyword):
url = []
title = []
pic = []
target_url = 'https://www.youtube.com/results?search_query={}&sp=EgIQAQ%253D%253D'.format(quote(keyword))
rs = requests.session()
res = rs.get(target_url)
soup = bf(res.text, 'html.parser')
for data in soup.select('.yt-lockup-title'):
if len(data.find('a')['href']) > 20:
continue
url.append('https://www.youtube.com{}'.format(data.find('a')['href']))
title.append(data.find('a')['title'])
pic.append('https://i.ytimg.com/vi/{}/0.jpg'.format(data.find('a')['href'][9:]))
return url,title,pic
def yvideo(url):
url = 'https://qdownloader.net/download?video={}'.format(url)
res = requests.get(url)
soup = bf(res.text,'html.parser')
t = soup.select('.col-md-8 td a' )
c = 0
url = t[c]['href']
while re.search(r'.*googlevideo.*',url) == None:
url = t[c]['href']
c += 1
t = soup.select('.info.col-md-4 img' )
img = t[0]['src']
url = re.search(r'.*&title',url).group()[:-6]
return url,img
def yout_download(_id):
print('in')
url = 'http://www.youtube.com/get_video_info?eurl=http%3A%2F%2Fkej.tw%2F&sts=17885&video_id={}'.format(str(_id))
res = requests.get(url).text
a = parse.parse_qs(res)
# img = 'http://i.ytimg.com/vi/{}/0.jpg'.format(_id)
# title = (a['title'])
b = parse.parse_qs(a['url_encoded_fmt_stream_map'][0])
url = b['url'][0]
print('out----'+url)
return url
def buttons_template_yout(page,keyword):
confirm_template = TemplateSendMessage(
alt_text = 'video template',
template = ConfirmTemplate(
text = '請選擇一下',
actions = [
MessageTemplateAction(
label = '推薦',
text = '台北暗殺星奪冠之路yout'
),
PostbackTemplateAction(
label = '再來10部',
data = 'carousel/{}/{}'.format(page,keyword)
)
]
)
)
return confirm_template
def carousel_template(keyword,page=0):
pass_url = []
video_url,title,img_url = youtube_page(keyword)
if page!=0:
if page%2 == 0:
pass
else:
temp = 10
video_url = [i for i in video_url[temp:]]
title = [i for i in title[temp:]]
img_url = [i for i in img_url[temp:]]
pass_url = [i[32:] for i in video_url]
if len(title)<10:
buttons_template = porn_video_template(keyword)
return buttons_template
Image_Carousel = TemplateSendMessage(
alt_text='Carousel_template',
template=ImageCarouselTemplate(
columns=[
ImageCarouselColumn(
image_url=img_url[0],
action=PostbackTemplateAction(
label=title[0][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[0])
)
),
ImageCarouselColumn(
image_url=img_url[1],
action=PostbackTemplateAction(
label=title[1][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[1])
)
),
ImageCarouselColumn(
image_url=img_url[2],
action=PostbackTemplateAction(
label=title[2][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[2])
)
),
ImageCarouselColumn(
image_url=img_url[3],
action=PostbackTemplateAction(
label=title[3][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[3])
)
),
ImageCarouselColumn(
image_url=img_url[4],
action=PostbackTemplateAction(
label=title[4][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[4])
)
),
ImageCarouselColumn(
image_url=img_url[5],
action=PostbackTemplateAction(
label=title[5][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[5])
)
),
ImageCarouselColumn(
image_url=img_url[6],
action=PostbackTemplateAction(
label=title[6][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[6])
)
),
ImageCarouselColumn(
image_url=img_url[7],
action=PostbackTemplateAction(
label=title[7][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[7])
)
),
ImageCarouselColumn(
image_url=img_url[8],
action=PostbackTemplateAction(
label=title[8][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[8])
)
),
ImageCarouselColumn(
image_url=img_url[9],
action=PostbackTemplateAction(
label=title[9][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[9])
)
)
]
)
)
return [Image_Carousel,buttons_template_yout(page,keyword)]
def porn_video_template(keyword,index=0):
video_url,title,img_url = youtube_page(keyword)
pass_url = video_url[index][32:]
title = title[index]
buttons_template = TemplateSendMessage(
alt_text='video template',
template=ButtonsTemplate(
title = title[:40],
text='請選擇',
thumbnail_image_url=img_url[index],
actions=[
PostbackTemplateAction(
label='觀看~請耐心等待.....',
data = 'video/{}/{}/{}'.format(str(index),keyword,pass_url)
),
PostbackTemplateAction(
label='下一部',
data = 'porn/{}/{}'.format(str(index),keyword)
)
]))
return buttons_template
def movie():
target_url = 'http://www.atmovies.com.tw/movie/next/0/'
print('Start parsing movie ...')
rs = requests.session()
res = rs.get(target_url, verify=False)
res.encoding = 'utf-8'
soup = bf(res.text, 'html.parser')
content = ""
for index, data in enumerate(soup.select('ul.filmNextListAll a')):
if index == 20:
return content
title = data.text.replace('\t', '').replace('\r', '')
link = "http://www.atmovies.com.tw" + data['href']
content += '{}\n{}\n'.format(title, link)
return content
def pattern_mega(text):
patterns = [
'mega', 'mg', 'mu', 'MEGA', 'ME', 'MU',
'me', 'mu', 'mega', 'GD', 'MG', 'google',
]
for pattern in patterns:
if re.search(pattern, text, re.IGNORECASE):
return True
def eyny_movie():
target_url = 'http://www.eyny.com/forum-205-1.html'
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
content = ''
for titleURL in soup.select('.bm_c tbody .xst'):
if pattern_mega(titleURL.text):
title = titleURL.text
if '11379780-1-3' in titleURL['href']:
continue
link = 'http://www.eyny.com/' + titleURL['href']
data = '{}\n{}\n\n'.format(title, link)
content += data
return content
def panx():
target_url = 'https://panx.asia/'
print('Start parsing ptt hot....')
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
content = ""
for data in soup.select('div.container div.row div.desc_wrap h2 a'):
title = data.text
link = data['href']
content += '{}\n{}\n\n'.format(title, link)
return content
def magazine():
target_url = 'https://www.cw.com.tw/'
rs = requests.session()
res = rs.get(target_url, verify=False)
res.encoding = 'utf-8'
soup = bf(res.text, 'html.parser')
temp = ""
for v ,date in enumerate(soup.select('.caption h3 a'),0):
url = date['href']
title = date.text.strip()
temp += '{}\n{}\n'.format(title,url)
if(v>4):
break
return temp
def lottery():
# url = 'https://www.pilio.idv.tw/ltobig/drawlist/drawlist.asp'
# url2 = 'https://www.pilio.idv.tw/lto539/drawlist/drawlist.asp'
# url3 = 'https://www.pilio.idv.tw/lto/drawlist/drawlist.asp'
name = ['ltobig','lto539','lto']
for i in name:
url = 'https://www.pilio.idv.tw/{}/drawlist/drawlist.asp'.format(i)
res = requests.get(url)
res.encoding = 'utf-8'
soup = bf(res.text,'html.parser')
t = soup.select('.inner td')
if i == 'ltobig':
big = [t[i].text.strip() for i in range(4,10,1)]
elif i == 'lto539':
b539 = [t[i].text.strip() for i in range(3,7,1)]
elif i == 'lto':
bwei = [t[i].text.strip() for i in range(3,7,1)]
return big,b539,bwei
def lottery_stat(type_lottery,year):
if type_lottery == 'big-lotto':
div = 4
elif type_lottery == 'power':
div = 5
elif type_lottery == 'daily539':
div = 7
url = 'http://lotto.auzonet.com/lotto_balllist_{}_{}.html'.format(type_lottery,year)
res = requests.get(url)
res.encoding = 'utf-8'
soup = bf(res.text,'html.parser')
num = ''
for c,i in enumerate(soup.select('.forumline tr td')[3:],1):
if c%3 == 2:
continue
elif c%3 == 1:
num += ' '+i.text.strip()+' '
else:
if len(i.text.strip()) < 2:
num += '0{}次 {}\n'.format(i.text.strip(),'🎈️'*((int(i.text.strip()))//div))
else:
num += '{}次 {}\n'.format(i.text.strip(),'🎈️'*((int(i.text.strip()))//div))
return num
def lottery_all_num(type_lottery):
if type_lottery == 'big-lotto':
type_lottery = 'listltobigbbk'
start = 4
div = 4
elif type_lottery == 'power':
type_lottery = 'listlto'
start = 4
div = 4
elif type_lottery == 'daily539':
type_lottery = 'listlto539bbk'
start = 3
div = 3
url = 'https://www.lotto-8.com/{}.asp'.format(type_lottery)
res = requests.get(url)
res.encoding = 'utf-8'
soup = bf(res.text,'html.parser')
num = ''
for c,i in enumerate(soup.select('.auto-style4 tr td')[start:],1):
if c % div == 1:
num += i.text.strip()
elif c % div == 2:
num += ' {}\n'.format(i.text.strip())
elif c % div == 3:
if type_lottery == 'listltobigbbk':
num += '💰️特別號 : {}\n'.format(i.text.strip())
elif type_lottery == 'listlto':
num += '💰️第二區 : {}\n'.format(i.text.strip())
return num
def lottery_year(type_lottery):
if type_lottery == 'big-lotto':
t = '大樂透'
elif type_lottery == 'power':
t = '威力彩'
elif type_lottery == 'daily539':
t = '今彩539'
Carousel_template = TemplateSendMessage(
alt_text='Carousel template',
template=CarouselTemplate(
columns=[
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/zp75S87.jpg',
title=t+'--各個年份的統計',
text='請選擇年份',
actions=[
PostbackTemplateAction(
label='2019',
data='ball_st/{}/{}'.format('2019',type_lottery)
),
PostbackTemplateAction(
label='2018',
data='ball_st/{}/{}'.format('2018',type_lottery)
),
PostbackTemplateAction(
label='2017',
data='ball_st/{}/{}'.format('2017',type_lottery)
)
]
),
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/zp75S87.jpg',
title='各個年份的統計',
text='請選擇年份',
actions=[
PostbackTemplateAction(
label='2016',
data='ball_st/{}/{}'.format('2016',type_lottery)
),
PostbackTemplateAction(
label='2015',
data='ball_st/{}/{}'.format('2015',type_lottery)
),
PostbackTemplateAction(
label='2014',
data='ball_st/{}/{}'.format('2014',type_lottery)
),
]
)
]
)
)
return Carousel_template
def check_pic(img_id):
Confirm_template = TemplateSendMessage(
alt_text='要給你照片標籤描述嗎?',
template=ConfirmTemplate(
title='注意',
text= '要給你照片標籤描述嗎?\n要就選Yes,並且回覆\n-->id+描述訊息(這張照片id是'+ str(img_id) +')',
actions=[
PostbackTemplateAction(
label='Yes',
text='I choose YES',
data='action=buy&itemid=1'
),
MessageTemplateAction(
label='No',
text='I choose NO'
)
]
)
)
return Confirm_template
def look_up(tex):
content = ''
target_url = 'https://tw.dictionary.search.yahoo.com/search;_ylt=AwrtXG86cTRcUGoAESt9rolQ?p={}&fr2=sb-top'.format(tex)
res = requests.get(target_url)
soup = bf(res.text,'html.parser')
try:
content += '{}\n'.format(soup.select('.lh-22.mh-22.mt-12.mb-12.mr-25.last')[0].text)
for i in soup.select('.layoutCenter .lh-22.mh-22.ml-50.mt-12.mb-12'):
if i.select('p span') != []:
content += '{}\n{}\n'.format(i.select('.fz-14')[0].text,i.select('p span')[0].text)
else:
content += '{}\n'.format(i.select('.fz-14')[0].text)
if content == '':
for i in soup.select('.layoutCenter .ml-50.mt-5.last'):
content += i.text
except IndexError:
content = '查無此字'
return content
def get_total_flex(body_content,footer_content=[ButtonComponent(style='link',action=URIAction(label='My github', uri='https://github.com/kevin1061517?tab=repositories'))]):
bubble = BubbleContainer(
# header=BoxComponent(
# layout='vertical',
# contents=header_content#---->這樣子也行 contents=[t[0],t[1]]
# ),
body=BoxComponent(
layout='vertical',
contents=body_content
),
footer=BoxComponent(
layout='vertical',
spacing='sm',
contents= footer_content
)
)
return bubble
def integer_word(word):
content = look_up(word)
if content != '查無此字':
content = [TextComponent(text='🔍英文單字查詢',weight='bold', align='center',size='md',wrap=True,color='#000000'),SeparatorComponent(margin='lg'),TextComponent(text=content, size='sm',wrap=True,color='#000000')]
audio_button = [
SeparatorComponent(),
ButtonComponent(
style='link',
height='sm',
action=PostbackAction(label='📢 美式發音', data='audio/{}'.format(word))
)
]
bubble = get_total_flex(content,audio_button)
message = FlexSendMessage(alt_text="hello", contents=bubble)
else:
message = TextSendMessage(text=content)
return message
def process_draw(user_id):
start = fb.get('/{}/start'.format(user_id),None)
if not start:
start = 0
else:
start = list(start.values())[0]
end = fb.get('/{}/end'.format(user_id),None)
if not end:
end = 0
else:
end = list(end.values())[0]
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '抽數字',size='xl',color='#000000'),
TextComponent(text= '按照步驟來隨機產生幸運數字', size='sm',color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='起始',
color='#000000',
size='xxl',
flex = 5
),
TextComponent(
text=str(start),
size='xxl',
flex = 5
)
],
),
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='結束',
color='#000000',
size='xxl',
flex = 5
),
TextComponent(
text=str(end),
size='xxl',
flex = 5
)
],
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='設定起始數字',text='請輸入起始數字-----------')
),
SeparatorComponent(color='#000000'),
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='設定結束數字(包含)',text='請輸入結束數字-----------')
),
SeparatorComponent(color='#000000'),
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=PostbackAction(label='開始抽籤',text='抽籤結果!!',data='random/{}/{}'.format(start,end))
)
]
),
)
return bubble
def process_choose(user_id):
temp_opti =[]
texts = ''
temp_ques = ''
t = fb.get('/{}/opti_num'.format(user_id),None)
if t :
temp = list(t.values())[0]
temp_opti = temp.split(';')
t1 = fb.get('/{}/ques_num'.format(user_id),None)
if t1:
temp_ques = list(t1.values())[0]
print('-----in------')
for i in temp_opti:
texts += '{}\n'.format(i)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '確定好就按下面的抽籤按鈕', weight='bold',size='lg',color='#000000'),
TextComponent(text= '問題為-->{}'.format(temp_ques), size='md',wrap=True,color='#000000'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='選項:',
color='#000000',
gravity='center',
flex = 1,
size='lg'
),
TextComponent(
text='{}\n'.format(texts[:-1]),
color='#000000',
wrap=True,
flex = 4,
size='lg')
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=PostbackAction(label='隨機選擇',data='custom')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定問題',text='請輸入要設定抉擇的問題:')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定選項',text='請輸入要設定的選項,各個選項以分號區隔!!!')
)
]
),
)
return bubble
def answer(num,user_id):
t = fb.get('/{}/question/no'.format(user_id),None)
if t:
answer = [['Secret'],['是','不是,來過好幾次'],['約會','聚餐','朋友聚','家人聚餐'],['排骨套餐','雞排套餐','銷魂叉燒飯','黯然消魂炒飯','螞蟻上樹'],
['太鹹了','太清淡了','不好吃','好吃沒話講'],['價格公道','太貴了','普普通通'],['非常滿意','滿意','尚可','差勁','非常差勁'],['非常滿意','滿意','尚可','差勁','非常差勁'],['感覺很棒','感覺很差','食物好吃!','沒有']]
answer_list = answer[num]
content = []
for i in answer_list:
content += [QuickReplyButton(action=MessageAction(label=i, text=i))]
message = QuickReply(items=content)
return message
def questionnaire(num,user_id):
if num == 9:
num = 0
t = fb.get('/{}/question/no'.format(user_id),None)
if t:
# profile = line_bot_api.get_profile(event.source.user_id)
# user_name = profile.display_name
question = ['用餐編號','第一次來用餐?','用餐的目的是?','享用主餐的部份是?','對餐廳提供的菜餚口味感到?','對餐廳食物的價格感到?','對工作人員的服務態度感到?','餐廳衛生評價是?','想對我們建議的話']
return question[num]
else:
return None
def greet():
t = ['哇!!感謝您的答案','太棒了!!','很寶貴的建議','我們會持續改進','謝謝您的建議','很特別的意見','會不斷提供最好服務給您','給我們持續改善的動力','真的是很寶貴的建議','謝謝您!','謝謝指教','中獎']
r = random.randint(0,10)
if t[r] == '中獎':
message = ImageSendMessage(
original_content_url='https://i.imgur.com/d9jnyyN.jpg',
preview_image_url='https://i.imgur.com/d9jnyyN.jpg')
else:
message = TextSendMessage(text=t[r])
return message
def keep(t):
#GDriveJSON就輸入下載下來Json檔名稱
#GSpreadSheet是google試算表名稱
GDriveJSON = 'My First Project-9cf8421ad126.json'
GSpreadSheet = 'BotTest'
try:
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
key = SAC.from_json_keyfile_name(GDriveJSON, scope)
gc = gspread.authorize(key)
worksheet = gc.open(GSpreadSheet).sheet1
except Exception as ex:
print('無法連線Google試算表', ex)
sys.exit(1)
worksheet.append_row(t)
print('新增一列資料到試算表' ,GSpreadSheet)
def delete_row():
#GDriveJSON就輸入下載下來Json檔名稱
#GSpreadSheet是google試算表名稱
GDriveJSON = 'My First Project-9cf8421ad126.json'
GSpreadSheet = 'BotTest'
try:
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
key = SAC.from_json_keyfile_name(GDriveJSON, scope)
gc = gspread.authorize(key)
worksheet = gc.open(GSpreadSheet).sheet1
except Exception as ex:
print('無法連線Google試算表', ex)
sys.exit(1)
worksheet.delete_row(1)
print('delete一列資料到試算表' ,GSpreadSheet)
def audio_template(text):
Confirm_template = TemplateSendMessage(
alt_text='audio_template',
template=ConfirmTemplate(
title='確定一下吧',
text='您的建議是:\n{}'.format(text),
actions=[
MessageTemplateAction(
label='錯',
text='那請再說一次'
),
MessageTemplateAction(
label='對',
text=text
)
]
)
)
return Confirm_template
def quest_template(answer,user_name):
t = fb.get('/{}/question/item'.format('U19df1f98bcf1414ec15f9dad09b9b0cb'),None)
answer = ''
value = list(t.values())
for v in value:
for key,value in v.items():
answer += '{} \n---> {}\n\n'.format(key,value)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '{}的消費體驗'.format(user_name), weight='bold',size='xl',color='#000000'),
TextComponent(text= '您的建議與指教是推動我們前進的動力,{}的滿意就是我們的努力目標,歡迎給我們寶貴的意見,感謝!!'.format(user_name),size='sm',wrap = True,color='#888888'),
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
BoxComponent(
layout='horizontal',
spacing='md',
contents=[
TextComponent(
text=answer[:-1],
color='#000000',
wrap = True,
gravity = 'center',
size='md')]
)
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
SeparatorComponent(margin='xl',color='#000000'),
ButtonComponent(
style='secondary',
color='#66FF66',
height='sm',
action=PostbackAction(label='確定送出',data='send')
),
ButtonComponent(
style='secondary',
color='#66FF66',
height='sm',
action=PostbackAction(label='清除資料',data='clear')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
return message
@handler.add(PostbackEvent)
def handle_postback(event):
profile = line_bot_api.get_profile(event.source.user_id)
user_name = profile.display_name
temp = event.postback.data
if temp[:5] == 'audio':
t = temp.split('/')
word = t[1]
url = 'https://s.yimg.com/bg/dict/dreye/live/f/{}.mp3'.format(word)
line_bot_api.reply_message(
event.reply_token,
AudioSendMessage(original_content_url=url,duration=3000)
)
elif temp == 'datetime':
time = event.postback.params['datetime']
t = str(time).replace('T',' ')
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(
text='請問來店人數為?',
quick_reply=QuickReply(
items=[
QuickReplyButton(
action=PostbackAction(label="1人",text='您訂位時間為{}\n人數為{}人'.format(t,1),data="reservation1")
),
QuickReplyButton(
action=PostbackAction(label="2人",text='您訂位時間為{}\n人數為{}人'.format(t,2), data="reservation2")
),
QuickReplyButton(
action=PostbackAction(label="3人",text='您訂位時間為{}\n人數為{}人'.format(t,3), data="reservation3")
),
QuickReplyButton(
action=PostbackAction(label="4人",text='您訂位時間為{}\n人數為{}人'.format(t,4), data="reservation4")
)
])
)
)
elif temp == 'question':
fb.put('/{}/question'.format(event.source.user_id),data={'no':'1'},name='no')
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='感謝您的用餐,請先輸入您的用餐編號\n讓小弟可以為你服務')
)
elif temp == 'send':
t = fb.get('/{}/question/item'.format(event.source.user_id),None)
if not t:
line_bot_api.reply_message(
event.reply_token, TextSendMessage(text='已經送出囉'))
return
temp = [list(i.values())[0] for i in t.values()]
keep(temp)
fb.delete('/{}/question'.format(event.source.user_id),None)
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='小弟已經把貴賓{}的意見傳給公司了,我們會持續不斷改進,以顧客滿意至極'.format(user_name))
)
elif temp == 'clear':
fb.delete('/{}/question'.format(event.source.user_id),None)
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='親愛的{} 小弟期待您再給我們意見'.format(user_name))
)
elif temp == 'revise':
fb.delete('/{}/member'.format(event.source.user_id),None)
elif temp == 'custom':
t = fb.get('/{}/opti_num'.format(event.source.user_id),None)
bubble = process_choose(event.source.user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
if t :
temp = list(t.values())[0]
temp_opti = temp.split(';')
else:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='必須要有輸入有正確的選項喔'),message]
)
result = random.choice(temp_opti)
t1 = fb.get('/{}/ques_num'.format(event.source.user_id),None)
if t1:
temp_ques = list(t1.values())[0]
else:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='必須要有輸入有正確的問題喔'),message]
)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '隨機結果出爐', weight='bold',size='xl',color='#000000'),
TextComponent(text= '如有其他問題再按下面按鈕🙏', size='md',color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
BoxComponent(
layout='baseline',
spacing='sm',
contents=[
TextComponent(
text='問題:',
color='#000000',
gravity = 'center',
size='lg'),
TextComponent(
text=temp_ques,
color='#000000',
size='lg')]
),
BoxComponent(
layout='baseline',
spacing='sm',
contents=[
TextComponent(
text='隨機選項:',
color='#000000',
gravity = 'center',
size='lg'),
TextComponent(
text=result,
color='#000000',
size='lg')]
)
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=PostbackAction(label='其他猶豫問題',data='choose')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message)
elif temp[:5] == 'first':
print('--------in-----')
temp = temp.split('/')
_type = temp[1]
text = ''
text = '開始'
action = PostbackAction(label='開始選擇',data='first/{}/start'.format(_type),text='為你選出最佳選擇')
color = ['#AAAAAA','#AAAAAA']
point = ['👈','👈']
if _type == 'yesno':
t = ['要','不要']
elif _type == 'buy':
t = ['買','不買']
elif _type == 'yes':
t = ['是','不是']
if 'start' in temp:
text = '其他選擇'
r = random.randint(0,1)
print('----------'+str(r))
point[r] = ' '
color[1-r] = '#000000'
action = MessageAction(label='其他選擇',text='choose')
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '隨機選擇',gravity='center',size='xl',color='#000000'),
TextComponent(text= '{}請按最下面按鈕'.format(text), size='sm',gravity='center',color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text=t[0],
color=color[0],
size='xl',
flex = 5
),
TextComponent(
text=point[0],
size='xl',
flex = 5
)
],
),
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text=t[1],
color=color[1],
size='xl',
flex = 5
),
TextComponent(
text=point[1],
size='xl',
flex = 5
)
],
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=action
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:6] == 'random':
profile = line_bot_api.get_profile(event.source.user_id)
user_name = profile.display_name
user_id = event.source.user_id
bubble = process_draw(user_id)
t = temp.split('/')
start = int(t[1])
end = int(t[2])
if start >= end:
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='咦!{}要注意起始不能大於等於最後一個數字喔!!'.format(user_name)),message])
return
r = random.randint(start,end)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '隨機選擇',size='xl',color='#000000'),
TextComponent(text= '🔔🔔🔔', size='sm'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
color = '#FFFF00',
spacing='sm',
contents=[
TextComponent(
text='由{}到{}隨機產生的號碼'.format(start,end),
color='#000000',
size='lg',
flex = 5
),
BoxComponent(
layout='baseline',
color = '#FFFF00',
spacing='sm',
contents=[
TextComponent(
text=' ',
color='#000000',
size='xl',
flex = 4
),
TextComponent(
text=str(r),
color='#000000',
weight = 'bold',
size='xxl',
flex = 5
)
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=PostbackAction(label='再抽一次',text='抽籤結果!!',data='random/{}/{}'.format(start,end))
),
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label ='重設範圍',text='draw',)
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:6] == 'choose':
fb.delete('/{}/opti_num'.format(event.source.user_id),None)
fb.delete('/{}/ques_num'.format(event.source.user_id),None)
print('in')
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '把老天爺幫你選擇的選項回覆給我', weight='bold',wrap=True,size='lg',color='#000000'),
TextComponent(text= '請先設定問題為什麼,再去設定選項,在最下面的按鈕可以點選並設定,內建有常用的選擇內容,可以參考看看', size='md',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='問題:\n選擇飲料店:',
color='#000000',
wrap=True,
size='md'
),
TextComponent(
text='選項:\n50嵐;清新;coco;茶湯會',
wrap=True,
color='#000000',
size='md'
)
],
),
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='問題:\n選擇雞排店',
color='#000000',
wrap=True,
size='md'
),
TextComponent(
text='選項:\n豪大;派克;蔥Ya雞;胖老爹',
color='#000000',
wrap=True,
size='md'
)
],
),
SeparatorComponent(color='#000000')
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=PostbackAction(label='內建問題',data='other',text='請選擇一下喔~')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定問題',text='請輸入要設定抉擇的問題:')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定選項',text='請輸入要設定的選項,各個選項以分號區隔喔!!!')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp == 'other':
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '請把選擇需要解決的選擇', weight='bold',size='xl',color='#000000'),
TextComponent(text= '希望能夠解決你的選擇障礙...', size='md',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#5555FF',
height='sm',
action=PostbackAction(label='內建問題',data='other',text='請選擇一下喔~')
),
ButtonComponent(
style='secondary',
color='#5555FF',
height='sm',
action=MessageAction(label='設定問題',text='請輸入要設定抉擇的問題:')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#5555FF',
height='sm',
action=MessageAction(label='設定選項',text='請輸入要設定的選項,各個選項以分號區隔~')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp == 'result':
print('-------in---')
t = temp.split('/')
lot_year = t[1]
lot_type = t[2]
num = lottery_stat(lot_type,lot_year)
if lot_type == 'big-lotto':
t = '大樂透'
elif lot_type == 'power':
t = '威力彩'
elif lot_type == 'daily539':
t = '今彩539'
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='爬蟲程式抓取奧索樂透網', size='xs',wrap=True,color='#888888'),
TextComponent(text= '{}年\n{}各號碼出現次數'.format(lot_year,t), weight='bold', wrap=True,size='xl',color='#000000'),
TextComponent(text= '各個號碼出現次數統計後的結果呈現,透過爬蟲程式免於開網頁慢慢搜尋....', size='xs',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
TextComponent(
text='號碼 出現次數',
color='#000000',
size='md'
),
TextComponent(
text=num[:-1],
color='#000000',
size='md',
wrap=True
),
SeparatorComponent(color='#000000')
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他年份號碼出現次數',data='ball_year/{}'.format(lot_type),text='請稍等...')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他遊戲號碼出現次數',data='ballyear',text='請稍等...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:7] == 'ball_st':
print('-------in---')
t = temp.split('/')
lot_year = t[1]
lot_type = t[2]
num = lottery_stat(lot_type,lot_year)
if lot_type == 'big-lotto':
t = '大樂透'
elif lot_type == 'power':
t = '威力彩'
elif lot_type == 'daily539':
t = '今彩539'
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='爬蟲程式抓取奧索樂透網', size='xs',wrap=True,color='#888888'),
TextComponent(text= '{}年\n{}各號碼出現次數'.format(lot_year,t), weight='bold', wrap=True,size='xl',color='#000000'),
TextComponent(text= '各個號碼出現次數統計後的結果呈現,透過爬蟲程式免於開網頁慢慢搜尋....', size='xs',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
TextComponent(
text='號碼 出現次數',
color='#000000',
size='md'
),
TextComponent(
text=num[:-1],
color='#000000',
size='md',
wrap=True
),
SeparatorComponent(color='#000000')
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他年份號碼出現次數',data='ball_year/{}'.format(lot_type),text='請稍等...')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他遊戲號碼出現次數',data='ballyear',text='請稍等...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:9] == 'ball_year':
print('-------in---')
print(temp)
t = temp.split('/')
lot_type = t[1]
print(lot_type+'-----------')
Carousel_template = lottery_year(lot_type)
line_bot_api.reply_message(event.reply_token,Carousel_template)
elif temp[:8] == 'ball_num':
print('-------in---')
t = temp.split('/')
lot_type = t[1]
num = lottery_all_num(lot_type)
if lot_type == 'big-lotto':
t = '大樂透'
elif lot_type == 'power':
t = '威力彩'
elif lot_type == 'daily539':
t = '今彩539'
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='爬蟲程式抓取樂透雲內容', size='xs',wrap=True,color='#888888'),
TextComponent(text= '{}歷史開獎紀錄'.format(t), weight='bold', wrap=True,size='xl',color='#000000'),
TextComponent(text= '各個號碼個期紀錄,僅列出最近35筆紀錄,透過爬蟲程式免於開網頁慢慢搜尋....', size='xs',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
TextComponent(
text=' 日期 {}中獎號碼'.format(t),
color='#000000',
size='md'
),
TextComponent(
text=num,
color='#000000',
size='xs',
wrap=True
),
SeparatorComponent(color='#000000')
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=MessageAction(label='近期開獎紀錄',text='lottery')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他遊戲歷史開獎紀錄',data='ball_all_num',text='請稍等...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp == 'ball_all_num':
buttons_template = TemplateSendMessage(
alt_text='歷史開獎紀錄',
template=ButtonsTemplate(
title='歷史開獎紀錄',
text='請選擇要查詢的遊戲歷史開獎紀錄',
thumbnail_image_url='https://i.imgur.com/sMu1PJN.jpg',
actions=[
PostbackTemplateAction(
label='大樂透歷史紀錄',
data='ball_num/big-lotto',
text = '選擇了大樂透...'
),
PostbackTemplateAction(
label='今彩539歷史紀錄',
data='ball_num/daily539',
text = '選擇了今彩539...'
),
PostbackTemplateAction(
label='威力彩歷史紀錄',
data='ball_num/power',
text = '選擇了威力彩...'
)
]
)
)
line_bot_api.reply_message(event.reply_token, buttons_template)
elif temp == 'ballyear':
buttons_template = TemplateSendMessage(
alt_text='歷年號碼出現次數',
template=ButtonsTemplate(
title='歷年號碼出現次數',
text='請選擇一下',
thumbnail_image_url='https://i.imgur.com/sMu1PJN.jpg',
actions=[
PostbackTemplateAction(
label='大樂透統計',
data='ball_year/big-lotto'
),
PostbackTemplateAction(
label='今彩539統計',
data='ball_year/power'
),
PostbackTemplateAction(
label='威力彩統計',
data='ball_year/daily539'
)
]
)
)
line_bot_api.reply_message(event.reply_token, buttons_template)
elif temp == 'ball':
big = ''
r539 = ''
r3 = ''
print('---in--------')
for i in random.sample([str(i) for i in range(1,50)],6):
if len(i) !=2 :
big += '0{},'.format(i)
else:
big += '{},'.format(i)
for i in random.sample([str(i) for i in range(1,40)],5):
if len(i) !=2 :
r539 += '0{},'.format(i)
else:
r539 += '{},'.format(i)
for i in random.sample([str(i) for i in range(1,39)],6):
if len(i) !=2 :
r3 += '0{},'.format(i)
else:
r3 += '{},'.format(i)
r3 = r3[:-1] + '\n第二區:0{}'.format(random.sample([i for i in range(1,8)],1)[0])
print(r3)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='僅供參考', size='sm',wrap=True,color='#008844'),
TextComponent(text='幸運號碼', size='xxl',color='#000000'),
SeparatorComponent(color='#000000'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
color = '#FFFF00',
contents=[
TextComponent(
text='大樂透',
color='#000000',
weight='bold',
size='md',
flex=2
),
TextComponent(
text=big[:-1],
weight='bold',
color='#FF3333',
size='lg',
flex=5
)
],
),
BoxComponent(
layout='baseline',
color = '#FFFF00',
contents=[
TextComponent(
text='今彩539',
color='#000000',
weight='bold',
size='md',
flex = 2
),
TextComponent(
text=r539[:-1],
weight='bold',
color='#FF3333',
size='lg',
flex=5
)
],
),
BoxComponent(
layout='horizontal',
color = '#FFFF00',
contents=[
TextComponent(
text='威力彩',
color='#000000',
weight='bold',
size='md',
gravity = 'center',
flex=2
),
TextComponent(
text=r3,
weight='bold',
color='#FF3333',
size='lg',
wrap=True,
flex=5
)
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='primary',
color='#DAA520',
height='sm',
action=PostbackAction(label='歷年號碼出現次數',data='ballyear',text='請稍等...')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='primary',
height='sm',
color='#DAA520',
action=PostbackAction(label='再來一組', data='ball',text='好運到來...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:8] == 'carousel':
t = temp.split('/')
pa = int(t[1])
print('--------be else-------{}---{}'.format(pa,str(type(pa))))
pa += 1
print('--------af else-------{}'.format(pa))
keyword = t[2]
t = carousel_template(keyword,page=pa)
line_bot_api.reply_message(
event.reply_token,
t)
elif temp[0:6] == 'listen':
url = temp[6:]
if url == '音樂版權未授權~':
line_bot_api.reply_message(event.reply_token,TextSendMessage(text='音樂版權未授權~'))
else:
line_bot_api.reply_message(
event.reply_token,
AudioSendMessage(original_content_url=url,duration=30000)
)
elif temp[0:4] == 'porn':
print('------in------')
t = temp.split('/')
index = int(t[1])
keyword = t[2]
index += 1
try:
buttons_template = porn_video_template(keyword,index)
line_bot_api.reply_message(event.reply_token, buttons_template)
except IndexError:
line_bot_api.reply_message(event.reply_token, TextSendMessage(text='已經到底了喔'))
elif temp[0:5] == 'video':
t = temp.split('/')
print('----t-----'+str(t))
keyword = t[1]
video_url = t[2]
video_url = 'https://www.youtube.com/watch?v={}'.format(video_url)
video_url,img = yvideo(video_url)
line_bot_api.reply_message(
event.reply_token,
VideoSendMessage(
original_content_url=video_url,
preview_image_url=img))
# 處理圖片
@handler.add(MessageEvent,message=ImageMessage)
def handle_msg_img(event):
profile = line_bot_api.get_profile(event.source.user_id)
tem_name = str(profile.display_name)
img_id = 1
t = fb.get('/pic',None)
if t!=None:
count = 1
for key,value in t.items():
if count == len(t):#取得最後一個dict項目
img_id = int(value['id'])+1
count+=1
try:
message_content = line_bot_api.get_message_content(event.message.id)
with tempfile.NamedTemporaryFile(prefix='jpg-', delete=False) as tf:
for chunk in message_content.iter_content():
tf.write(chunk)
fb.post('/pic',{'id':str(img_id),'user':tem_name,'describe':''})
tempfile_path = tf.name
path = tempfile_path
client = ImgurClient(client_id, client_secret, access_token, refresh_token)
config = {
'album': album_id,
'name' : img_id,
'title': img_id,
'description': 'Cute kitten being cute on'
}
client.upload_from_path(path, config=config, anon=False)
os.remove(path)
image_reply = check_pic(img_id)
line_bot_api.reply_message(event.reply_token,[TextSendMessage(text='上傳成功'),image_reply])
except Exception as e:
t = '上傳失敗'+str(e.args)
line_bot_api.reply_message(event.reply_token,TextSendMessage(text=t))
from pydub import AudioSegment
import speech_recognition as sr
@handler.add(MessageEvent,message=AudioMessage)
def handle_aud(event):
r = sr.Recognizer()
message_content = line_bot_api.get_message_content(event.message.id)
ext = 'mp3'
try:
with tempfile.NamedTemporaryFile(prefix=ext + '-', delete=False) as tf:
for chunk in message_content.iter_content():
tf.write(chunk)
tempfile_path = tf.name
path = tempfile_path
AudioSegment.converter = '/app/vendor/ffmpeg/ffmpeg'
sound = AudioSegment.from_file_using_temporary_files(path)
path = os.path.splitext(path)[0]+'.wav'
sound.export(path, format="wav")
with sr.AudioFile(path) as source:
audio = r.record(source)
except Exception as e:
t = '音訊有問題'+test+str(e.args)+path
line_bot_api.reply_message(event.reply_token,TextSendMessage(text=t))
os.remove(path)
text = r.recognize_google(audio,language='zh-TW')
message = audio_template(text)
line_bot_api.reply_message(event.reply_token,message)
import sys
import gspread
from oauth2client.service_account import ServiceAccountCredentials as SAC
# 處理訊息:
@handler.add(MessageEvent, message=TextMessage)
def handle_msg_text(event):
profile = line_bot_api.get_profile(event.source.user_id)
user_name = profile.display_name
picture_url = profile.picture_url
user_id = event.source.user_id
n = fb.get('/{}/question/no'.format(user_id),None)
num = 1
if n:
num = int(n['no'])
# ----------------註冊-----------------------
register = fb.get('/{}/member'.format(user_id),None)
if register == None:
temp = event.message.text
if '/' not in temp:
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='注意!!少了斜線(/)'))
t = temp.split('/')
if len(t) > 2:
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='請重新輸入-多打了斜線了'))
fb.post('/{}/member'.format(user_id),{'name':t[0],'email':t[1]})
buttons_template = TemplateSendMessage(
alt_text='Template',
template=ButtonsTemplate(
title='註冊成功',
text='姓名:{}\nemail:{}\n請確定是否正確'.format(t[0],t[1]),
actions=[
MessageTemplateAction(
label='確認無誤',
text='MENU'
),
PostbackTemplateAction(
label='重新輸入',
text='請再輸入一次,名字與email以斜線(/)區隔',
data='revise'
)
]
)
)
line_bot_api.reply_message(
event.reply_token,
buttons_template)
t = fb.get('/{}/num'.format(user_id),None)
number = fb.get('/{}/temp'.format(user_id),None)
# ----------------抽數字-----------------------
if event.message.text == '請輸入起始數字-----------':
t = '起始數字'
fb.post('/{}/temp'.format(user_id),'起始數字')
elif event.message.text == '請輸入結束數字-----------':
t = '結束數字'
fb.post('/{}/temp'.format(user_id),'結束數字')
elif number:
temp = int(event.message.text)
if '起始數字' in list(number.values()):
fb.post('/{}/start'.format(user_id),temp)
else:
fb.post('/{}/end'.format(user_id),temp)
fb.delete('/{}/temp'.format(user_id),None)
bubble = process_draw(user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='{}為---->{}'.format(list(number.values())[0],temp)),message])
# -----------------自訂的問題-----------------------
elif event.message.text == '請輸入要設定抉擇的問題:':
fb.delete('/{}/ques_num'.format(event.source.user_id),None)
fb.post('/{}/num'.format(user_id),'問題')
elif event.message.text == '請輸入要設定的選項,各個選項以分號區隔!!!':
fb.delete('/{}/opti_num'.format(event.source.user_id),None)
fb.post('/{}/num'.format(user_id),'選項')
elif t:
if '問題' in list(t.values()):
fb.post('/{}/ques_num'.format(user_id),event.message.text)
else:
fb.post('/{}/opti_num'.format(user_id),event.message.text)
fb.delete('/{}/num'.format(user_id),None)
bubble = process_choose(user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='{}為---->{}'.format(list(t.values())[0],event.message.text)),message])
else:
if t != None:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='請輸入正確格式的問題或是選項'),TextSendMessage(text='就文字包含數字也可以🙏')])
elif number != None:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='請輸入正確的起始及結束數字'),TextSendMessage(text='只能是數字,不能包含文字喔🙏')])
if event.message.text.lower() == "eyny":
content = eyny_movie()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == 'draw':
fb.delete('/{}/end'.format(user_id),None)
fb.delete('/{}/start'.format(user_id),None)
print('in')
bubble = process_draw(user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif event.message.text.lower() == 'else':
line_bot_api.reply_message(event.reply_token,TextSendMessage(text='敬請期待'))
elif event.message.text.lower() == 'food':
image_message = [ImageSendMessage(
original_content_url=url,
preview_image_url=url
) for url in ['https://i.imgur.com/5iMx8nk.jpg','https://i.imgur.com/EEy8s6m.jpg','https://i.imgur.com/RCGdggZ.jpg']]
line_bot_api.reply_message(event.reply_token,image_message)
elif event.message.text.lower() == 'exit' or event.message.text == '不做':
fb.delete('/{}/question'.format(event.source.user_id),None)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='如需繼續幫我們了解您的需求,可以透過問卷讓我們了解'),TextSendMessage(text='輸入menu進入選單喔')]
)
elif event.message.text.lower() == '我吃飽了':
fb.put('/{}/question'.format(event.source.user_id),data={'no':'1'},name='no')
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='感謝您的用餐,請先輸入您的用餐編號\n讓小弟可以為你服務')
)
elif questionnaire(num,user_id):
if num == 9:
fb.post('/{}/question/item'.format(user_id),{questionnaire(num-1,user_id):event.message.text})
flex = quest_template(answer,user_name)
line_bot_api.reply_message(
event.reply_token,
flex)
return
t = questionnaire(num,user_id)
QuickReply = answer(num,user_id)
g = ['那想請問','方便問一下','可以告訴我們','可以問','我們想知道']
r = random.randint(0,4)
t = '{}{}'.format(g[r],t)
message = greet()
if num == 8:
message = TextSendMessage(text='最後一題了喔!!!!')
fb.post('/{}/question/item'.format(user_id),{questionnaire(num-1,user_id):event.message.text})
num += 1
fb.put('/{}/question'.format(user_id),data={'no':num},name='no')
line_bot_api.reply_message(
event.reply_token,
[message,TextSendMessage(text='--------- 消費體驗調查 ---------\n如需跳開問卷,請輸入exit或不做'),TextSendMessage(text=t,quick_reply=QuickReply)])
elif event.message.text.lower() == "choose":
buttons_template = TemplateSendMessage(
alt_text='抉擇領域template',
template=ButtonsTemplate(
title='抉擇類型',
text='請選擇一下,想要老天爺替你選擇的問題',
thumbnail_image_url='https://i.imgur.com/ISBqTUQ.jpg',
actions=[
PostbackTemplateAction(
label='要不要問題',
data='first/yesno'
),
PostbackTemplateAction(
label='買不買問題',
data='first/buy'
),
PostbackTemplateAction(
label='是不是問題',
data='first/yes'
),
PostbackTemplateAction(
label='新增問題',
data='choose'
)
]
)
)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text=' -------已經進入抉擇領域了------- '),buttons_template])
# elif event.message.text.lower() == "get":
# result = fb.get('note',None)
# result2 = firebase.get('note', None, {'print': 'pretty'}, {'X_FANCY_HEADER': 'VERY FANCY'})
# line_bot_api.reply_message(
# event.reply_token,
# TextSendMessage(text=str(result)+str(result2)))
#
# elif event.message.text.lower() == "save":
# data = {'name': '<NAME>', 'age': 26,
# 'created_at': datetime.datetime.now()}
# snapshot = firebase.post('/users', data)
# print(snapshot['name'])
# elif event.message.text.lower() == 'test':
# print('-----------in')
# data_UserData = usermessage.query.all()
# history_dic = {}
# history_list = []
# for _data in data_UserData:
# history_dic['id'] = _data.id
# history_dic['User_Id'] = _data.user_id
# history_dic['Mesaage'] = _data.message
# history_dic['Date'] = _data.birth_date
# history_list.append(history_dic)
# history_dic = {}
# line_bot_api.reply_message(event.reply_token,TextSendMessage(text= str(history_list)))
# elif event.message.text.lower() == 'clear':
# t = db.session.query(usermessage).delete()
# db.session.commit()
# print('end------------',str(t))
# line_bot_api.reply_message(event.reply_token,TextSendMessage(text= 'successfully'))
#
# elif event.message.text.lower() == 'input':
# print('-----------in')
# data_UserData = usermessage.query.filter_by(message='hi').first()
# print('end------------',str(data_UserData))
# line_bot_api.reply_message(event.reply_token,TextSendMessage(text= str(data_UserData)))
#
elif event.message.text.lower() == "menu":
bubble = BubbleContainer(
direction='ltr',
hero=ImageComponent(
url='https://i.imgur.com/d1XQC5H.jpg',
aspectMode = 'cover',
aspect_ratio='10:3',
size='full',
action=URIAction(uri='http://www.ccu.edu.tw/', label='label'),
),
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='目錄功能', weight='bold', size='lg'),
TextComponent(text='感謝您使用加入本店LINE BOT',align='end',color='#AAAAAA', size='sm'),
SeparatorComponent(color='#000000'),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='primary',
height='sm',
color='#00AA00',
action=PostbackAction(label='問卷填答',data='question')
),
ButtonComponent(
style='primary',
color='#00AA00',
height='sm',
action=MessageAction(label='精選菜單',text='food')
),
ButtonComponent(
style='primary',
color='#00AA00',
height='sm',
action=MessageAction(label='訂位功能',text='call')
),
ButtonComponent(
style='primary',
color='#00AA00',
height='sm',
action=MessageAction(label='其他功能',text='else')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
#訂位
elif event.message.text.lower() == "call":
date_picker = TemplateSendMessage(
alt_text='訂位系統',
template=ButtonsTemplate(
text='{} 你好\n請設定一下取餐時間'.format(user_name),
title='訂位系統',
# thumbnail_image_url=picture_url,
actions=[
DatetimePickerTemplateAction(
label='設定',
data='datetime',
mode='datetime',
initial='2017-04-01T12:30',
min='2017-04-01T12:30',
max='2099-12-31T12:30'
)
]
)
)
line_bot_api.reply_message(
event.reply_token,
date_picker
)
elif event.message.text == "PanX泛科技":
content = panx()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == "help":
Carousel_template = TemplateSendMessage(
alt_text='Carousel template',
template=CarouselTemplate(
columns=[
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/d1XQC5H.jpg',
title = '功能目錄',
text = 'Hey {} bro!\n提供額外小工具,希望您能有美好的一天'.format(user_name),
actions=[
MessageTemplateAction(
label='餐廳資訊',
text= 'menu'
),
MessageTemplateAction(
label='電影資訊',
text= 'movie'
),
MessageTemplateAction(
label='新聞資訊',
text= 'news'
)
]
),
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/d1XQC5H.jpg',
title = '功能目錄',
text = 'Hey {} bro!\n提供額外小工具,希望您能有美好的一天'.format(user_name),
actions=[
MessageTemplateAction(
label='英文字典',
text= '提醒您:\n只需要在查詢英文單字後加上?即可'
),
MessageTemplateAction(
label='樂透查詢',
text= 'lottery'
),
MessageTemplateAction(
label='中正大學',
text= 'introduce'
)
]
)
]
)
)
line_bot_api.reply_message(event.reply_token,Carousel_template)
elif event.message.text == "近期上映電影":
content = movie()
template = movie_template()
line_bot_api.reply_message(
event.reply_token,[
TextSendMessage(text=content),
template
]
)
elif event.message.text == "觸電網-youtube":
target_url = 'https://www.youtube.com/user/truemovie1/videos'
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
seqs = ['https://www.youtube.com{}'.format(data.find('a')['href']) for data in soup.select('.yt-lockup-title')]
template = movie_template()
line_bot_api.reply_message(
event.reply_token, [
TextSendMessage(text=seqs[random.randint(0, len(seqs) - 1)]),
TextSendMessage(text=seqs[random.randint(0, len(seqs) - 1)]),
TextSendMessage(text=seqs[random.randint(0, len(seqs) - 1)]),
template
])
elif event.message.text.lower() == "movie":
buttons_template = movie_template()
line_bot_api.reply_message(event.reply_token, buttons_template)
elif event.message.text == "蘋果即時新聞":
content = apple_news()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == "news":
buttons_template = TemplateSendMessage(
alt_text='news template',
template=ButtonsTemplate(
title='新聞類型',
text='請選擇',
thumbnail_image_url='https://i.imgur.com/GoAYFqv.jpg',
actions=[
MessageTemplateAction(
label='蘋果即時新聞',
text='蘋果即時新聞'
),
MessageTemplateAction(
label='天下雜誌',
text='天下雜誌'
),
MessageTemplateAction(
label='PanX泛科技',
text='PanX泛科技'
)
]
)
)
line_bot_api.reply_message(event.reply_token, buttons_template)
elif event.message.text == "天下雜誌":
content = magazine()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == 'post':
bubble = BubbleContainer(
direction='ltr',
hero=ImageComponent(
url='https://i.imgur.com/qXqg5qA.jpg',
size='full',
aspect_ratio='5:3',
aspect_mode='cover',
action=URIAction(uri='https://github.com/kevin1061517', label='label'),
),
body=BoxComponent(
layout='vertical',
contents=[
# title
TextComponent(text='Content', weight='bold', size='xl',color='#006400'),
SeparatorComponent(margin='xl',color='#000000'),
# review
TextComponent(
text='''現在在練習python各種語法~藉由這次的project,讓我更加熟悉python語法與邏輯,這個LineBot有各種功能,可以把youtube網址拉進來,LineBot會傳來網址影片,你就可以利用右下角的下載鍵,以及抓出菜單等等功能,就可以下載到手機端了😜,如下:\n語法:\n1.阿滴英文yout\n關鍵字後面加上yout,就可以抓出影片了\n2.50嵐menu\n餐廳名字後面加上menu,就可以抓出餐廳單\n3.馬英九pic\n搜尋照片關鍵字加上pic,就可以馬上幫你抓到要搜尋的照片\n -------------------- 18禁 -------------------- \n4.李宗瑞porn\n搜尋關鍵字加上porn,就可以有成人影片彈出來🙏''',
size='sm',wrap=True,color='#2E8B57'
),
SeparatorComponent(margin='xl',color='#000000'),
TextComponent(
text='承認不勇敢 你能不能別離開很多愛不能重來 我應該釋懷在街頭徘徊 下雨時為你撐傘對你的愛成阻礙 祝福你愉快',
size='sm',wrap=True,color='#2E8B57'
),
SeparatorComponent(margin='xl',color='#000000'),
TextComponent(
text='承認不勇敢 你能不能別離開很多愛不能重來 我應該釋懷在街頭徘徊 下雨時為你撐傘對你的愛成阻礙 祝福你愉快',
size='sm',wrap=True,color='#2E8B57'
),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
color = '#FFFF00',
spacing='sm',
contents=[
TextComponent(
text='Developer',
color='#000000',
weight='bold',
align="end",
size='xxs',
flex=5
),
TextComponent(
text='Kevin',
wrap=True,
weight='bold',
align="end",
color='#000000',
size='xxs',
flex=1
)
],
),
],
),
SeparatorComponent(),
],
),
footer=BoxComponent(
layout='vertical',
spacing='sm',
contents=[
# callAction, separator, websiteAction
# SpacerComponent(size='sm'),
# callAction
ButtonComponent(
style='primary',
color = '#FFFF00',
height='sm',
action=URIAction(label='CALL', uri='tel:0935593342'),
),
# separator
SeparatorComponent(),
# websiteAction
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif event.message.text.lower() == 'lottery':
big,b539,bwei = lottery()
big_txt = ''
b539_txt = ''
bwei = ''
for t,c in enumerate(big,1):
if t%3==0:
big_txt += '特別號:'
big_txt += str(c+'\n')
big_txt = big_txt[:-1]
for t,c in enumerate(b539,0):
b539_txt +='{}\n'.format(str(c))
b539_txt = b539_txt[:-1]
for t,c in enumerate(big,1):
if t%3==0:
bwei += '二區:'
bwei +='{}\n'.format(str(c))
bwei = bwei[:-1]
bubble = BubbleContainer(
direction='ltr',
hero=ImageComponent(
url='https://i.imgur.com/9IUzhOT.jpg',
aspectMode = 'cover',
aspect_ratio='11:3',
size='full',
backgroundColor = '#FFD700',
action=URIAction(uri='https://github.com/kevin1061517', label='label'),
),
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='祝你中獎', weight='bold', size='md'),
BoxComponent(
layout='vertical',
margin='lg',
spacing='xs',
contents=[
BoxComponent(
margin = 'sm',
layout='horizontal',
contents=[
ImageComponent(
url='https://i.imgur.com/T6rFvGm.png',
size='md',
aspect_ratio='5:5',
flex=2,
gravity='center',
),
TextComponent(
text=big_txt,
wrap=True,
color='#666666',
size='md',
flex=5
)
],
),
SeparatorComponent(color='#000000'),
BoxComponent(
layout='horizontal',
margin = 'sm',
contents=[
ImageComponent(
url='https://i.imgur.com/DQrt8Xz.png',
size='md',
aspect_ratio='5:5',
flex=2,
gravity='center'
),
TextComponent(
text=b539_txt,
wrap=True,
color='#666666',
size='md',
flex=5,
),
],
),
SeparatorComponent(color='#000000'),
BoxComponent(
layout='horizontal',
margin = 'sm',
contents=[
ImageComponent(
url='https://i.imgur.com/nXq6wrd.png',
size='md',
aspect_ratio='5:5',
flex=2,
gravity='center'
),
TextComponent(
text=bwei,
wrap=True,
color='#666666',
size='md',
flex=5,
),
],
),
],
),
SeparatorComponent(color='#000000'),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='primary',
height='sm',
action=PostbackAction(label='歷年開獎紀錄',data='ball_all_num',text='歷年號碼~詳細內容參考至台彩官網')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='primary',
color='#DAA520',
height='sm',
action=PostbackAction(label='開門見喜💎️', data='ball',text='您的幸運號碼...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif re.search(r'yout$',event.message.text.lower())!=None:
keyword = event.message.text.lower()[:-4]
carousel = carousel_template(keyword)
line_bot_api.reply_message(event.reply_token, carousel)
# 供下載影片
elif re.search(r'^https://www.youtu.*',event.message.text) != None or re.search(r'^https://youtu.be.*',event.message.text) !=None:
t = event.message.text
video_url,img = yvideo(t)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='供你下載製手機端,本人僅供學術用途,不負法律責任'),
VideoSendMessage(
original_content_url=video_url,
preview_image_url=img)]
)
elif re.search(r'\?$',event.message.text.lower())!=None:
keyword = event.message.text.lower()[:-1]
keyword = keyword.replace(' ','')
print('-----------'+keyword)
message = integer_word(keyword)
line_bot_api.reply_message(
event.reply_token,
message
)
elif event.message.text.lower() == 'introduce':
url = 'https://www.youtube.com/watch?v=vf3qc2-h9kE'
url, img = yvideo(url)
line_bot_api.reply_message(
event.reply_token,
VideoSendMessage(
original_content_url=url,
preview_image_url=img)
)
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| from linebot import (
LineBotApi, WebhookHandler
)
from linebot.exceptions import (
LineBotApiError, InvalidSignatureError
)
from firebase import firebase
from linebot.models import (
SourceUser,SourceGroup,SourceRoom,LeaveEvent,JoinEvent,
TemplateSendMessage,PostbackEvent,AudioMessage,LocationMessage,
ButtonsTemplate,LocationSendMessage,AudioSendMessage,ButtonsTemplate,
ImageMessage,URITemplateAction,MessageTemplateAction,ConfirmTemplate,
PostbackTemplateAction,ImageSendMessage,MessageEvent, TextMessage,
TextSendMessage,StickerMessage, StickerSendMessage,DatetimePickerTemplateAction,
CarouselColumn,CarouselTemplate,VideoSendMessage,ImagemapSendMessage,BaseSize,
URIImagemapAction,MessageImagemapAction,ImagemapArea,ImageCarouselColumn,ImageCarouselTemplate,
FlexSendMessage, BubbleContainer, ImageComponent, BoxComponent,
TextComponent, SpacerComponent, IconComponent, ButtonComponent,
SeparatorComponent,URIAction,LocationAction,QuickReply,QuickReplyButton,
DatetimePickerAction,PostbackAction,MessageAction,CameraAction,CameraRollAction
)
from imgurpython import ImgurClient
import re
from bs4 import BeautifulSoup as bf
import requests
import random
import os,tempfile
from datetime import timedelta, datetime
from time import sleep
import json
from selenium import webdriver
from urllib.parse import quote
from urllib import parse
from flask import Flask, request, render_template, make_response, abort
#from flask_bootstrap import Bootstrap
#from PIL import Image
#import warnings
#warnings.simplefilter('error', Image.DecompressionBombWarning)
client_id = os.getenv('client_id',None)
client_secret = os.getenv('client_secret',None)
album_id = os.getenv('album_id',None)
access_token = os.getenv('access_token',None)
refresh_token = os.getenv('refresh_token',None)
client = ImgurClient(client_id, client_secret, access_token, refresh_token)
url = os.getenv('firebase_bot',None)
fb = firebase.FirebaseApplication(url,None)
line_bot_api = LineBotApi(os.getenv('LINE_CHANNEL_ACCESS_TOKEN',None))
handler = WebhookHandler(os.getenv('LINE_CHANNEL_SECRET', None))
app = Flask(__name__)
#bootstrap = Bootstrap(app)
#
<EMAIL>('/list')
#def do_get():
# return render_template('list.html')
#
<EMAIL>('/2')
#def do_get():
# return render_template('index2.html')
@app.route("/callback", methods=['POST'])
def callback():
# get X-Line-Signature header value
signature = request.headers['X-Line-Signature']
# get request body as text
print('----------in-------')
body = request.get_data(as_text=True)
bodyjson=json.loads(body)
#app.logger.error("Request body: " + bodyjson['events'][0]['message']['text'])
# app.logger.error("Request body: " + body)
#insertdata
# print('-----in----------')
# add_data = usermessage(
# id = bodyjson['events'][0]['message']['id'],
# user_id = bodyjson['events'][0]['source']['userId'],
# message = bodyjson['events'][0]['message']['text'],
# birth_date = datetime.fromtimestamp(int(bodyjson['events'][0]['timestamp'])/1000)
# )
# db.session.add(add_data)
# db.session.commit()
# handle webhook body
try:
handler.handle(body,signature)
except LineBotApiError as e:
print("Catch exception from LINE Messaging API: %s\n" % e.message)
for m in e.error.details:
print("ERROR is %s: %s" % (m.property, m.message))
print("\n")
except InvalidSignatureError:
abort(400)
return 'OK'
def movie_template():
buttons_template = TemplateSendMessage(
alt_text='電影 template',
template=ButtonsTemplate(
title='服務類型',
text='請選擇',
thumbnail_image_url='https://i.imgur.com/zzv2aSR.jpg',
actions=[
MessageTemplateAction(
label='近期上映電影',
text='近期上映電影'
),
MessageTemplateAction(
label='依莉下載電影',
text='eyny'
),
MessageTemplateAction(
label='觸電網-youtube',
text='觸電網-youtube')
]
)
)
return buttons_template
def apple_news():
target_url = 'https://tw.appledaily.com/new/realtime'
print('Start parsing appleNews....')
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
content = ""
for index, data in enumerate(soup.select('.rtddt a'), 0):
if index == 5:
return content
title = data.select('font')[0].text
link = data['href']
content += '{}\n{}\n'.format(title,link)
return content
def youtube_page(keyword):
url = []
title = []
pic = []
target_url = 'https://www.youtube.com/results?search_query={}&sp=EgIQAQ%253D%253D'.format(quote(keyword))
rs = requests.session()
res = rs.get(target_url)
soup = bf(res.text, 'html.parser')
for data in soup.select('.yt-lockup-title'):
if len(data.find('a')['href']) > 20:
continue
url.append('https://www.youtube.com{}'.format(data.find('a')['href']))
title.append(data.find('a')['title'])
pic.append('https://i.ytimg.com/vi/{}/0.jpg'.format(data.find('a')['href'][9:]))
return url,title,pic
def yvideo(url):
url = 'https://qdownloader.net/download?video={}'.format(url)
res = requests.get(url)
soup = bf(res.text,'html.parser')
t = soup.select('.col-md-8 td a' )
c = 0
url = t[c]['href']
while re.search(r'.*googlevideo.*',url) == None:
url = t[c]['href']
c += 1
t = soup.select('.info.col-md-4 img' )
img = t[0]['src']
url = re.search(r'.*&title',url).group()[:-6]
return url,img
def yout_download(_id):
print('in')
url = 'http://www.youtube.com/get_video_info?eurl=http%3A%2F%2Fkej.tw%2F&sts=17885&video_id={}'.format(str(_id))
res = requests.get(url).text
a = parse.parse_qs(res)
# img = 'http://i.ytimg.com/vi/{}/0.jpg'.format(_id)
# title = (a['title'])
b = parse.parse_qs(a['url_encoded_fmt_stream_map'][0])
url = b['url'][0]
print('out----'+url)
return url
def buttons_template_yout(page,keyword):
confirm_template = TemplateSendMessage(
alt_text = 'video template',
template = ConfirmTemplate(
text = '請選擇一下',
actions = [
MessageTemplateAction(
label = '推薦',
text = '台北暗殺星奪冠之路yout'
),
PostbackTemplateAction(
label = '再來10部',
data = 'carousel/{}/{}'.format(page,keyword)
)
]
)
)
return confirm_template
def carousel_template(keyword,page=0):
pass_url = []
video_url,title,img_url = youtube_page(keyword)
if page!=0:
if page%2 == 0:
pass
else:
temp = 10
video_url = [i for i in video_url[temp:]]
title = [i for i in title[temp:]]
img_url = [i for i in img_url[temp:]]
pass_url = [i[32:] for i in video_url]
if len(title)<10:
buttons_template = porn_video_template(keyword)
return buttons_template
Image_Carousel = TemplateSendMessage(
alt_text='Carousel_template',
template=ImageCarouselTemplate(
columns=[
ImageCarouselColumn(
image_url=img_url[0],
action=PostbackTemplateAction(
label=title[0][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[0])
)
),
ImageCarouselColumn(
image_url=img_url[1],
action=PostbackTemplateAction(
label=title[1][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[1])
)
),
ImageCarouselColumn(
image_url=img_url[2],
action=PostbackTemplateAction(
label=title[2][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[2])
)
),
ImageCarouselColumn(
image_url=img_url[3],
action=PostbackTemplateAction(
label=title[3][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[3])
)
),
ImageCarouselColumn(
image_url=img_url[4],
action=PostbackTemplateAction(
label=title[4][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[4])
)
),
ImageCarouselColumn(
image_url=img_url[5],
action=PostbackTemplateAction(
label=title[5][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[5])
)
),
ImageCarouselColumn(
image_url=img_url[6],
action=PostbackTemplateAction(
label=title[6][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[6])
)
),
ImageCarouselColumn(
image_url=img_url[7],
action=PostbackTemplateAction(
label=title[7][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[7])
)
),
ImageCarouselColumn(
image_url=img_url[8],
action=PostbackTemplateAction(
label=title[8][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[8])
)
),
ImageCarouselColumn(
image_url=img_url[9],
action=PostbackTemplateAction(
label=title[9][:12],
text='請等待一下...',
data = 'video/{}/{}'.format(keyword,pass_url[9])
)
)
]
)
)
return [Image_Carousel,buttons_template_yout(page,keyword)]
def porn_video_template(keyword,index=0):
video_url,title,img_url = youtube_page(keyword)
pass_url = video_url[index][32:]
title = title[index]
buttons_template = TemplateSendMessage(
alt_text='video template',
template=ButtonsTemplate(
title = title[:40],
text='請選擇',
thumbnail_image_url=img_url[index],
actions=[
PostbackTemplateAction(
label='觀看~請耐心等待.....',
data = 'video/{}/{}/{}'.format(str(index),keyword,pass_url)
),
PostbackTemplateAction(
label='下一部',
data = 'porn/{}/{}'.format(str(index),keyword)
)
]))
return buttons_template
def movie():
target_url = 'http://www.atmovies.com.tw/movie/next/0/'
print('Start parsing movie ...')
rs = requests.session()
res = rs.get(target_url, verify=False)
res.encoding = 'utf-8'
soup = bf(res.text, 'html.parser')
content = ""
for index, data in enumerate(soup.select('ul.filmNextListAll a')):
if index == 20:
return content
title = data.text.replace('\t', '').replace('\r', '')
link = "http://www.atmovies.com.tw" + data['href']
content += '{}\n{}\n'.format(title, link)
return content
def pattern_mega(text):
patterns = [
'mega', 'mg', 'mu', 'MEGA', 'ME', 'MU',
'me', 'mu', 'mega', 'GD', 'MG', 'google',
]
for pattern in patterns:
if re.search(pattern, text, re.IGNORECASE):
return True
def eyny_movie():
target_url = 'http://www.eyny.com/forum-205-1.html'
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
content = ''
for titleURL in soup.select('.bm_c tbody .xst'):
if pattern_mega(titleURL.text):
title = titleURL.text
if '11379780-1-3' in titleURL['href']:
continue
link = 'http://www.eyny.com/' + titleURL['href']
data = '{}\n{}\n\n'.format(title, link)
content += data
return content
def panx():
target_url = 'https://panx.asia/'
print('Start parsing ptt hot....')
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
content = ""
for data in soup.select('div.container div.row div.desc_wrap h2 a'):
title = data.text
link = data['href']
content += '{}\n{}\n\n'.format(title, link)
return content
def magazine():
target_url = 'https://www.cw.com.tw/'
rs = requests.session()
res = rs.get(target_url, verify=False)
res.encoding = 'utf-8'
soup = bf(res.text, 'html.parser')
temp = ""
for v ,date in enumerate(soup.select('.caption h3 a'),0):
url = date['href']
title = date.text.strip()
temp += '{}\n{}\n'.format(title,url)
if(v>4):
break
return temp
def lottery():
# url = 'https://www.pilio.idv.tw/ltobig/drawlist/drawlist.asp'
# url2 = 'https://www.pilio.idv.tw/lto539/drawlist/drawlist.asp'
# url3 = 'https://www.pilio.idv.tw/lto/drawlist/drawlist.asp'
name = ['ltobig','lto539','lto']
for i in name:
url = 'https://www.pilio.idv.tw/{}/drawlist/drawlist.asp'.format(i)
res = requests.get(url)
res.encoding = 'utf-8'
soup = bf(res.text,'html.parser')
t = soup.select('.inner td')
if i == 'ltobig':
big = [t[i].text.strip() for i in range(4,10,1)]
elif i == 'lto539':
b539 = [t[i].text.strip() for i in range(3,7,1)]
elif i == 'lto':
bwei = [t[i].text.strip() for i in range(3,7,1)]
return big,b539,bwei
def lottery_stat(type_lottery,year):
if type_lottery == 'big-lotto':
div = 4
elif type_lottery == 'power':
div = 5
elif type_lottery == 'daily539':
div = 7
url = 'http://lotto.auzonet.com/lotto_balllist_{}_{}.html'.format(type_lottery,year)
res = requests.get(url)
res.encoding = 'utf-8'
soup = bf(res.text,'html.parser')
num = ''
for c,i in enumerate(soup.select('.forumline tr td')[3:],1):
if c%3 == 2:
continue
elif c%3 == 1:
num += ' '+i.text.strip()+' '
else:
if len(i.text.strip()) < 2:
num += '0{}次 {}\n'.format(i.text.strip(),'🎈️'*((int(i.text.strip()))//div))
else:
num += '{}次 {}\n'.format(i.text.strip(),'🎈️'*((int(i.text.strip()))//div))
return num
def lottery_all_num(type_lottery):
if type_lottery == 'big-lotto':
type_lottery = 'listltobigbbk'
start = 4
div = 4
elif type_lottery == 'power':
type_lottery = 'listlto'
start = 4
div = 4
elif type_lottery == 'daily539':
type_lottery = 'listlto539bbk'
start = 3
div = 3
url = 'https://www.lotto-8.com/{}.asp'.format(type_lottery)
res = requests.get(url)
res.encoding = 'utf-8'
soup = bf(res.text,'html.parser')
num = ''
for c,i in enumerate(soup.select('.auto-style4 tr td')[start:],1):
if c % div == 1:
num += i.text.strip()
elif c % div == 2:
num += ' {}\n'.format(i.text.strip())
elif c % div == 3:
if type_lottery == 'listltobigbbk':
num += '💰️特別號 : {}\n'.format(i.text.strip())
elif type_lottery == 'listlto':
num += '💰️第二區 : {}\n'.format(i.text.strip())
return num
def lottery_year(type_lottery):
if type_lottery == 'big-lotto':
t = '大樂透'
elif type_lottery == 'power':
t = '威力彩'
elif type_lottery == 'daily539':
t = '今彩539'
Carousel_template = TemplateSendMessage(
alt_text='Carousel template',
template=CarouselTemplate(
columns=[
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/zp75S87.jpg',
title=t+'--各個年份的統計',
text='請選擇年份',
actions=[
PostbackTemplateAction(
label='2019',
data='ball_st/{}/{}'.format('2019',type_lottery)
),
PostbackTemplateAction(
label='2018',
data='ball_st/{}/{}'.format('2018',type_lottery)
),
PostbackTemplateAction(
label='2017',
data='ball_st/{}/{}'.format('2017',type_lottery)
)
]
),
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/zp75S87.jpg',
title='各個年份的統計',
text='請選擇年份',
actions=[
PostbackTemplateAction(
label='2016',
data='ball_st/{}/{}'.format('2016',type_lottery)
),
PostbackTemplateAction(
label='2015',
data='ball_st/{}/{}'.format('2015',type_lottery)
),
PostbackTemplateAction(
label='2014',
data='ball_st/{}/{}'.format('2014',type_lottery)
),
]
)
]
)
)
return Carousel_template
def check_pic(img_id):
Confirm_template = TemplateSendMessage(
alt_text='要給你照片標籤描述嗎?',
template=ConfirmTemplate(
title='注意',
text= '要給你照片標籤描述嗎?\n要就選Yes,並且回覆\n-->id+描述訊息(這張照片id是'+ str(img_id) +')',
actions=[
PostbackTemplateAction(
label='Yes',
text='I choose YES',
data='action=buy&itemid=1'
),
MessageTemplateAction(
label='No',
text='I choose NO'
)
]
)
)
return Confirm_template
def look_up(tex):
content = ''
target_url = 'https://tw.dictionary.search.yahoo.com/search;_ylt=AwrtXG86cTRcUGoAESt9rolQ?p={}&fr2=sb-top'.format(tex)
res = requests.get(target_url)
soup = bf(res.text,'html.parser')
try:
content += '{}\n'.format(soup.select('.lh-22.mh-22.mt-12.mb-12.mr-25.last')[0].text)
for i in soup.select('.layoutCenter .lh-22.mh-22.ml-50.mt-12.mb-12'):
if i.select('p span') != []:
content += '{}\n{}\n'.format(i.select('.fz-14')[0].text,i.select('p span')[0].text)
else:
content += '{}\n'.format(i.select('.fz-14')[0].text)
if content == '':
for i in soup.select('.layoutCenter .ml-50.mt-5.last'):
content += i.text
except IndexError:
content = '查無此字'
return content
def get_total_flex(body_content,footer_content=[ButtonComponent(style='link',action=URIAction(label='My github', uri='https://github.com/kevin1061517?tab=repositories'))]):
bubble = BubbleContainer(
# header=BoxComponent(
# layout='vertical',
# contents=header_content#---->這樣子也行 contents=[t[0],t[1]]
# ),
body=BoxComponent(
layout='vertical',
contents=body_content
),
footer=BoxComponent(
layout='vertical',
spacing='sm',
contents= footer_content
)
)
return bubble
def integer_word(word):
content = look_up(word)
if content != '查無此字':
content = [TextComponent(text='🔍英文單字查詢',weight='bold', align='center',size='md',wrap=True,color='#000000'),SeparatorComponent(margin='lg'),TextComponent(text=content, size='sm',wrap=True,color='#000000')]
audio_button = [
SeparatorComponent(),
ButtonComponent(
style='link',
height='sm',
action=PostbackAction(label='📢 美式發音', data='audio/{}'.format(word))
)
]
bubble = get_total_flex(content,audio_button)
message = FlexSendMessage(alt_text="hello", contents=bubble)
else:
message = TextSendMessage(text=content)
return message
def process_draw(user_id):
start = fb.get('/{}/start'.format(user_id),None)
if not start:
start = 0
else:
start = list(start.values())[0]
end = fb.get('/{}/end'.format(user_id),None)
if not end:
end = 0
else:
end = list(end.values())[0]
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '抽數字',size='xl',color='#000000'),
TextComponent(text= '按照步驟來隨機產生幸運數字', size='sm',color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='起始',
color='#000000',
size='xxl',
flex = 5
),
TextComponent(
text=str(start),
size='xxl',
flex = 5
)
],
),
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='結束',
color='#000000',
size='xxl',
flex = 5
),
TextComponent(
text=str(end),
size='xxl',
flex = 5
)
],
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='設定起始數字',text='請輸入起始數字-----------')
),
SeparatorComponent(color='#000000'),
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='設定結束數字(包含)',text='請輸入結束數字-----------')
),
SeparatorComponent(color='#000000'),
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=PostbackAction(label='開始抽籤',text='抽籤結果!!',data='random/{}/{}'.format(start,end))
)
]
),
)
return bubble
def process_choose(user_id):
temp_opti =[]
texts = ''
temp_ques = ''
t = fb.get('/{}/opti_num'.format(user_id),None)
if t :
temp = list(t.values())[0]
temp_opti = temp.split(';')
t1 = fb.get('/{}/ques_num'.format(user_id),None)
if t1:
temp_ques = list(t1.values())[0]
print('-----in------')
for i in temp_opti:
texts += '{}\n'.format(i)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '確定好就按下面的抽籤按鈕', weight='bold',size='lg',color='#000000'),
TextComponent(text= '問題為-->{}'.format(temp_ques), size='md',wrap=True,color='#000000'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='選項:',
color='#000000',
gravity='center',
flex = 1,
size='lg'
),
TextComponent(
text='{}\n'.format(texts[:-1]),
color='#000000',
wrap=True,
flex = 4,
size='lg')
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=PostbackAction(label='隨機選擇',data='custom')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定問題',text='請輸入要設定抉擇的問題:')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定選項',text='請輸入要設定的選項,各個選項以分號區隔!!!')
)
]
),
)
return bubble
def answer(num,user_id):
t = fb.get('/{}/question/no'.format(user_id),None)
if t:
answer = [['Secret'],['是','不是,來過好幾次'],['約會','聚餐','朋友聚','家人聚餐'],['排骨套餐','雞排套餐','銷魂叉燒飯','黯然消魂炒飯','螞蟻上樹'],
['太鹹了','太清淡了','不好吃','好吃沒話講'],['價格公道','太貴了','普普通通'],['非常滿意','滿意','尚可','差勁','非常差勁'],['非常滿意','滿意','尚可','差勁','非常差勁'],['感覺很棒','感覺很差','食物好吃!','沒有']]
answer_list = answer[num]
content = []
for i in answer_list:
content += [QuickReplyButton(action=MessageAction(label=i, text=i))]
message = QuickReply(items=content)
return message
def questionnaire(num,user_id):
if num == 9:
num = 0
t = fb.get('/{}/question/no'.format(user_id),None)
if t:
# profile = line_bot_api.get_profile(event.source.user_id)
# user_name = profile.display_name
question = ['用餐編號','第一次來用餐?','用餐的目的是?','享用主餐的部份是?','對餐廳提供的菜餚口味感到?','對餐廳食物的價格感到?','對工作人員的服務態度感到?','餐廳衛生評價是?','想對我們建議的話']
return question[num]
else:
return None
def greet():
t = ['哇!!感謝您的答案','太棒了!!','很寶貴的建議','我們會持續改進','謝謝您的建議','很特別的意見','會不斷提供最好服務給您','給我們持續改善的動力','真的是很寶貴的建議','謝謝您!','謝謝指教','中獎']
r = random.randint(0,10)
if t[r] == '中獎':
message = ImageSendMessage(
original_content_url='https://i.imgur.com/d9jnyyN.jpg',
preview_image_url='https://i.imgur.com/d9jnyyN.jpg')
else:
message = TextSendMessage(text=t[r])
return message
def keep(t):
#GDriveJSON就輸入下載下來Json檔名稱
#GSpreadSheet是google試算表名稱
GDriveJSON = 'My First Project-9cf8421ad126.json'
GSpreadSheet = 'BotTest'
try:
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
key = SAC.from_json_keyfile_name(GDriveJSON, scope)
gc = gspread.authorize(key)
worksheet = gc.open(GSpreadSheet).sheet1
except Exception as ex:
print('無法連線Google試算表', ex)
sys.exit(1)
worksheet.append_row(t)
print('新增一列資料到試算表' ,GSpreadSheet)
def delete_row():
#GDriveJSON就輸入下載下來Json檔名稱
#GSpreadSheet是google試算表名稱
GDriveJSON = 'My First Project-9cf8421ad126.json'
GSpreadSheet = 'BotTest'
try:
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
key = SAC.from_json_keyfile_name(GDriveJSON, scope)
gc = gspread.authorize(key)
worksheet = gc.open(GSpreadSheet).sheet1
except Exception as ex:
print('無法連線Google試算表', ex)
sys.exit(1)
worksheet.delete_row(1)
print('delete一列資料到試算表' ,GSpreadSheet)
def audio_template(text):
Confirm_template = TemplateSendMessage(
alt_text='audio_template',
template=ConfirmTemplate(
title='確定一下吧',
text='您的建議是:\n{}'.format(text),
actions=[
MessageTemplateAction(
label='錯',
text='那請再說一次'
),
MessageTemplateAction(
label='對',
text=text
)
]
)
)
return Confirm_template
def quest_template(answer,user_name):
t = fb.get('/{}/question/item'.format('U19df1f98bcf1414ec15f9dad09b9b0cb'),None)
answer = ''
value = list(t.values())
for v in value:
for key,value in v.items():
answer += '{} \n---> {}\n\n'.format(key,value)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '{}的消費體驗'.format(user_name), weight='bold',size='xl',color='#000000'),
TextComponent(text= '您的建議與指教是推動我們前進的動力,{}的滿意就是我們的努力目標,歡迎給我們寶貴的意見,感謝!!'.format(user_name),size='sm',wrap = True,color='#888888'),
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
BoxComponent(
layout='horizontal',
spacing='md',
contents=[
TextComponent(
text=answer[:-1],
color='#000000',
wrap = True,
gravity = 'center',
size='md')]
)
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
SeparatorComponent(margin='xl',color='#000000'),
ButtonComponent(
style='secondary',
color='#66FF66',
height='sm',
action=PostbackAction(label='確定送出',data='send')
),
ButtonComponent(
style='secondary',
color='#66FF66',
height='sm',
action=PostbackAction(label='清除資料',data='clear')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
return message
@handler.add(PostbackEvent)
def handle_postback(event):
profile = line_bot_api.get_profile(event.source.user_id)
user_name = profile.display_name
temp = event.postback.data
if temp[:5] == 'audio':
t = temp.split('/')
word = t[1]
url = 'https://s.yimg.com/bg/dict/dreye/live/f/{}.mp3'.format(word)
line_bot_api.reply_message(
event.reply_token,
AudioSendMessage(original_content_url=url,duration=3000)
)
elif temp == 'datetime':
time = event.postback.params['datetime']
t = str(time).replace('T',' ')
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(
text='請問來店人數為?',
quick_reply=QuickReply(
items=[
QuickReplyButton(
action=PostbackAction(label="1人",text='您訂位時間為{}\n人數為{}人'.format(t,1),data="reservation1")
),
QuickReplyButton(
action=PostbackAction(label="2人",text='您訂位時間為{}\n人數為{}人'.format(t,2), data="reservation2")
),
QuickReplyButton(
action=PostbackAction(label="3人",text='您訂位時間為{}\n人數為{}人'.format(t,3), data="reservation3")
),
QuickReplyButton(
action=PostbackAction(label="4人",text='您訂位時間為{}\n人數為{}人'.format(t,4), data="reservation4")
)
])
)
)
elif temp == 'question':
fb.put('/{}/question'.format(event.source.user_id),data={'no':'1'},name='no')
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='感謝您的用餐,請先輸入您的用餐編號\n讓小弟可以為你服務')
)
elif temp == 'send':
t = fb.get('/{}/question/item'.format(event.source.user_id),None)
if not t:
line_bot_api.reply_message(
event.reply_token, TextSendMessage(text='已經送出囉'))
return
temp = [list(i.values())[0] for i in t.values()]
keep(temp)
fb.delete('/{}/question'.format(event.source.user_id),None)
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='小弟已經把貴賓{}的意見傳給公司了,我們會持續不斷改進,以顧客滿意至極'.format(user_name))
)
elif temp == 'clear':
fb.delete('/{}/question'.format(event.source.user_id),None)
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='親愛的{} 小弟期待您再給我們意見'.format(user_name))
)
elif temp == 'revise':
fb.delete('/{}/member'.format(event.source.user_id),None)
elif temp == 'custom':
t = fb.get('/{}/opti_num'.format(event.source.user_id),None)
bubble = process_choose(event.source.user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
if t :
temp = list(t.values())[0]
temp_opti = temp.split(';')
else:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='必須要有輸入有正確的選項喔'),message]
)
result = random.choice(temp_opti)
t1 = fb.get('/{}/ques_num'.format(event.source.user_id),None)
if t1:
temp_ques = list(t1.values())[0]
else:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='必須要有輸入有正確的問題喔'),message]
)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '隨機結果出爐', weight='bold',size='xl',color='#000000'),
TextComponent(text= '如有其他問題再按下面按鈕🙏', size='md',color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
BoxComponent(
layout='baseline',
spacing='sm',
contents=[
TextComponent(
text='問題:',
color='#000000',
gravity = 'center',
size='lg'),
TextComponent(
text=temp_ques,
color='#000000',
size='lg')]
),
BoxComponent(
layout='baseline',
spacing='sm',
contents=[
TextComponent(
text='隨機選項:',
color='#000000',
gravity = 'center',
size='lg'),
TextComponent(
text=result,
color='#000000',
size='lg')]
)
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=PostbackAction(label='其他猶豫問題',data='choose')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message)
elif temp[:5] == 'first':
print('--------in-----')
temp = temp.split('/')
_type = temp[1]
text = ''
text = '開始'
action = PostbackAction(label='開始選擇',data='first/{}/start'.format(_type),text='為你選出最佳選擇')
color = ['#AAAAAA','#AAAAAA']
point = ['👈','👈']
if _type == 'yesno':
t = ['要','不要']
elif _type == 'buy':
t = ['買','不買']
elif _type == 'yes':
t = ['是','不是']
if 'start' in temp:
text = '其他選擇'
r = random.randint(0,1)
print('----------'+str(r))
point[r] = ' '
color[1-r] = '#000000'
action = MessageAction(label='其他選擇',text='choose')
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '隨機選擇',gravity='center',size='xl',color='#000000'),
TextComponent(text= '{}請按最下面按鈕'.format(text), size='sm',gravity='center',color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text=t[0],
color=color[0],
size='xl',
flex = 5
),
TextComponent(
text=point[0],
size='xl',
flex = 5
)
],
),
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text=t[1],
color=color[1],
size='xl',
flex = 5
),
TextComponent(
text=point[1],
size='xl',
flex = 5
)
],
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=action
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:6] == 'random':
profile = line_bot_api.get_profile(event.source.user_id)
user_name = profile.display_name
user_id = event.source.user_id
bubble = process_draw(user_id)
t = temp.split('/')
start = int(t[1])
end = int(t[2])
if start >= end:
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='咦!{}要注意起始不能大於等於最後一個數字喔!!'.format(user_name)),message])
return
r = random.randint(start,end)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '隨機選擇',size='xl',color='#000000'),
TextComponent(text= '🔔🔔🔔', size='sm'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
color = '#FFFF00',
spacing='sm',
contents=[
TextComponent(
text='由{}到{}隨機產生的號碼'.format(start,end),
color='#000000',
size='lg',
flex = 5
),
BoxComponent(
layout='baseline',
color = '#FFFF00',
spacing='sm',
contents=[
TextComponent(
text=' ',
color='#000000',
size='xl',
flex = 4
),
TextComponent(
text=str(r),
color='#000000',
weight = 'bold',
size='xxl',
flex = 5
)
]
)
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=PostbackAction(label='再抽一次',text='抽籤結果!!',data='random/{}/{}'.format(start,end))
),
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label ='重設範圍',text='draw',)
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:6] == 'choose':
fb.delete('/{}/opti_num'.format(event.source.user_id),None)
fb.delete('/{}/ques_num'.format(event.source.user_id),None)
print('in')
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '把老天爺幫你選擇的選項回覆給我', weight='bold',wrap=True,size='lg',color='#000000'),
TextComponent(text= '請先設定問題為什麼,再去設定選項,在最下面的按鈕可以點選並設定,內建有常用的選擇內容,可以參考看看', size='md',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='問題:\n選擇飲料店:',
color='#000000',
wrap=True,
size='md'
),
TextComponent(
text='選項:\n50嵐;清新;coco;茶湯會',
wrap=True,
color='#000000',
size='md'
)
],
),
BoxComponent(
layout='baseline',
contents=[
TextComponent(
text='問題:\n選擇雞排店',
color='#000000',
wrap=True,
size='md'
),
TextComponent(
text='選項:\n豪大;派克;蔥Ya雞;胖老爹',
color='#000000',
wrap=True,
size='md'
)
],
),
SeparatorComponent(color='#000000')
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=PostbackAction(label='內建問題',data='other',text='請選擇一下喔~')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定問題',text='請輸入要設定抉擇的問題:')
),
ButtonComponent(
style='secondary',
color='#FFDD55',
height='sm',
action=MessageAction(label='設定選項',text='請輸入要設定的選項,各個選項以分號區隔喔!!!')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp == 'other':
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text= '請把選擇需要解決的選擇', weight='bold',size='xl',color='#000000'),
TextComponent(text= '希望能夠解決你的選擇障礙...', size='md',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
ButtonComponent(
style='secondary',
color='#5555FF',
height='sm',
action=PostbackAction(label='內建問題',data='other',text='請選擇一下喔~')
),
ButtonComponent(
style='secondary',
color='#5555FF',
height='sm',
action=MessageAction(label='設定問題',text='請輸入要設定抉擇的問題:')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#5555FF',
height='sm',
action=MessageAction(label='設定選項',text='請輸入要設定的選項,各個選項以分號區隔~')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp == 'result':
print('-------in---')
t = temp.split('/')
lot_year = t[1]
lot_type = t[2]
num = lottery_stat(lot_type,lot_year)
if lot_type == 'big-lotto':
t = '大樂透'
elif lot_type == 'power':
t = '威力彩'
elif lot_type == 'daily539':
t = '今彩539'
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='爬蟲程式抓取奧索樂透網', size='xs',wrap=True,color='#888888'),
TextComponent(text= '{}年\n{}各號碼出現次數'.format(lot_year,t), weight='bold', wrap=True,size='xl',color='#000000'),
TextComponent(text= '各個號碼出現次數統計後的結果呈現,透過爬蟲程式免於開網頁慢慢搜尋....', size='xs',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
TextComponent(
text='號碼 出現次數',
color='#000000',
size='md'
),
TextComponent(
text=num[:-1],
color='#000000',
size='md',
wrap=True
),
SeparatorComponent(color='#000000')
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他年份號碼出現次數',data='ball_year/{}'.format(lot_type),text='請稍等...')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他遊戲號碼出現次數',data='ballyear',text='請稍等...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:7] == 'ball_st':
print('-------in---')
t = temp.split('/')
lot_year = t[1]
lot_type = t[2]
num = lottery_stat(lot_type,lot_year)
if lot_type == 'big-lotto':
t = '大樂透'
elif lot_type == 'power':
t = '威力彩'
elif lot_type == 'daily539':
t = '今彩539'
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='爬蟲程式抓取奧索樂透網', size='xs',wrap=True,color='#888888'),
TextComponent(text= '{}年\n{}各號碼出現次數'.format(lot_year,t), weight='bold', wrap=True,size='xl',color='#000000'),
TextComponent(text= '各個號碼出現次數統計後的結果呈現,透過爬蟲程式免於開網頁慢慢搜尋....', size='xs',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
TextComponent(
text='號碼 出現次數',
color='#000000',
size='md'
),
TextComponent(
text=num[:-1],
color='#000000',
size='md',
wrap=True
),
SeparatorComponent(color='#000000')
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他年份號碼出現次數',data='ball_year/{}'.format(lot_type),text='請稍等...')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他遊戲號碼出現次數',data='ballyear',text='請稍等...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:9] == 'ball_year':
print('-------in---')
print(temp)
t = temp.split('/')
lot_type = t[1]
print(lot_type+'-----------')
Carousel_template = lottery_year(lot_type)
line_bot_api.reply_message(event.reply_token,Carousel_template)
elif temp[:8] == 'ball_num':
print('-------in---')
t = temp.split('/')
lot_type = t[1]
num = lottery_all_num(lot_type)
if lot_type == 'big-lotto':
t = '大樂透'
elif lot_type == 'power':
t = '威力彩'
elif lot_type == 'daily539':
t = '今彩539'
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='爬蟲程式抓取樂透雲內容', size='xs',wrap=True,color='#888888'),
TextComponent(text= '{}歷史開獎紀錄'.format(t), weight='bold', wrap=True,size='xl',color='#000000'),
TextComponent(text= '各個號碼個期紀錄,僅列出最近35筆紀錄,透過爬蟲程式免於開網頁慢慢搜尋....', size='xs',wrap=True,color='#888888'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='vertical',
contents=[
TextComponent(
text=' 日期 {}中獎號碼'.format(t),
color='#000000',
size='md'
),
TextComponent(
text=num,
color='#000000',
size='xs',
wrap=True
),
SeparatorComponent(color='#000000')
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=MessageAction(label='近期開獎紀錄',text='lottery')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='secondary',
color='#DAA520',
height='sm',
action=PostbackAction(label='其他遊戲歷史開獎紀錄',data='ball_all_num',text='請稍等...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp == 'ball_all_num':
buttons_template = TemplateSendMessage(
alt_text='歷史開獎紀錄',
template=ButtonsTemplate(
title='歷史開獎紀錄',
text='請選擇要查詢的遊戲歷史開獎紀錄',
thumbnail_image_url='https://i.imgur.com/sMu1PJN.jpg',
actions=[
PostbackTemplateAction(
label='大樂透歷史紀錄',
data='ball_num/big-lotto',
text = '選擇了大樂透...'
),
PostbackTemplateAction(
label='今彩539歷史紀錄',
data='ball_num/daily539',
text = '選擇了今彩539...'
),
PostbackTemplateAction(
label='威力彩歷史紀錄',
data='ball_num/power',
text = '選擇了威力彩...'
)
]
)
)
line_bot_api.reply_message(event.reply_token, buttons_template)
elif temp == 'ballyear':
buttons_template = TemplateSendMessage(
alt_text='歷年號碼出現次數',
template=ButtonsTemplate(
title='歷年號碼出現次數',
text='請選擇一下',
thumbnail_image_url='https://i.imgur.com/sMu1PJN.jpg',
actions=[
PostbackTemplateAction(
label='大樂透統計',
data='ball_year/big-lotto'
),
PostbackTemplateAction(
label='今彩539統計',
data='ball_year/power'
),
PostbackTemplateAction(
label='威力彩統計',
data='ball_year/daily539'
)
]
)
)
line_bot_api.reply_message(event.reply_token, buttons_template)
elif temp == 'ball':
big = ''
r539 = ''
r3 = ''
print('---in--------')
for i in random.sample([str(i) for i in range(1,50)],6):
if len(i) !=2 :
big += '0{},'.format(i)
else:
big += '{},'.format(i)
for i in random.sample([str(i) for i in range(1,40)],5):
if len(i) !=2 :
r539 += '0{},'.format(i)
else:
r539 += '{},'.format(i)
for i in random.sample([str(i) for i in range(1,39)],6):
if len(i) !=2 :
r3 += '0{},'.format(i)
else:
r3 += '{},'.format(i)
r3 = r3[:-1] + '\n第二區:0{}'.format(random.sample([i for i in range(1,8)],1)[0])
print(r3)
bubble = BubbleContainer(
direction='ltr',
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='僅供參考', size='sm',wrap=True,color='#008844'),
TextComponent(text='幸運號碼', size='xxl',color='#000000'),
SeparatorComponent(color='#000000'),
# review
SeparatorComponent(color='#000000'),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
color = '#FFFF00',
contents=[
TextComponent(
text='大樂透',
color='#000000',
weight='bold',
size='md',
flex=2
),
TextComponent(
text=big[:-1],
weight='bold',
color='#FF3333',
size='lg',
flex=5
)
],
),
BoxComponent(
layout='baseline',
color = '#FFFF00',
contents=[
TextComponent(
text='今彩539',
color='#000000',
weight='bold',
size='md',
flex = 2
),
TextComponent(
text=r539[:-1],
weight='bold',
color='#FF3333',
size='lg',
flex=5
)
],
),
BoxComponent(
layout='horizontal',
color = '#FFFF00',
contents=[
TextComponent(
text='威力彩',
color='#000000',
weight='bold',
size='md',
gravity = 'center',
flex=2
),
TextComponent(
text=r3,
weight='bold',
color='#FF3333',
size='lg',
wrap=True,
flex=5
)
],
),
],
),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='primary',
color='#DAA520',
height='sm',
action=PostbackAction(label='歷年號碼出現次數',data='ballyear',text='請稍等...')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='primary',
height='sm',
color='#DAA520',
action=PostbackAction(label='再來一組', data='ball',text='好運到來...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif temp[:8] == 'carousel':
t = temp.split('/')
pa = int(t[1])
print('--------be else-------{}---{}'.format(pa,str(type(pa))))
pa += 1
print('--------af else-------{}'.format(pa))
keyword = t[2]
t = carousel_template(keyword,page=pa)
line_bot_api.reply_message(
event.reply_token,
t)
elif temp[0:6] == 'listen':
url = temp[6:]
if url == '音樂版權未授權~':
line_bot_api.reply_message(event.reply_token,TextSendMessage(text='音樂版權未授權~'))
else:
line_bot_api.reply_message(
event.reply_token,
AudioSendMessage(original_content_url=url,duration=30000)
)
elif temp[0:4] == 'porn':
print('------in------')
t = temp.split('/')
index = int(t[1])
keyword = t[2]
index += 1
try:
buttons_template = porn_video_template(keyword,index)
line_bot_api.reply_message(event.reply_token, buttons_template)
except IndexError:
line_bot_api.reply_message(event.reply_token, TextSendMessage(text='已經到底了喔'))
elif temp[0:5] == 'video':
t = temp.split('/')
print('----t-----'+str(t))
keyword = t[1]
video_url = t[2]
video_url = 'https://www.youtube.com/watch?v={}'.format(video_url)
video_url,img = yvideo(video_url)
line_bot_api.reply_message(
event.reply_token,
VideoSendMessage(
original_content_url=video_url,
preview_image_url=img))
# 處理圖片
@handler.add(MessageEvent,message=ImageMessage)
def handle_msg_img(event):
profile = line_bot_api.get_profile(event.source.user_id)
tem_name = str(profile.display_name)
img_id = 1
t = fb.get('/pic',None)
if t!=None:
count = 1
for key,value in t.items():
if count == len(t):#取得最後一個dict項目
img_id = int(value['id'])+1
count+=1
try:
message_content = line_bot_api.get_message_content(event.message.id)
with tempfile.NamedTemporaryFile(prefix='jpg-', delete=False) as tf:
for chunk in message_content.iter_content():
tf.write(chunk)
fb.post('/pic',{'id':str(img_id),'user':tem_name,'describe':''})
tempfile_path = tf.name
path = tempfile_path
client = ImgurClient(client_id, client_secret, access_token, refresh_token)
config = {
'album': album_id,
'name' : img_id,
'title': img_id,
'description': 'Cute kitten being cute on'
}
client.upload_from_path(path, config=config, anon=False)
os.remove(path)
image_reply = check_pic(img_id)
line_bot_api.reply_message(event.reply_token,[TextSendMessage(text='上傳成功'),image_reply])
except Exception as e:
t = '上傳失敗'+str(e.args)
line_bot_api.reply_message(event.reply_token,TextSendMessage(text=t))
from pydub import AudioSegment
import speech_recognition as sr
@handler.add(MessageEvent,message=AudioMessage)
def handle_aud(event):
r = sr.Recognizer()
message_content = line_bot_api.get_message_content(event.message.id)
ext = 'mp3'
try:
with tempfile.NamedTemporaryFile(prefix=ext + '-', delete=False) as tf:
for chunk in message_content.iter_content():
tf.write(chunk)
tempfile_path = tf.name
path = tempfile_path
AudioSegment.converter = '/app/vendor/ffmpeg/ffmpeg'
sound = AudioSegment.from_file_using_temporary_files(path)
path = os.path.splitext(path)[0]+'.wav'
sound.export(path, format="wav")
with sr.AudioFile(path) as source:
audio = r.record(source)
except Exception as e:
t = '音訊有問題'+test+str(e.args)+path
line_bot_api.reply_message(event.reply_token,TextSendMessage(text=t))
os.remove(path)
text = r.recognize_google(audio,language='zh-TW')
message = audio_template(text)
line_bot_api.reply_message(event.reply_token,message)
import sys
import gspread
from oauth2client.service_account import ServiceAccountCredentials as SAC
# 處理訊息:
@handler.add(MessageEvent, message=TextMessage)
def handle_msg_text(event):
profile = line_bot_api.get_profile(event.source.user_id)
user_name = profile.display_name
picture_url = profile.picture_url
user_id = event.source.user_id
n = fb.get('/{}/question/no'.format(user_id),None)
num = 1
if n:
num = int(n['no'])
# ----------------註冊-----------------------
register = fb.get('/{}/member'.format(user_id),None)
if register == None:
temp = event.message.text
if '/' not in temp:
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='注意!!少了斜線(/)'))
t = temp.split('/')
if len(t) > 2:
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='請重新輸入-多打了斜線了'))
fb.post('/{}/member'.format(user_id),{'name':t[0],'email':t[1]})
buttons_template = TemplateSendMessage(
alt_text='Template',
template=ButtonsTemplate(
title='註冊成功',
text='姓名:{}\nemail:{}\n請確定是否正確'.format(t[0],t[1]),
actions=[
MessageTemplateAction(
label='確認無誤',
text='MENU'
),
PostbackTemplateAction(
label='重新輸入',
text='請再輸入一次,名字與email以斜線(/)區隔',
data='revise'
)
]
)
)
line_bot_api.reply_message(
event.reply_token,
buttons_template)
t = fb.get('/{}/num'.format(user_id),None)
number = fb.get('/{}/temp'.format(user_id),None)
# ----------------抽數字-----------------------
if event.message.text == '請輸入起始數字-----------':
t = '起始數字'
fb.post('/{}/temp'.format(user_id),'起始數字')
elif event.message.text == '請輸入結束數字-----------':
t = '結束數字'
fb.post('/{}/temp'.format(user_id),'結束數字')
elif number:
temp = int(event.message.text)
if '起始數字' in list(number.values()):
fb.post('/{}/start'.format(user_id),temp)
else:
fb.post('/{}/end'.format(user_id),temp)
fb.delete('/{}/temp'.format(user_id),None)
bubble = process_draw(user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='{}為---->{}'.format(list(number.values())[0],temp)),message])
# -----------------自訂的問題-----------------------
elif event.message.text == '請輸入要設定抉擇的問題:':
fb.delete('/{}/ques_num'.format(event.source.user_id),None)
fb.post('/{}/num'.format(user_id),'問題')
elif event.message.text == '請輸入要設定的選項,各個選項以分號區隔!!!':
fb.delete('/{}/opti_num'.format(event.source.user_id),None)
fb.post('/{}/num'.format(user_id),'選項')
elif t:
if '問題' in list(t.values()):
fb.post('/{}/ques_num'.format(user_id),event.message.text)
else:
fb.post('/{}/opti_num'.format(user_id),event.message.text)
fb.delete('/{}/num'.format(user_id),None)
bubble = process_choose(user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='{}為---->{}'.format(list(t.values())[0],event.message.text)),message])
else:
if t != None:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='請輸入正確格式的問題或是選項'),TextSendMessage(text='就文字包含數字也可以🙏')])
elif number != None:
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='請輸入正確的起始及結束數字'),TextSendMessage(text='只能是數字,不能包含文字喔🙏')])
if event.message.text.lower() == "eyny":
content = eyny_movie()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == 'draw':
fb.delete('/{}/end'.format(user_id),None)
fb.delete('/{}/start'.format(user_id),None)
print('in')
bubble = process_draw(user_id)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif event.message.text.lower() == 'else':
line_bot_api.reply_message(event.reply_token,TextSendMessage(text='敬請期待'))
elif event.message.text.lower() == 'food':
image_message = [ImageSendMessage(
original_content_url=url,
preview_image_url=url
) for url in ['https://i.imgur.com/5iMx8nk.jpg','https://i.imgur.com/EEy8s6m.jpg','https://i.imgur.com/RCGdggZ.jpg']]
line_bot_api.reply_message(event.reply_token,image_message)
elif event.message.text.lower() == 'exit' or event.message.text == '不做':
fb.delete('/{}/question'.format(event.source.user_id),None)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='如需繼續幫我們了解您的需求,可以透過問卷讓我們了解'),TextSendMessage(text='輸入menu進入選單喔')]
)
elif event.message.text.lower() == '我吃飽了':
fb.put('/{}/question'.format(event.source.user_id),data={'no':'1'},name='no')
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text='感謝您的用餐,請先輸入您的用餐編號\n讓小弟可以為你服務')
)
elif questionnaire(num,user_id):
if num == 9:
fb.post('/{}/question/item'.format(user_id),{questionnaire(num-1,user_id):event.message.text})
flex = quest_template(answer,user_name)
line_bot_api.reply_message(
event.reply_token,
flex)
return
t = questionnaire(num,user_id)
QuickReply = answer(num,user_id)
g = ['那想請問','方便問一下','可以告訴我們','可以問','我們想知道']
r = random.randint(0,4)
t = '{}{}'.format(g[r],t)
message = greet()
if num == 8:
message = TextSendMessage(text='最後一題了喔!!!!')
fb.post('/{}/question/item'.format(user_id),{questionnaire(num-1,user_id):event.message.text})
num += 1
fb.put('/{}/question'.format(user_id),data={'no':num},name='no')
line_bot_api.reply_message(
event.reply_token,
[message,TextSendMessage(text='--------- 消費體驗調查 ---------\n如需跳開問卷,請輸入exit或不做'),TextSendMessage(text=t,quick_reply=QuickReply)])
elif event.message.text.lower() == "choose":
buttons_template = TemplateSendMessage(
alt_text='抉擇領域template',
template=ButtonsTemplate(
title='抉擇類型',
text='請選擇一下,想要老天爺替你選擇的問題',
thumbnail_image_url='https://i.imgur.com/ISBqTUQ.jpg',
actions=[
PostbackTemplateAction(
label='要不要問題',
data='first/yesno'
),
PostbackTemplateAction(
label='買不買問題',
data='first/buy'
),
PostbackTemplateAction(
label='是不是問題',
data='first/yes'
),
PostbackTemplateAction(
label='新增問題',
data='choose'
)
]
)
)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text=' -------已經進入抉擇領域了------- '),buttons_template])
# elif event.message.text.lower() == "get":
# result = fb.get('note',None)
# result2 = firebase.get('note', None, {'print': 'pretty'}, {'X_FANCY_HEADER': 'VERY FANCY'})
# line_bot_api.reply_message(
# event.reply_token,
# TextSendMessage(text=str(result)+str(result2)))
#
# elif event.message.text.lower() == "save":
# data = {'name': '<NAME>', 'age': 26,
# 'created_at': datetime.datetime.now()}
# snapshot = firebase.post('/users', data)
# print(snapshot['name'])
# elif event.message.text.lower() == 'test':
# print('-----------in')
# data_UserData = usermessage.query.all()
# history_dic = {}
# history_list = []
# for _data in data_UserData:
# history_dic['id'] = _data.id
# history_dic['User_Id'] = _data.user_id
# history_dic['Mesaage'] = _data.message
# history_dic['Date'] = _data.birth_date
# history_list.append(history_dic)
# history_dic = {}
# line_bot_api.reply_message(event.reply_token,TextSendMessage(text= str(history_list)))
# elif event.message.text.lower() == 'clear':
# t = db.session.query(usermessage).delete()
# db.session.commit()
# print('end------------',str(t))
# line_bot_api.reply_message(event.reply_token,TextSendMessage(text= 'successfully'))
#
# elif event.message.text.lower() == 'input':
# print('-----------in')
# data_UserData = usermessage.query.filter_by(message='hi').first()
# print('end------------',str(data_UserData))
# line_bot_api.reply_message(event.reply_token,TextSendMessage(text= str(data_UserData)))
#
elif event.message.text.lower() == "menu":
bubble = BubbleContainer(
direction='ltr',
hero=ImageComponent(
url='https://i.imgur.com/d1XQC5H.jpg',
aspectMode = 'cover',
aspect_ratio='10:3',
size='full',
action=URIAction(uri='http://www.ccu.edu.tw/', label='label'),
),
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='目錄功能', weight='bold', size='lg'),
TextComponent(text='感謝您使用加入本店LINE BOT',align='end',color='#AAAAAA', size='sm'),
SeparatorComponent(color='#000000'),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='primary',
height='sm',
color='#00AA00',
action=PostbackAction(label='問卷填答',data='question')
),
ButtonComponent(
style='primary',
color='#00AA00',
height='sm',
action=MessageAction(label='精選菜單',text='food')
),
ButtonComponent(
style='primary',
color='#00AA00',
height='sm',
action=MessageAction(label='訂位功能',text='call')
),
ButtonComponent(
style='primary',
color='#00AA00',
height='sm',
action=MessageAction(label='其他功能',text='else')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
#訂位
elif event.message.text.lower() == "call":
date_picker = TemplateSendMessage(
alt_text='訂位系統',
template=ButtonsTemplate(
text='{} 你好\n請設定一下取餐時間'.format(user_name),
title='訂位系統',
# thumbnail_image_url=picture_url,
actions=[
DatetimePickerTemplateAction(
label='設定',
data='datetime',
mode='datetime',
initial='2017-04-01T12:30',
min='2017-04-01T12:30',
max='2099-12-31T12:30'
)
]
)
)
line_bot_api.reply_message(
event.reply_token,
date_picker
)
elif event.message.text == "PanX泛科技":
content = panx()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == "help":
Carousel_template = TemplateSendMessage(
alt_text='Carousel template',
template=CarouselTemplate(
columns=[
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/d1XQC5H.jpg',
title = '功能目錄',
text = 'Hey {} bro!\n提供額外小工具,希望您能有美好的一天'.format(user_name),
actions=[
MessageTemplateAction(
label='餐廳資訊',
text= 'menu'
),
MessageTemplateAction(
label='電影資訊',
text= 'movie'
),
MessageTemplateAction(
label='新聞資訊',
text= 'news'
)
]
),
CarouselColumn(
thumbnail_image_url='https://i.imgur.com/d1XQC5H.jpg',
title = '功能目錄',
text = 'Hey {} bro!\n提供額外小工具,希望您能有美好的一天'.format(user_name),
actions=[
MessageTemplateAction(
label='英文字典',
text= '提醒您:\n只需要在查詢英文單字後加上?即可'
),
MessageTemplateAction(
label='樂透查詢',
text= 'lottery'
),
MessageTemplateAction(
label='中正大學',
text= 'introduce'
)
]
)
]
)
)
line_bot_api.reply_message(event.reply_token,Carousel_template)
elif event.message.text == "近期上映電影":
content = movie()
template = movie_template()
line_bot_api.reply_message(
event.reply_token,[
TextSendMessage(text=content),
template
]
)
elif event.message.text == "觸電網-youtube":
target_url = 'https://www.youtube.com/user/truemovie1/videos'
rs = requests.session()
res = rs.get(target_url, verify=False)
soup = bf(res.text, 'html.parser')
seqs = ['https://www.youtube.com{}'.format(data.find('a')['href']) for data in soup.select('.yt-lockup-title')]
template = movie_template()
line_bot_api.reply_message(
event.reply_token, [
TextSendMessage(text=seqs[random.randint(0, len(seqs) - 1)]),
TextSendMessage(text=seqs[random.randint(0, len(seqs) - 1)]),
TextSendMessage(text=seqs[random.randint(0, len(seqs) - 1)]),
template
])
elif event.message.text.lower() == "movie":
buttons_template = movie_template()
line_bot_api.reply_message(event.reply_token, buttons_template)
elif event.message.text == "蘋果即時新聞":
content = apple_news()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == "news":
buttons_template = TemplateSendMessage(
alt_text='news template',
template=ButtonsTemplate(
title='新聞類型',
text='請選擇',
thumbnail_image_url='https://i.imgur.com/GoAYFqv.jpg',
actions=[
MessageTemplateAction(
label='蘋果即時新聞',
text='蘋果即時新聞'
),
MessageTemplateAction(
label='天下雜誌',
text='天下雜誌'
),
MessageTemplateAction(
label='PanX泛科技',
text='PanX泛科技'
)
]
)
)
line_bot_api.reply_message(event.reply_token, buttons_template)
elif event.message.text == "天下雜誌":
content = magazine()
line_bot_api.reply_message(
event.reply_token,
TextSendMessage(text=content))
elif event.message.text.lower() == 'post':
bubble = BubbleContainer(
direction='ltr',
hero=ImageComponent(
url='https://i.imgur.com/qXqg5qA.jpg',
size='full',
aspect_ratio='5:3',
aspect_mode='cover',
action=URIAction(uri='https://github.com/kevin1061517', label='label'),
),
body=BoxComponent(
layout='vertical',
contents=[
# title
TextComponent(text='Content', weight='bold', size='xl',color='#006400'),
SeparatorComponent(margin='xl',color='#000000'),
# review
TextComponent(
text='''現在在練習python各種語法~藉由這次的project,讓我更加熟悉python語法與邏輯,這個LineBot有各種功能,可以把youtube網址拉進來,LineBot會傳來網址影片,你就可以利用右下角的下載鍵,以及抓出菜單等等功能,就可以下載到手機端了😜,如下:\n語法:\n1.阿滴英文yout\n關鍵字後面加上yout,就可以抓出影片了\n2.50嵐menu\n餐廳名字後面加上menu,就可以抓出餐廳單\n3.馬英九pic\n搜尋照片關鍵字加上pic,就可以馬上幫你抓到要搜尋的照片\n -------------------- 18禁 -------------------- \n4.李宗瑞porn\n搜尋關鍵字加上porn,就可以有成人影片彈出來🙏''',
size='sm',wrap=True,color='#2E8B57'
),
SeparatorComponent(margin='xl',color='#000000'),
TextComponent(
text='承認不勇敢 你能不能別離開很多愛不能重來 我應該釋懷在街頭徘徊 下雨時為你撐傘對你的愛成阻礙 祝福你愉快',
size='sm',wrap=True,color='#2E8B57'
),
SeparatorComponent(margin='xl',color='#000000'),
TextComponent(
text='承認不勇敢 你能不能別離開很多愛不能重來 我應該釋懷在街頭徘徊 下雨時為你撐傘對你的愛成阻礙 祝福你愉快',
size='sm',wrap=True,color='#2E8B57'
),
# info
BoxComponent(
layout='vertical',
margin='lg',
color = '#FFFF00',
spacing='sm',
contents=[
BoxComponent(
layout='baseline',
color = '#FFFF00',
spacing='sm',
contents=[
TextComponent(
text='Developer',
color='#000000',
weight='bold',
align="end",
size='xxs',
flex=5
),
TextComponent(
text='Kevin',
wrap=True,
weight='bold',
align="end",
color='#000000',
size='xxs',
flex=1
)
],
),
],
),
SeparatorComponent(),
],
),
footer=BoxComponent(
layout='vertical',
spacing='sm',
contents=[
# callAction, separator, websiteAction
# SpacerComponent(size='sm'),
# callAction
ButtonComponent(
style='primary',
color = '#FFFF00',
height='sm',
action=URIAction(label='CALL', uri='tel:0935593342'),
),
# separator
SeparatorComponent(),
# websiteAction
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif event.message.text.lower() == 'lottery':
big,b539,bwei = lottery()
big_txt = ''
b539_txt = ''
bwei = ''
for t,c in enumerate(big,1):
if t%3==0:
big_txt += '特別號:'
big_txt += str(c+'\n')
big_txt = big_txt[:-1]
for t,c in enumerate(b539,0):
b539_txt +='{}\n'.format(str(c))
b539_txt = b539_txt[:-1]
for t,c in enumerate(big,1):
if t%3==0:
bwei += '二區:'
bwei +='{}\n'.format(str(c))
bwei = bwei[:-1]
bubble = BubbleContainer(
direction='ltr',
hero=ImageComponent(
url='https://i.imgur.com/9IUzhOT.jpg',
aspectMode = 'cover',
aspect_ratio='11:3',
size='full',
backgroundColor = '#FFD700',
action=URIAction(uri='https://github.com/kevin1061517', label='label'),
),
body=BoxComponent(
layout='vertical',
contents=[
TextComponent(text='祝你中獎', weight='bold', size='md'),
BoxComponent(
layout='vertical',
margin='lg',
spacing='xs',
contents=[
BoxComponent(
margin = 'sm',
layout='horizontal',
contents=[
ImageComponent(
url='https://i.imgur.com/T6rFvGm.png',
size='md',
aspect_ratio='5:5',
flex=2,
gravity='center',
),
TextComponent(
text=big_txt,
wrap=True,
color='#666666',
size='md',
flex=5
)
],
),
SeparatorComponent(color='#000000'),
BoxComponent(
layout='horizontal',
margin = 'sm',
contents=[
ImageComponent(
url='https://i.imgur.com/DQrt8Xz.png',
size='md',
aspect_ratio='5:5',
flex=2,
gravity='center'
),
TextComponent(
text=b539_txt,
wrap=True,
color='#666666',
size='md',
flex=5,
),
],
),
SeparatorComponent(color='#000000'),
BoxComponent(
layout='horizontal',
margin = 'sm',
contents=[
ImageComponent(
url='https://i.imgur.com/nXq6wrd.png',
size='md',
aspect_ratio='5:5',
flex=2,
gravity='center'
),
TextComponent(
text=bwei,
wrap=True,
color='#666666',
size='md',
flex=5,
),
],
),
],
),
SeparatorComponent(color='#000000'),
],
),
footer=BoxComponent(
layout='vertical',
spacing='xs',
contents=[
# websiteAction
ButtonComponent(
style='primary',
height='sm',
action=PostbackAction(label='歷年開獎紀錄',data='ball_all_num',text='歷年號碼~詳細內容參考至台彩官網')
),
SeparatorComponent(color='#000000'),
ButtonComponent(
style='primary',
color='#DAA520',
height='sm',
action=PostbackAction(label='開門見喜💎️', data='ball',text='您的幸運號碼...')
)
]
),
)
message = FlexSendMessage(alt_text="hello", contents=bubble)
line_bot_api.reply_message(
event.reply_token,
message
)
elif re.search(r'yout$',event.message.text.lower())!=None:
keyword = event.message.text.lower()[:-4]
carousel = carousel_template(keyword)
line_bot_api.reply_message(event.reply_token, carousel)
# 供下載影片
elif re.search(r'^https://www.youtu.*',event.message.text) != None or re.search(r'^https://youtu.be.*',event.message.text) !=None:
t = event.message.text
video_url,img = yvideo(t)
line_bot_api.reply_message(
event.reply_token,
[TextSendMessage(text='供你下載製手機端,本人僅供學術用途,不負法律責任'),
VideoSendMessage(
original_content_url=video_url,
preview_image_url=img)]
)
elif re.search(r'\?$',event.message.text.lower())!=None:
keyword = event.message.text.lower()[:-1]
keyword = keyword.replace(' ','')
print('-----------'+keyword)
message = integer_word(keyword)
line_bot_api.reply_message(
event.reply_token,
message
)
elif event.message.text.lower() == 'introduce':
url = 'https://www.youtube.com/watch?v=vf3qc2-h9kE'
url, img = yvideo(url)
line_bot_api.reply_message(
event.reply_token,
VideoSendMessage(
original_content_url=url,
preview_image_url=img)
)
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port) | en | 0.177529 | #from flask_bootstrap import Bootstrap #from PIL import Image #import warnings #warnings.simplefilter('error', Image.DecompressionBombWarning) #bootstrap = Bootstrap(app) # #def do_get(): # return render_template('list.html') # #def do_get(): # return render_template('index2.html') # get X-Line-Signature header value # get request body as text #app.logger.error("Request body: " + bodyjson['events'][0]['message']['text']) # app.logger.error("Request body: " + body) #insertdata # print('-----in----------') # add_data = usermessage( # id = bodyjson['events'][0]['message']['id'], # user_id = bodyjson['events'][0]['source']['userId'], # message = bodyjson['events'][0]['message']['text'], # birth_date = datetime.fromtimestamp(int(bodyjson['events'][0]['timestamp'])/1000) # ) # db.session.add(add_data) # db.session.commit() # handle webhook body # img = 'http://i.ytimg.com/vi/{}/0.jpg'.format(_id) # title = (a['title']) # url = 'https://www.pilio.idv.tw/ltobig/drawlist/drawlist.asp' # url2 = 'https://www.pilio.idv.tw/lto539/drawlist/drawlist.asp' # url3 = 'https://www.pilio.idv.tw/lto/drawlist/drawlist.asp' # header=BoxComponent( # layout='vertical', # contents=header_content#---->這樣子也行 contents=[t[0],t[1]] # ), # review # info # websiteAction # websiteAction # websiteAction # review # info # profile = line_bot_api.get_profile(event.source.user_id) # user_name = profile.display_name #GDriveJSON就輸入下載下來Json檔名稱 #GSpreadSheet是google試算表名稱 #GDriveJSON就輸入下載下來Json檔名稱 #GSpreadSheet是google試算表名稱 # info # review # info # review # info # websiteAction # review # info # websiteAction # review # info # review # review # info # websiteAction # review # info # websiteAction # review # info # websiteAction # review # info # websiteAction # 處理圖片 #取得最後一個dict項目 # 處理訊息: # ----------------註冊----------------------- # ----------------抽數字----------------------- # -----------------自訂的問題----------------------- # elif event.message.text.lower() == "get": # result = fb.get('note',None) # result2 = firebase.get('note', None, {'print': 'pretty'}, {'X_FANCY_HEADER': 'VERY FANCY'}) # line_bot_api.reply_message( # event.reply_token, # TextSendMessage(text=str(result)+str(result2))) # # elif event.message.text.lower() == "save": # data = {'name': '<NAME>', 'age': 26, # 'created_at': datetime.datetime.now()} # snapshot = firebase.post('/users', data) # print(snapshot['name']) # elif event.message.text.lower() == 'test': # print('-----------in') # data_UserData = usermessage.query.all() # history_dic = {} # history_list = [] # for _data in data_UserData: # history_dic['id'] = _data.id # history_dic['User_Id'] = _data.user_id # history_dic['Mesaage'] = _data.message # history_dic['Date'] = _data.birth_date # history_list.append(history_dic) # history_dic = {} # line_bot_api.reply_message(event.reply_token,TextSendMessage(text= str(history_list))) # elif event.message.text.lower() == 'clear': # t = db.session.query(usermessage).delete() # db.session.commit() # print('end------------',str(t)) # line_bot_api.reply_message(event.reply_token,TextSendMessage(text= 'successfully')) # # elif event.message.text.lower() == 'input': # print('-----------in') # data_UserData = usermessage.query.filter_by(message='hi').first() # print('end------------',str(data_UserData)) # line_bot_api.reply_message(event.reply_token,TextSendMessage(text= str(data_UserData))) # # websiteAction #訂位 # thumbnail_image_url=picture_url, # title # review 現在在練習python各種語法~藉由這次的project,讓我更加熟悉python語法與邏輯,這個LineBot有各種功能,可以把youtube網址拉進來,LineBot會傳來網址影片,你就可以利用右下角的下載鍵,以及抓出菜單等等功能,就可以下載到手機端了😜,如下:\n語法:\n1.阿滴英文yout\n關鍵字後面加上yout,就可以抓出影片了\n2.50嵐menu\n餐廳名字後面加上menu,就可以抓出餐廳單\n3.馬英九pic\n搜尋照片關鍵字加上pic,就可以馬上幫你抓到要搜尋的照片\n -------------------- 18禁 -------------------- \n4.李宗瑞porn\n搜尋關鍵字加上porn,就可以有成人影片彈出來🙏 # info # callAction, separator, websiteAction # SpacerComponent(size='sm'), # callAction # separator # websiteAction # websiteAction # 供下載影片 | 1.669903 | 2 |
test/modules_test3d.py | jinanloubani/aTEAM | 23 | 6617894 | """
3d examples for LagrangeInterp,LagrangeInterpFixInputs (nn.modules.Interpolation)
"""
#%%
from numpy import *
import numpy as np
import torch
from torch.autograd import grad
import torch.nn as nn
from torch.nn import functional as F
from scipy.optimize.lbfgsb import fmin_l_bfgs_b as lbfgsb
from scipy.optimize.slsqp import fmin_slsqp as slsqp
import matplotlib.pyplot as plt
from aTEAM.optim import NumpyFunctionInterface,ParamGroupsManager
from aTEAM.nn.modules import LagrangeInterp,LagrangeInterpFixInputs
from aTEAM.utils import meshgen
#%%
def testfunc(inputs):
"""inputs (ndarray)"""
return sin(inputs[...,0]*8)+cos(sqrt(inputs[...,1]*4))*sin(inputs[...,2]*4)
def compare(I, inputs):
infe = I(inputs).data.cpu().numpy()
infe_true = testfunc(inputs.data.cpu().numpy())
return infe,infe_true
def forward(I, inputs):
outputs = I(inputs)
outputs_true = torch.from_numpy(testfunc(inputs.data.cpu().numpy()))
outputs_true = outputs.data.new(outputs_true.size()).copy_(outputs_true)
return ((outputs-outputs_true)**2).mean()
def forwardFixInputs(IFixInputs, outputs_true):
outputs = IFixInputs()
return ((outputs-outputs_true)**2).mean()
#%%
m = 3
d = 2
device = -1
mesh_bound = zeros((2,m))
# mesh_bound[0] = arange(m)-1
# mesh_bound[1] = arange(m)+1
mesh_bound[0] = 0
mesh_bound[1] = 1
mesh_size = array([40,]*m)
I = LagrangeInterp(m, d, mesh_bound, mesh_size)
I.double()
if device>=0:
I.cuda(device)
mesh_bound[1] += 1/200
dataset = meshgen(mesh_bound, [201,201,201])
dataset = torch.from_numpy(dataset).clone()
dataset = I.interp_coe.data.new(dataset.size()).copy_(dataset)
mesh_bound[1] -= 1/200
IFixInputs = LagrangeInterpFixInputs(dataset[:1,:1,:1],m,d,mesh_bound,mesh_size)
IFixInputs.double()
if device>=0:
IFixInputs.cuda(device)
#%%
inputs_shape = [50,50,50]
IN,JN,KN = int(200/inputs_shape[0]), int(200/inputs_shape[1]), int(200/inputs_shape[2])
indx = zeros((IN*JN*KN,3),dtype=int32)
idx = 0
for i in range(IN):
for j in range(JN):
for k in range(KN):
indx[idx] = array([i,j,k])*array(inputs_shape)
idx += 1
#%%
nfi = NumpyFunctionInterface([I.interp_coe,],forward=lambda :forward(I,dataset))
nfi.flat_param = random.randn(nfi.numel())
x0 = nfi.flat_param
for i in range(64):
inputs = dataset[
indx[i,0]:indx[i,0]+inputs_shape[0],
indx[i,1]:indx[i,1]+inputs_shape[1],
indx[i,2]:indx[i,2]+inputs_shape[2]
]
inputs = inputs.clone()
nfi.forward = lambda :forward(I,inputs)
x = nfi.flat_param
x,f,d = lbfgsb(nfi.f,x,nfi.fprime,m=1000,maxiter=20,factr=1,pgtol=1e-16,iprint=10)
#%%
outputs = IFixInputs()
outputs_true = torch.from_numpy(testfunc(IFixInputs.inputs.cpu().numpy()))
outputs_true = outputs_true.view(outputs.size())
outputs_true = outputs.data.new(outputs_true.size()).copy_(outputs_true)
nfi = NumpyFunctionInterface([IFixInputs.interp_coe,],forward=lambda :forwardFixInputs(IFixInputs,outputs_true))
nfi.flat_param = random.randn(nfi.numel())
for i in range(64):
inputs = dataset[
indx[i,0]:indx[i,0]+inputs_shape[0],
indx[i,1]:indx[i,1]+inputs_shape[1],
indx[i,2]:indx[i,2]+inputs_shape[2]
]
inputs = inputs.clone()
IFixInputs.inputs = inputs
outputs = IFixInputs()
outputs_true = torch.from_numpy(testfunc(IFixInputs.inputs.cpu().numpy()))
outputs_true = outputs_true.view(outputs.size())
outputs_true = outputs.data.new(outputs_true.size()).copy_(outputs_true)
nfi.forward = lambda :forwardFixInputs(IFixInputs,outputs_true)
x = nfi.flat_param
x,f,d = lbfgsb(nfi.f,nfi.flat_param,nfi.fprime,m=1000,maxiter=20,factr=1,pgtol=1e-14,iprint=10)
#%%
inputs = dataset[
random.randint(200/inputs_shape[0])+int(200/inputs_shape[0])*arange(0,inputs_shape[0],dtype=int32)[:,newaxis,newaxis],
random.randint(200/inputs_shape[1])+int(200/inputs_shape[1])*arange(0,inputs_shape[1],dtype=int32)[newaxis,:,newaxis],
random.randint(200/inputs_shape[2])+int(200/inputs_shape[2])*arange(0,inputs_shape[2],dtype=int32)[newaxis,newaxis,:]
]
inputs = inputs.clone()
nfi.forward = lambda :forward(I,inputs)
infe,infe_true = compare(I,inputs)
print(sqrt((infe-infe_true)**2).mean())
print(sqrt((infe-infe_true)**2).max())
h = plt.figure()
indx = random.randint(20)
a = h.add_subplot(4,2,1)
a.imshow(infe_true[indx])
a.set_title('true')
a = h.add_subplot(4,2,2)
a.imshow(infe[indx])
a.set_title('inferenced')
indx = random.randint(20)
a = h.add_subplot(4,2,3)
a.plot(infe_true[indx,indx])
a = h.add_subplot(4,2,4)
a.plot(infe[indx,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,5)
a.plot(infe_true[indx,:,indx])
a = h.add_subplot(4,2,6)
a.plot(infe[indx,:,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,7)
a.plot(infe_true[:,indx,indx])
a = h.add_subplot(4,2,8)
a.plot(infe[:,indx,indx])
#%%
inputs = dataset[
random.randint(200/inputs_shape[0])+int(200/inputs_shape[0])*arange(0,inputs_shape[0],dtype=int32)[:,newaxis,newaxis],
random.randint(200/inputs_shape[1])+int(200/inputs_shape[1])*arange(0,inputs_shape[1],dtype=int32)[newaxis,:,newaxis],
random.randint(200/inputs_shape[2])+int(200/inputs_shape[2])*arange(0,inputs_shape[2],dtype=int32)[newaxis,newaxis,:]
]
inputs = inputs.clone()
IFixInputs.inputs = inputs
outputs = IFixInputs()
outputs_true = torch.from_numpy(testfunc(IFixInputs.inputs.cpu().numpy()))
outputs_true = outputs_true.view(outputs.size())
infe = outputs.data.cpu().numpy()
infe_true = outputs_true.numpy()
print(sqrt((infe-infe_true)**2).mean())
print(sqrt((infe-infe_true)**2).max())
h = plt.figure()
indx = random.randint(20)
a = h.add_subplot(4,2,1)
a.imshow(infe_true[indx])
a.set_title('true')
a = h.add_subplot(4,2,2)
a.imshow(infe[indx])
a.set_title('inferenced')
indx = random.randint(20)
a = h.add_subplot(4,2,3)
a.plot(infe_true[indx,indx])
a = h.add_subplot(4,2,4)
a.plot(infe[indx,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,5)
a.plot(infe_true[indx,:,indx])
a = h.add_subplot(4,2,6)
a.plot(infe[indx,:,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,7)
a.plot(infe_true[:,indx,indx])
a = h.add_subplot(4,2,8)
a.plot(infe[:,indx,indx])
#%%
| """
3d examples for LagrangeInterp,LagrangeInterpFixInputs (nn.modules.Interpolation)
"""
#%%
from numpy import *
import numpy as np
import torch
from torch.autograd import grad
import torch.nn as nn
from torch.nn import functional as F
from scipy.optimize.lbfgsb import fmin_l_bfgs_b as lbfgsb
from scipy.optimize.slsqp import fmin_slsqp as slsqp
import matplotlib.pyplot as plt
from aTEAM.optim import NumpyFunctionInterface,ParamGroupsManager
from aTEAM.nn.modules import LagrangeInterp,LagrangeInterpFixInputs
from aTEAM.utils import meshgen
#%%
def testfunc(inputs):
"""inputs (ndarray)"""
return sin(inputs[...,0]*8)+cos(sqrt(inputs[...,1]*4))*sin(inputs[...,2]*4)
def compare(I, inputs):
infe = I(inputs).data.cpu().numpy()
infe_true = testfunc(inputs.data.cpu().numpy())
return infe,infe_true
def forward(I, inputs):
outputs = I(inputs)
outputs_true = torch.from_numpy(testfunc(inputs.data.cpu().numpy()))
outputs_true = outputs.data.new(outputs_true.size()).copy_(outputs_true)
return ((outputs-outputs_true)**2).mean()
def forwardFixInputs(IFixInputs, outputs_true):
outputs = IFixInputs()
return ((outputs-outputs_true)**2).mean()
#%%
m = 3
d = 2
device = -1
mesh_bound = zeros((2,m))
# mesh_bound[0] = arange(m)-1
# mesh_bound[1] = arange(m)+1
mesh_bound[0] = 0
mesh_bound[1] = 1
mesh_size = array([40,]*m)
I = LagrangeInterp(m, d, mesh_bound, mesh_size)
I.double()
if device>=0:
I.cuda(device)
mesh_bound[1] += 1/200
dataset = meshgen(mesh_bound, [201,201,201])
dataset = torch.from_numpy(dataset).clone()
dataset = I.interp_coe.data.new(dataset.size()).copy_(dataset)
mesh_bound[1] -= 1/200
IFixInputs = LagrangeInterpFixInputs(dataset[:1,:1,:1],m,d,mesh_bound,mesh_size)
IFixInputs.double()
if device>=0:
IFixInputs.cuda(device)
#%%
inputs_shape = [50,50,50]
IN,JN,KN = int(200/inputs_shape[0]), int(200/inputs_shape[1]), int(200/inputs_shape[2])
indx = zeros((IN*JN*KN,3),dtype=int32)
idx = 0
for i in range(IN):
for j in range(JN):
for k in range(KN):
indx[idx] = array([i,j,k])*array(inputs_shape)
idx += 1
#%%
nfi = NumpyFunctionInterface([I.interp_coe,],forward=lambda :forward(I,dataset))
nfi.flat_param = random.randn(nfi.numel())
x0 = nfi.flat_param
for i in range(64):
inputs = dataset[
indx[i,0]:indx[i,0]+inputs_shape[0],
indx[i,1]:indx[i,1]+inputs_shape[1],
indx[i,2]:indx[i,2]+inputs_shape[2]
]
inputs = inputs.clone()
nfi.forward = lambda :forward(I,inputs)
x = nfi.flat_param
x,f,d = lbfgsb(nfi.f,x,nfi.fprime,m=1000,maxiter=20,factr=1,pgtol=1e-16,iprint=10)
#%%
outputs = IFixInputs()
outputs_true = torch.from_numpy(testfunc(IFixInputs.inputs.cpu().numpy()))
outputs_true = outputs_true.view(outputs.size())
outputs_true = outputs.data.new(outputs_true.size()).copy_(outputs_true)
nfi = NumpyFunctionInterface([IFixInputs.interp_coe,],forward=lambda :forwardFixInputs(IFixInputs,outputs_true))
nfi.flat_param = random.randn(nfi.numel())
for i in range(64):
inputs = dataset[
indx[i,0]:indx[i,0]+inputs_shape[0],
indx[i,1]:indx[i,1]+inputs_shape[1],
indx[i,2]:indx[i,2]+inputs_shape[2]
]
inputs = inputs.clone()
IFixInputs.inputs = inputs
outputs = IFixInputs()
outputs_true = torch.from_numpy(testfunc(IFixInputs.inputs.cpu().numpy()))
outputs_true = outputs_true.view(outputs.size())
outputs_true = outputs.data.new(outputs_true.size()).copy_(outputs_true)
nfi.forward = lambda :forwardFixInputs(IFixInputs,outputs_true)
x = nfi.flat_param
x,f,d = lbfgsb(nfi.f,nfi.flat_param,nfi.fprime,m=1000,maxiter=20,factr=1,pgtol=1e-14,iprint=10)
#%%
inputs = dataset[
random.randint(200/inputs_shape[0])+int(200/inputs_shape[0])*arange(0,inputs_shape[0],dtype=int32)[:,newaxis,newaxis],
random.randint(200/inputs_shape[1])+int(200/inputs_shape[1])*arange(0,inputs_shape[1],dtype=int32)[newaxis,:,newaxis],
random.randint(200/inputs_shape[2])+int(200/inputs_shape[2])*arange(0,inputs_shape[2],dtype=int32)[newaxis,newaxis,:]
]
inputs = inputs.clone()
nfi.forward = lambda :forward(I,inputs)
infe,infe_true = compare(I,inputs)
print(sqrt((infe-infe_true)**2).mean())
print(sqrt((infe-infe_true)**2).max())
h = plt.figure()
indx = random.randint(20)
a = h.add_subplot(4,2,1)
a.imshow(infe_true[indx])
a.set_title('true')
a = h.add_subplot(4,2,2)
a.imshow(infe[indx])
a.set_title('inferenced')
indx = random.randint(20)
a = h.add_subplot(4,2,3)
a.plot(infe_true[indx,indx])
a = h.add_subplot(4,2,4)
a.plot(infe[indx,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,5)
a.plot(infe_true[indx,:,indx])
a = h.add_subplot(4,2,6)
a.plot(infe[indx,:,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,7)
a.plot(infe_true[:,indx,indx])
a = h.add_subplot(4,2,8)
a.plot(infe[:,indx,indx])
#%%
inputs = dataset[
random.randint(200/inputs_shape[0])+int(200/inputs_shape[0])*arange(0,inputs_shape[0],dtype=int32)[:,newaxis,newaxis],
random.randint(200/inputs_shape[1])+int(200/inputs_shape[1])*arange(0,inputs_shape[1],dtype=int32)[newaxis,:,newaxis],
random.randint(200/inputs_shape[2])+int(200/inputs_shape[2])*arange(0,inputs_shape[2],dtype=int32)[newaxis,newaxis,:]
]
inputs = inputs.clone()
IFixInputs.inputs = inputs
outputs = IFixInputs()
outputs_true = torch.from_numpy(testfunc(IFixInputs.inputs.cpu().numpy()))
outputs_true = outputs_true.view(outputs.size())
infe = outputs.data.cpu().numpy()
infe_true = outputs_true.numpy()
print(sqrt((infe-infe_true)**2).mean())
print(sqrt((infe-infe_true)**2).max())
h = plt.figure()
indx = random.randint(20)
a = h.add_subplot(4,2,1)
a.imshow(infe_true[indx])
a.set_title('true')
a = h.add_subplot(4,2,2)
a.imshow(infe[indx])
a.set_title('inferenced')
indx = random.randint(20)
a = h.add_subplot(4,2,3)
a.plot(infe_true[indx,indx])
a = h.add_subplot(4,2,4)
a.plot(infe[indx,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,5)
a.plot(infe_true[indx,:,indx])
a = h.add_subplot(4,2,6)
a.plot(infe[indx,:,indx])
indx = random.randint(20)
a = h.add_subplot(4,2,7)
a.plot(infe_true[:,indx,indx])
a = h.add_subplot(4,2,8)
a.plot(infe[:,indx,indx])
#%%
| en | 0.129653 | 3d examples for LagrangeInterp,LagrangeInterpFixInputs (nn.modules.Interpolation) #%% #%% inputs (ndarray) #%% # mesh_bound[0] = arange(m)-1 # mesh_bound[1] = arange(m)+1 #%% #%% #%% #%% #%% #%% | 2.507199 | 3 |
read_only_site/migrations/0005_auto_20190531_0700.py | vvvictoire/harddrop_league | 2 | 6617895 | # Generated by Django 2.2.1 on 2019-05-31 07:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('read_only_site', '0004_auto_20190531_0700'),
]
operations = [
migrations.AlterField(
model_name='match',
name='match_identifier',
field=models.CharField(default=None, max_length=200),
),
]
| # Generated by Django 2.2.1 on 2019-05-31 07:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('read_only_site', '0004_auto_20190531_0700'),
]
operations = [
migrations.AlterField(
model_name='match',
name='match_identifier',
field=models.CharField(default=None, max_length=200),
),
]
| en | 0.665866 | # Generated by Django 2.2.1 on 2019-05-31 07:00 | 1.473921 | 1 |
prototypes/harvesters/oai_pmh_harvester.py | materials-data-facility/connect | 1 | 6617896 | <gh_stars>1-10
import requests
from sys import exit
from json import dump, loads
import xmltodict
import os
import os.path
from shutil import rmtree
from tqdm import tqdm
#List of resources to harvest
to_harvest = []
#to_harvest.append("matin")
to_harvest.append("nist_mrr")
#Collects available data from a resource using OAI-PMH and saves to the given directory
#out_dir: The path to the directory (which will be created) for the data files
#base_url: The URL to the OAI-PMH resource
#metadata_prefixes: List of metadataPrefix values to use in fetching records. Requires at least one. Default ["oai_dc"].
#resource_types: List of setSpec resource types to harvest. If empty, will harvest all. Default empty.
#existing_dir:
# -1: Remove out_dir if it exists
# 0: Error if out_dir exists (Default)
# 1: Overwrite files in out_dir if there are path collisions
#verbose: Print status messages? Default False
def harvest(out_dir, base_url, metadata_prefixes=["oai_dc"], resource_types=[], existing_dir=0, verbose=False):
if os.path.exists(out_dir):
if existing_dir == 0:
exit("Directory '" + out_dir + "' exists")
elif not os.path.isdir(out_dir):
exit("Error: '" + out_dir + "' is not a directory")
elif existing_dir == -1:
rmtree(out_dir)
os.mkdir(out_dir)
else:
os.mkdir(out_dir)
#Fetch list of records
records = []
for prefix in metadata_prefixes:
record_res = requests.get(base_url + "?verb=ListRecords&metadataPrefix=" + prefix)
if record_res.status_code != 200:
exit("Records GET failure: " + str(record_res.status_code) + " error")
result = xmltodict.parse(record_res.content)
# print(result)
try:
new_records = result["OAI-PMH"]["ListRecords"]["record"]
records.append(new_records)
except KeyError: #No results
if verbose:
print("No results for", prefix)
for record_entry in tqdm(records, desc="Fetching records", disable= not verbose):
for record in (record_entry if type(record_entry) is list else [record_entry]):
if not resource_types or record["header"]["setSpec"] in resource_types: #Only grab what is desired
# print(len(record))
resource_num = record["header"]["identifier"].rsplit("/", 1)[1] #identifier is `URL/id_num`
with open(os.path.join(out_dir, resource_num + "_metadata.json"), 'w') as out_file:
dump(record, out_file)
if __name__ == "__main__":
nist_mrr_url = "http://matsci.registry.nationaldataservice.org/oai_pmh/server/"
matin_url = "https://matin.gatech.edu/oaipmh/"
if "matin" in to_harvest:
oai_pmh_harvest(out_dir=paths.datasets + "matin_metadata", base_url=matin_url, metadata_prefixes=["oai_dc"], existing_dir=1, verbose=True)
if "nist_mrr" in to_harvest:
oai_pmh_harvest(out_dir=paths.datasets + "nist_mrr_metadata", base_url=nist_mrr_url, metadata_prefixes=["oai_dc", "oai_all", "oai_software", "oai_service", "oai_org"], existing_dir=1, verbose=True)\
| import requests
from sys import exit
from json import dump, loads
import xmltodict
import os
import os.path
from shutil import rmtree
from tqdm import tqdm
#List of resources to harvest
to_harvest = []
#to_harvest.append("matin")
to_harvest.append("nist_mrr")
#Collects available data from a resource using OAI-PMH and saves to the given directory
#out_dir: The path to the directory (which will be created) for the data files
#base_url: The URL to the OAI-PMH resource
#metadata_prefixes: List of metadataPrefix values to use in fetching records. Requires at least one. Default ["oai_dc"].
#resource_types: List of setSpec resource types to harvest. If empty, will harvest all. Default empty.
#existing_dir:
# -1: Remove out_dir if it exists
# 0: Error if out_dir exists (Default)
# 1: Overwrite files in out_dir if there are path collisions
#verbose: Print status messages? Default False
def harvest(out_dir, base_url, metadata_prefixes=["oai_dc"], resource_types=[], existing_dir=0, verbose=False):
if os.path.exists(out_dir):
if existing_dir == 0:
exit("Directory '" + out_dir + "' exists")
elif not os.path.isdir(out_dir):
exit("Error: '" + out_dir + "' is not a directory")
elif existing_dir == -1:
rmtree(out_dir)
os.mkdir(out_dir)
else:
os.mkdir(out_dir)
#Fetch list of records
records = []
for prefix in metadata_prefixes:
record_res = requests.get(base_url + "?verb=ListRecords&metadataPrefix=" + prefix)
if record_res.status_code != 200:
exit("Records GET failure: " + str(record_res.status_code) + " error")
result = xmltodict.parse(record_res.content)
# print(result)
try:
new_records = result["OAI-PMH"]["ListRecords"]["record"]
records.append(new_records)
except KeyError: #No results
if verbose:
print("No results for", prefix)
for record_entry in tqdm(records, desc="Fetching records", disable= not verbose):
for record in (record_entry if type(record_entry) is list else [record_entry]):
if not resource_types or record["header"]["setSpec"] in resource_types: #Only grab what is desired
# print(len(record))
resource_num = record["header"]["identifier"].rsplit("/", 1)[1] #identifier is `URL/id_num`
with open(os.path.join(out_dir, resource_num + "_metadata.json"), 'w') as out_file:
dump(record, out_file)
if __name__ == "__main__":
nist_mrr_url = "http://matsci.registry.nationaldataservice.org/oai_pmh/server/"
matin_url = "https://matin.gatech.edu/oaipmh/"
if "matin" in to_harvest:
oai_pmh_harvest(out_dir=paths.datasets + "matin_metadata", base_url=matin_url, metadata_prefixes=["oai_dc"], existing_dir=1, verbose=True)
if "nist_mrr" in to_harvest:
oai_pmh_harvest(out_dir=paths.datasets + "nist_mrr_metadata", base_url=nist_mrr_url, metadata_prefixes=["oai_dc", "oai_all", "oai_software", "oai_service", "oai_org"], existing_dir=1, verbose=True)\ | en | 0.58996 | #List of resources to harvest #to_harvest.append("matin") #Collects available data from a resource using OAI-PMH and saves to the given directory #out_dir: The path to the directory (which will be created) for the data files #base_url: The URL to the OAI-PMH resource #metadata_prefixes: List of metadataPrefix values to use in fetching records. Requires at least one. Default ["oai_dc"]. #resource_types: List of setSpec resource types to harvest. If empty, will harvest all. Default empty. #existing_dir: # -1: Remove out_dir if it exists # 0: Error if out_dir exists (Default) # 1: Overwrite files in out_dir if there are path collisions #verbose: Print status messages? Default False #Fetch list of records # print(result) #No results #Only grab what is desired # print(len(record)) #identifier is `URL/id_num` | 2.43787 | 2 |
mediaserver_processor/helpers.py | jobveldhuis/python-mediaserver-processor | 0 | 6617897 | <reponame>jobveldhuis/python-mediaserver-processor<filename>mediaserver_processor/helpers.py
import re
import os
import yaml
from watchgod import DefaultDirWatcher
import logging
class Config(dict):
"""
Configuration class, behaves like a standard dict.
"""
def __init__(self, *args, **kwargs):
super(Config, self).__init__(*args, **kwargs)
# Standard configuration settings, please do not overwrite but load using yaml template
# Directories
self['DIRECTORIES'] = {
'QUEUE_DIR': './queue',
'OUT_DIR': './out',
'TMP_DIR': './tmp',
'ORIGINALS_DIR': './originals',
'LOG_DIR': './logs'
}
# Actions
self['ALLOWED_FILE_TYPES'] = ['jpg', 'jpeg', 'png']
self['FILE_TYPE_TRANSPARENT'] = 'png'
self['FILE_TYPE_NONTRANSPARENT'] = 'jpeg'
self['ALWAYS_SAVE_AS'] = ['webp']
self['SOURCE_SET'] = [(100, 100), (250, 250)]
self['OPTIMIZE'] = True
self['HASH_FILE_NAMES'] = False
self['PROCESS_LEFTOVER_IMAGES'] = True
# What to do with unknown file types (not png, jpg or jpeg) or unprocessable images
self['HARD_KEEP_FILE_TYPE'] = True
self['HARD_DELETE_UNKNOWN_TYPES'] = True
self['HARD_DELETE_UNPROCESSABLE'] = True
# Settings for file permissions
self['OVERWRITE_FILE_PERMISSIONS'] = False
self['FILE_PERMISSIONS'] = None
# Safety feature to check for malicious files to be uploaded (Decompression Bombs)
self['MAX_IMAGE_PIXELS'] = 10000000
self['IGNORE_COMPRESSION_BOMBS'] = True
# Settings for logging
self['DISABLE_LOGGING'] = False
self['LOG_LEVEL'] = logging.INFO
self['LOG_FILE_NAME'] = 'mediaserver'
def load(self, file):
"""
Add key/value pairs to the configuration. Overwrite where necessary.
Parameters
----------
file : str
Relative path to the yaml-file to load into the configuration.
Returns
-------
None
"""
dictionary = load_yaml(file)
for item in dictionary:
self[item] = dictionary[item]
class FileWatcher(DefaultDirWatcher):
"""
Used to watch the directory for changes.
"""
def __init__(self, root_path):
self.include_pattern = re.compile(r"^[._]")
super().__init__(root_path)
def should_watch_file(self, entry):
"""
Returns whether or not the file should be watched. Ignores all files starting with a '.' or '_'
Parameters
----------
entry : os.DirEntry
The file that was found in the directory.
Returns
-------
bool
Whether or not the file should be watched.
"""
return not self.include_pattern.match(entry.name)
def should_watch_dir(self, entry):
"""
Returns false, so directory changes are ignored.
Parameter
---------
entry : os.DirEntry
The directory that was changed in the main directory.
Returns
-------
False : bool
Directories should be ignored, thus the value False is always returned.
"""
return False
def is_yaml(path):
"""
Checks whether the file at path is a yaml-file.
Parameters
----------
path : str
The relative path to the file that should be checked.
Returns
-------
bool
Whether or not the specified file is a yaml-file.
"""
return path.endswith('.yaml') or path.endswith('.yml')
def load_yaml(file):
"""
Loads a yaml-file into a Python dictionary.
Parameters
----------
file : str
Relative path to the file that should be loaded into a dict.
Raises
------
ValueError
When specified file is not a yaml-file, and thus, cannot be loaded.
Returns
-------
items : dict
The dictionary that was retrieved from the Yaml-file.
"""
if not is_yaml(file):
raise ValueError()
with open(file, 'r') as f:
items = yaml.load(f, Loader=yaml.FullLoader)
return items
| import re
import os
import yaml
from watchgod import DefaultDirWatcher
import logging
class Config(dict):
"""
Configuration class, behaves like a standard dict.
"""
def __init__(self, *args, **kwargs):
super(Config, self).__init__(*args, **kwargs)
# Standard configuration settings, please do not overwrite but load using yaml template
# Directories
self['DIRECTORIES'] = {
'QUEUE_DIR': './queue',
'OUT_DIR': './out',
'TMP_DIR': './tmp',
'ORIGINALS_DIR': './originals',
'LOG_DIR': './logs'
}
# Actions
self['ALLOWED_FILE_TYPES'] = ['jpg', 'jpeg', 'png']
self['FILE_TYPE_TRANSPARENT'] = 'png'
self['FILE_TYPE_NONTRANSPARENT'] = 'jpeg'
self['ALWAYS_SAVE_AS'] = ['webp']
self['SOURCE_SET'] = [(100, 100), (250, 250)]
self['OPTIMIZE'] = True
self['HASH_FILE_NAMES'] = False
self['PROCESS_LEFTOVER_IMAGES'] = True
# What to do with unknown file types (not png, jpg or jpeg) or unprocessable images
self['HARD_KEEP_FILE_TYPE'] = True
self['HARD_DELETE_UNKNOWN_TYPES'] = True
self['HARD_DELETE_UNPROCESSABLE'] = True
# Settings for file permissions
self['OVERWRITE_FILE_PERMISSIONS'] = False
self['FILE_PERMISSIONS'] = None
# Safety feature to check for malicious files to be uploaded (Decompression Bombs)
self['MAX_IMAGE_PIXELS'] = 10000000
self['IGNORE_COMPRESSION_BOMBS'] = True
# Settings for logging
self['DISABLE_LOGGING'] = False
self['LOG_LEVEL'] = logging.INFO
self['LOG_FILE_NAME'] = 'mediaserver'
def load(self, file):
"""
Add key/value pairs to the configuration. Overwrite where necessary.
Parameters
----------
file : str
Relative path to the yaml-file to load into the configuration.
Returns
-------
None
"""
dictionary = load_yaml(file)
for item in dictionary:
self[item] = dictionary[item]
class FileWatcher(DefaultDirWatcher):
"""
Used to watch the directory for changes.
"""
def __init__(self, root_path):
self.include_pattern = re.compile(r"^[._]")
super().__init__(root_path)
def should_watch_file(self, entry):
"""
Returns whether or not the file should be watched. Ignores all files starting with a '.' or '_'
Parameters
----------
entry : os.DirEntry
The file that was found in the directory.
Returns
-------
bool
Whether or not the file should be watched.
"""
return not self.include_pattern.match(entry.name)
def should_watch_dir(self, entry):
"""
Returns false, so directory changes are ignored.
Parameter
---------
entry : os.DirEntry
The directory that was changed in the main directory.
Returns
-------
False : bool
Directories should be ignored, thus the value False is always returned.
"""
return False
def is_yaml(path):
"""
Checks whether the file at path is a yaml-file.
Parameters
----------
path : str
The relative path to the file that should be checked.
Returns
-------
bool
Whether or not the specified file is a yaml-file.
"""
return path.endswith('.yaml') or path.endswith('.yml')
def load_yaml(file):
"""
Loads a yaml-file into a Python dictionary.
Parameters
----------
file : str
Relative path to the file that should be loaded into a dict.
Raises
------
ValueError
When specified file is not a yaml-file, and thus, cannot be loaded.
Returns
-------
items : dict
The dictionary that was retrieved from the Yaml-file.
"""
if not is_yaml(file):
raise ValueError()
with open(file, 'r') as f:
items = yaml.load(f, Loader=yaml.FullLoader)
return items | en | 0.79248 | Configuration class, behaves like a standard dict. # Standard configuration settings, please do not overwrite but load using yaml template # Directories # Actions # What to do with unknown file types (not png, jpg or jpeg) or unprocessable images # Settings for file permissions # Safety feature to check for malicious files to be uploaded (Decompression Bombs) # Settings for logging Add key/value pairs to the configuration. Overwrite where necessary. Parameters ---------- file : str Relative path to the yaml-file to load into the configuration. Returns ------- None Used to watch the directory for changes. Returns whether or not the file should be watched. Ignores all files starting with a '.' or '_' Parameters ---------- entry : os.DirEntry The file that was found in the directory. Returns ------- bool Whether or not the file should be watched. Returns false, so directory changes are ignored. Parameter --------- entry : os.DirEntry The directory that was changed in the main directory. Returns ------- False : bool Directories should be ignored, thus the value False is always returned. Checks whether the file at path is a yaml-file. Parameters ---------- path : str The relative path to the file that should be checked. Returns ------- bool Whether or not the specified file is a yaml-file. Loads a yaml-file into a Python dictionary. Parameters ---------- file : str Relative path to the file that should be loaded into a dict. Raises ------ ValueError When specified file is not a yaml-file, and thus, cannot be loaded. Returns ------- items : dict The dictionary that was retrieved from the Yaml-file. | 2.346711 | 2 |
Enemy.py | FaiZaman/DurSeek | 0 | 6617898 | <reponame>FaiZaman/DurSeek
import pygame
import random as rand
from Entity import Entity
black_list = []
explosion_list = []
for n in range(1, 44):
black_list.append(pygame.image.load("assets/black_enemy/L" + str(n) + ".png"))
for m in range(1, 91):
explosion_list.append(pygame.image.load("assets/explosions/E (" + str(m) + ").png"))
class Enemy(Entity):
red_list = [pygame.image.load("assets/red_enemy/L1.png"), pygame.image.load("assets/red_enemy/L2.png"),\
pygame.image.load("assets/red_enemy/L3.png"), pygame.image.load("assets/red_enemy/L4.png"),\
pygame.image.load("assets/red_enemy/L5.png"), pygame.image.load("assets/red_enemy/L6.png"),\
pygame.image.load("assets/red_enemy/L7.png"), pygame.image.load("assets/red_enemy/L8.png"),\
pygame.image.load("assets/red_enemy/L9.png"), pygame.image.load("assets/red_enemy/L10.png")]
black_list = black_list
explosion_list = explosion_list
def __init__(self, x, y):
self.is_red = True
if rand.random() >= 0.33:
super().__init__(x, y, self.red_list[0])
self.speed = 2
else:
super().__init__(x, y, self.black_list[0])
self.is_red = False
self.speed = 4
self.steps = 0
self.exploding = False
self.explode_count = 0
def set_image(self):
if not(self.exploding):
if self.is_red:
if self.steps + 1 > 30:
self.steps = 0
self.image = self.red_list[self.steps//3]
else:
if self.steps + 1 > 129:
self.steps = 0
self.image = self.black_list[self.steps//3]
self.steps += 1
self.rect.x -= self.speed
else:
self.image = self.explosion_list[self.explode_count]
self.explode_count += 1
if self.explode_count == 90:
self.kill()
| import pygame
import random as rand
from Entity import Entity
black_list = []
explosion_list = []
for n in range(1, 44):
black_list.append(pygame.image.load("assets/black_enemy/L" + str(n) + ".png"))
for m in range(1, 91):
explosion_list.append(pygame.image.load("assets/explosions/E (" + str(m) + ").png"))
class Enemy(Entity):
red_list = [pygame.image.load("assets/red_enemy/L1.png"), pygame.image.load("assets/red_enemy/L2.png"),\
pygame.image.load("assets/red_enemy/L3.png"), pygame.image.load("assets/red_enemy/L4.png"),\
pygame.image.load("assets/red_enemy/L5.png"), pygame.image.load("assets/red_enemy/L6.png"),\
pygame.image.load("assets/red_enemy/L7.png"), pygame.image.load("assets/red_enemy/L8.png"),\
pygame.image.load("assets/red_enemy/L9.png"), pygame.image.load("assets/red_enemy/L10.png")]
black_list = black_list
explosion_list = explosion_list
def __init__(self, x, y):
self.is_red = True
if rand.random() >= 0.33:
super().__init__(x, y, self.red_list[0])
self.speed = 2
else:
super().__init__(x, y, self.black_list[0])
self.is_red = False
self.speed = 4
self.steps = 0
self.exploding = False
self.explode_count = 0
def set_image(self):
if not(self.exploding):
if self.is_red:
if self.steps + 1 > 30:
self.steps = 0
self.image = self.red_list[self.steps//3]
else:
if self.steps + 1 > 129:
self.steps = 0
self.image = self.black_list[self.steps//3]
self.steps += 1
self.rect.x -= self.speed
else:
self.image = self.explosion_list[self.explode_count]
self.explode_count += 1
if self.explode_count == 90:
self.kill() | none | 1 | 2.660334 | 3 | |
app/backend/reservationapp/booking/api/urls.py | wojtiwojt/reservation-app | 0 | 6617899 | <gh_stars>0
from django.urls import path
from booking.api import views
urlpatterns = [
path(
"taken_days_for_calendar/",
views.TakenDatesViewSet.as_view(),
name="reservations",
),
]
| from django.urls import path
from booking.api import views
urlpatterns = [
path(
"taken_days_for_calendar/",
views.TakenDatesViewSet.as_view(),
name="reservations",
),
] | none | 1 | 1.721128 | 2 | |
credit_card/credit_card_validator.py | sebanie15/pwd_secure | 0 | 6617900 | """
Credit card validation - how to do it?
Nowadays - a standard payment card is defined by ISO IEC_7812 standards. So according to the ISO standard, length
credit card number is 16 digits. It is validated by the so-called Luhn's algorithm. Therefore, similar to the PESEL
algorithm, at the end of the sequence of numbers there is a check digit. With its help, it is also possible to calculate
the IMEI number visible on cell phones.
Example of a credit card number: 6123 2462 0532 2891:
6 - Economy Domain Identifier - informs about the field accompanied by the card number:
1,2 - airlines,
3 - travel and entertainment,
4, 5 - banking, finance,
6 - trade, banking,
7 - oil industry,
8 - telecommunications,
9 - to be determined by standardization bodies,
123 24 - Publisher Identification Number, e.g. MasterCard, Visa,
62 0532 289 - Personal Account Identifier, an individual number assigned to a specific personal account,
1 - check digit
The number is validated by doubling the digits in the even places in the card number.
The number 9 is subtracted from products greater than 9. All digits are added up in sequence - including those on
odd positions. To the obtained number, add such a digit that the result is a multiple of 10.
The added number is a check digit.
source: https://www.czerwona-skarbonka.pl/walidator-danych-walidacja-pesel-regon-nip-krok-po-kroku/
"""
from validator_base import Validator
from text_validator import DigitsValidator
from .credit_card_exceptions import CreditCardException
class CreditCardValidator(Validator):
"""class summary"""
CC_NUMBER_LENGTH = 16
def __init__(self, cc_number: str):
self.cc_number = cc_number
def checksum(self) -> int:
"""the function calculates a checksum for the credit card
Args:
cc_number: int
Returns:
int
"""
odd_digits = [int(odd_digit) for odd_digit in self.cc_number[1:-1:2]]
even_digits = [
int(even_digit) * 2 if int(even_digit) * 2 <= 9 else int(even_digit) * 2 - 9
for even_digit in str(self.cc_number)[:-1:2]
]
sum_of_digits = sum(odd_digits) + sum(even_digits)
return 10 - (sum_of_digits % 10)
def is_valid(self) -> bool:
"""the function checks if the card number has 16 characters and if the check sum \
is consistent with the sum digit in the card number
Returns:
bool
"""
if DigitsValidator(self.cc_number, occurrences=self.CC_NUMBER_LENGTH).is_valid():
checksum_number = int(self.cc_number[-1])
if checksum_number == self.checksum(self.cc_number):
return True
raise CreditCardException()
| """
Credit card validation - how to do it?
Nowadays - a standard payment card is defined by ISO IEC_7812 standards. So according to the ISO standard, length
credit card number is 16 digits. It is validated by the so-called Luhn's algorithm. Therefore, similar to the PESEL
algorithm, at the end of the sequence of numbers there is a check digit. With its help, it is also possible to calculate
the IMEI number visible on cell phones.
Example of a credit card number: 6123 2462 0532 2891:
6 - Economy Domain Identifier - informs about the field accompanied by the card number:
1,2 - airlines,
3 - travel and entertainment,
4, 5 - banking, finance,
6 - trade, banking,
7 - oil industry,
8 - telecommunications,
9 - to be determined by standardization bodies,
123 24 - Publisher Identification Number, e.g. MasterCard, Visa,
62 0532 289 - Personal Account Identifier, an individual number assigned to a specific personal account,
1 - check digit
The number is validated by doubling the digits in the even places in the card number.
The number 9 is subtracted from products greater than 9. All digits are added up in sequence - including those on
odd positions. To the obtained number, add such a digit that the result is a multiple of 10.
The added number is a check digit.
source: https://www.czerwona-skarbonka.pl/walidator-danych-walidacja-pesel-regon-nip-krok-po-kroku/
"""
from validator_base import Validator
from text_validator import DigitsValidator
from .credit_card_exceptions import CreditCardException
class CreditCardValidator(Validator):
"""class summary"""
CC_NUMBER_LENGTH = 16
def __init__(self, cc_number: str):
self.cc_number = cc_number
def checksum(self) -> int:
"""the function calculates a checksum for the credit card
Args:
cc_number: int
Returns:
int
"""
odd_digits = [int(odd_digit) for odd_digit in self.cc_number[1:-1:2]]
even_digits = [
int(even_digit) * 2 if int(even_digit) * 2 <= 9 else int(even_digit) * 2 - 9
for even_digit in str(self.cc_number)[:-1:2]
]
sum_of_digits = sum(odd_digits) + sum(even_digits)
return 10 - (sum_of_digits % 10)
def is_valid(self) -> bool:
"""the function checks if the card number has 16 characters and if the check sum \
is consistent with the sum digit in the card number
Returns:
bool
"""
if DigitsValidator(self.cc_number, occurrences=self.CC_NUMBER_LENGTH).is_valid():
checksum_number = int(self.cc_number[-1])
if checksum_number == self.checksum(self.cc_number):
return True
raise CreditCardException()
| en | 0.832615 | Credit card validation - how to do it? Nowadays - a standard payment card is defined by ISO IEC_7812 standards. So according to the ISO standard, length credit card number is 16 digits. It is validated by the so-called Luhn's algorithm. Therefore, similar to the PESEL algorithm, at the end of the sequence of numbers there is a check digit. With its help, it is also possible to calculate the IMEI number visible on cell phones. Example of a credit card number: 6123 2462 0532 2891: 6 - Economy Domain Identifier - informs about the field accompanied by the card number: 1,2 - airlines, 3 - travel and entertainment, 4, 5 - banking, finance, 6 - trade, banking, 7 - oil industry, 8 - telecommunications, 9 - to be determined by standardization bodies, 123 24 - Publisher Identification Number, e.g. MasterCard, Visa, 62 0532 289 - Personal Account Identifier, an individual number assigned to a specific personal account, 1 - check digit The number is validated by doubling the digits in the even places in the card number. The number 9 is subtracted from products greater than 9. All digits are added up in sequence - including those on odd positions. To the obtained number, add such a digit that the result is a multiple of 10. The added number is a check digit. source: https://www.czerwona-skarbonka.pl/walidator-danych-walidacja-pesel-regon-nip-krok-po-kroku/ class summary the function calculates a checksum for the credit card Args: cc_number: int Returns: int the function checks if the card number has 16 characters and if the check sum \ is consistent with the sum digit in the card number Returns: bool | 3.944443 | 4 |
01_simple/test_mo.py | jamesETsmith/quantum_chem_julia | 1 | 6617901 | <reponame>jamesETsmith/quantum_chem_julia<filename>01_simple/test_mo.py<gh_stars>1-10
import numpy as np
from pyscf import gto, scf
mol = gto.M(atom = """O 0.00000 0.00000 0.11779
H 0.00000 0.75545 -0.47116
H 0.00000 -0.75545 -0.47116""", basis = "sto3g", symmetry=True)
mf = scf.RHF(mol).run()
mo_ovlp = np.einsum("ai,bj,ab->ij", mf.mo_coeff, mf.mo_coeff, mf.get_ovlp())
print(np.round(mo_ovlp, decimals=2))
print(np.round(mf.mo_coeff, decimals=2))
idx = np.argmax(abs(mf.mo_coeff.real), axis=0)
print(idx) | import numpy as np
from pyscf import gto, scf
mol = gto.M(atom = """O 0.00000 0.00000 0.11779
H 0.00000 0.75545 -0.47116
H 0.00000 -0.75545 -0.47116""", basis = "sto3g", symmetry=True)
mf = scf.RHF(mol).run()
mo_ovlp = np.einsum("ai,bj,ab->ij", mf.mo_coeff, mf.mo_coeff, mf.get_ovlp())
print(np.round(mo_ovlp, decimals=2))
print(np.round(mf.mo_coeff, decimals=2))
idx = np.argmax(abs(mf.mo_coeff.real), axis=0)
print(idx) | en | 0.230237 | O 0.00000 0.00000 0.11779 H 0.00000 0.75545 -0.47116 H 0.00000 -0.75545 -0.47116 | 1.965339 | 2 |
ztag/annotations/netgear.py | justinbastress/ztag | 107 | 6617902 | from ztag.annotation import *
class NetGearSmartSwitch(Annotation):
protocol = protocols.HTTP
subprotocol = protocols.HTTP.GET
port = None
tests = {
"netgear_smart_switch":{
"global_metadata":{
"manufacturer":Manufacturer.NETGEAR,
"device_type":Type.SWITCH,
"product":"Smart Switch",
},
"tags":["embedded",]
}
}
def process(self, obj, meta):
if obj["title"] == "NETGEAR Web Smart Switch":
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = "Smart Switch"
meta.global_metadata.device_type = Type.SWITCH
meta.tags.add("embedded")
return meta
if obj["title"] == "Netgear Prosafe Plus Switch":
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = "Prosafe Plus Switch"
meta.global_metadata.device_type = Type.SWITCH
meta.tags.add("embedded")
return meta
class NetGearLabeledSwitches(Annotation):
protocol = protocols.HTTP
subprotocol = protocols.HTTP.GET
port = None
VALID_MODELS = set([
"GS724T",
"GS748Tv5",
"GS108T"
])
def process(self, obj, meta):
if obj["title"].lower().startswith("netgear") and " " in obj["title"]:
m = obj["title"].split(" ")[1].strip()
if m in self.VALID_MODELS:
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = m
meta.global_metadata.device_type = Type.SWITCH
meta.tags.add("embedded")
return meta
class NetGearProsafe(Annotation):
protocol = protocols.HTTPS
subprotocol = protocols.HTTPS.TLS
port = None
tests = {
"netgear_prosafe":{
"global_metadata":{
"manufacturer":Manufacturer.NETGEAR,
"device_type":Type.FIREWALL,
"product":"Prosafe VPN Firewall",
},
"tags":["embedded",]
}
}
def process(self, obj, meta):
cn = obj["certificate"]["parsed"]["subject"]["common_name"][0].strip()
if cn == "Netgear VPN Firewall":
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = "Prosafe VPN Firewall"
meta.global_metadata.device_type = Type.FIREWALL
meta.tags.add("embedded")
return meta
| from ztag.annotation import *
class NetGearSmartSwitch(Annotation):
protocol = protocols.HTTP
subprotocol = protocols.HTTP.GET
port = None
tests = {
"netgear_smart_switch":{
"global_metadata":{
"manufacturer":Manufacturer.NETGEAR,
"device_type":Type.SWITCH,
"product":"Smart Switch",
},
"tags":["embedded",]
}
}
def process(self, obj, meta):
if obj["title"] == "NETGEAR Web Smart Switch":
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = "Smart Switch"
meta.global_metadata.device_type = Type.SWITCH
meta.tags.add("embedded")
return meta
if obj["title"] == "Netgear Prosafe Plus Switch":
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = "Prosafe Plus Switch"
meta.global_metadata.device_type = Type.SWITCH
meta.tags.add("embedded")
return meta
class NetGearLabeledSwitches(Annotation):
protocol = protocols.HTTP
subprotocol = protocols.HTTP.GET
port = None
VALID_MODELS = set([
"GS724T",
"GS748Tv5",
"GS108T"
])
def process(self, obj, meta):
if obj["title"].lower().startswith("netgear") and " " in obj["title"]:
m = obj["title"].split(" ")[1].strip()
if m in self.VALID_MODELS:
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = m
meta.global_metadata.device_type = Type.SWITCH
meta.tags.add("embedded")
return meta
class NetGearProsafe(Annotation):
protocol = protocols.HTTPS
subprotocol = protocols.HTTPS.TLS
port = None
tests = {
"netgear_prosafe":{
"global_metadata":{
"manufacturer":Manufacturer.NETGEAR,
"device_type":Type.FIREWALL,
"product":"Prosafe VPN Firewall",
},
"tags":["embedded",]
}
}
def process(self, obj, meta):
cn = obj["certificate"]["parsed"]["subject"]["common_name"][0].strip()
if cn == "Netgear VPN Firewall":
meta.global_metadata.manufacturer = Manufacturer.NETGEAR
meta.global_metadata.product = "Prosafe VPN Firewall"
meta.global_metadata.device_type = Type.FIREWALL
meta.tags.add("embedded")
return meta
| none | 1 | 2.172347 | 2 | |
generated-libraries/python/netapp/system/system_node_kernel_info.py | radekg/netapp-ontap-lib-get | 2 | 6617903 | from netapp.system.system_path_version import SystemPathVersion
from netapp.netapp_object import NetAppObject
class SystemNodeKernelInfo(NetAppObject):
"""
Diagnostics and firmware details of a node
"""
_node_name = None
@property
def node_name(self):
"""
Name of the node
"""
return self._node_name
@node_name.setter
def node_name(self, val):
if val != None:
self.validate('node_name', val)
self._node_name = val
_firmware_info = None
@property
def firmware_info(self):
"""
Firmware path and firmware version
"""
return self._firmware_info
@firmware_info.setter
def firmware_info(self, val):
if val != None:
self.validate('firmware_info', val)
self._firmware_info = val
_compilation_flags = None
@property
def compilation_flags(self):
"""
Displays the verbose output which currently includes the
compilation flags
"""
return self._compilation_flags
@compilation_flags.setter
def compilation_flags(self, val):
if val != None:
self.validate('compilation_flags', val)
self._compilation_flags = val
_kernel_info = None
@property
def kernel_info(self):
"""
Kernel path and kernel version
"""
return self._kernel_info
@kernel_info.setter
def kernel_info(self, val):
if val != None:
self.validate('kernel_info', val)
self._kernel_info = val
@staticmethod
def get_api_name():
return "system-node-kernel-info"
@staticmethod
def get_desired_attrs():
return [
'node-name',
'firmware-info',
'compilation-flags',
'kernel-info',
]
def describe_properties(self):
return {
'node_name': { 'class': basestring, 'is_list': False, 'required': 'required' },
'firmware_info': { 'class': SystemPathVersion, 'is_list': True, 'required': 'optional' },
'compilation_flags': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'kernel_info': { 'class': SystemPathVersion, 'is_list': True, 'required': 'optional' },
}
| from netapp.system.system_path_version import SystemPathVersion
from netapp.netapp_object import NetAppObject
class SystemNodeKernelInfo(NetAppObject):
"""
Diagnostics and firmware details of a node
"""
_node_name = None
@property
def node_name(self):
"""
Name of the node
"""
return self._node_name
@node_name.setter
def node_name(self, val):
if val != None:
self.validate('node_name', val)
self._node_name = val
_firmware_info = None
@property
def firmware_info(self):
"""
Firmware path and firmware version
"""
return self._firmware_info
@firmware_info.setter
def firmware_info(self, val):
if val != None:
self.validate('firmware_info', val)
self._firmware_info = val
_compilation_flags = None
@property
def compilation_flags(self):
"""
Displays the verbose output which currently includes the
compilation flags
"""
return self._compilation_flags
@compilation_flags.setter
def compilation_flags(self, val):
if val != None:
self.validate('compilation_flags', val)
self._compilation_flags = val
_kernel_info = None
@property
def kernel_info(self):
"""
Kernel path and kernel version
"""
return self._kernel_info
@kernel_info.setter
def kernel_info(self, val):
if val != None:
self.validate('kernel_info', val)
self._kernel_info = val
@staticmethod
def get_api_name():
return "system-node-kernel-info"
@staticmethod
def get_desired_attrs():
return [
'node-name',
'firmware-info',
'compilation-flags',
'kernel-info',
]
def describe_properties(self):
return {
'node_name': { 'class': basestring, 'is_list': False, 'required': 'required' },
'firmware_info': { 'class': SystemPathVersion, 'is_list': True, 'required': 'optional' },
'compilation_flags': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'kernel_info': { 'class': SystemPathVersion, 'is_list': True, 'required': 'optional' },
}
| en | 0.853003 | Diagnostics and firmware details of a node Name of the node Firmware path and firmware version Displays the verbose output which currently includes the compilation flags Kernel path and kernel version | 2.293659 | 2 |
explore_of_spotify_datasets.py | BuildweekSpotify/DS_BuildWeek_Dec_2020NEW | 0 | 6617904 | <reponame>BuildweekSpotify/DS_BuildWeek_Dec_2020NEW<gh_stars>0
# -*- coding: utf-8 -*-
"""explore of spotify_datasets.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/13FClY2Y3TcMTmOczm7j5Dc7g74Lwmbb3
"""
import pandas as pd
# from sqlalchemy import Column, Integer, String, create_engine
# from sqlalchemy.ext.declarative import declarative_base
df_track = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data.csv')
df_artist = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_by_artist.csv')
df_genres = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_by_genres.csv')
df_year = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_by_year.csv')
df_track_genres = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_w_genres.csv')
print('Track \n')
print(df_track.info())
print(df_track.isnull().sum())
print('Artist \n')
df_artist.info()
print(df_artist.isnull().sum())
print('genres \n')
df_genres.info()
print(df_genres.isnull().sum())
print('Year \n')
df_year.info()
print(df_year.isnull().sum())
print('Track genres \n')
df_track_genres.info()
print(df_track_genres.isnull().sum())
print('Track genres value counts \n')
df_track_genres.artists.value_counts().to_frame()
df_track_genres.groupby('genres')['artists'].nunique()
print(df_track_genres.groupby('genres')['artists'].nunique().to_frame().sort_values(by='artists', ascending=False))
print(df_artist.shape)
df_genres.head()
df_genres['key'].value_counts()
# engine = create_engine('sqlite:///spotify.db')
# df_track_genres.to_sql(con=engine, index=False, name='genres', if_exists='replace')
# df_track.to_sql(con=engine, index=False, name='tracks', if_exists='replace')
# df_genres_artists = df_track_genres[["genres","artists"]]
# print(df_genres_artists)
# df_genres_artists.to_csv('genres_artists.csv',index=False)
print(df_genres.sort_values('popularity',ascending=False).head(50))
top100_genres = df_genres[['genres','popularity']].sort_values('popularity',ascending=False).head(100)
top100_genres.to_csv('/mnt/e/DataScience/Lambda/DS_BuildWeek_Dec_2020NEW/csv/top100_genres.csv',index=False)
# df_track.merge(df_track_genres,)
| # -*- coding: utf-8 -*-
"""explore of spotify_datasets.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/13FClY2Y3TcMTmOczm7j5Dc7g74Lwmbb3
"""
import pandas as pd
# from sqlalchemy import Column, Integer, String, create_engine
# from sqlalchemy.ext.declarative import declarative_base
df_track = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data.csv')
df_artist = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_by_artist.csv')
df_genres = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_by_genres.csv')
df_year = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_by_year.csv')
df_track_genres = pd.read_csv('https://raw.githubusercontent.com/vjmiyagi/DS_BuildWeek_Dec_2020/main/csv/data_w_genres.csv')
print('Track \n')
print(df_track.info())
print(df_track.isnull().sum())
print('Artist \n')
df_artist.info()
print(df_artist.isnull().sum())
print('genres \n')
df_genres.info()
print(df_genres.isnull().sum())
print('Year \n')
df_year.info()
print(df_year.isnull().sum())
print('Track genres \n')
df_track_genres.info()
print(df_track_genres.isnull().sum())
print('Track genres value counts \n')
df_track_genres.artists.value_counts().to_frame()
df_track_genres.groupby('genres')['artists'].nunique()
print(df_track_genres.groupby('genres')['artists'].nunique().to_frame().sort_values(by='artists', ascending=False))
print(df_artist.shape)
df_genres.head()
df_genres['key'].value_counts()
# engine = create_engine('sqlite:///spotify.db')
# df_track_genres.to_sql(con=engine, index=False, name='genres', if_exists='replace')
# df_track.to_sql(con=engine, index=False, name='tracks', if_exists='replace')
# df_genres_artists = df_track_genres[["genres","artists"]]
# print(df_genres_artists)
# df_genres_artists.to_csv('genres_artists.csv',index=False)
print(df_genres.sort_values('popularity',ascending=False).head(50))
top100_genres = df_genres[['genres','popularity']].sort_values('popularity',ascending=False).head(100)
top100_genres.to_csv('/mnt/e/DataScience/Lambda/DS_BuildWeek_Dec_2020NEW/csv/top100_genres.csv',index=False)
# df_track.merge(df_track_genres,) | en | 0.530461 | # -*- coding: utf-8 -*- explore of spotify_datasets.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/13FClY2Y3TcMTmOczm7j5Dc7g74Lwmbb3 # from sqlalchemy import Column, Integer, String, create_engine # from sqlalchemy.ext.declarative import declarative_base # engine = create_engine('sqlite:///spotify.db') # df_track_genres.to_sql(con=engine, index=False, name='genres', if_exists='replace') # df_track.to_sql(con=engine, index=False, name='tracks', if_exists='replace') # df_genres_artists = df_track_genres[["genres","artists"]] # print(df_genres_artists) # df_genres_artists.to_csv('genres_artists.csv',index=False) # df_track.merge(df_track_genres,) | 2.229681 | 2 |
azfs/error.py | gsy0911/azfs | 0 | 6617905 | from typing import Optional
class AzfsBaseError(Exception):
pass
class AzfsInputError(AzfsBaseError):
pass
class AzfsInvalidPathError(AzfsBaseError):
pass
class AzfsImportDecoratorError(AzfsBaseError):
MESSAGE = ""
def __init__(self, message: Optional[str] = None):
self.message = message if message is not None else self.MESSAGE
def __str__(self):
return self.message
class AzfsDecoratorFileFormatError(AzfsImportDecoratorError):
MESSAGE = "file format must be `csv` or `pickle`"
class AzfsDecoratorSizeNotMatchedError(AzfsImportDecoratorError):
MESSAGE = "size of output path and function response not matched"
class AzfsDecoratorReturnTypeError(AzfsImportDecoratorError):
MESSAGE = "return type of the given function must be `pd.DataFrame`"
| from typing import Optional
class AzfsBaseError(Exception):
pass
class AzfsInputError(AzfsBaseError):
pass
class AzfsInvalidPathError(AzfsBaseError):
pass
class AzfsImportDecoratorError(AzfsBaseError):
MESSAGE = ""
def __init__(self, message: Optional[str] = None):
self.message = message if message is not None else self.MESSAGE
def __str__(self):
return self.message
class AzfsDecoratorFileFormatError(AzfsImportDecoratorError):
MESSAGE = "file format must be `csv` or `pickle`"
class AzfsDecoratorSizeNotMatchedError(AzfsImportDecoratorError):
MESSAGE = "size of output path and function response not matched"
class AzfsDecoratorReturnTypeError(AzfsImportDecoratorError):
MESSAGE = "return type of the given function must be `pd.DataFrame`"
| none | 1 | 3.101811 | 3 | |
tests/val_validate_tool_plugin/test_val_validate_tool_plugin.py | tdenewiler/statick-planning | 0 | 6617906 | """Unit tests for the Validate tool plugin."""
import argparse
import os
import subprocess
import sys
import mock
import pytest
from yapsy.PluginManager import PluginManager
import statick_tool
from statick_tool.config import Config
from statick_tool.package import Package
from statick_tool.plugin_context import PluginContext
from statick_tool.plugins.tool.val_validate_tool_plugin import ValValidateToolPlugin
from statick_tool.resources import Resources
from statick_tool.tool_plugin import ToolPlugin
def setup_val_validate_tool_plugin(binary=None):
"""Construct and return an instance of the Validate plugin."""
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
"--show-tool-output",
dest="show_tool_output",
action="store_false",
help="Show tool output",
)
arg_parser.add_argument("--val-validate-bin", dest="val_validate_bin")
resources = Resources(
[os.path.join(os.path.dirname(statick_tool.__file__), "plugins")]
)
config = Config(resources.get_file("config.yaml"))
plugin_context = PluginContext(arg_parser.parse_args([]), resources, config)
plugin_context.args.output_directory = os.path.dirname(__file__)
vtp = ValValidateToolPlugin()
print(f"binary: {binary}")
if binary:
plugin_context.args.val_validate_bin = binary
vtp.set_plugin_context(plugin_context)
return vtp
def test_val_validate_tool_plugin_found():
"""Test that the plugin manager finds the Validate plugin."""
manager = PluginManager()
# Get the path to statick_tool/__init__.py, get the directory part, and
# add 'plugins' to that to get the standard plugins dir
manager.setPluginPlaces(
[os.path.join(os.path.dirname(statick_tool.__file__), "plugins")]
)
manager.setCategoriesFilter(
{
"Tool": ToolPlugin,
}
)
manager.collectPlugins()
# Verify that a plugin's get_name() function returns "val_validate"
assert any(
plugin_info.plugin_object.get_name() == "val_validate"
for plugin_info in manager.getPluginsOfCategory("Tool")
)
# While we're at it, verify that a plugin is named Validate Tool Plugin
assert any(
plugin_info.name == "VAL Validate Tool Plugin"
for plugin_info in manager.getPluginsOfCategory("Tool")
)
def test_val_validate_tool_plugin_gather_args():
"""Test that the Validate tool plugin arguments are collected."""
arg_parser = argparse.ArgumentParser()
vtp = setup_val_validate_tool_plugin()
vtp.gather_args(arg_parser)
def test_val_validate_tool_plugin_scan_valid():
"""Integration test: Make sure the Validate output hasn't changed."""
vtp = setup_val_validate_tool_plugin("/opt/val/bin/Validate")
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
elif sys.platform == "win32":
pytest.skip("Don't know how to run Validate on Windows.")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert not issues
def test_val_validate_tool_plugin_no_sources():
"""Make sure no issues are found if no sources are provided.
Expected result: issues is empty
"""
vtp = setup_val_validate_tool_plugin("/opt/val/bin/Validate")
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
elif sys.platform == "win32":
pytest.skip("Don't know how to run Validate on Windows.")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = []
issues = vtp.scan(package, "level")
assert not issues
def test_val_validate_tool_plugin_scan_wrong_binary():
"""
Test what happens when the specified tool binary does not exist.
Expected result: issues is None
"""
vtp = setup_val_validate_tool_plugin("wrong_binary")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert issues is None
def test_val_validate_tool_plugin_parse_valid():
"""Verify that we can parse the normal output of Validate."""
vtp = setup_val_validate_tool_plugin()
output = ""
line = "Type-checking move-up"
output += line
line = "...action passes type checking."
output += line
line = "Type-checking move-down"
output += line
line = "...action passes type checking."
output += line
line = "Type-checking board"
output += line
line = "...action passes type checking."
output += line
line = "Type-checking leave"
output += line
line = "...action passes type checking."
output += line
issues = vtp.parse_output(output, "test.pddl")
assert not issues
output = "Errors: 0, warnings: 0"
issues = vtp.parse_output(output, "test.pddl")
assert not issues
output = "Error: Parser failed to read file!"
issues = vtp.parse_output(output, "/home/user/test.pddl")
assert len(issues) == 1
assert issues[0].filename == "/home/user/test.pddl"
assert issues[0].line_number == "0"
assert issues[0].tool == "val_validate"
assert issues[0].issue_type == "0"
assert issues[0].severity == "3"
assert (
issues[0].message
== "Exact file and line number unknown. Parser failed to read file!"
)
output = "Problem in domain definition!"
issues = vtp.parse_output(output, "/home/user/test.pddl")
assert len(issues) == 1
assert issues[0].filename == "/home/user/test.pddl"
assert issues[0].line_number == "0"
assert issues[0].tool == "val_validate"
assert issues[0].issue_type == "1"
assert issues[0].severity == "3"
assert (
issues[0].message
== "Exact file and line number unknown. Problem in domain definition!"
)
def test_val_validate_tool_plugin_parse_invalid():
"""Verify that we can parse the invalid output of Validate."""
vtp = setup_val_validate_tool_plugin()
output = "invalid text"
issues = vtp.parse_output(output, "test.pddl")
assert not issues
@mock.patch(
"statick_tool.plugins.tool.val_validate_tool_plugin.subprocess.check_output"
)
def test_val_validate_tool_plugin_scan_calledprocesserror(mock_subprocess_check_output):
"""
Test what happens when a CalledProcessError is raised (usually means Validate hit an error).
Expected result: issues is None
"""
mock_subprocess_check_output.side_effect = subprocess.CalledProcessError(
0, "", output="mocked error"
)
vtp = setup_val_validate_tool_plugin()
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert not issues
mock_subprocess_check_output.side_effect = subprocess.CalledProcessError(
32, "", output="mocked error"
)
issues = vtp.scan(package, "level")
assert issues is None
@mock.patch(
"statick_tool.plugins.tool.val_validate_tool_plugin.subprocess.check_output"
)
def test_val_validate_tool_plugin_scan_oserror(mock_subprocess_check_output):
"""
Test what happens when an OSError is raised (usually means Validate doesn't exist).
Expected result: issues is None
"""
mock_subprocess_check_output.side_effect = OSError("mocked error")
vtp = setup_val_validate_tool_plugin()
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
elif sys.platform == "win32":
pytest.skip("Don't know how to run Validate on Windows.")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert issues is None
| """Unit tests for the Validate tool plugin."""
import argparse
import os
import subprocess
import sys
import mock
import pytest
from yapsy.PluginManager import PluginManager
import statick_tool
from statick_tool.config import Config
from statick_tool.package import Package
from statick_tool.plugin_context import PluginContext
from statick_tool.plugins.tool.val_validate_tool_plugin import ValValidateToolPlugin
from statick_tool.resources import Resources
from statick_tool.tool_plugin import ToolPlugin
def setup_val_validate_tool_plugin(binary=None):
"""Construct and return an instance of the Validate plugin."""
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
"--show-tool-output",
dest="show_tool_output",
action="store_false",
help="Show tool output",
)
arg_parser.add_argument("--val-validate-bin", dest="val_validate_bin")
resources = Resources(
[os.path.join(os.path.dirname(statick_tool.__file__), "plugins")]
)
config = Config(resources.get_file("config.yaml"))
plugin_context = PluginContext(arg_parser.parse_args([]), resources, config)
plugin_context.args.output_directory = os.path.dirname(__file__)
vtp = ValValidateToolPlugin()
print(f"binary: {binary}")
if binary:
plugin_context.args.val_validate_bin = binary
vtp.set_plugin_context(plugin_context)
return vtp
def test_val_validate_tool_plugin_found():
"""Test that the plugin manager finds the Validate plugin."""
manager = PluginManager()
# Get the path to statick_tool/__init__.py, get the directory part, and
# add 'plugins' to that to get the standard plugins dir
manager.setPluginPlaces(
[os.path.join(os.path.dirname(statick_tool.__file__), "plugins")]
)
manager.setCategoriesFilter(
{
"Tool": ToolPlugin,
}
)
manager.collectPlugins()
# Verify that a plugin's get_name() function returns "val_validate"
assert any(
plugin_info.plugin_object.get_name() == "val_validate"
for plugin_info in manager.getPluginsOfCategory("Tool")
)
# While we're at it, verify that a plugin is named Validate Tool Plugin
assert any(
plugin_info.name == "VAL Validate Tool Plugin"
for plugin_info in manager.getPluginsOfCategory("Tool")
)
def test_val_validate_tool_plugin_gather_args():
"""Test that the Validate tool plugin arguments are collected."""
arg_parser = argparse.ArgumentParser()
vtp = setup_val_validate_tool_plugin()
vtp.gather_args(arg_parser)
def test_val_validate_tool_plugin_scan_valid():
"""Integration test: Make sure the Validate output hasn't changed."""
vtp = setup_val_validate_tool_plugin("/opt/val/bin/Validate")
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
elif sys.platform == "win32":
pytest.skip("Don't know how to run Validate on Windows.")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert not issues
def test_val_validate_tool_plugin_no_sources():
"""Make sure no issues are found if no sources are provided.
Expected result: issues is empty
"""
vtp = setup_val_validate_tool_plugin("/opt/val/bin/Validate")
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
elif sys.platform == "win32":
pytest.skip("Don't know how to run Validate on Windows.")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = []
issues = vtp.scan(package, "level")
assert not issues
def test_val_validate_tool_plugin_scan_wrong_binary():
"""
Test what happens when the specified tool binary does not exist.
Expected result: issues is None
"""
vtp = setup_val_validate_tool_plugin("wrong_binary")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert issues is None
def test_val_validate_tool_plugin_parse_valid():
"""Verify that we can parse the normal output of Validate."""
vtp = setup_val_validate_tool_plugin()
output = ""
line = "Type-checking move-up"
output += line
line = "...action passes type checking."
output += line
line = "Type-checking move-down"
output += line
line = "...action passes type checking."
output += line
line = "Type-checking board"
output += line
line = "...action passes type checking."
output += line
line = "Type-checking leave"
output += line
line = "...action passes type checking."
output += line
issues = vtp.parse_output(output, "test.pddl")
assert not issues
output = "Errors: 0, warnings: 0"
issues = vtp.parse_output(output, "test.pddl")
assert not issues
output = "Error: Parser failed to read file!"
issues = vtp.parse_output(output, "/home/user/test.pddl")
assert len(issues) == 1
assert issues[0].filename == "/home/user/test.pddl"
assert issues[0].line_number == "0"
assert issues[0].tool == "val_validate"
assert issues[0].issue_type == "0"
assert issues[0].severity == "3"
assert (
issues[0].message
== "Exact file and line number unknown. Parser failed to read file!"
)
output = "Problem in domain definition!"
issues = vtp.parse_output(output, "/home/user/test.pddl")
assert len(issues) == 1
assert issues[0].filename == "/home/user/test.pddl"
assert issues[0].line_number == "0"
assert issues[0].tool == "val_validate"
assert issues[0].issue_type == "1"
assert issues[0].severity == "3"
assert (
issues[0].message
== "Exact file and line number unknown. Problem in domain definition!"
)
def test_val_validate_tool_plugin_parse_invalid():
"""Verify that we can parse the invalid output of Validate."""
vtp = setup_val_validate_tool_plugin()
output = "invalid text"
issues = vtp.parse_output(output, "test.pddl")
assert not issues
@mock.patch(
"statick_tool.plugins.tool.val_validate_tool_plugin.subprocess.check_output"
)
def test_val_validate_tool_plugin_scan_calledprocesserror(mock_subprocess_check_output):
"""
Test what happens when a CalledProcessError is raised (usually means Validate hit an error).
Expected result: issues is None
"""
mock_subprocess_check_output.side_effect = subprocess.CalledProcessError(
0, "", output="mocked error"
)
vtp = setup_val_validate_tool_plugin()
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert not issues
mock_subprocess_check_output.side_effect = subprocess.CalledProcessError(
32, "", output="mocked error"
)
issues = vtp.scan(package, "level")
assert issues is None
@mock.patch(
"statick_tool.plugins.tool.val_validate_tool_plugin.subprocess.check_output"
)
def test_val_validate_tool_plugin_scan_oserror(mock_subprocess_check_output):
"""
Test what happens when an OSError is raised (usually means Validate doesn't exist).
Expected result: issues is None
"""
mock_subprocess_check_output.side_effect = OSError("mocked error")
vtp = setup_val_validate_tool_plugin()
# Sanity check - make sure Validate exists
if not vtp.command_exists("/opt/val/bin/Validate"):
pytest.skip("Couldn't find 'Validate' command, can't run tests")
elif sys.platform == "win32":
pytest.skip("Don't know how to run Validate on Windows.")
package = Package(
"valid_package", os.path.join(os.path.dirname(__file__), "valid_package")
)
package["pddl_domain_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "domain.pddl"),
]
package["pddl_problem_src"] = [
os.path.join(os.path.dirname(__file__), "valid_package", "problem.pddl"),
]
issues = vtp.scan(package, "level")
assert issues is None
| en | 0.839636 | Unit tests for the Validate tool plugin. Construct and return an instance of the Validate plugin. Test that the plugin manager finds the Validate plugin. # Get the path to statick_tool/__init__.py, get the directory part, and # add 'plugins' to that to get the standard plugins dir # Verify that a plugin's get_name() function returns "val_validate" # While we're at it, verify that a plugin is named Validate Tool Plugin Test that the Validate tool plugin arguments are collected. Integration test: Make sure the Validate output hasn't changed. # Sanity check - make sure Validate exists Make sure no issues are found if no sources are provided. Expected result: issues is empty # Sanity check - make sure Validate exists Test what happens when the specified tool binary does not exist. Expected result: issues is None Verify that we can parse the normal output of Validate. Verify that we can parse the invalid output of Validate. Test what happens when a CalledProcessError is raised (usually means Validate hit an error). Expected result: issues is None # Sanity check - make sure Validate exists Test what happens when an OSError is raised (usually means Validate doesn't exist). Expected result: issues is None # Sanity check - make sure Validate exists | 2.457684 | 2 |
spectrocrunch/materials/tests/test_pymca.py | woutdenolf/spectrocrunch | 3 | 6617907 | <reponame>woutdenolf/spectrocrunch<filename>spectrocrunch/materials/tests/test_pymca.py
# -*- coding: utf-8 -*-
import unittest
import numpy as np
import os
from testfixtures import TempDirectory
import matplotlib.pyplot as plt
from .. import pymca
from .. import multilayer
from .. import xrayspectrum
from ...geometries import xrf as xrfgeometries
from ...sources import xray as xraysources
from ...detectors import xrf as xrfdetectors
from . import xrf_setup
class test_pymca(unittest.TestCase):
def setUp(self):
self.dir = TempDirectory()
self.energy = [7.5, 8]
def tearDown(self):
self.dir.cleanup()
@unittest.skipIf(
xrfdetectors.compoundfromname.xraylib is None, "xraylib not installed"
)
def test_loadcfg(self):
cfgfile = os.path.join(self.dir.path, "mca.cfg")
h1 = xrf_setup.simple(energy=self.energy)
h1.savepymca(cfgfile)
source = xraysources.factory("synchrotron")
detector = xrfdetectors.factory("XRFDetector", ehole=3.8)
geometry = xrfgeometries.factory(
"LinearXRFGeometry",
detector=detector,
source=source,
zerodistance=0,
detectorposition=0,
positionunits="mm",
)
sample = multilayer.Multilayer(geometry=geometry)
h2 = pymca.PymcaHandle(sample=sample)
h2.loadfrompymca(cfgfile)
np.testing.assert_allclose(h1.mca(), h2.mca())
@unittest.skipIf(
xrfdetectors.compoundfromname.xraylib is None, "xraylib not installed"
)
def test_rates(self):
h = xrf_setup.simple(energy=self.energy, escape=0, snip=False, continuum=1)
if False:
path = "/data/id21/inhouse/wout/tmp/pymcatst"
cfgfile = os.path.join(path, "mca_mixture.cfg")
mcafile = os.path.join(path, "spectrum.mca")
h.savepymca(cfgfile)
h.savemca(mcafile, func=lambda x: x + 1)
y = h.mca()
y += 1
# Fit data
h.setdata(y)
h.configurepymca()
fitresult = h.fit()
# Mass fractions are calculated as follows:
# grouparea = flux.time.grouprate
# grouprate = solidangle/(4.pi).sum_l[massfrac_l.grouprate_l] where l loops over the layers
#
# massfrac_l = self.mcafit._fluoRates[layer][element]["rates"][group] where layer in 1,...,n
# grouprate_l = self.mcafit._fluoRates[layer][element]["mass fraction"]
# sum_l[massfrac_l.grouprate_l] = self.mcafit._fluoRates[0][element]["rates"][group]*
# self.mcafit._fluoRates[0][element]["mass fraction"]
#
# When element in one layer:
# massfrac = area/(flux.time.solidangle/(4.pi).grouprate_l)
#
# When element in more than one layer:
# grouprate_avg = solidangle/(4.pi).massfrac_avg.sum_l[grouprate_l]
#
# When element in more than one layer (per layer as if all intensity came from that layer?):
# massfrac_l = grouparea/(flux.time.solidangle/(4.pi).grouprate_l)
grouprates = h.xraygrouprates(scattering=False, method="fisx")
safrac = h.sample.geometry.solidangle / (4 * np.pi)
np.testing.assert_allclose(safrac, h._pymcainternals_solidanglefrac())
for group in fitresult["fitareas"]:
if not isinstance(group, xrayspectrum.FluoZLine):
continue
element, linegroup = group.element, group.group
grouprate_avg = h.mcafit._fluoRates[0][element]["rates"][
"{} xrays".format(linegroup)
]
grouprate_avg *= safrac
massfrac_avg = h.mcafit._fluoRates[0][element]["mass fraction"]
grouprate = 0.0
grouprate_avg2 = 0.0
massfrac_avg2 = 0.0
npresent = 0
for j in range(h.sample.nlayers):
i = j + 1
if element in h.mcafit._fluoRates[i]:
massfrac_l = h.mcafit._fluoRates[i][element]["mass fraction"]
grouprate_l = h.mcafit._fluoRates[i][element]["rates"][
"{} xrays".format(linegroup)
]
grouprate += massfrac_l * grouprate_l
grouprate_avg2 += grouprate_l
# massfrac_avg2 = max(massfrac_avg2,massfrac_l)
massfrac_avg2 = massfrac_l # just the last one?
npresent += 1
grouprate *= safrac
grouprate_avg2 *= safrac
# TODO: something wrong with mixtures of different elements (fisx vs Elements)
if group in grouprates:
# 1% error fisx vs. Elements?
np.testing.assert_allclose(grouprate, grouprates[group], rtol=1e-2)
np.testing.assert_allclose(grouprate, fitresult["rates"][group])
if npresent == 1:
np.testing.assert_allclose(grouprate_avg * massfrac_avg, grouprate)
else:
np.testing.assert_allclose(massfrac_avg, massfrac_avg2)
np.testing.assert_allclose(grouprate_avg, grouprate_avg2)
np.testing.assert_allclose(
fitresult["massfractions"][group],
fitresult["fitareas"][group] / (h.I0 * grouprate_avg),
)
for j in range(h.sample.nlayers):
i = j + 1
if element in h.mcafit._fluoRates[i]:
grouprate_l = h.mcafit._fluoRates[i][element]["rates"][
"{} xrays".format(linegroup)
]
grouprate = grouprate_l * safrac
np.testing.assert_allclose(
fitresult["lmassfractions"][j][group],
fitresult["fitareas"][group] / (h.I0 * grouprate),
)
# Plot
plt.plot(fitresult["energy"], fitresult["y"], label="data")
plt.plot(fitresult["energy"], fitresult["yfit"], label="pymca")
spectrum = h.xrayspectrum()
spectrum.plot(
fluxtime=h.I0,
histogram=True,
ylog=False,
decompose=False,
backfunc=lambda x: 1,
)
ax = plt.gca()
ax.set_ylim(ymin=np.nanmin(y[np.nonzero(y)]))
ax.set_xlabel("Energy (keV)")
ax.set_ylabel("Intensity (cts)")
plt.legend(loc="best")
# plt.show()
@unittest.skipIf(
xrfdetectors.compoundfromname.xraylib is None, "xraylib not installed"
)
def test_spectrum(self):
h = xrf_setup.complex(
energy=self.energy,
escape=0,
flux=1e10,
time=1,
scatter=np.zeros_like(self.energy),
linear=1,
emin=2,
emax=7.4,
)
h.sample.geometry.detector.bltail = False
h.sample.geometry.detector.bstail = False
h.sample.geometry.detector.bstep = False
y = h.mca()
# Prepare fit
h.setdata(y)
h.configurepymca()
# Force config
config = h.mcafit.getConfiguration()
config["fit"]["stripflag"] = 0
h.mcafit.configure(config)
# Fit
# h.fitgui(loadfromfit=False)
fitresult = h.fit(loadfromfit=False)
# Get fundamental MCA and fitted MCA
spectrum = h.xrayspectrum(method="fisx", scattering=False)
x, ysum, ylabel = spectrum.sumspectrum(fluxtime=h.I0, histogram=True)
ypymca = fitresult["interpol_energy"](fitresult["ymatrix"])(x)
# TODO: Doesn't work due to peak rejection, add it the xrayspectrum
# np.testing.assert_allclose(ysum,ypymca,rtol=1e-2)
# Plot
# x,ygroup,ylabel,names = spectrum.linespectra(fluxtime=h.I0,histogram=True)
# for name,y in zip(names,ygroup.T):
# if "Ce-L3" not in name: # pymca is cutting small peaks
# continue
# plt.plot(x,y,label=name)
plt.plot(x, ysum, label="fisx", linewidth=2)
plt.plot(x, ypymca, label="pymca", linewidth=2)
plt.legend()
ax = plt.gca()
ax.set_yscale("log", basey=10)
plt.ylim([0.001, max(ysum)])
# plt.show()
def test_suite():
"""Test suite including all test suites"""
testSuite = unittest.TestSuite()
testSuite.addTest(test_pymca("test_loadcfg"))
testSuite.addTest(test_pymca("test_rates"))
testSuite.addTest(test_pymca("test_spectrum"))
return testSuite
if __name__ == "__main__":
import sys
mysuite = test_suite()
runner = unittest.TextTestRunner()
if not runner.run(mysuite).wasSuccessful():
sys.exit(1)
| # -*- coding: utf-8 -*-
import unittest
import numpy as np
import os
from testfixtures import TempDirectory
import matplotlib.pyplot as plt
from .. import pymca
from .. import multilayer
from .. import xrayspectrum
from ...geometries import xrf as xrfgeometries
from ...sources import xray as xraysources
from ...detectors import xrf as xrfdetectors
from . import xrf_setup
class test_pymca(unittest.TestCase):
def setUp(self):
self.dir = TempDirectory()
self.energy = [7.5, 8]
def tearDown(self):
self.dir.cleanup()
@unittest.skipIf(
xrfdetectors.compoundfromname.xraylib is None, "xraylib not installed"
)
def test_loadcfg(self):
cfgfile = os.path.join(self.dir.path, "mca.cfg")
h1 = xrf_setup.simple(energy=self.energy)
h1.savepymca(cfgfile)
source = xraysources.factory("synchrotron")
detector = xrfdetectors.factory("XRFDetector", ehole=3.8)
geometry = xrfgeometries.factory(
"LinearXRFGeometry",
detector=detector,
source=source,
zerodistance=0,
detectorposition=0,
positionunits="mm",
)
sample = multilayer.Multilayer(geometry=geometry)
h2 = pymca.PymcaHandle(sample=sample)
h2.loadfrompymca(cfgfile)
np.testing.assert_allclose(h1.mca(), h2.mca())
@unittest.skipIf(
xrfdetectors.compoundfromname.xraylib is None, "xraylib not installed"
)
def test_rates(self):
h = xrf_setup.simple(energy=self.energy, escape=0, snip=False, continuum=1)
if False:
path = "/data/id21/inhouse/wout/tmp/pymcatst"
cfgfile = os.path.join(path, "mca_mixture.cfg")
mcafile = os.path.join(path, "spectrum.mca")
h.savepymca(cfgfile)
h.savemca(mcafile, func=lambda x: x + 1)
y = h.mca()
y += 1
# Fit data
h.setdata(y)
h.configurepymca()
fitresult = h.fit()
# Mass fractions are calculated as follows:
# grouparea = flux.time.grouprate
# grouprate = solidangle/(4.pi).sum_l[massfrac_l.grouprate_l] where l loops over the layers
#
# massfrac_l = self.mcafit._fluoRates[layer][element]["rates"][group] where layer in 1,...,n
# grouprate_l = self.mcafit._fluoRates[layer][element]["mass fraction"]
# sum_l[massfrac_l.grouprate_l] = self.mcafit._fluoRates[0][element]["rates"][group]*
# self.mcafit._fluoRates[0][element]["mass fraction"]
#
# When element in one layer:
# massfrac = area/(flux.time.solidangle/(4.pi).grouprate_l)
#
# When element in more than one layer:
# grouprate_avg = solidangle/(4.pi).massfrac_avg.sum_l[grouprate_l]
#
# When element in more than one layer (per layer as if all intensity came from that layer?):
# massfrac_l = grouparea/(flux.time.solidangle/(4.pi).grouprate_l)
grouprates = h.xraygrouprates(scattering=False, method="fisx")
safrac = h.sample.geometry.solidangle / (4 * np.pi)
np.testing.assert_allclose(safrac, h._pymcainternals_solidanglefrac())
for group in fitresult["fitareas"]:
if not isinstance(group, xrayspectrum.FluoZLine):
continue
element, linegroup = group.element, group.group
grouprate_avg = h.mcafit._fluoRates[0][element]["rates"][
"{} xrays".format(linegroup)
]
grouprate_avg *= safrac
massfrac_avg = h.mcafit._fluoRates[0][element]["mass fraction"]
grouprate = 0.0
grouprate_avg2 = 0.0
massfrac_avg2 = 0.0
npresent = 0
for j in range(h.sample.nlayers):
i = j + 1
if element in h.mcafit._fluoRates[i]:
massfrac_l = h.mcafit._fluoRates[i][element]["mass fraction"]
grouprate_l = h.mcafit._fluoRates[i][element]["rates"][
"{} xrays".format(linegroup)
]
grouprate += massfrac_l * grouprate_l
grouprate_avg2 += grouprate_l
# massfrac_avg2 = max(massfrac_avg2,massfrac_l)
massfrac_avg2 = massfrac_l # just the last one?
npresent += 1
grouprate *= safrac
grouprate_avg2 *= safrac
# TODO: something wrong with mixtures of different elements (fisx vs Elements)
if group in grouprates:
# 1% error fisx vs. Elements?
np.testing.assert_allclose(grouprate, grouprates[group], rtol=1e-2)
np.testing.assert_allclose(grouprate, fitresult["rates"][group])
if npresent == 1:
np.testing.assert_allclose(grouprate_avg * massfrac_avg, grouprate)
else:
np.testing.assert_allclose(massfrac_avg, massfrac_avg2)
np.testing.assert_allclose(grouprate_avg, grouprate_avg2)
np.testing.assert_allclose(
fitresult["massfractions"][group],
fitresult["fitareas"][group] / (h.I0 * grouprate_avg),
)
for j in range(h.sample.nlayers):
i = j + 1
if element in h.mcafit._fluoRates[i]:
grouprate_l = h.mcafit._fluoRates[i][element]["rates"][
"{} xrays".format(linegroup)
]
grouprate = grouprate_l * safrac
np.testing.assert_allclose(
fitresult["lmassfractions"][j][group],
fitresult["fitareas"][group] / (h.I0 * grouprate),
)
# Plot
plt.plot(fitresult["energy"], fitresult["y"], label="data")
plt.plot(fitresult["energy"], fitresult["yfit"], label="pymca")
spectrum = h.xrayspectrum()
spectrum.plot(
fluxtime=h.I0,
histogram=True,
ylog=False,
decompose=False,
backfunc=lambda x: 1,
)
ax = plt.gca()
ax.set_ylim(ymin=np.nanmin(y[np.nonzero(y)]))
ax.set_xlabel("Energy (keV)")
ax.set_ylabel("Intensity (cts)")
plt.legend(loc="best")
# plt.show()
@unittest.skipIf(
xrfdetectors.compoundfromname.xraylib is None, "xraylib not installed"
)
def test_spectrum(self):
h = xrf_setup.complex(
energy=self.energy,
escape=0,
flux=1e10,
time=1,
scatter=np.zeros_like(self.energy),
linear=1,
emin=2,
emax=7.4,
)
h.sample.geometry.detector.bltail = False
h.sample.geometry.detector.bstail = False
h.sample.geometry.detector.bstep = False
y = h.mca()
# Prepare fit
h.setdata(y)
h.configurepymca()
# Force config
config = h.mcafit.getConfiguration()
config["fit"]["stripflag"] = 0
h.mcafit.configure(config)
# Fit
# h.fitgui(loadfromfit=False)
fitresult = h.fit(loadfromfit=False)
# Get fundamental MCA and fitted MCA
spectrum = h.xrayspectrum(method="fisx", scattering=False)
x, ysum, ylabel = spectrum.sumspectrum(fluxtime=h.I0, histogram=True)
ypymca = fitresult["interpol_energy"](fitresult["ymatrix"])(x)
# TODO: Doesn't work due to peak rejection, add it the xrayspectrum
# np.testing.assert_allclose(ysum,ypymca,rtol=1e-2)
# Plot
# x,ygroup,ylabel,names = spectrum.linespectra(fluxtime=h.I0,histogram=True)
# for name,y in zip(names,ygroup.T):
# if "Ce-L3" not in name: # pymca is cutting small peaks
# continue
# plt.plot(x,y,label=name)
plt.plot(x, ysum, label="fisx", linewidth=2)
plt.plot(x, ypymca, label="pymca", linewidth=2)
plt.legend()
ax = plt.gca()
ax.set_yscale("log", basey=10)
plt.ylim([0.001, max(ysum)])
# plt.show()
def test_suite():
"""Test suite including all test suites"""
testSuite = unittest.TestSuite()
testSuite.addTest(test_pymca("test_loadcfg"))
testSuite.addTest(test_pymca("test_rates"))
testSuite.addTest(test_pymca("test_spectrum"))
return testSuite
if __name__ == "__main__":
import sys
mysuite = test_suite()
runner = unittest.TextTestRunner()
if not runner.run(mysuite).wasSuccessful():
sys.exit(1) | en | 0.486218 | # -*- coding: utf-8 -*- # Fit data # Mass fractions are calculated as follows: # grouparea = flux.time.grouprate # grouprate = solidangle/(4.pi).sum_l[massfrac_l.grouprate_l] where l loops over the layers # # massfrac_l = self.mcafit._fluoRates[layer][element]["rates"][group] where layer in 1,...,n # grouprate_l = self.mcafit._fluoRates[layer][element]["mass fraction"] # sum_l[massfrac_l.grouprate_l] = self.mcafit._fluoRates[0][element]["rates"][group]* # self.mcafit._fluoRates[0][element]["mass fraction"] # # When element in one layer: # massfrac = area/(flux.time.solidangle/(4.pi).grouprate_l) # # When element in more than one layer: # grouprate_avg = solidangle/(4.pi).massfrac_avg.sum_l[grouprate_l] # # When element in more than one layer (per layer as if all intensity came from that layer?): # massfrac_l = grouparea/(flux.time.solidangle/(4.pi).grouprate_l) # massfrac_avg2 = max(massfrac_avg2,massfrac_l) # just the last one? # TODO: something wrong with mixtures of different elements (fisx vs Elements) # 1% error fisx vs. Elements? # Plot # plt.show() # Prepare fit # Force config # Fit # h.fitgui(loadfromfit=False) # Get fundamental MCA and fitted MCA # TODO: Doesn't work due to peak rejection, add it the xrayspectrum # np.testing.assert_allclose(ysum,ypymca,rtol=1e-2) # Plot # x,ygroup,ylabel,names = spectrum.linespectra(fluxtime=h.I0,histogram=True) # for name,y in zip(names,ygroup.T): # if "Ce-L3" not in name: # pymca is cutting small peaks # continue # plt.plot(x,y,label=name) # plt.show() Test suite including all test suites | 2.012082 | 2 |
Newsapi/urls.py | 1104028/StrativNewsFeed | 0 | 6617908 | <reponame>1104028/StrativNewsFeed
from django.urls import path, include
from .views import ApiNews, StatusView
urlpatterns = [
path('allnews', ApiNews.as_view()),
path('status', StatusView.as_view())
] | from django.urls import path, include
from .views import ApiNews, StatusView
urlpatterns = [
path('allnews', ApiNews.as_view()),
path('status', StatusView.as_view())
] | none | 1 | 1.630279 | 2 | |
Code/Ames_splitting.py | mowal/Imputation_Paper | 0 | 6617909 | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 2 10:05:46 2021
@author: Moritz
"""
import pandas as pd
import numpy as np
import math
import random
from sklearn.model_selection import train_test_split
#compound-based splits
df_agg = pd.read_csv('mowal/Imputation_Paper/Data/Datasets/Ames_aggregated.csv',index_col=False)
#split randomly and see what sparsity values are obtained
train,test = train_test_split(df_agg,test_size=0.2,shuffle=True,random_state=23)
for assay in df_agg.iloc[:,:-1].columns:
count_train = train.shape[0]
for i in train[assay]:
if math.isnan(i) == True:
count_train-=1
count_test = test.shape[0]
for i in test[assay]:
if math.isnan(i) == True:
count_test-=1
train.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/train_set_compound_based_Ames.csv',index=False)
test.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/test_set_compound_based_Ames.csv',index=False)
#assay-based splits
#get for each assay all indices where data is available (0-6167)
assays = df_agg.columns[:-1]
dict_indices = {}
for assay in assays:
dict_indices[assay] = []
for i,j in enumerate(df_agg[assay]):
if not math.isnan(j):
dict_indices[assay].append(i)
#assign randomly 20% of the indices for each assay to the test set
random.seed(a=47)
dict_indices_test = {}
for assay in assays:
#get an integer k which is the amount of data points assigned to the test set for each assay
k = round(0.2*len(dict_indices[assay]))
dict_indices_test[assay] = random.sample(dict_indices[assay], k=k)
#create train_df by removing test instances
df_train = df_agg.copy()
for i,row in df_train.iterrows():
for assay in assays:
if i in dict_indices_test[assay]:
df_train.loc[i,assay] = np.nan
#create test_df by retaining test instances
df_test = df_agg.copy()
for i,row in df_test.iterrows():
for assay in assays:
if i not in dict_indices_test[assay]:
df_test.loc[i,assay] = np.nan
df_train.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/train_set_assay_based_Ames.csv',index=False)
df_test.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/test_set_assay_based_Ames.csv',index=False)
| # -*- coding: utf-8 -*-
"""
Created on Tue Mar 2 10:05:46 2021
@author: Moritz
"""
import pandas as pd
import numpy as np
import math
import random
from sklearn.model_selection import train_test_split
#compound-based splits
df_agg = pd.read_csv('mowal/Imputation_Paper/Data/Datasets/Ames_aggregated.csv',index_col=False)
#split randomly and see what sparsity values are obtained
train,test = train_test_split(df_agg,test_size=0.2,shuffle=True,random_state=23)
for assay in df_agg.iloc[:,:-1].columns:
count_train = train.shape[0]
for i in train[assay]:
if math.isnan(i) == True:
count_train-=1
count_test = test.shape[0]
for i in test[assay]:
if math.isnan(i) == True:
count_test-=1
train.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/train_set_compound_based_Ames.csv',index=False)
test.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/test_set_compound_based_Ames.csv',index=False)
#assay-based splits
#get for each assay all indices where data is available (0-6167)
assays = df_agg.columns[:-1]
dict_indices = {}
for assay in assays:
dict_indices[assay] = []
for i,j in enumerate(df_agg[assay]):
if not math.isnan(j):
dict_indices[assay].append(i)
#assign randomly 20% of the indices for each assay to the test set
random.seed(a=47)
dict_indices_test = {}
for assay in assays:
#get an integer k which is the amount of data points assigned to the test set for each assay
k = round(0.2*len(dict_indices[assay]))
dict_indices_test[assay] = random.sample(dict_indices[assay], k=k)
#create train_df by removing test instances
df_train = df_agg.copy()
for i,row in df_train.iterrows():
for assay in assays:
if i in dict_indices_test[assay]:
df_train.loc[i,assay] = np.nan
#create test_df by retaining test instances
df_test = df_agg.copy()
for i,row in df_test.iterrows():
for assay in assays:
if i not in dict_indices_test[assay]:
df_test.loc[i,assay] = np.nan
df_train.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/train_set_assay_based_Ames.csv',index=False)
df_test.to_csv('mowal/Imputation_Paper/Data/Train_Test_Splits/test_set_assay_based_Ames.csv',index=False)
| en | 0.826702 | # -*- coding: utf-8 -*- Created on Tue Mar 2 10:05:46 2021 @author: Moritz #compound-based splits #split randomly and see what sparsity values are obtained #assay-based splits #get for each assay all indices where data is available (0-6167) #assign randomly 20% of the indices for each assay to the test set #get an integer k which is the amount of data points assigned to the test set for each assay #create train_df by removing test instances #create test_df by retaining test instances | 2.762253 | 3 |
dirbot/spiders/post.py | jingzhou123/tieba-crawler | 33 | 6617910 | #coding=utf-8
from scrapy import Request
from cookieSpider import CookieSpider as Spider
from scrapy.selector import Selector
from dirbot.settings import TIEBA_NAMES_LIST
from dirbot.items import Post
import logging
class PostSpider(Spider):
"""Docstring for PostSpider. """
name = 'post'
allowed_domains = ["baidu.com"]
def _extract_post_id(self, href):# href = /p/123456789
try:
return href.split('/')[-1]
except Exception, e:
return -1#没有ID的帖子就是广告,在pipeline里要过滤掉
def start_requests(self):
"""TODO: Docstring for start_requests.
:returns: TODO
"""
url_list = map(
lambda name: ("http://tieba.baidu.com/f?ie=utf-8&kw=" + name),
TIEBA_NAMES_LIST
)
for url in url_list:
yield Request(url, callback=self.parse)
def _parse_posts(self, response):
"""TODO: Docstring for _parse_posts.
:response: TODO
:returns: TODO
"""
#logging.debug('parsing a post..')
tieba_name = Selector(response).css('.card_title_fname::text').extract_first().strip()[:-1]# XX吧 -> XX
post_item_sels = Selector(response).css('#thread_list>li')
#logging.debug('posts total num: %s', len(post_item_sels))
for sel in post_item_sels:
item = Post()
item['id'] = self._extract_post_id(sel.css('.j_th_tit a::attr(href)').extract_first())
#logging.debug('post id: %s' % (sel.css('.j_th_tit a::attr(href)').extract_first()))
item['tieba_name'] = tieba_name
item['title'] = sel.css('.j_th_tit a::text').extract_first()# 有时标题过长会被截断,在帖子回复爬虫里再爬一遍完整的标题
item['reply_num'] = sel.css('.threadlist_rep_num::text').extract_first()# 这里有可能是‘推广’,而非数字,在pipeline里过滤一遍
item['author_name'] = sel.css('.tb_icon_author a::text').extract_first()
item['body'] = sel.css('.threadlist_detail .threadlist_abs_onlyline::text').extract_first()
#遇到取不到帖子内容的情况,有可能是广告或者其它类型的无ID的贴子
if item['body'] is None:
item['body'] = ''
else:
item['body'] = item['body'].strip()#去掉回车和空格
#item['post_time'] = sel.css('') #这里拿不到发贴时间,只有最后回复时间
logging.debug('帖子:%r' % (item))
yield item
def should_stop(self, item):
"""stop crawl if possible, can be inheritted
:item: TODO
:returns: TODO
"""
return False
def parse(self, response):
"""TODO: Docstring for pass.
:response: TODO
:returns: TODO
"""
for item in self._parse_posts(response):
if not self.should_stop(item):
yield item
else:
return
if len(Selector(response).css('#frs_list_pager .next')):
#贴吧的分页有的不是完整的链接
next_page_url = Selector(response).css('#frs_list_pager .next::attr(href)').extract_first()
logging.debug('next_page_url %s', next_page_url)
if -1 != next_page_url.find('http://tieba.baidu.com'):
yield Request(next_page_url, callback=self.parse)
else:
yield Request('http://tieba.baidu.com' + next_page_url, callback=self.parse)
| #coding=utf-8
from scrapy import Request
from cookieSpider import CookieSpider as Spider
from scrapy.selector import Selector
from dirbot.settings import TIEBA_NAMES_LIST
from dirbot.items import Post
import logging
class PostSpider(Spider):
"""Docstring for PostSpider. """
name = 'post'
allowed_domains = ["baidu.com"]
def _extract_post_id(self, href):# href = /p/123456789
try:
return href.split('/')[-1]
except Exception, e:
return -1#没有ID的帖子就是广告,在pipeline里要过滤掉
def start_requests(self):
"""TODO: Docstring for start_requests.
:returns: TODO
"""
url_list = map(
lambda name: ("http://tieba.baidu.com/f?ie=utf-8&kw=" + name),
TIEBA_NAMES_LIST
)
for url in url_list:
yield Request(url, callback=self.parse)
def _parse_posts(self, response):
"""TODO: Docstring for _parse_posts.
:response: TODO
:returns: TODO
"""
#logging.debug('parsing a post..')
tieba_name = Selector(response).css('.card_title_fname::text').extract_first().strip()[:-1]# XX吧 -> XX
post_item_sels = Selector(response).css('#thread_list>li')
#logging.debug('posts total num: %s', len(post_item_sels))
for sel in post_item_sels:
item = Post()
item['id'] = self._extract_post_id(sel.css('.j_th_tit a::attr(href)').extract_first())
#logging.debug('post id: %s' % (sel.css('.j_th_tit a::attr(href)').extract_first()))
item['tieba_name'] = tieba_name
item['title'] = sel.css('.j_th_tit a::text').extract_first()# 有时标题过长会被截断,在帖子回复爬虫里再爬一遍完整的标题
item['reply_num'] = sel.css('.threadlist_rep_num::text').extract_first()# 这里有可能是‘推广’,而非数字,在pipeline里过滤一遍
item['author_name'] = sel.css('.tb_icon_author a::text').extract_first()
item['body'] = sel.css('.threadlist_detail .threadlist_abs_onlyline::text').extract_first()
#遇到取不到帖子内容的情况,有可能是广告或者其它类型的无ID的贴子
if item['body'] is None:
item['body'] = ''
else:
item['body'] = item['body'].strip()#去掉回车和空格
#item['post_time'] = sel.css('') #这里拿不到发贴时间,只有最后回复时间
logging.debug('帖子:%r' % (item))
yield item
def should_stop(self, item):
"""stop crawl if possible, can be inheritted
:item: TODO
:returns: TODO
"""
return False
def parse(self, response):
"""TODO: Docstring for pass.
:response: TODO
:returns: TODO
"""
for item in self._parse_posts(response):
if not self.should_stop(item):
yield item
else:
return
if len(Selector(response).css('#frs_list_pager .next')):
#贴吧的分页有的不是完整的链接
next_page_url = Selector(response).css('#frs_list_pager .next::attr(href)').extract_first()
logging.debug('next_page_url %s', next_page_url)
if -1 != next_page_url.find('http://tieba.baidu.com'):
yield Request(next_page_url, callback=self.parse)
else:
yield Request('http://tieba.baidu.com' + next_page_url, callback=self.parse)
| zh | 0.406217 | #coding=utf-8 Docstring for PostSpider. # href = /p/123456789 #没有ID的帖子就是广告,在pipeline里要过滤掉 TODO: Docstring for start_requests. :returns: TODO TODO: Docstring for _parse_posts. :response: TODO :returns: TODO #logging.debug('parsing a post..') # XX吧 -> XX #logging.debug('posts total num: %s', len(post_item_sels)) #logging.debug('post id: %s' % (sel.css('.j_th_tit a::attr(href)').extract_first())) # 有时标题过长会被截断,在帖子回复爬虫里再爬一遍完整的标题 # 这里有可能是‘推广’,而非数字,在pipeline里过滤一遍 #遇到取不到帖子内容的情况,有可能是广告或者其它类型的无ID的贴子 #去掉回车和空格 #item['post_time'] = sel.css('') #这里拿不到发贴时间,只有最后回复时间 stop crawl if possible, can be inheritted :item: TODO :returns: TODO TODO: Docstring for pass. :response: TODO :returns: TODO #贴吧的分页有的不是完整的链接 | 2.679739 | 3 |
src/infi/mount_utils/exceptions.py | Infinidat/mount-utils | 0 | 6617911 | from infi.exceptools import InfiException
class MountException(InfiException):
pass
class IncorrectInvocationOrPermissions(MountException):
pass
class SystemErrorException(MountException):
pass
class MountInternalBugException(MountException):
pass
class UserInterruptException(MountException):
pass
class ProblemWithWritingOrLockingException(MountException):
pass
class MountFailureException(MountException):
pass
class SomeMountSucceededException(MountException):
pass
ERRORCODES_DICT = {1:IncorrectInvocationOrPermissions,
2:SystemErrorException,
4:MountInternalBugException,
8:UserInterruptException,
16:ProblemWithWritingOrLockingException,
32:MountFailureException,
64:SomeMountSucceededException}
def translate_mount_error(errorno):
if errorno in ERRORCODES_DICT:
return ERRORCODES_DICT.get(errorno)
else:
return MountException(errorno)
| from infi.exceptools import InfiException
class MountException(InfiException):
pass
class IncorrectInvocationOrPermissions(MountException):
pass
class SystemErrorException(MountException):
pass
class MountInternalBugException(MountException):
pass
class UserInterruptException(MountException):
pass
class ProblemWithWritingOrLockingException(MountException):
pass
class MountFailureException(MountException):
pass
class SomeMountSucceededException(MountException):
pass
ERRORCODES_DICT = {1:IncorrectInvocationOrPermissions,
2:SystemErrorException,
4:MountInternalBugException,
8:UserInterruptException,
16:ProblemWithWritingOrLockingException,
32:MountFailureException,
64:SomeMountSucceededException}
def translate_mount_error(errorno):
if errorno in ERRORCODES_DICT:
return ERRORCODES_DICT.get(errorno)
else:
return MountException(errorno)
| none | 1 | 2.415913 | 2 | |
server/config.py | Cryliss/comp-440 | 0 | 6617912 | <reponame>Cryliss/comp-440
from app import app
from flaskext.mysql import MySQL
# Need to change this to your local SQL information.
# This should be in an .env file and NOT like this .. TO DO lol.
mysql = MySQL()
app.config['MYSQL_DATABASE_USER'] = 'comp440'
app.config['MYSQL_DATABASE_PASSWORD'] = '<PASSWORD>'
app.config['MYSQL_DATABASE_DB'] = 'blogger'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
app.config['SECRET_KEY'] = 'supercalifragilisticexpialidocious'
mysql.init_app(app)
| from app import app
from flaskext.mysql import MySQL
# Need to change this to your local SQL information.
# This should be in an .env file and NOT like this .. TO DO lol.
mysql = MySQL()
app.config['MYSQL_DATABASE_USER'] = 'comp440'
app.config['MYSQL_DATABASE_PASSWORD'] = '<PASSWORD>'
app.config['MYSQL_DATABASE_DB'] = 'blogger'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
app.config['SECRET_KEY'] = 'supercalifragilisticexpialidocious'
mysql.init_app(app) | en | 0.806287 | # Need to change this to your local SQL information. # This should be in an .env file and NOT like this .. TO DO lol. | 1.756754 | 2 |
MetaScreener/external_sw/mgltools/lib/python2.7/site-packages/SimPy/__init__.py | bio-hpc/metascreener | 8 | 6617913 | #!/usr/bin/env python
# $Revision: 1.1.1.13 $ $Date: 2007/01/08 14:39:27 $
""" SimPy a process-based simulation package in Python
LICENSE:
Copyright (C) 2002,2004,2005,2006,2007 <NAME>, <NAME>
mailto: <EMAIL> and <EMAIL>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
END OF LICENSE
Contains the following modules:
Simulation - module implementing processes and resources
Monitor - dummy module for backward compatibility
SimulationTrace - module implementing event tracing
SimulationRT - module for simulation speed control
SimulationStep - module for stepping through simulation event by event
SimPlot - Tk-based plotting module
SimGui - Tk-based SimPy GUI module
Lister - module for prettyprinting class instances
__version__ = '$Revision: 1.1.1.13 $ $Date: 2007/01/08 14:39:27 $ kgm'
"""
__SimPyVersion__="1.8"
| #!/usr/bin/env python
# $Revision: 1.1.1.13 $ $Date: 2007/01/08 14:39:27 $
""" SimPy a process-based simulation package in Python
LICENSE:
Copyright (C) 2002,2004,2005,2006,2007 <NAME>, <NAME>
mailto: <EMAIL> and <EMAIL>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
END OF LICENSE
Contains the following modules:
Simulation - module implementing processes and resources
Monitor - dummy module for backward compatibility
SimulationTrace - module implementing event tracing
SimulationRT - module for simulation speed control
SimulationStep - module for stepping through simulation event by event
SimPlot - Tk-based plotting module
SimGui - Tk-based SimPy GUI module
Lister - module for prettyprinting class instances
__version__ = '$Revision: 1.1.1.13 $ $Date: 2007/01/08 14:39:27 $ kgm'
"""
__SimPyVersion__="1.8"
| en | 0.724827 | #!/usr/bin/env python # $Revision: 1.1.1.13 $ $Date: 2007/01/08 14:39:27 $ SimPy a process-based simulation package in Python
LICENSE:
Copyright (C) 2002,2004,2005,2006,2007 <NAME>, <NAME>
mailto: <EMAIL> and <EMAIL>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
END OF LICENSE
Contains the following modules:
Simulation - module implementing processes and resources
Monitor - dummy module for backward compatibility
SimulationTrace - module implementing event tracing
SimulationRT - module for simulation speed control
SimulationStep - module for stepping through simulation event by event
SimPlot - Tk-based plotting module
SimGui - Tk-based SimPy GUI module
Lister - module for prettyprinting class instances
__version__ = '$Revision: 1.1.1.13 $ $Date: 2007/01/08 14:39:27 $ kgm' | 1.571689 | 2 |
src/dye/forms.py | martsime/DSSCDB | 7 | 6617914 | <gh_stars>1-10
import pybel
from django import forms
from .helpers import get_DOI_metadata
from .models import Molecule, Spectrum, Performance, Spreadsheet, Contribution, Article
class ArticleForm(forms.Form):
doi = forms.CharField(max_length=500, label="DOI", required=True)
def is_valid(self):
super().is_valid()
article_doi = self.cleaned_data.get('doi')
article = Article.objects.filter(doi__iexact=article_doi).first()
if article:
self.model_instance = article
self.created = False
return True
else:
# TODO: Add try and expect if connection to DOI is not found
article_data = get_DOI_metadata(article_doi)
if not article_data:
self.add_error('doi', 'DOI not found')
return False
article_model = ArticleModelForm(article_data)
if article_model.is_valid():
article_model.save()
self.model_instance = article_model.instance
self.created = True
return True
else:
erred_fields = ', '.join(article_model.errors.keys())
self.add_error('doi',
'DOI provided has incomplete ({}) data. Please contact us regarding this.'.format(
erred_fields))
return False
# except TypeError:
# self.add_error('doi', 'DOI not found')
def get_model(self):
return self.model_instance, self.created
class ArticleModelForm(forms.ModelForm):
class Meta:
model = Article
fields = [
'author',
'title',
'journal',
'volume',
'doi',
'pages',
'issue_nr',
'eid',
'year',
'electronic_id',
'keywords',
]
class MoleculeForm(forms.ModelForm):
class Meta:
model = Molecule
fields = [
'smiles',
'inchi',
'keywords'
]
def validate_unique(self):
pass
# TODO: Add function to make SMILES canonical
# pybel_mol = pybel.readstring('smiles', instance.smiles)
# smiles_canonical = molecule.write('can').rstrip()
# instance.smiles = smiles_canonical
class SpectrumForm(forms.ModelForm):
class Meta:
model = Spectrum
fields = [
'absorption_maxima',
'emission_maxima',
'solvent',
]
class PerformanceForm(forms.ModelForm):
class Meta:
model = Performance
fields = [
'voc',
'jsc',
'ff',
'pce',
'electrolyte',
'active_area',
'co_adsorbent',
'co_sensitizer',
'semiconductor',
'dye_loading',
'exposure_time',
'solar_simulator',
'comment',
]
def is_valid(self, article, molecule):
super().is_valid()
if self.errors:
return False
# Try to get existing performance, add error if duplicate found
try:
performance = Performance.objects.get(article=article, molecule=molecule,
voc=str(self.data.get('voc')),
jsc=str(self.data.get('jsc')),
ff=str(self.data.get('ff')),
pce=str(self.data.get('pce')))
if performance:
self.add_error('voc', 'The performance measure exists already for the given molecule')
return False
except Performance.DoesNotExist:
# All ok
pass
return True
def clean(self):
super().clean()
class SpreadsheetForm(forms.ModelForm):
class Meta:
model = Spreadsheet
fields = [
'file'
]
class ApprovalForm(forms.ModelForm):
confirm = forms.BooleanField(required=True, label="I've thoroughly checked the data", )
class Meta:
model = Contribution
fields = [
'status'
]
class PerformanceRangeSearchForm(forms.Form):
min_voc = forms.DecimalField(label='min VOC', decimal_places=4, max_digits=15, required=False)
max_voc = forms.DecimalField(label='max VOC', decimal_places=4, max_digits=15, required=False)
min_jsc = forms.DecimalField(label='min JSC', decimal_places=5, max_digits=15, required=False)
max_jsc = forms.DecimalField(label='max JSC', decimal_places=5, max_digits=15, required=False)
min_ff = forms.DecimalField(label='min FF', decimal_places=5, max_digits=13, required=False)
max_ff = forms.DecimalField(label='max FF', decimal_places=5, max_digits=13, required=False)
min_pce = forms.DecimalField(label='min PCE', decimal_places=5, max_digits=13, required=False)
max_pce = forms.DecimalField(label='max PCE', decimal_places=5, max_digits=13, required=False)
class PerformanceKeywordSearchForm(forms.Form):
keyword = forms.CharField(max_length=1000,
required=False,
widget=forms.TextInput(attrs={'placeholder': 'Free text search'}))
class PerformanceStructureSearchForm(forms.Form):
SUBSTRUCTURE = 'SUB'
FINGERPRINT = 'FP'
SEARCH_TYPES = (
(SUBSTRUCTURE, 'Substructure'),
(FINGERPRINT, 'Similarity')
)
smiles = forms.CharField(max_length=1000, required=False)
complete_molecule = forms.BooleanField(required=False)
search_type = forms.ChoiceField(choices=SEARCH_TYPES, label='Structure search type', widget=forms.RadioSelect)
tanimoto_threshold = forms.DecimalField(decimal_places=2, max_digits=3, initial=0.85, required=True)
def is_valid(self):
super().is_valid()
# The form can be empty, return true if so
if not self.cleaned_data.get('smiles'):
return True
try:
# The SMILES was not empty, must be validated
molecule = pybel.Smarts(self.cleaned_data.get('smiles'))
if molecule:
return True
except OSError:
self.add_error(None, 'Invalid structure.')
return False
| import pybel
from django import forms
from .helpers import get_DOI_metadata
from .models import Molecule, Spectrum, Performance, Spreadsheet, Contribution, Article
class ArticleForm(forms.Form):
doi = forms.CharField(max_length=500, label="DOI", required=True)
def is_valid(self):
super().is_valid()
article_doi = self.cleaned_data.get('doi')
article = Article.objects.filter(doi__iexact=article_doi).first()
if article:
self.model_instance = article
self.created = False
return True
else:
# TODO: Add try and expect if connection to DOI is not found
article_data = get_DOI_metadata(article_doi)
if not article_data:
self.add_error('doi', 'DOI not found')
return False
article_model = ArticleModelForm(article_data)
if article_model.is_valid():
article_model.save()
self.model_instance = article_model.instance
self.created = True
return True
else:
erred_fields = ', '.join(article_model.errors.keys())
self.add_error('doi',
'DOI provided has incomplete ({}) data. Please contact us regarding this.'.format(
erred_fields))
return False
# except TypeError:
# self.add_error('doi', 'DOI not found')
def get_model(self):
return self.model_instance, self.created
class ArticleModelForm(forms.ModelForm):
class Meta:
model = Article
fields = [
'author',
'title',
'journal',
'volume',
'doi',
'pages',
'issue_nr',
'eid',
'year',
'electronic_id',
'keywords',
]
class MoleculeForm(forms.ModelForm):
class Meta:
model = Molecule
fields = [
'smiles',
'inchi',
'keywords'
]
def validate_unique(self):
pass
# TODO: Add function to make SMILES canonical
# pybel_mol = pybel.readstring('smiles', instance.smiles)
# smiles_canonical = molecule.write('can').rstrip()
# instance.smiles = smiles_canonical
class SpectrumForm(forms.ModelForm):
class Meta:
model = Spectrum
fields = [
'absorption_maxima',
'emission_maxima',
'solvent',
]
class PerformanceForm(forms.ModelForm):
class Meta:
model = Performance
fields = [
'voc',
'jsc',
'ff',
'pce',
'electrolyte',
'active_area',
'co_adsorbent',
'co_sensitizer',
'semiconductor',
'dye_loading',
'exposure_time',
'solar_simulator',
'comment',
]
def is_valid(self, article, molecule):
super().is_valid()
if self.errors:
return False
# Try to get existing performance, add error if duplicate found
try:
performance = Performance.objects.get(article=article, molecule=molecule,
voc=str(self.data.get('voc')),
jsc=str(self.data.get('jsc')),
ff=str(self.data.get('ff')),
pce=str(self.data.get('pce')))
if performance:
self.add_error('voc', 'The performance measure exists already for the given molecule')
return False
except Performance.DoesNotExist:
# All ok
pass
return True
def clean(self):
super().clean()
class SpreadsheetForm(forms.ModelForm):
class Meta:
model = Spreadsheet
fields = [
'file'
]
class ApprovalForm(forms.ModelForm):
confirm = forms.BooleanField(required=True, label="I've thoroughly checked the data", )
class Meta:
model = Contribution
fields = [
'status'
]
class PerformanceRangeSearchForm(forms.Form):
min_voc = forms.DecimalField(label='min VOC', decimal_places=4, max_digits=15, required=False)
max_voc = forms.DecimalField(label='max VOC', decimal_places=4, max_digits=15, required=False)
min_jsc = forms.DecimalField(label='min JSC', decimal_places=5, max_digits=15, required=False)
max_jsc = forms.DecimalField(label='max JSC', decimal_places=5, max_digits=15, required=False)
min_ff = forms.DecimalField(label='min FF', decimal_places=5, max_digits=13, required=False)
max_ff = forms.DecimalField(label='max FF', decimal_places=5, max_digits=13, required=False)
min_pce = forms.DecimalField(label='min PCE', decimal_places=5, max_digits=13, required=False)
max_pce = forms.DecimalField(label='max PCE', decimal_places=5, max_digits=13, required=False)
class PerformanceKeywordSearchForm(forms.Form):
keyword = forms.CharField(max_length=1000,
required=False,
widget=forms.TextInput(attrs={'placeholder': 'Free text search'}))
class PerformanceStructureSearchForm(forms.Form):
SUBSTRUCTURE = 'SUB'
FINGERPRINT = 'FP'
SEARCH_TYPES = (
(SUBSTRUCTURE, 'Substructure'),
(FINGERPRINT, 'Similarity')
)
smiles = forms.CharField(max_length=1000, required=False)
complete_molecule = forms.BooleanField(required=False)
search_type = forms.ChoiceField(choices=SEARCH_TYPES, label='Structure search type', widget=forms.RadioSelect)
tanimoto_threshold = forms.DecimalField(decimal_places=2, max_digits=3, initial=0.85, required=True)
def is_valid(self):
super().is_valid()
# The form can be empty, return true if so
if not self.cleaned_data.get('smiles'):
return True
try:
# The SMILES was not empty, must be validated
molecule = pybel.Smarts(self.cleaned_data.get('smiles'))
if molecule:
return True
except OSError:
self.add_error(None, 'Invalid structure.')
return False | en | 0.570977 | # TODO: Add try and expect if connection to DOI is not found # except TypeError: # self.add_error('doi', 'DOI not found') # TODO: Add function to make SMILES canonical # pybel_mol = pybel.readstring('smiles', instance.smiles) # smiles_canonical = molecule.write('can').rstrip() # instance.smiles = smiles_canonical # Try to get existing performance, add error if duplicate found # All ok # The form can be empty, return true if so # The SMILES was not empty, must be validated | 2.068908 | 2 |
download.py | maysrp/13clock | 1 | 6617915 | <gh_stars>1-10
import network
import urequests
import ujson
import time
import gc
def don(filename,url):
w=urequests.get(url)
f=open(filename,'wb')
f.write(w.content)
f.close()
print(filename,"Done! free:",gc.mem_free())
del f,w
gc.collect()
con={}
con["bli"]="b站ID"
con["city"]="suzhou城市的拼音"
con["keys"]="心知天气私钥"
con["wifiname"]="你的WIFI名称"
con["wifipassword"]="<PASSWORD>"
con["set_password"]="<PASSWORD>"
f=open("config.json",'w')
f.write(ujson.dumps(con))
f.close()
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(con["wifiname"], con["wifipassword"])
time.sleep(3)
if wlan.isconnected():
a1="https://webdir.micropython.biz/bilibili/13clock/microWebSrv.py"
a2="https://webdir.micropython.biz/bilibili/13clock/main.py"
a3="https://webdir.micropython.biz/bilibili/13clock/ssd1351.py"
a4="https://webdir.micropython.biz/bilibili/13clock/EspressoDolce18x24.c"
a5="https://webdir.micropython.biz/bilibili/13clock/FixedFont5x8.c"
a6="https://webdir.micropython.biz/bilibili/13clock/Robotron7x11.c"
a7="https://webdir.micropython.biz/bilibili/13clock/Robotron13x21.c"
a8="https://webdir.micropython.biz/bilibili/13clock/xglcd_font.py"
don("microWebSrv.py",a1)
# don("main.py",a2)
don("ssd1351.py",a3)
don("EspressoDolce18x24.c",a4)
don("FixedFont5x8.c",a5)
don("Robotron7x11.c",a6)
don("Robotron13x21.c",a7)
don("xglcd_font.py",a8)
ourl="https://webdir.micropython.biz/bilibili/13clock/"
for i in range(39):
fn=str(i)+".raw"
url=ourl+fn
don(fn,url)
don("main.py",a2)
else:
print("wifi error")
| import network
import urequests
import ujson
import time
import gc
def don(filename,url):
w=urequests.get(url)
f=open(filename,'wb')
f.write(w.content)
f.close()
print(filename,"Done! free:",gc.mem_free())
del f,w
gc.collect()
con={}
con["bli"]="b站ID"
con["city"]="suzhou城市的拼音"
con["keys"]="心知天气私钥"
con["wifiname"]="你的WIFI名称"
con["wifipassword"]="<PASSWORD>"
con["set_password"]="<PASSWORD>"
f=open("config.json",'w')
f.write(ujson.dumps(con))
f.close()
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
wlan.connect(con["wifiname"], con["wifipassword"])
time.sleep(3)
if wlan.isconnected():
a1="https://webdir.micropython.biz/bilibili/13clock/microWebSrv.py"
a2="https://webdir.micropython.biz/bilibili/13clock/main.py"
a3="https://webdir.micropython.biz/bilibili/13clock/ssd1351.py"
a4="https://webdir.micropython.biz/bilibili/13clock/EspressoDolce18x24.c"
a5="https://webdir.micropython.biz/bilibili/13clock/FixedFont5x8.c"
a6="https://webdir.micropython.biz/bilibili/13clock/Robotron7x11.c"
a7="https://webdir.micropython.biz/bilibili/13clock/Robotron13x21.c"
a8="https://webdir.micropython.biz/bilibili/13clock/xglcd_font.py"
don("microWebSrv.py",a1)
# don("main.py",a2)
don("ssd1351.py",a3)
don("EspressoDolce18x24.c",a4)
don("FixedFont5x8.c",a5)
don("Robotron7x11.c",a6)
don("Robotron13x21.c",a7)
don("xglcd_font.py",a8)
ourl="https://webdir.micropython.biz/bilibili/13clock/"
for i in range(39):
fn=str(i)+".raw"
url=ourl+fn
don(fn,url)
don("main.py",a2)
else:
print("wifi error") | en | 0.214607 | # don("main.py",a2) | 2.375446 | 2 |
src/waldur_mastermind/marketplace_openstack/migrations/0008_drop_package_tables.py | geant-multicloud/MCMS-mastermind | 26 | 6617916 | <gh_stars>10-100
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('marketplace_openstack', '0007_change_billing_type_for_volumes_of_tenants'),
('invoices', '0043_drop_package_column'),
('marketplace', '0041_drop_package'),
]
operations = [
# Raw SQL is used instead of Django migration operations
# because packages application has been removed
migrations.RunSQL('DROP TABLE IF EXISTS packages_openstackpackage'),
migrations.RunSQL('DROP TABLE IF EXISTS packages_packagecomponent'),
migrations.RunSQL('DROP TABLE IF EXISTS packages_packagetemplate'),
]
| from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('marketplace_openstack', '0007_change_billing_type_for_volumes_of_tenants'),
('invoices', '0043_drop_package_column'),
('marketplace', '0041_drop_package'),
]
operations = [
# Raw SQL is used instead of Django migration operations
# because packages application has been removed
migrations.RunSQL('DROP TABLE IF EXISTS packages_openstackpackage'),
migrations.RunSQL('DROP TABLE IF EXISTS packages_packagecomponent'),
migrations.RunSQL('DROP TABLE IF EXISTS packages_packagetemplate'),
] | en | 0.953319 | # Raw SQL is used instead of Django migration operations # because packages application has been removed | 1.682915 | 2 |
analysis/db/2020-spring/max/load_data.py | databridgevt/covid19 | 7 | 6617917 | import pandas as pd
#import spacy
from pyprojroot import here
# load the tab separated file
dat = pd.read_csv(here("./data/db/final/kaggle/paper_text/comm_use_subset.tsv"), sep="\t")
# look at the first few lines
dat.head()
# look at the column names, their data types, and number of non missing elements
dat.info()
# number of rows and columns
dat.shape | import pandas as pd
#import spacy
from pyprojroot import here
# load the tab separated file
dat = pd.read_csv(here("./data/db/final/kaggle/paper_text/comm_use_subset.tsv"), sep="\t")
# look at the first few lines
dat.head()
# look at the column names, their data types, and number of non missing elements
dat.info()
# number of rows and columns
dat.shape | en | 0.850886 | #import spacy # load the tab separated file # look at the first few lines # look at the column names, their data types, and number of non missing elements # number of rows and columns | 3.32792 | 3 |
webapp/element43/apps/market_data/migrations/0001_initial.py | Ososope/eve_online | 0 | 6617918 | <reponame>Ososope/eve_online
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('eve_db', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ArchivedOrders',
fields=[
('generated_at', models.DateTimeField(help_text=b"When the market data was generated on the user's machine.", null=True, blank=True)),
('price', models.FloatField(help_text=b'Item price, as reported in the message.')),
('volume_remaining', models.PositiveIntegerField(help_text=b'Number of remaining items for sale.')),
('volume_entered', models.PositiveIntegerField(help_text=b'Number of items initially put up for sale.')),
('minimum_volume', models.PositiveIntegerField(help_text=b'Minimum volume before the order finishes.')),
('order_range', models.IntegerField(help_text=b'How far the order is visible. 32767 = region-wide')),
('id', models.BigIntegerField(help_text=b'Unique order ID from EVE for this order.', serialize=False, primary_key=True)),
('is_bid', models.BooleanField(help_text=b'If True, this is a buy order. If False, this is a sell order.')),
('issue_date', models.DateTimeField(help_text=b'When the order was issued.')),
('duration', models.PositiveSmallIntegerField(help_text=b'The duration of the order, in days.')),
('is_suspicious', models.BooleanField(help_text=b"If this is True, we have reason to question this order's validity")),
('message_key', models.CharField(help_text=b'The unique hash that of the market message.', max_length=255, null=True, blank=True)),
('uploader_ip_hash', models.CharField(help_text=b'The unique hash for the person who uploaded this message.', max_length=255)),
('is_active', models.BooleanField(default=True, help_text=b'is this a live order or is it history')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
('mapsolarsystem', models.ForeignKey(help_text=b'ID of the solar system the order is in.', to='eve_db.MapSolarSystem')),
('stastation', models.ForeignKey(help_text=b'The station that this order is in.', to='eve_db.StaStation')),
],
options={
'verbose_name': 'Archived Market Order',
'verbose_name_plural': 'Archived Market Orders',
},
),
migrations.CreateModel(
name='EmdrStats',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status_type', models.SmallIntegerField(help_text=b'Message type for statistics')),
('status_count', models.PositiveIntegerField(help_text=b'Count of messages of specific type')),
('message_timestamp', models.DateTimeField(help_text=b'When the stats were counted for this entry', auto_now_add=True, db_index=True)),
],
options={
'verbose_name': 'Message Statistics Data',
'verbose_name_plural': 'Message Statistics Data',
},
),
migrations.CreateModel(
name='EmdrStatsWorking',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status_type', models.SmallIntegerField(help_text=b'Message type for statistics')),
],
options={
'verbose_name': 'Message Statistics Live Data',
'verbose_name_plural': 'Message Statistics Live Data',
},
),
migrations.CreateModel(
name='History',
fields=[
('id', models.CharField(help_text=b'Primary key, based on UUID', max_length=255, serialize=False, primary_key=True)),
('history_data', models.TextField(help_text=b'Compressed zlib data of the JSON message for history')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
],
options={
'verbose_name': 'History Data',
'verbose_name_plural': 'History Data',
},
),
migrations.CreateModel(
name='ItemRegionStat',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('buymean', models.FloatField(help_text=b'Mean of buy price')),
('buyavg', models.FloatField(help_text=b'Average of buy price')),
('sellmean', models.FloatField(help_text=b'Mean of sell price')),
('sellavg', models.FloatField(help_text=b'Avg of sell price')),
('buymedian', models.FloatField(help_text=b'Median of buy price')),
('sellmedian', models.FloatField(help_text=b'Median of sell price')),
('buyvolume', models.BigIntegerField(help_text=b'total volume traded')),
('sellvolume', models.BigIntegerField(help_text=b'total volume traded')),
('buy_95_percentile', models.FloatField(help_text=b'95th % of buy orders')),
('sell_95_percentile', models.FloatField(help_text=b'95th % of sell orders')),
('buy_std_dev', models.FloatField(help_text=b'standard deviation of buy orders')),
('sell_std_dev', models.FloatField(help_text=b'standard deviation of sell orders')),
('lastupdate', models.DateTimeField(help_text=b'Date the stats were updated', null=True, blank=True)),
('invtype', models.ForeignKey(help_text=b'FK to type table', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'FK to region table', to='eve_db.MapRegion')),
],
options={
'verbose_name': 'Stat Data',
'verbose_name_plural': 'Stats Data',
},
),
migrations.CreateModel(
name='ItemRegionStatHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('buymean', models.FloatField(help_text=b'Mean of buy price')),
('buyavg', models.FloatField(help_text=b'Average of buy price')),
('sellmean', models.FloatField(help_text=b'Mean of sell price')),
('sellavg', models.FloatField(help_text=b'Avg of sell price')),
('buymedian', models.FloatField(help_text=b'Median of buy price')),
('sellmedian', models.FloatField(help_text=b'Median of sell price')),
('buyvolume', models.BigIntegerField(help_text=b'total volume traded')),
('sellvolume', models.BigIntegerField(help_text=b'total volume traded')),
('buy_95_percentile', models.FloatField(help_text=b'95th % of buy orders')),
('sell_95_percentile', models.FloatField(help_text=b'95th % of sell orders')),
('buy_std_dev', models.FloatField(help_text=b'standard deviation of buy orders')),
('sell_std_dev', models.FloatField(help_text=b'standard deviation of sell orders')),
('date', models.DateTimeField(help_text=b'Date the stats were inserted', null=True, blank=True)),
('invtype', models.ForeignKey(help_text=b'FK to type table', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'FK to region table', to='eve_db.MapRegion')),
],
options={
'verbose_name': 'Stat History Data',
'verbose_name_plural': 'Stat History Data',
},
),
migrations.CreateModel(
name='OrderHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(help_text=b'Date of the data')),
('numorders', models.PositiveIntegerField(help_text=b'number of transactions for this item/region')),
('low', models.FloatField(help_text=b'low price of orders for this item/region')),
('high', models.FloatField(help_text=b'high price of orders for this item/region')),
('mean', models.FloatField(help_text=b'mean price of orders for this item/region')),
('quantity', models.BigIntegerField(help_text=b'quantity of item sold for this item/region')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
],
),
migrations.CreateModel(
name='Orders',
fields=[
('generated_at', models.DateTimeField(help_text=b"When the market data was generated on the user's machine.", null=True, blank=True)),
('price', models.FloatField(help_text=b'Item price, as reported in the message.')),
('volume_remaining', models.PositiveIntegerField(help_text=b'Number of remaining items for sale.')),
('volume_entered', models.PositiveIntegerField(help_text=b'Number of items initially put up for sale.')),
('minimum_volume', models.PositiveIntegerField(help_text=b'Minimum volume before the order finishes.')),
('order_range', models.IntegerField(help_text=b'How far the order is visible. 32767 = region-wide')),
('id', models.BigIntegerField(help_text=b'Unique order ID from EVE for this order.', serialize=False, primary_key=True)),
('is_bid', models.BooleanField(help_text=b'If True, this is a buy order. If False, this is a sell order.')),
('issue_date', models.DateTimeField(help_text=b'When the order was issued.')),
('duration', models.PositiveSmallIntegerField(help_text=b'The duration of the order, in days.')),
('is_suspicious', models.BooleanField(help_text=b"If this is True, we have reason to question this order's validity")),
('message_key', models.CharField(help_text=b'The unique hash that of the market message.', max_length=255, null=True, blank=True)),
('uploader_ip_hash', models.CharField(help_text=b'The unique hash for the person who uploaded this message.', max_length=255)),
('is_active', models.BooleanField(default=True, help_text=b'is this a live order or is it history')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
('mapsolarsystem', models.ForeignKey(help_text=b'ID of the solar system the order is in.', to='eve_db.MapSolarSystem')),
('stastation', models.ForeignKey(help_text=b'The station that this order is in.', to='eve_db.StaStation')),
],
options={
'verbose_name': 'Market Order',
'verbose_name_plural': 'Market Orders',
},
),
migrations.CreateModel(
name='SeenOrders',
fields=[
('id', models.BigIntegerField(help_text=b'Order ID', serialize=False, primary_key=True)),
('region_id', models.PositiveIntegerField(help_text=b'Region ID of seen order')),
('type_id', models.PositiveIntegerField(help_text=b'Type ID of seen order')),
],
options={
'verbose_name': 'Seen Order',
'verbose_name_plural': 'Seen Orders',
},
),
migrations.CreateModel(
name='UUDIFMessage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(help_text=b"I'm assuming this is a unique hash for the message.", unique=True, max_length=255)),
('received_dtime', models.DateTimeField(help_text=b'Time of initial receiving.', auto_now_add=True)),
('is_order', models.BooleanField(help_text=b'If True, this is an order. If False, this is history.')),
('message', models.TextField(help_text=b'Full JSON representation of the message.')),
],
options={
'verbose_name': 'UUDIF Message',
'verbose_name_plural': 'UUDIF Messages',
},
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('eve_db', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ArchivedOrders',
fields=[
('generated_at', models.DateTimeField(help_text=b"When the market data was generated on the user's machine.", null=True, blank=True)),
('price', models.FloatField(help_text=b'Item price, as reported in the message.')),
('volume_remaining', models.PositiveIntegerField(help_text=b'Number of remaining items for sale.')),
('volume_entered', models.PositiveIntegerField(help_text=b'Number of items initially put up for sale.')),
('minimum_volume', models.PositiveIntegerField(help_text=b'Minimum volume before the order finishes.')),
('order_range', models.IntegerField(help_text=b'How far the order is visible. 32767 = region-wide')),
('id', models.BigIntegerField(help_text=b'Unique order ID from EVE for this order.', serialize=False, primary_key=True)),
('is_bid', models.BooleanField(help_text=b'If True, this is a buy order. If False, this is a sell order.')),
('issue_date', models.DateTimeField(help_text=b'When the order was issued.')),
('duration', models.PositiveSmallIntegerField(help_text=b'The duration of the order, in days.')),
('is_suspicious', models.BooleanField(help_text=b"If this is True, we have reason to question this order's validity")),
('message_key', models.CharField(help_text=b'The unique hash that of the market message.', max_length=255, null=True, blank=True)),
('uploader_ip_hash', models.CharField(help_text=b'The unique hash for the person who uploaded this message.', max_length=255)),
('is_active', models.BooleanField(default=True, help_text=b'is this a live order or is it history')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
('mapsolarsystem', models.ForeignKey(help_text=b'ID of the solar system the order is in.', to='eve_db.MapSolarSystem')),
('stastation', models.ForeignKey(help_text=b'The station that this order is in.', to='eve_db.StaStation')),
],
options={
'verbose_name': 'Archived Market Order',
'verbose_name_plural': 'Archived Market Orders',
},
),
migrations.CreateModel(
name='EmdrStats',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status_type', models.SmallIntegerField(help_text=b'Message type for statistics')),
('status_count', models.PositiveIntegerField(help_text=b'Count of messages of specific type')),
('message_timestamp', models.DateTimeField(help_text=b'When the stats were counted for this entry', auto_now_add=True, db_index=True)),
],
options={
'verbose_name': 'Message Statistics Data',
'verbose_name_plural': 'Message Statistics Data',
},
),
migrations.CreateModel(
name='EmdrStatsWorking',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status_type', models.SmallIntegerField(help_text=b'Message type for statistics')),
],
options={
'verbose_name': 'Message Statistics Live Data',
'verbose_name_plural': 'Message Statistics Live Data',
},
),
migrations.CreateModel(
name='History',
fields=[
('id', models.CharField(help_text=b'Primary key, based on UUID', max_length=255, serialize=False, primary_key=True)),
('history_data', models.TextField(help_text=b'Compressed zlib data of the JSON message for history')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
],
options={
'verbose_name': 'History Data',
'verbose_name_plural': 'History Data',
},
),
migrations.CreateModel(
name='ItemRegionStat',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('buymean', models.FloatField(help_text=b'Mean of buy price')),
('buyavg', models.FloatField(help_text=b'Average of buy price')),
('sellmean', models.FloatField(help_text=b'Mean of sell price')),
('sellavg', models.FloatField(help_text=b'Avg of sell price')),
('buymedian', models.FloatField(help_text=b'Median of buy price')),
('sellmedian', models.FloatField(help_text=b'Median of sell price')),
('buyvolume', models.BigIntegerField(help_text=b'total volume traded')),
('sellvolume', models.BigIntegerField(help_text=b'total volume traded')),
('buy_95_percentile', models.FloatField(help_text=b'95th % of buy orders')),
('sell_95_percentile', models.FloatField(help_text=b'95th % of sell orders')),
('buy_std_dev', models.FloatField(help_text=b'standard deviation of buy orders')),
('sell_std_dev', models.FloatField(help_text=b'standard deviation of sell orders')),
('lastupdate', models.DateTimeField(help_text=b'Date the stats were updated', null=True, blank=True)),
('invtype', models.ForeignKey(help_text=b'FK to type table', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'FK to region table', to='eve_db.MapRegion')),
],
options={
'verbose_name': 'Stat Data',
'verbose_name_plural': 'Stats Data',
},
),
migrations.CreateModel(
name='ItemRegionStatHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('buymean', models.FloatField(help_text=b'Mean of buy price')),
('buyavg', models.FloatField(help_text=b'Average of buy price')),
('sellmean', models.FloatField(help_text=b'Mean of sell price')),
('sellavg', models.FloatField(help_text=b'Avg of sell price')),
('buymedian', models.FloatField(help_text=b'Median of buy price')),
('sellmedian', models.FloatField(help_text=b'Median of sell price')),
('buyvolume', models.BigIntegerField(help_text=b'total volume traded')),
('sellvolume', models.BigIntegerField(help_text=b'total volume traded')),
('buy_95_percentile', models.FloatField(help_text=b'95th % of buy orders')),
('sell_95_percentile', models.FloatField(help_text=b'95th % of sell orders')),
('buy_std_dev', models.FloatField(help_text=b'standard deviation of buy orders')),
('sell_std_dev', models.FloatField(help_text=b'standard deviation of sell orders')),
('date', models.DateTimeField(help_text=b'Date the stats were inserted', null=True, blank=True)),
('invtype', models.ForeignKey(help_text=b'FK to type table', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'FK to region table', to='eve_db.MapRegion')),
],
options={
'verbose_name': 'Stat History Data',
'verbose_name_plural': 'Stat History Data',
},
),
migrations.CreateModel(
name='OrderHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(help_text=b'Date of the data')),
('numorders', models.PositiveIntegerField(help_text=b'number of transactions for this item/region')),
('low', models.FloatField(help_text=b'low price of orders for this item/region')),
('high', models.FloatField(help_text=b'high price of orders for this item/region')),
('mean', models.FloatField(help_text=b'mean price of orders for this item/region')),
('quantity', models.BigIntegerField(help_text=b'quantity of item sold for this item/region')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
],
),
migrations.CreateModel(
name='Orders',
fields=[
('generated_at', models.DateTimeField(help_text=b"When the market data was generated on the user's machine.", null=True, blank=True)),
('price', models.FloatField(help_text=b'Item price, as reported in the message.')),
('volume_remaining', models.PositiveIntegerField(help_text=b'Number of remaining items for sale.')),
('volume_entered', models.PositiveIntegerField(help_text=b'Number of items initially put up for sale.')),
('minimum_volume', models.PositiveIntegerField(help_text=b'Minimum volume before the order finishes.')),
('order_range', models.IntegerField(help_text=b'How far the order is visible. 32767 = region-wide')),
('id', models.BigIntegerField(help_text=b'Unique order ID from EVE for this order.', serialize=False, primary_key=True)),
('is_bid', models.BooleanField(help_text=b'If True, this is a buy order. If False, this is a sell order.')),
('issue_date', models.DateTimeField(help_text=b'When the order was issued.')),
('duration', models.PositiveSmallIntegerField(help_text=b'The duration of the order, in days.')),
('is_suspicious', models.BooleanField(help_text=b"If this is True, we have reason to question this order's validity")),
('message_key', models.CharField(help_text=b'The unique hash that of the market message.', max_length=255, null=True, blank=True)),
('uploader_ip_hash', models.CharField(help_text=b'The unique hash for the person who uploaded this message.', max_length=255)),
('is_active', models.BooleanField(default=True, help_text=b'is this a live order or is it history')),
('invtype', models.ForeignKey(help_text=b'The Type ID of the item in the order.', to='eve_db.InvType')),
('mapregion', models.ForeignKey(help_text=b'Region ID the order originated from.', to='eve_db.MapRegion')),
('mapsolarsystem', models.ForeignKey(help_text=b'ID of the solar system the order is in.', to='eve_db.MapSolarSystem')),
('stastation', models.ForeignKey(help_text=b'The station that this order is in.', to='eve_db.StaStation')),
],
options={
'verbose_name': 'Market Order',
'verbose_name_plural': 'Market Orders',
},
),
migrations.CreateModel(
name='SeenOrders',
fields=[
('id', models.BigIntegerField(help_text=b'Order ID', serialize=False, primary_key=True)),
('region_id', models.PositiveIntegerField(help_text=b'Region ID of seen order')),
('type_id', models.PositiveIntegerField(help_text=b'Type ID of seen order')),
],
options={
'verbose_name': 'Seen Order',
'verbose_name_plural': 'Seen Orders',
},
),
migrations.CreateModel(
name='UUDIFMessage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(help_text=b"I'm assuming this is a unique hash for the message.", unique=True, max_length=255)),
('received_dtime', models.DateTimeField(help_text=b'Time of initial receiving.', auto_now_add=True)),
('is_order', models.BooleanField(help_text=b'If True, this is an order. If False, this is history.')),
('message', models.TextField(help_text=b'Full JSON representation of the message.')),
],
options={
'verbose_name': 'UUDIF Message',
'verbose_name_plural': 'UUDIF Messages',
},
),
] | en | 0.769321 | # -*- coding: utf-8 -*- | 2.067288 | 2 |
cli/add_dataminer_plane.py | CS-METIS/minimetis | 5 | 6617919 | from metis_lib import miningservices
from getpass import getpass
if __name__ == "__main__":
user_email = input("Enter miner email address\n>>")
user_firstname = input("Enter user firstname\n>>")
user_lastname = input("Enter user lastname\n>>")
user_name = input("Enter miner user name\n>>")
user_password = getpass("Enter user password\n>>")
user_cpu_limit = input("Enter user cpu limit (ex: 4)\n>>")
user_memory_limit = input("Enter user memory limit (ex: 8Gi)\n>>")
miningservices.deploy(
email=user_email,
username=user_name,
password=<PASSWORD>,
firstname=user_firstname,
lastname=user_lastname,
cpu_limit=int(user_cpu_limit),
memory_limit=user_memory_limit,
storage_limit="10Gi"
)
| from metis_lib import miningservices
from getpass import getpass
if __name__ == "__main__":
user_email = input("Enter miner email address\n>>")
user_firstname = input("Enter user firstname\n>>")
user_lastname = input("Enter user lastname\n>>")
user_name = input("Enter miner user name\n>>")
user_password = getpass("Enter user password\n>>")
user_cpu_limit = input("Enter user cpu limit (ex: 4)\n>>")
user_memory_limit = input("Enter user memory limit (ex: 8Gi)\n>>")
miningservices.deploy(
email=user_email,
username=user_name,
password=<PASSWORD>,
firstname=user_firstname,
lastname=user_lastname,
cpu_limit=int(user_cpu_limit),
memory_limit=user_memory_limit,
storage_limit="10Gi"
)
| none | 1 | 1.890986 | 2 | |
mygoals/templatetags/mygoals_extras.py | evan-rusin/fly-project | 15 | 6617920 | from django.template import Library
from fly_project import constants
from api.models import Course
from api.models import EnrolledCourse
from api.models import Quiz
from api.models import QuizSubmission
from api.models import Question
from api.models import QuestionSubmission
register = Library()
@register.inclusion_tag('templatetags/show_goal_text.html')
def show_goal_text(goal, type):
"""
Function will generate the appropriate Goal text message depending on
what 'type' of 'goal' was entered.
"""
return {
'constants': constants,
'goal': goal,
'type': int(type),
} | from django.template import Library
from fly_project import constants
from api.models import Course
from api.models import EnrolledCourse
from api.models import Quiz
from api.models import QuizSubmission
from api.models import Question
from api.models import QuestionSubmission
register = Library()
@register.inclusion_tag('templatetags/show_goal_text.html')
def show_goal_text(goal, type):
"""
Function will generate the appropriate Goal text message depending on
what 'type' of 'goal' was entered.
"""
return {
'constants': constants,
'goal': goal,
'type': int(type),
} | en | 0.856048 | Function will generate the appropriate Goal text message depending on what 'type' of 'goal' was entered. | 2.090994 | 2 |
src/vyos/cli/command_modes.py | QualiSystems/VyOS-Deployment-App-Shell-2G | 0 | 6617921 | from cloudshell.cli.command_mode import CommandMode
class DefaultCommandMode(CommandMode):
PROMPT = r'\$'
ENTER_COMMAND = ''
EXIT_COMMAND = '\x03'
def __init__(self):
super(DefaultCommandMode, self).__init__(DefaultCommandMode.PROMPT,
DefaultCommandMode.ENTER_COMMAND,
DefaultCommandMode.EXIT_COMMAND)
class ConfigCommandMode(CommandMode):
PROMPT = r'#'
ENTER_COMMAND = 'configure'
EXIT_COMMAND = 'exit'
def __init__(self):
super(ConfigCommandMode, self).__init__(ConfigCommandMode.PROMPT,
ConfigCommandMode.ENTER_COMMAND,
ConfigCommandMode.EXIT_COMMAND)
CommandMode.RELATIONS_DICT = {
DefaultCommandMode: {
ConfigCommandMode: {}
}
}
| from cloudshell.cli.command_mode import CommandMode
class DefaultCommandMode(CommandMode):
PROMPT = r'\$'
ENTER_COMMAND = ''
EXIT_COMMAND = '\x03'
def __init__(self):
super(DefaultCommandMode, self).__init__(DefaultCommandMode.PROMPT,
DefaultCommandMode.ENTER_COMMAND,
DefaultCommandMode.EXIT_COMMAND)
class ConfigCommandMode(CommandMode):
PROMPT = r'#'
ENTER_COMMAND = 'configure'
EXIT_COMMAND = 'exit'
def __init__(self):
super(ConfigCommandMode, self).__init__(ConfigCommandMode.PROMPT,
ConfigCommandMode.ENTER_COMMAND,
ConfigCommandMode.EXIT_COMMAND)
CommandMode.RELATIONS_DICT = {
DefaultCommandMode: {
ConfigCommandMode: {}
}
}
| none | 1 | 2.365053 | 2 | |
2016/python/day25.py | astonshane/AdventOfCode | 0 | 6617922 | import re
import copy
def run(tape, registers):
i = 0
last = 1
outCount = 0
while i < len(tape):
if outCount > 100:
return True
instruction = tape[i]
#print i, instruction, registers
m = re.search('cpy ([a-z]|[0-9]+) ([a-z])', instruction)
if m is not None:
(value, register) = m.groups()
tmp = registers.get(value)
if tmp is None:
value = int(value)
else:
value = tmp
registers[register] = value
i += 1
continue
m = re.search('(inc|dec) ([a-z])', instruction)
if m is not None:
(kind, register) = m.groups()
if kind == "inc":
registers[register] += 1
else:
registers[register] -= 1
i += 1
continue
m = re.search('jnz ([a-z]|[0-9+]) (\-[0-9]+|[0-9]+)', instruction)
if m is not None:
(test, jmp) = m.groups()
tmp = registers.get(test)
if tmp is None:
test = int(test)
else:
test = tmp
jmp = int(jmp)
if test != 0:
i += jmp
else:
i += 1
continue
m = re.search('out ([a-z]|[0-9]+)', instruction)
if m is not None:
value = m.groups()[0]
tmp = registers.get(value)
if tmp is None:
value = int(value)
else:
value = tmp
print value,
outCount += 1
if (last + value) == 1:
last = value
else:
return False
i += 1
continue
print "Unrecognized instruction: %s" % instruction
assert(False)
return False
with open("inputs/day25.txt") as f:
tape = []
for line in f:
tape.append(line.strip())
a = 0
while True:
registers = {
"a": a,
"b": 0,
"c": 0,
"d": 0
}
print "a == %d" % a,
ok = run(copy.copy(tape), registers)
print ""
if ok:
print "success at a == %d" % a
break
a += 1
| import re
import copy
def run(tape, registers):
i = 0
last = 1
outCount = 0
while i < len(tape):
if outCount > 100:
return True
instruction = tape[i]
#print i, instruction, registers
m = re.search('cpy ([a-z]|[0-9]+) ([a-z])', instruction)
if m is not None:
(value, register) = m.groups()
tmp = registers.get(value)
if tmp is None:
value = int(value)
else:
value = tmp
registers[register] = value
i += 1
continue
m = re.search('(inc|dec) ([a-z])', instruction)
if m is not None:
(kind, register) = m.groups()
if kind == "inc":
registers[register] += 1
else:
registers[register] -= 1
i += 1
continue
m = re.search('jnz ([a-z]|[0-9+]) (\-[0-9]+|[0-9]+)', instruction)
if m is not None:
(test, jmp) = m.groups()
tmp = registers.get(test)
if tmp is None:
test = int(test)
else:
test = tmp
jmp = int(jmp)
if test != 0:
i += jmp
else:
i += 1
continue
m = re.search('out ([a-z]|[0-9]+)', instruction)
if m is not None:
value = m.groups()[0]
tmp = registers.get(value)
if tmp is None:
value = int(value)
else:
value = tmp
print value,
outCount += 1
if (last + value) == 1:
last = value
else:
return False
i += 1
continue
print "Unrecognized instruction: %s" % instruction
assert(False)
return False
with open("inputs/day25.txt") as f:
tape = []
for line in f:
tape.append(line.strip())
a = 0
while True:
registers = {
"a": a,
"b": 0,
"c": 0,
"d": 0
}
print "a == %d" % a,
ok = run(copy.copy(tape), registers)
print ""
if ok:
print "success at a == %d" % a
break
a += 1
| en | 0.449742 | #print i, instruction, registers | 2.853698 | 3 |
medium/78-Subsets.py | Davidxswang/leetcode | 2 | 6617923 | """
https://leetcode.com/problems/subsets/
Given a set of distinct integers, nums, return all possible subsets (the power set).
Note: The solution set must not contain duplicate subsets.
Example:
Input: nums = [1,2,3]
Output:
[
[3],
[1],
[2],
[1,2,3],
[1,3],
[2,3],
[1,2],
[]
]
"""
# time complexity: O(2^n), space complexity: O(n), where n is the number of the elements in the set
class Solution:
def subsets(self, nums: List[int]) -> List[List[int]]:
n = len(nums)
result = []
for number in range(0, pow(2,n)):
i = 0
temp = []
while number > 0:
remainder = number % 2
if remainder == 1:
temp.append(nums[i])
i += 1
number //= 2
result.append(temp)
return result
| """
https://leetcode.com/problems/subsets/
Given a set of distinct integers, nums, return all possible subsets (the power set).
Note: The solution set must not contain duplicate subsets.
Example:
Input: nums = [1,2,3]
Output:
[
[3],
[1],
[2],
[1,2,3],
[1,3],
[2,3],
[1,2],
[]
]
"""
# time complexity: O(2^n), space complexity: O(n), where n is the number of the elements in the set
class Solution:
def subsets(self, nums: List[int]) -> List[List[int]]:
n = len(nums)
result = []
for number in range(0, pow(2,n)):
i = 0
temp = []
while number > 0:
remainder = number % 2
if remainder == 1:
temp.append(nums[i])
i += 1
number //= 2
result.append(temp)
return result
| en | 0.777641 | https://leetcode.com/problems/subsets/ Given a set of distinct integers, nums, return all possible subsets (the power set). Note: The solution set must not contain duplicate subsets. Example: Input: nums = [1,2,3] Output: [ [3], [1], [2], [1,2,3], [1,3], [2,3], [1,2], [] ] # time complexity: O(2^n), space complexity: O(n), where n is the number of the elements in the set | 3.877111 | 4 |
peek_storage/_private/test/StorageTestMixin.py | Synerty/peek-storage | 0 | 6617924 | <filename>peek_storage/_private/test/StorageTestMixin.py
import logging
import os
from peek_plugin_base.PeekVortexUtil import peekStorageName
logger = logging.getLogger(__name__)
class StorageTestMixin:
def __init__(self):
self._dbConn = None
def setUp(self) -> None:
from peek_storage._private.storage import setupDbConn
from peek_storage._private.storage.DeclarativeBase import metadata
import peek_storage
from peek_plugin_base.storage.DbConnection import DbConnection
from peek_storage._private.service.PeekStorageConfig import PeekStorageConfig
from peek_platform import PeekPlatformConfig
PeekPlatformConfig.componentName = peekStorageName
config = PeekStorageConfig()
alembicDir = os.path.join(
os.path.dirname(peek_storage._private.__file__),
"alembic")
self._dbConn = DbConnection(dbConnectString=config.dbConnectString,
metadata=metadata,
alembicDir=alembicDir,
dbEngineArgs=config.dbEngineArgs,
enableCreateAll=False,
enableForeignKeys=False)
self._dbConn.migrate()
def tearDown(self) -> None:
self._dbConn.closeAllSessions()
| <filename>peek_storage/_private/test/StorageTestMixin.py
import logging
import os
from peek_plugin_base.PeekVortexUtil import peekStorageName
logger = logging.getLogger(__name__)
class StorageTestMixin:
def __init__(self):
self._dbConn = None
def setUp(self) -> None:
from peek_storage._private.storage import setupDbConn
from peek_storage._private.storage.DeclarativeBase import metadata
import peek_storage
from peek_plugin_base.storage.DbConnection import DbConnection
from peek_storage._private.service.PeekStorageConfig import PeekStorageConfig
from peek_platform import PeekPlatformConfig
PeekPlatformConfig.componentName = peekStorageName
config = PeekStorageConfig()
alembicDir = os.path.join(
os.path.dirname(peek_storage._private.__file__),
"alembic")
self._dbConn = DbConnection(dbConnectString=config.dbConnectString,
metadata=metadata,
alembicDir=alembicDir,
dbEngineArgs=config.dbEngineArgs,
enableCreateAll=False,
enableForeignKeys=False)
self._dbConn.migrate()
def tearDown(self) -> None:
self._dbConn.closeAllSessions()
| none | 1 | 1.906986 | 2 | |
app/__init__.py | tbicr/title_words | 0 | 6617925 | import datetime
import logging.handlers
import uuid
from flask import Flask, request, redirect, url_for, jsonify
from flask import abort
from flask.json import JSONEncoder
from flask_login import LoginManager, UserMixin, login_user, logout_user, login_required, current_user
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_oauth2_login import GoogleLogin
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import joinedload
from app.file_parser import parse
app = Flask(__name__, static_folder='../build', static_url_path='')
app.config.from_object('settings')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
login_manager = LoginManager()
login_manager.init_app(app)
google_login = GoogleLogin(app)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
app.logger.addHandler(stream_handler)
class ModelJSONEncoder(JSONEncoder):
def default(self, obj):
if hasattr(obj, 'serialize'):
return obj.serialize()
if isinstance(obj, datetime.datetime):
return obj.isoformat()
return super().default(obj)
app.json_encoder = ModelJSONEncoder
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
uuid = db.Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False)
email = db.Column(db.String(64), unique=True, nullable=False)
def serialize(self):
return {
'uuid': self.uuid,
'email': self.email,
}
class Title(db.Model):
id = db.Column(db.Integer, primary_key=True)
uuid = db.Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('titles'))
name = db.Column(db.String(32), nullable=False)
total_words_count = db.Column(db.Integer, default=0, nullable=False)
unique_words_count = db.Column(db.Integer, default=0, nullable=False)
date_added = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False)
def serialize(self):
return {
'uuid': self.uuid,
'name': self.name,
'total_words': self.total_words_count,
'unique_words': self.unique_words_count,
'date_added': self.date_added,
}
class TitleWord(db.Model):
title_id = db.Column(db.Integer, db.ForeignKey('title.id'), primary_key=True)
title = db.relationship('Title', backref=db.backref('words'))
word_id = db.Column(db.Integer, db.ForeignKey('word.id'), primary_key=True)
word = db.relationship('Word')
times = db.Column(db.Integer, default=0, nullable=False)
def serialize(self):
return {
'uuid': self.word.uuid,
'name': self.word.word,
'times': self.times,
'known': self.word.known,
'is_name': self.word.is_name,
'is_toponym': self.word.is_toponym,
'date_added': self.word.date_added,
'date_known': self.word.date_known,
}
class Word(db.Model):
id = db.Column(db.Integer, primary_key=True)
uuid = db.Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('words'))
word = db.Column(db.String(32), nullable=False)
times = db.Column(db.Integer, default=0, nullable=False)
known = db.Column(db.Boolean, default=False, nullable=False)
is_name = db.Column(db.Boolean, default=False, nullable=False)
is_toponym = db.Column(db.Boolean, default=False, nullable=False)
date_added = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False)
date_known = db.Column(db.DateTime, nullable=True)
__table_args__ = (db.UniqueConstraint('user_id', 'word'),)
def serialize(self):
return {
'uuid': self.uuid,
'name': self.word,
'times': self.times,
'known': self.known,
'is_name': self.is_name,
'is_toponym': self.is_toponym,
'date_added': self.date_added,
'date_known': self.date_known,
}
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
@app.route('/')
@login_required
def index():
return redirect(url_for('static', filename='index.html'))
@app.route('/settings')
def settings():
return jsonify({
'translation': {
'from': app.config['GOOGLE_TRANSLATION_LANG_FROM'],
'to': app.config['GOOGLE_TRANSLATION_LANG_TO'],
'api_key': app.config['GOOGLE_TRANSLATION_API_KEY'],
}
})
@app.route('/words')
@login_required
def load_words():
return jsonify(Word.query.filter_by(user=current_user).order_by(Word.times.desc()).all())
@app.route('/titles')
@login_required
def load_titles():
return jsonify(Title.query.filter_by(user=current_user).order_by(Title.date_added.desc()).all())
@app.route('/titles/<title_uuid>/words')
@login_required
def load_title_words(title_uuid):
title = Title.query.filter_by(uuid=title_uuid, user=current_user).first()
if not title:
abort(404)
return jsonify({
'title': title,
'words': (TitleWord.query.
filter_by(title=title).
options(joinedload('word')).
order_by(TitleWord.times.desc()).
all()),
})
@app.route('/upload', methods=['POST'])
@login_required
def upload_new_title():
file = request.files.get('file')
stat = parse(file)
title = Title(user=current_user, name=file.filename,
total_words_count=sum(stat.values()),
unique_words_count=len(stat))
db.session.add(title)
words = Word.query.filter(Word.user == current_user, Word.word.in_(stat.keys()))
word_instance_map = {word.word: word for word in words}
for word, count in stat.items():
word_instance = word_instance_map.get(word)
if word_instance:
word_instance.times += count
else:
word_instance = Word(user=current_user, word=word, times=count)
db.session.add(word_instance)
title_word = TitleWord(title=title, word=word_instance, times=count)
db.session.add(title_word)
db.session.commit()
return jsonify(title)
@app.route('/words/<word_uuid>/mark-known', methods=['POST'])
@login_required
def mark_word_as_known(word_uuid):
word = Word.query.filter_by(uuid=word_uuid, user=current_user).first()
if not word:
abort(404)
word.known = True
db.session.add(word)
db.session.commit()
return jsonify(word)
@app.route('/words/<word_uuid>/mark-as-name', methods=['POST'])
@login_required
def mark_word_as_name(word_uuid):
word = Word.query.filter_by(uuid=word_uuid, user=current_user).first()
if not word:
abort(404)
word.is_name = True
db.session.add(word)
db.session.commit()
return jsonify(word)
@app.route('/words/<word_uuid>/mark-as-toponym', methods=['POST'])
@login_required
def mark_word_as_toponym(word_uuid):
word = Word.query.filter_by(uuid=word_uuid, user=current_user).first()
if not word:
abort(404)
word.is_toponym = True
db.session.add(word)
db.session.commit()
return jsonify(word)
@app.route('/login')
def login():
return redirect(google_login.authorization_url())
@app.route('/logout', methods=['POST'])
def logout():
logout_user()
return redirect(url_for('login'))
@google_login.login_success
def google_get_or_create_user(token, userinfo, **params):
email = userinfo['email']
user = User.query.filter_by(email=email).first()
if not user:
user = User(email=email)
db.session.add(user)
db.session.commit()
login_user(user)
return redirect(url_for('index'))
@google_login.login_failure
def google_login_failure(e):
return jsonify(error=str(e))
| import datetime
import logging.handlers
import uuid
from flask import Flask, request, redirect, url_for, jsonify
from flask import abort
from flask.json import JSONEncoder
from flask_login import LoginManager, UserMixin, login_user, logout_user, login_required, current_user
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_oauth2_login import GoogleLogin
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import joinedload
from app.file_parser import parse
app = Flask(__name__, static_folder='../build', static_url_path='')
app.config.from_object('settings')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
login_manager = LoginManager()
login_manager.init_app(app)
google_login = GoogleLogin(app)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
app.logger.addHandler(stream_handler)
class ModelJSONEncoder(JSONEncoder):
def default(self, obj):
if hasattr(obj, 'serialize'):
return obj.serialize()
if isinstance(obj, datetime.datetime):
return obj.isoformat()
return super().default(obj)
app.json_encoder = ModelJSONEncoder
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
uuid = db.Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False)
email = db.Column(db.String(64), unique=True, nullable=False)
def serialize(self):
return {
'uuid': self.uuid,
'email': self.email,
}
class Title(db.Model):
id = db.Column(db.Integer, primary_key=True)
uuid = db.Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('titles'))
name = db.Column(db.String(32), nullable=False)
total_words_count = db.Column(db.Integer, default=0, nullable=False)
unique_words_count = db.Column(db.Integer, default=0, nullable=False)
date_added = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False)
def serialize(self):
return {
'uuid': self.uuid,
'name': self.name,
'total_words': self.total_words_count,
'unique_words': self.unique_words_count,
'date_added': self.date_added,
}
class TitleWord(db.Model):
title_id = db.Column(db.Integer, db.ForeignKey('title.id'), primary_key=True)
title = db.relationship('Title', backref=db.backref('words'))
word_id = db.Column(db.Integer, db.ForeignKey('word.id'), primary_key=True)
word = db.relationship('Word')
times = db.Column(db.Integer, default=0, nullable=False)
def serialize(self):
return {
'uuid': self.word.uuid,
'name': self.word.word,
'times': self.times,
'known': self.word.known,
'is_name': self.word.is_name,
'is_toponym': self.word.is_toponym,
'date_added': self.word.date_added,
'date_known': self.word.date_known,
}
class Word(db.Model):
id = db.Column(db.Integer, primary_key=True)
uuid = db.Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('words'))
word = db.Column(db.String(32), nullable=False)
times = db.Column(db.Integer, default=0, nullable=False)
known = db.Column(db.Boolean, default=False, nullable=False)
is_name = db.Column(db.Boolean, default=False, nullable=False)
is_toponym = db.Column(db.Boolean, default=False, nullable=False)
date_added = db.Column(db.DateTime, default=datetime.datetime.utcnow, nullable=False)
date_known = db.Column(db.DateTime, nullable=True)
__table_args__ = (db.UniqueConstraint('user_id', 'word'),)
def serialize(self):
return {
'uuid': self.uuid,
'name': self.word,
'times': self.times,
'known': self.known,
'is_name': self.is_name,
'is_toponym': self.is_toponym,
'date_added': self.date_added,
'date_known': self.date_known,
}
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
@app.route('/')
@login_required
def index():
return redirect(url_for('static', filename='index.html'))
@app.route('/settings')
def settings():
return jsonify({
'translation': {
'from': app.config['GOOGLE_TRANSLATION_LANG_FROM'],
'to': app.config['GOOGLE_TRANSLATION_LANG_TO'],
'api_key': app.config['GOOGLE_TRANSLATION_API_KEY'],
}
})
@app.route('/words')
@login_required
def load_words():
return jsonify(Word.query.filter_by(user=current_user).order_by(Word.times.desc()).all())
@app.route('/titles')
@login_required
def load_titles():
return jsonify(Title.query.filter_by(user=current_user).order_by(Title.date_added.desc()).all())
@app.route('/titles/<title_uuid>/words')
@login_required
def load_title_words(title_uuid):
title = Title.query.filter_by(uuid=title_uuid, user=current_user).first()
if not title:
abort(404)
return jsonify({
'title': title,
'words': (TitleWord.query.
filter_by(title=title).
options(joinedload('word')).
order_by(TitleWord.times.desc()).
all()),
})
@app.route('/upload', methods=['POST'])
@login_required
def upload_new_title():
file = request.files.get('file')
stat = parse(file)
title = Title(user=current_user, name=file.filename,
total_words_count=sum(stat.values()),
unique_words_count=len(stat))
db.session.add(title)
words = Word.query.filter(Word.user == current_user, Word.word.in_(stat.keys()))
word_instance_map = {word.word: word for word in words}
for word, count in stat.items():
word_instance = word_instance_map.get(word)
if word_instance:
word_instance.times += count
else:
word_instance = Word(user=current_user, word=word, times=count)
db.session.add(word_instance)
title_word = TitleWord(title=title, word=word_instance, times=count)
db.session.add(title_word)
db.session.commit()
return jsonify(title)
@app.route('/words/<word_uuid>/mark-known', methods=['POST'])
@login_required
def mark_word_as_known(word_uuid):
word = Word.query.filter_by(uuid=word_uuid, user=current_user).first()
if not word:
abort(404)
word.known = True
db.session.add(word)
db.session.commit()
return jsonify(word)
@app.route('/words/<word_uuid>/mark-as-name', methods=['POST'])
@login_required
def mark_word_as_name(word_uuid):
word = Word.query.filter_by(uuid=word_uuid, user=current_user).first()
if not word:
abort(404)
word.is_name = True
db.session.add(word)
db.session.commit()
return jsonify(word)
@app.route('/words/<word_uuid>/mark-as-toponym', methods=['POST'])
@login_required
def mark_word_as_toponym(word_uuid):
word = Word.query.filter_by(uuid=word_uuid, user=current_user).first()
if not word:
abort(404)
word.is_toponym = True
db.session.add(word)
db.session.commit()
return jsonify(word)
@app.route('/login')
def login():
return redirect(google_login.authorization_url())
@app.route('/logout', methods=['POST'])
def logout():
logout_user()
return redirect(url_for('login'))
@google_login.login_success
def google_get_or_create_user(token, userinfo, **params):
email = userinfo['email']
user = User.query.filter_by(email=email).first()
if not user:
user = User(email=email)
db.session.add(user)
db.session.commit()
login_user(user)
return redirect(url_for('index'))
@google_login.login_failure
def google_login_failure(e):
return jsonify(error=str(e))
| none | 1 | 2.195221 | 2 | |
app.py | IgnacioEscobar/mate-sup | 0 | 6617926 | <gh_stars>0
import sys
import tkinter as tk
import numpy as np
from tkinter import ttk
poly_creator = np.polynomial.polynomial
polinomio_interpolacion = []
puntosx = []
puntosy = []
estaElPunto00 = False
class PrintLogger():
def __init__(self, textbox):
self.textbox = textbox
def write(self, text):
self.textbox.insert(tk.END, text)
def flush(self):
pass
class Application(ttk.Frame):
def __init__(self, main_window):
super().__init__(main_window)
main_window.title("FINTER")
self.points = []
self.xValue = tk.IntVar()
self.yValue = tk.IntVar()
self.xLabel = tk.Label(self, text="Valor x")
self.xLabel.pack()
self.xInput = tk.Entry(self, text="Valor x", textvariable=self.xValue)
self.xInput.pack()
self.yLabel = tk.Label(self, text="Valor y")
self.yLabel.pack()
self.yInput = tk.Entry(self, text="Valor y", textvariable=self.yValue)
self.yInput.pack()
self.addButton = tk.Button(self, text="Agregar punto", command=self.addPoint)
self.addButton.configure(pady=10)
self.addButton.pack()
self.removeButton = tk.Button(self, text="Sacar punto", command=self.removePoint)
self.removeButton.configure(pady=10)
self.removeButton.pack()
# Log points
self.puntosListbox = tk.Listbox(self)
self.puntosListbox.pack()
self.methodsLabel = tk.Label(self, text="Metodos de calculo")
self.methodsLabel.pack()
self.methods = ["Lagrange", "Newton-Gregory progresivo", "Newton-Gregory regresivo"]
self.methodCombo = ttk.Combobox(self, values=self.methods)
self.methodCombo.pack()
self.checkButtonValue = tk.IntVar()
self.checkButton = tk.Checkbutton(self, text="Mostrar pasos de calculo", variable=self.checkButtonValue)
self.checkButton.pack()
self.kValue = tk.IntVar()
self.evalPoliLabel = tk.Label(self, text="Especializar el polinomio en valor K")
self.evalPoliLabel.pack()
self.evalPoliInput = tk.Entry(self, text="Valor K", textvariable=self.kValue)
self.evalPoliInput.pack()
self.calcButton = tk.Button(self, text="Calcular polinomio interpolante", command=self.calculateInterpolator)
self.calcButton.configure(pady=10)
self.calcButton.pack()
# Log to screen
self.logText = tk.Text(self)
self.logText.pack()
pl = PrintLogger(self.logText)
sys.stdout = pl
main_window.configure(pady=20, padx=50)
self.pack()
def addPoint(self):
point_to_add = {'x': self.xValue.get(), 'y': self.yValue.get()}
if {point_to_add['x']} == {0} and {point_to_add['y']} == {0}:
global estaElPunto00
estaElPunto00 = True
if point_to_add in self.points:
print(f"Ya agrego el punto ({point_to_add['x']}, {point_to_add['y']})")
else:
def take_x(_point):
return _point['x']
self.points.append(point_to_add)
self.points.sort(key=take_x)
global puntosx
puntosx = list(map(lambda _point: _point['x'], self.points))
global puntosy
puntosy = list(map(lambda _point: _point['y'], self.points))
self.puntosListbox.delete(0, tk.END)
for point in self.points:
self.puntosListbox.insert(tk.END, f"({point['x']}, {point['y']})")
def removePoint(self):
if not self.points:
print("No hay puntos que borrar")
elif not self.puntosListbox.curselection():
print("No ha seleccionado ningun punto")
else:
point = self.puntosListbox.get(self.puntosListbox.curselection()) \
.replace('(', '') \
.replace(')', '') \
.replace(',', '') \
.split()
self.points = [
p for p in self.points if p['x'] != float(point[0]) or p['y'] != float(point[1])
]
self.puntosListbox.delete(self.puntosListbox.curselection())
global puntosx
puntosx = list(map(lambda p: p['x'], self.points))
global puntosy
puntosy = list(map(lambda p: p['y'], self.points))
def calculateInterpolator(self):
# TODO guille aca esta el valor del checkbox
hayQueMostrarCalculos = self.checkButtonValue.get()
if len(self.points) == 0:
print("Por favor ingrese puntos para sacar un polinomio interpolante")
else:
print("\nCalculando por", self.methodCombo.get(), "para puntos:", self.points);
if self.methodCombo.get() == "Lagrange":
armarPolinomioInterpolanteLAG(hayQueMostrarCalculos) # TODO cambiar esto por lo que dice la checkbox
evaluarPolinomioInterpolanteEn(self.kValue.get())
if self.methodCombo.get() == "Newton-Gregory progresivo":
sacarPolinomioProgresivo(sacarCoeficientesLagrange(puntosx, puntosy, hayQueMostrarCalculos),
hayQueMostrarCalculos) # TODO cambiar los booleanos por lo que dice la checkbox
evaluarPolinomioInterpolanteEn(self.kValue.get())
if self.methodCombo.get() == "Newton-Gregory regresivo":
sacarPolinomioRegresivo(sacarCoeficientesLagrange(puntosx, puntosy, hayQueMostrarCalculos),
hayQueMostrarCalculos) # TODO cambiar los booleanos por lo que dice la checkbox
evaluarPolinomioInterpolanteEn(self.kValue.get())
pass
def sacarCoeficientesLagrange(puntos_x, puntos_y, hayQueMostrarCalculos):
cantidad_puntos = len(puntos_x);
matriz_coeficientes = [[0 for x in range(cantidad_puntos)] for y in range(cantidad_puntos + 1)]
cantidad_iteraciones = cantidad_puntos - 2
for i in range(cantidad_puntos): # meto los puntos iniciales a la matriz de coeficientes
matriz_coeficientes[0][i] = puntos_x[i]
matriz_coeficientes[1][i] = puntos_y[i]
numerosAMostrar = ""
if cantidad_puntos > 1:
for i in range(cantidad_puntos - 1):
matriz_coeficientes[2][i] = (matriz_coeficientes[1][i + 1] - matriz_coeficientes[1][i]) / (
matriz_coeficientes[0][i + 1] - matriz_coeficientes[0][i])
numerosAMostrar = numerosAMostrar + str(matriz_coeficientes[2][i]) + ", "
if hayQueMostrarCalculos:
print("\nLos valores ꕔ1" + " Son: " + numerosAMostrar)
if cantidad_puntos > 2:
for i in range(3, cantidad_puntos + 1):
numerosAMostrar = ""
for j in range(cantidad_iteraciones):
matriz_coeficientes[i][j] = (matriz_coeficientes[i - 1][j + 1] - matriz_coeficientes[i - 1][j]) / (
matriz_coeficientes[0][i - 1 + j] - matriz_coeficientes[0][j])
numerosAMostrar = numerosAMostrar + str(matriz_coeficientes[i][j]) + ", "
cantidad_iteraciones = cantidad_iteraciones - 1
if hayQueMostrarCalculos:
print("Los valores ꕔ" + str(i - 1) + " Son: " + numerosAMostrar)
return matriz_coeficientes
def sacarPolinomioProgresivo(matriz_coeficientes, hayQueMostrarCalculos):
coeficientes_progresivo = [];
for i in range(len(puntosx)):
coeficientes_progresivo.append(matriz_coeficientes[i + 1][0])
if hayQueMostrarCalculos:
print("\nLos coeficientes del polinomio progresivo son: " + str(coeficientes_progresivo))
armarPolinomioInterpolanteNGPROG(coeficientes_progresivo)
def sacarPolinomioRegresivo(matriz_coeficientes, hayQueMostrarCalculos):
coeficientes_progresivo = [];
for i in range(len(puntosx)):
coeficientes_progresivo.append(matriz_coeficientes[i + 1][len(puntosx) - 1 - i])
if hayQueMostrarCalculos:
print("\nLos coeficientes del polinomio Regresivo son: " + str(coeficientes_progresivo))
armarPolinomioInterpolanteNGREG(coeficientes_progresivo)
def armarPolinomioInterpolanteNGPROG(coeficientes):
polinomioDeRaices = [1]
polinomioDeRaices = np.resize(polinomioDeRaices, 1)
polinomioInterpolante = [0]
cantidadPuntos = len(coeficientes)
for i in range(len(coeficientes) - 1):
polinomioRaicesDeEstaIteracion = poly_creator.polyfromroots([puntosx[i]])
poli1 = poly_creator.polymulx(polinomioDeRaices)
poli2 = polinomioDeRaices * polinomioRaicesDeEstaIteracion[0]
for q in range(len(poli2)):
poli1[q] = poli1[q] + poli2[q]
polinomioDeRaices = poli1
polinomioDeIteracion = polinomioDeRaices * coeficientes[i+1]
for j in range(i + 1):
polinomioDeIteracion[j] = polinomioDeIteracion[j] + polinomioInterpolante[j]
polinomioInterpolante = polinomioDeIteracion
polinomioInterpolante[0] = polinomioInterpolante[0] + coeficientes[0]
mostrarPoliniomio(polinomioInterpolante, cantidadPuntos)
polinomioInterpolante = voltearArray(polinomioInterpolante, cantidadPuntos)
global polinomio_interpolacion
polinomio_interpolacion = polinomioInterpolante
def armarPolinomioInterpolanteNGREG(coeficientes):
polinomioDeRaices = [1]
polinomioInterpolante = [0]
cantidadPuntos = len(coeficientes)
for i in range(len(coeficientes) - 1):
if puntosx[(len(coeficientes) - 1 - i)] == 0:
polinomioDeRaices = poly_creator.polymulx(polinomioDeRaices)
else:
if polinomioDeRaices[0] == 0 and polinomioDeRaices[1] == 1 and len(polinomioDeRaices) == 2:
polinomioDeRaices = poly_creator.polymulx(
poly_creator.polyfromroots([puntosx[(len(coeficientes) - 1 - i)]]))
else:
polinomioDeRaices = np.polymul(polinomioDeRaices,
poly_creator.polyfromroots([puntosx[(len(coeficientes) - 1 - i)]]))
polinomioDeIteracion = coeficientes[i + 1] * polinomioDeRaices
polinomioDeIteracion = np.array(polinomioDeIteracion)
for j in range(i + 1):
polinomioDeIteracion[j] = polinomioDeIteracion[j] + polinomioInterpolante[j]
polinomioInterpolante = polinomioDeIteracion
polinomioInterpolante[0] = polinomioInterpolante[0] + coeficientes[0]
mostrarPoliniomio(polinomioInterpolante, cantidadPuntos)
polinomioInterpolante = voltearArray(polinomioInterpolante, cantidadPuntos)
global polinomio_interpolacion
polinomio_interpolacion = polinomioInterpolante
def armarPolinomioInterpolanteLAG(hayQueMostrarCalculos):
polinomioInterpolante = [0]
polinomioInterpolante = np.resize(polinomioInterpolante, len(puntosy))
cantidadPuntos = len(puntosy)
for i in range(len(puntosy)):
polinomioDeIteracion = [1]
polinomioDeIteracion = np.resize( polinomioDeIteracion, len(puntosy))
for j in range(len(puntosy)):
if i == j:
pass
else:
numerador = poly_creator.polyfromroots([puntosx[j]])
denominador = puntosx[i] - puntosx[j]
poli1 = polinomioDeIteracion * numerador[1]
poli1 = poly_creator.polymulx(poli1)
poli2 = polinomioDeIteracion * numerador[0]
for q in range(len(poli2)):
poli1[q] = poli2[q] + poli1[q]
poli1 = poli1 * (1/denominador)
polinomioDeIteracion = poli1
if hayQueMostrarCalculos:
mostrarLn(polinomioDeIteracion, len(puntosy), i)
polinomioDeIteracion = polinomioDeIteracion * puntosy[i]
polinomioDeIteracion = np.array(polinomioDeIteracion)
if puntosx[i] == 0:
pass
else:
for q in range(cantidadPuntos):
polinomioDeIteracion[q] = polinomioDeIteracion[q] + polinomioInterpolante[q]
polinomioInterpolante = polinomioDeIteracion
sacarPolinomioProgresivo(sacarCoeficientesLagrange(puntosx, puntosy, False), False)
def voltearArray(arrayAVoltear, longitudArray):
arrayVolteado = []
for i in range(longitudArray):
arrayVolteado.append(arrayAVoltear[longitudArray - i - 1])
return arrayVolteado
def mostrarPoliniomio(array, longitudArray):
polinomio = str(array[0])
for i in range(1, longitudArray):
polinomio = polinomio + " + " + str(array[i]) + "x^" + str(i)
print("\nEl polinomio de interpolacion es:")
print(polinomio)
def mostrarLn(array, longitudArray, n):
polinomio = str(array[0])
for i in range(1, longitudArray):
polinomio = polinomio + " + " + str(array[i]) + "x^" + str(i)
print("\nEl valor de L" + str(n) + " es: " + str(polinomio))
def evaluarPolinomioInterpolanteEn(x):
if len(polinomio_interpolacion) == 0:
print("\nPor favor primero cree el polinomio interpolante antes de intentar evaluar en algun punto")
else:
valor = np.polyval(polinomio_interpolacion, x)
print("\nEl valor del polinomio interpolante en el punto " + str(x) + " es: " + str(valor))
if __name__ == '__main__':
main_window = tk.Tk()
app = Application(main_window)
app.mainloop()
| import sys
import tkinter as tk
import numpy as np
from tkinter import ttk
poly_creator = np.polynomial.polynomial
polinomio_interpolacion = []
puntosx = []
puntosy = []
estaElPunto00 = False
class PrintLogger():
def __init__(self, textbox):
self.textbox = textbox
def write(self, text):
self.textbox.insert(tk.END, text)
def flush(self):
pass
class Application(ttk.Frame):
def __init__(self, main_window):
super().__init__(main_window)
main_window.title("FINTER")
self.points = []
self.xValue = tk.IntVar()
self.yValue = tk.IntVar()
self.xLabel = tk.Label(self, text="Valor x")
self.xLabel.pack()
self.xInput = tk.Entry(self, text="Valor x", textvariable=self.xValue)
self.xInput.pack()
self.yLabel = tk.Label(self, text="Valor y")
self.yLabel.pack()
self.yInput = tk.Entry(self, text="Valor y", textvariable=self.yValue)
self.yInput.pack()
self.addButton = tk.Button(self, text="Agregar punto", command=self.addPoint)
self.addButton.configure(pady=10)
self.addButton.pack()
self.removeButton = tk.Button(self, text="Sacar punto", command=self.removePoint)
self.removeButton.configure(pady=10)
self.removeButton.pack()
# Log points
self.puntosListbox = tk.Listbox(self)
self.puntosListbox.pack()
self.methodsLabel = tk.Label(self, text="Metodos de calculo")
self.methodsLabel.pack()
self.methods = ["Lagrange", "Newton-Gregory progresivo", "Newton-Gregory regresivo"]
self.methodCombo = ttk.Combobox(self, values=self.methods)
self.methodCombo.pack()
self.checkButtonValue = tk.IntVar()
self.checkButton = tk.Checkbutton(self, text="Mostrar pasos de calculo", variable=self.checkButtonValue)
self.checkButton.pack()
self.kValue = tk.IntVar()
self.evalPoliLabel = tk.Label(self, text="Especializar el polinomio en valor K")
self.evalPoliLabel.pack()
self.evalPoliInput = tk.Entry(self, text="Valor K", textvariable=self.kValue)
self.evalPoliInput.pack()
self.calcButton = tk.Button(self, text="Calcular polinomio interpolante", command=self.calculateInterpolator)
self.calcButton.configure(pady=10)
self.calcButton.pack()
# Log to screen
self.logText = tk.Text(self)
self.logText.pack()
pl = PrintLogger(self.logText)
sys.stdout = pl
main_window.configure(pady=20, padx=50)
self.pack()
def addPoint(self):
point_to_add = {'x': self.xValue.get(), 'y': self.yValue.get()}
if {point_to_add['x']} == {0} and {point_to_add['y']} == {0}:
global estaElPunto00
estaElPunto00 = True
if point_to_add in self.points:
print(f"Ya agrego el punto ({point_to_add['x']}, {point_to_add['y']})")
else:
def take_x(_point):
return _point['x']
self.points.append(point_to_add)
self.points.sort(key=take_x)
global puntosx
puntosx = list(map(lambda _point: _point['x'], self.points))
global puntosy
puntosy = list(map(lambda _point: _point['y'], self.points))
self.puntosListbox.delete(0, tk.END)
for point in self.points:
self.puntosListbox.insert(tk.END, f"({point['x']}, {point['y']})")
def removePoint(self):
if not self.points:
print("No hay puntos que borrar")
elif not self.puntosListbox.curselection():
print("No ha seleccionado ningun punto")
else:
point = self.puntosListbox.get(self.puntosListbox.curselection()) \
.replace('(', '') \
.replace(')', '') \
.replace(',', '') \
.split()
self.points = [
p for p in self.points if p['x'] != float(point[0]) or p['y'] != float(point[1])
]
self.puntosListbox.delete(self.puntosListbox.curselection())
global puntosx
puntosx = list(map(lambda p: p['x'], self.points))
global puntosy
puntosy = list(map(lambda p: p['y'], self.points))
def calculateInterpolator(self):
# TODO guille aca esta el valor del checkbox
hayQueMostrarCalculos = self.checkButtonValue.get()
if len(self.points) == 0:
print("Por favor ingrese puntos para sacar un polinomio interpolante")
else:
print("\nCalculando por", self.methodCombo.get(), "para puntos:", self.points);
if self.methodCombo.get() == "Lagrange":
armarPolinomioInterpolanteLAG(hayQueMostrarCalculos) # TODO cambiar esto por lo que dice la checkbox
evaluarPolinomioInterpolanteEn(self.kValue.get())
if self.methodCombo.get() == "Newton-Gregory progresivo":
sacarPolinomioProgresivo(sacarCoeficientesLagrange(puntosx, puntosy, hayQueMostrarCalculos),
hayQueMostrarCalculos) # TODO cambiar los booleanos por lo que dice la checkbox
evaluarPolinomioInterpolanteEn(self.kValue.get())
if self.methodCombo.get() == "Newton-Gregory regresivo":
sacarPolinomioRegresivo(sacarCoeficientesLagrange(puntosx, puntosy, hayQueMostrarCalculos),
hayQueMostrarCalculos) # TODO cambiar los booleanos por lo que dice la checkbox
evaluarPolinomioInterpolanteEn(self.kValue.get())
pass
def sacarCoeficientesLagrange(puntos_x, puntos_y, hayQueMostrarCalculos):
cantidad_puntos = len(puntos_x);
matriz_coeficientes = [[0 for x in range(cantidad_puntos)] for y in range(cantidad_puntos + 1)]
cantidad_iteraciones = cantidad_puntos - 2
for i in range(cantidad_puntos): # meto los puntos iniciales a la matriz de coeficientes
matriz_coeficientes[0][i] = puntos_x[i]
matriz_coeficientes[1][i] = puntos_y[i]
numerosAMostrar = ""
if cantidad_puntos > 1:
for i in range(cantidad_puntos - 1):
matriz_coeficientes[2][i] = (matriz_coeficientes[1][i + 1] - matriz_coeficientes[1][i]) / (
matriz_coeficientes[0][i + 1] - matriz_coeficientes[0][i])
numerosAMostrar = numerosAMostrar + str(matriz_coeficientes[2][i]) + ", "
if hayQueMostrarCalculos:
print("\nLos valores ꕔ1" + " Son: " + numerosAMostrar)
if cantidad_puntos > 2:
for i in range(3, cantidad_puntos + 1):
numerosAMostrar = ""
for j in range(cantidad_iteraciones):
matriz_coeficientes[i][j] = (matriz_coeficientes[i - 1][j + 1] - matriz_coeficientes[i - 1][j]) / (
matriz_coeficientes[0][i - 1 + j] - matriz_coeficientes[0][j])
numerosAMostrar = numerosAMostrar + str(matriz_coeficientes[i][j]) + ", "
cantidad_iteraciones = cantidad_iteraciones - 1
if hayQueMostrarCalculos:
print("Los valores ꕔ" + str(i - 1) + " Son: " + numerosAMostrar)
return matriz_coeficientes
def sacarPolinomioProgresivo(matriz_coeficientes, hayQueMostrarCalculos):
coeficientes_progresivo = [];
for i in range(len(puntosx)):
coeficientes_progresivo.append(matriz_coeficientes[i + 1][0])
if hayQueMostrarCalculos:
print("\nLos coeficientes del polinomio progresivo son: " + str(coeficientes_progresivo))
armarPolinomioInterpolanteNGPROG(coeficientes_progresivo)
def sacarPolinomioRegresivo(matriz_coeficientes, hayQueMostrarCalculos):
coeficientes_progresivo = [];
for i in range(len(puntosx)):
coeficientes_progresivo.append(matriz_coeficientes[i + 1][len(puntosx) - 1 - i])
if hayQueMostrarCalculos:
print("\nLos coeficientes del polinomio Regresivo son: " + str(coeficientes_progresivo))
armarPolinomioInterpolanteNGREG(coeficientes_progresivo)
def armarPolinomioInterpolanteNGPROG(coeficientes):
polinomioDeRaices = [1]
polinomioDeRaices = np.resize(polinomioDeRaices, 1)
polinomioInterpolante = [0]
cantidadPuntos = len(coeficientes)
for i in range(len(coeficientes) - 1):
polinomioRaicesDeEstaIteracion = poly_creator.polyfromroots([puntosx[i]])
poli1 = poly_creator.polymulx(polinomioDeRaices)
poli2 = polinomioDeRaices * polinomioRaicesDeEstaIteracion[0]
for q in range(len(poli2)):
poli1[q] = poli1[q] + poli2[q]
polinomioDeRaices = poli1
polinomioDeIteracion = polinomioDeRaices * coeficientes[i+1]
for j in range(i + 1):
polinomioDeIteracion[j] = polinomioDeIteracion[j] + polinomioInterpolante[j]
polinomioInterpolante = polinomioDeIteracion
polinomioInterpolante[0] = polinomioInterpolante[0] + coeficientes[0]
mostrarPoliniomio(polinomioInterpolante, cantidadPuntos)
polinomioInterpolante = voltearArray(polinomioInterpolante, cantidadPuntos)
global polinomio_interpolacion
polinomio_interpolacion = polinomioInterpolante
def armarPolinomioInterpolanteNGREG(coeficientes):
polinomioDeRaices = [1]
polinomioInterpolante = [0]
cantidadPuntos = len(coeficientes)
for i in range(len(coeficientes) - 1):
if puntosx[(len(coeficientes) - 1 - i)] == 0:
polinomioDeRaices = poly_creator.polymulx(polinomioDeRaices)
else:
if polinomioDeRaices[0] == 0 and polinomioDeRaices[1] == 1 and len(polinomioDeRaices) == 2:
polinomioDeRaices = poly_creator.polymulx(
poly_creator.polyfromroots([puntosx[(len(coeficientes) - 1 - i)]]))
else:
polinomioDeRaices = np.polymul(polinomioDeRaices,
poly_creator.polyfromroots([puntosx[(len(coeficientes) - 1 - i)]]))
polinomioDeIteracion = coeficientes[i + 1] * polinomioDeRaices
polinomioDeIteracion = np.array(polinomioDeIteracion)
for j in range(i + 1):
polinomioDeIteracion[j] = polinomioDeIteracion[j] + polinomioInterpolante[j]
polinomioInterpolante = polinomioDeIteracion
polinomioInterpolante[0] = polinomioInterpolante[0] + coeficientes[0]
mostrarPoliniomio(polinomioInterpolante, cantidadPuntos)
polinomioInterpolante = voltearArray(polinomioInterpolante, cantidadPuntos)
global polinomio_interpolacion
polinomio_interpolacion = polinomioInterpolante
def armarPolinomioInterpolanteLAG(hayQueMostrarCalculos):
polinomioInterpolante = [0]
polinomioInterpolante = np.resize(polinomioInterpolante, len(puntosy))
cantidadPuntos = len(puntosy)
for i in range(len(puntosy)):
polinomioDeIteracion = [1]
polinomioDeIteracion = np.resize( polinomioDeIteracion, len(puntosy))
for j in range(len(puntosy)):
if i == j:
pass
else:
numerador = poly_creator.polyfromroots([puntosx[j]])
denominador = puntosx[i] - puntosx[j]
poli1 = polinomioDeIteracion * numerador[1]
poli1 = poly_creator.polymulx(poli1)
poli2 = polinomioDeIteracion * numerador[0]
for q in range(len(poli2)):
poli1[q] = poli2[q] + poli1[q]
poli1 = poli1 * (1/denominador)
polinomioDeIteracion = poli1
if hayQueMostrarCalculos:
mostrarLn(polinomioDeIteracion, len(puntosy), i)
polinomioDeIteracion = polinomioDeIteracion * puntosy[i]
polinomioDeIteracion = np.array(polinomioDeIteracion)
if puntosx[i] == 0:
pass
else:
for q in range(cantidadPuntos):
polinomioDeIteracion[q] = polinomioDeIteracion[q] + polinomioInterpolante[q]
polinomioInterpolante = polinomioDeIteracion
sacarPolinomioProgresivo(sacarCoeficientesLagrange(puntosx, puntosy, False), False)
def voltearArray(arrayAVoltear, longitudArray):
arrayVolteado = []
for i in range(longitudArray):
arrayVolteado.append(arrayAVoltear[longitudArray - i - 1])
return arrayVolteado
def mostrarPoliniomio(array, longitudArray):
polinomio = str(array[0])
for i in range(1, longitudArray):
polinomio = polinomio + " + " + str(array[i]) + "x^" + str(i)
print("\nEl polinomio de interpolacion es:")
print(polinomio)
def mostrarLn(array, longitudArray, n):
polinomio = str(array[0])
for i in range(1, longitudArray):
polinomio = polinomio + " + " + str(array[i]) + "x^" + str(i)
print("\nEl valor de L" + str(n) + " es: " + str(polinomio))
def evaluarPolinomioInterpolanteEn(x):
if len(polinomio_interpolacion) == 0:
print("\nPor favor primero cree el polinomio interpolante antes de intentar evaluar en algun punto")
else:
valor = np.polyval(polinomio_interpolacion, x)
print("\nEl valor del polinomio interpolante en el punto " + str(x) + " es: " + str(valor))
if __name__ == '__main__':
main_window = tk.Tk()
app = Application(main_window)
app.mainloop() | es | 0.869498 | # Log points # Log to screen # TODO guille aca esta el valor del checkbox # TODO cambiar esto por lo que dice la checkbox # TODO cambiar los booleanos por lo que dice la checkbox # TODO cambiar los booleanos por lo que dice la checkbox # meto los puntos iniciales a la matriz de coeficientes | 2.933424 | 3 |
tests/test_urls.py | t11e/werkzeug | 2 | 6617927 | # -*- coding: utf-8 -*-
"""
werkzeug.urls test
~~~~~~~~~~~~~~~~~~
Tests the URL features
:copyright: (c) 2009 by the Project Name Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from nose.tools import assert_raises
from werkzeug import url_quote, url_unquote, url_quote_plus, \
url_unquote_plus, url_encode, url_decode, url_fix, uri_to_iri, \
iri_to_uri
def test_quoting():
"""URL quoting"""
assert url_quote(u'\xf6\xe4\xfc') == '%C3%B6%C3%A4%C3%BC'
assert url_unquote(url_quote(u'#%="\xf6')) == u'#%="\xf6'
assert url_quote_plus('foo bar') == 'foo+bar'
assert url_unquote_plus('foo+bar') == 'foo bar'
assert url_encode({'a': None, 'b': 'foo bar'}) == 'b=foo+bar'
assert url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)') == \
'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
def test_url_decoding():
"""Test the URL decoding"""
x = url_decode('foo=42&bar=23&uni=H%C3%A4nsel')
assert x['foo'] == '42'
assert x['bar'] == '23'
assert x['uni'] == u'Hänsel'
x = url_decode('foo=42;bar=23;uni=H%C3%A4nsel', separator=';')
assert x['foo'] == '42'
assert x['bar'] == '23'
assert x['uni'] == u'Hänsel'
x = url_decode('%C3%9Ch=H%C3%A4nsel', decode_keys=True)
assert x[u'Üh'] == u'Hänsel'
def test_url_encoding():
"""Test the URL decoding"""
assert url_encode({'foo': 'bar 45'}) == 'foo=bar+45'
d = {'foo': 1, 'bar': 23, 'blah': u'Hänsel'}
assert url_encode(d, sort=True) == 'bar=23&blah=H%C3%A4nsel&foo=1'
assert url_encode(d, sort=True, separator=';') == 'bar=23;blah=H%C3%A4nsel;foo=1'
def test_sorted_url_encode():
"""Optional sorted URL encoding"""
assert url_encode({"a": 42, "b": 23, 1: 1, 2: 2}, sort=True) == '1=1&2=2&a=42&b=23'
assert url_encode({'A': 1, 'a': 2, 'B': 3, 'b': 4}, sort=True,
key=lambda x: x[0].lower()) == 'A=1&a=2&B=3&b=4'
def test_url_fixing():
"""URL fixing"""
x = url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)')
assert x == 'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
x = url_fix('http://example.com/?foo=%2f%2f')
assert x == 'http://example.com/?foo=%2f%2f'
def test_iri_support():
"""The IRI support"""
assert_raises(UnicodeError, uri_to_iri, u'http://föö.com/')
assert_raises(UnicodeError, iri_to_uri, 'http://föö.com/')
assert uri_to_iri('http://xn--n3h.net/') == u'http://\u2603.net/'
assert uri_to_iri('http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th') == \
u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
assert iri_to_uri(u'http://☃.net/') == 'http://xn--n3h.net/'
assert iri_to_uri(u'http://üser:pässword@☃.net/påth') == \
'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th'
assert uri_to_iri('http://test.com/%3Fmeh?foo=%26%2F') == \
u'http://test.com/%3Fmeh?foo=%26%2F'
| # -*- coding: utf-8 -*-
"""
werkzeug.urls test
~~~~~~~~~~~~~~~~~~
Tests the URL features
:copyright: (c) 2009 by the Project Name Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from nose.tools import assert_raises
from werkzeug import url_quote, url_unquote, url_quote_plus, \
url_unquote_plus, url_encode, url_decode, url_fix, uri_to_iri, \
iri_to_uri
def test_quoting():
"""URL quoting"""
assert url_quote(u'\xf6\xe4\xfc') == '%C3%B6%C3%A4%C3%BC'
assert url_unquote(url_quote(u'#%="\xf6')) == u'#%="\xf6'
assert url_quote_plus('foo bar') == 'foo+bar'
assert url_unquote_plus('foo+bar') == 'foo bar'
assert url_encode({'a': None, 'b': 'foo bar'}) == 'b=foo+bar'
assert url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)') == \
'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
def test_url_decoding():
"""Test the URL decoding"""
x = url_decode('foo=42&bar=23&uni=H%C3%A4nsel')
assert x['foo'] == '42'
assert x['bar'] == '23'
assert x['uni'] == u'Hänsel'
x = url_decode('foo=42;bar=23;uni=H%C3%A4nsel', separator=';')
assert x['foo'] == '42'
assert x['bar'] == '23'
assert x['uni'] == u'Hänsel'
x = url_decode('%C3%9Ch=H%C3%A4nsel', decode_keys=True)
assert x[u'Üh'] == u'Hänsel'
def test_url_encoding():
"""Test the URL decoding"""
assert url_encode({'foo': 'bar 45'}) == 'foo=bar+45'
d = {'foo': 1, 'bar': 23, 'blah': u'Hänsel'}
assert url_encode(d, sort=True) == 'bar=23&blah=H%C3%A4nsel&foo=1'
assert url_encode(d, sort=True, separator=';') == 'bar=23;blah=H%C3%A4nsel;foo=1'
def test_sorted_url_encode():
"""Optional sorted URL encoding"""
assert url_encode({"a": 42, "b": 23, 1: 1, 2: 2}, sort=True) == '1=1&2=2&a=42&b=23'
assert url_encode({'A': 1, 'a': 2, 'B': 3, 'b': 4}, sort=True,
key=lambda x: x[0].lower()) == 'A=1&a=2&B=3&b=4'
def test_url_fixing():
"""URL fixing"""
x = url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)')
assert x == 'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
x = url_fix('http://example.com/?foo=%2f%2f')
assert x == 'http://example.com/?foo=%2f%2f'
def test_iri_support():
"""The IRI support"""
assert_raises(UnicodeError, uri_to_iri, u'http://föö.com/')
assert_raises(UnicodeError, iri_to_uri, 'http://föö.com/')
assert uri_to_iri('http://xn--n3h.net/') == u'http://\u2603.net/'
assert uri_to_iri('http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th') == \
u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
assert iri_to_uri(u'http://☃.net/') == 'http://xn--n3h.net/'
assert iri_to_uri(u'http://üser:pässword@☃.net/påth') == \
'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th'
assert uri_to_iri('http://test.com/%3Fmeh?foo=%26%2F') == \
u'http://test.com/%3Fmeh?foo=%26%2F'
| en | 0.562399 | # -*- coding: utf-8 -*- werkzeug.urls test ~~~~~~~~~~~~~~~~~~ Tests the URL features :copyright: (c) 2009 by the Project Name Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. URL quoting Test the URL decoding Test the URL decoding Optional sorted URL encoding URL fixing The IRI support | 2.392434 | 2 |
src/encoded/tests/test_types_variant_consequence.py | dbmi-bgm/cgap-portal | 2 | 6617928 | <gh_stars>1-10
import pytest
pytestmark = [pytest.mark.working, pytest.mark.schema]
def test_calculated_variant_consequence_display_title(testapp, project, institution):
var_conseq_info = {
'SO:0001893': ['transcript_ablation', 'Transcript ablation'],
'SO:0001626': ['incomplete_terminal_codon_variant', 'Incomplete terminal codon variant'],
}
for vcid, vcnames in var_conseq_info.items():
vc = {
'project': project['@id'],
'institution': institution['@id'],
'var_conseq_id': vcid,
'var_conseq_name': vcnames[0]
}
res = testapp.post_json('/variant_consequence', vc, status=201).json['@graph'][0]
assert res.get('display_title') == vcnames[1]
| import pytest
pytestmark = [pytest.mark.working, pytest.mark.schema]
def test_calculated_variant_consequence_display_title(testapp, project, institution):
var_conseq_info = {
'SO:0001893': ['transcript_ablation', 'Transcript ablation'],
'SO:0001626': ['incomplete_terminal_codon_variant', 'Incomplete terminal codon variant'],
}
for vcid, vcnames in var_conseq_info.items():
vc = {
'project': project['@id'],
'institution': institution['@id'],
'var_conseq_id': vcid,
'var_conseq_name': vcnames[0]
}
res = testapp.post_json('/variant_consequence', vc, status=201).json['@graph'][0]
assert res.get('display_title') == vcnames[1] | none | 1 | 2.180707 | 2 | |
score/models.py | MdStart/imdbscoring | 0 | 6617929 | from django.db import models
from multiselectfield import MultiSelectField
from django.core.validators import MaxValueValidator, MinValueValidator
GENRE_CHOICES = (('Adventure', 'Adventure'), ('Family', 'Family'), ('Fantasy', 'Fantasy'),
('Musical', 'Musical'), ('Sci-Fi', 'Sci-Fi'), ('Drama', 'Drama'), ('War', 'War'),
('Romance', 'Romance'), ('Comedy', 'Comedy'), ('Thriller', 'Thriller'),
('Crime', 'Crime'), ('Horror', 'Horror'), ('History', 'History'), ('Family', 'Family'),
('Animation', 'Animation'), ('Short', 'Short'), ('Western', 'Western'),
('Action', 'Action'), ('Biography', 'Biography'))
class ImdbScore(models.Model):
popularity99 = models.FloatField(
default=0.0,
validators=[MinValueValidator(0.0), MaxValueValidator(100.0)]
)
director = models.CharField(max_length=500, blank=False)
genres = MultiSelectField(choices=GENRE_CHOICES)
imdb_score = models.FloatField(
default=0.0,
validators=[MinValueValidator(0.0), MaxValueValidator(10.0)]
)
name = models.CharField(max_length=500, blank=False, unique=True)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
class Meta:
ordering = ['-created',]
| from django.db import models
from multiselectfield import MultiSelectField
from django.core.validators import MaxValueValidator, MinValueValidator
GENRE_CHOICES = (('Adventure', 'Adventure'), ('Family', 'Family'), ('Fantasy', 'Fantasy'),
('Musical', 'Musical'), ('Sci-Fi', 'Sci-Fi'), ('Drama', 'Drama'), ('War', 'War'),
('Romance', 'Romance'), ('Comedy', 'Comedy'), ('Thriller', 'Thriller'),
('Crime', 'Crime'), ('Horror', 'Horror'), ('History', 'History'), ('Family', 'Family'),
('Animation', 'Animation'), ('Short', 'Short'), ('Western', 'Western'),
('Action', 'Action'), ('Biography', 'Biography'))
class ImdbScore(models.Model):
popularity99 = models.FloatField(
default=0.0,
validators=[MinValueValidator(0.0), MaxValueValidator(100.0)]
)
director = models.CharField(max_length=500, blank=False)
genres = MultiSelectField(choices=GENRE_CHOICES)
imdb_score = models.FloatField(
default=0.0,
validators=[MinValueValidator(0.0), MaxValueValidator(10.0)]
)
name = models.CharField(max_length=500, blank=False, unique=True)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
class Meta:
ordering = ['-created',]
| none | 1 | 2.258425 | 2 | |
mission/missions/examples/03-create-a-task.py | cuauv/software | 70 | 6617930 | from mission.framework.task import Task
class MyFirstTask(Task):
def on_first_run(self, *args, **kwargs):
self.run_count = 0
print("MyFirstTask ran for the first time")
def on_run(self, max_run_count, *args, **kwargs):
print("MyFirstTask has run at time {}".format(self.this_run_time))
self.run_count += 1
if self.run_count >= max_run_count:
self.finish()
def on_finish(self, *args, **kwargs):
print("MyFirstTask has finished after {} runs.".format(self.run_count))
run_10_times = MyFirstTask(10)
| from mission.framework.task import Task
class MyFirstTask(Task):
def on_first_run(self, *args, **kwargs):
self.run_count = 0
print("MyFirstTask ran for the first time")
def on_run(self, max_run_count, *args, **kwargs):
print("MyFirstTask has run at time {}".format(self.this_run_time))
self.run_count += 1
if self.run_count >= max_run_count:
self.finish()
def on_finish(self, *args, **kwargs):
print("MyFirstTask has finished after {} runs.".format(self.run_count))
run_10_times = MyFirstTask(10)
| none | 1 | 3.093315 | 3 | |
lib/python-lib/binary_search.py | takaaki82/Java-Lessons | 1 | 6617931 | <reponame>takaaki82/Java-Lessons
def binary_search(items, target):
lo = 0
hi = len(items) - 1
found = False
while lo <= hi and not found:
mid = (lo + hi) // 2
if items[mid] == target:
found = True
else:
if target < items[mid]:
hi = mid - 1
else:
lo = mid + 1
return found
| def binary_search(items, target):
lo = 0
hi = len(items) - 1
found = False
while lo <= hi and not found:
mid = (lo + hi) // 2
if items[mid] == target:
found = True
else:
if target < items[mid]:
hi = mid - 1
else:
lo = mid + 1
return found | none | 1 | 3.914014 | 4 | |
Chapter04/app/todo/__init__.py | Abhishek1373/Building-Serverless-Python-Web-Services-with-Zappa | 40 | 6617932 | from flask import Blueprint
from flask_restful import Api
from .resources import TodoResource
todo = Blueprint('todo', __name__)
todo_api = Api(todo, catch_all_404s=True)
todo_api.add_resource(TodoResource, '/todos/', endpoint='todos')
todo_api.add_resource(TodoResource, '/todos/<todo_id>/', endpoint='todos_detail')
| from flask import Blueprint
from flask_restful import Api
from .resources import TodoResource
todo = Blueprint('todo', __name__)
todo_api = Api(todo, catch_all_404s=True)
todo_api.add_resource(TodoResource, '/todos/', endpoint='todos')
todo_api.add_resource(TodoResource, '/todos/<todo_id>/', endpoint='todos_detail')
| none | 1 | 2.349747 | 2 | |
utils.py | Kamino666/Video-Captioning-Transformer | 14 | 6617933 | import torch
import numpy as np
import json
import random
import os
class EarlyStopping:
"""Early stops the training if validation loss doesn't improve after a given patience."""
"""This class is modified from https://github.com/Bjarten/early-stopping-pytorch/blob/master/pytorchtools.py"""
def __init__(self, patience=7, verbose=False, delta=0, path='checkpoint.pt', trace_func=print):
"""
Args:
patience (int): How long to wait after last time validation loss improved.
Default: 7
verbose (bool): If True, prints a message for each validation loss improvement.
Default: False
delta (float): Minimum change in the monitored quantity to qualify as an improvement.
Default: 0
path (str): Path for the checkpoint to be saved to.
Default: 'checkpoint.pt'
trace_func (function): trace print function.
Default: print
"""
self.patience = patience
self.verbose = verbose
self.counter = 0
self.best_score = None
self.early_stop = False
self.val_loss_min = np.Inf
self.delta = delta
self.path = path
self.trace_func = trace_func
def __call__(self, val_loss, model, do_save):
val_loss = -val_loss
if self.best_score is None:
self.best_score = val_loss
self.save_checkpoint(val_loss, model, do_save)
elif val_loss < self.best_score + self.delta:
self.counter += 1
self.trace_func(f'EarlyStopping counter: {self.counter} out of {self.patience}')
if self.counter >= self.patience:
self.early_stop = True
else:
self.best_score = val_loss
self.save_checkpoint(val_loss, model, do_save)
self.counter = 0
def save_checkpoint(self, val_loss, model, do_save):
"""Saves model when validation loss decrease."""
if self.verbose:
self.trace_func(
f'Validation loss decreased ({self.val_loss_min:.6f} --> {val_loss:.6f}). Saving model ...')
if do_save is True:
torch.save(model.state_dict(), self.path)
self.val_loss_min = val_loss
def generate_square_subsequent_mask(sz):
mask = (torch.triu(torch.ones((sz, sz))) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask
def show_input_shape(**kwargs):
print("\n***************************************")
for name, arg in kwargs.items():
if type(arg) is torch.Tensor:
print(f"{name}: {arg.shape}")
elif type(arg) is dict:
print(f"{name}: ", end="")
for k, v in arg.items():
print(f"{k}:{v.shape}", end=" ")
print("")
print("***************************************\n")
class Config:
def __init__(self, path: str):
"""
Load json config file from disk.
:param path: The path of config file
"""
with open(path) as f:
self.data = json.load(f)
def display(self, l: int = 90):
self.data: dict
bold_line = "=" * l
thin_list = "-" * l
print(bold_line)
print("{:^{}}".format("Config", l))
print(bold_line)
for mk, mv in self.data.items():
print("{:^{}}".format(f"{mk}", l))
print(thin_list)
if type(mv) != dict:
print(mv)
else:
for k, v in mv.items():
print("{:<20}| {}".format(k, v))
print(bold_line)
def check(self):
model_cfg = self.data['model']
if model_cfg['video_encoder'].get('type', 'mme') == 'simple':
if self.data['train']['task'] != "caption":
raise ValueError("Simple video encoder does NOT support 'cross' task")
def setup_seed(seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.enabled = False
def configure_hardware(args):
import torch.distributed as dist
if args.cpu:
args.device = torch.device('cpu')
args.is_main_rank = True
print("\033[1;33;40m Using CPU as backend \033[0m")
elif args.gpu:
args.device = torch.device('cuda')
# args._multi_gpu = False
args.is_main_rank = True
print("\033[1;33;40m Using CUDA as backend \033[0m")
elif args.multi_gpu:
local_rank = int(os.environ["LOCAL_RANK"])
args.local_rank = local_rank
args.is_main_rank = True if local_rank == 0 else False
# args.world_size = 4
torch.cuda.set_device(local_rank)
dist.init_process_group(backend='nccl') # 一般使用的后端为nccl
args.device = torch.device("cuda", local_rank)
if args.is_main_rank:
print("\033[1;33;40m Using multiple CUDA as backend \033[0m")
else:
raise ValueError("No hardware configured")
return args
| import torch
import numpy as np
import json
import random
import os
class EarlyStopping:
"""Early stops the training if validation loss doesn't improve after a given patience."""
"""This class is modified from https://github.com/Bjarten/early-stopping-pytorch/blob/master/pytorchtools.py"""
def __init__(self, patience=7, verbose=False, delta=0, path='checkpoint.pt', trace_func=print):
"""
Args:
patience (int): How long to wait after last time validation loss improved.
Default: 7
verbose (bool): If True, prints a message for each validation loss improvement.
Default: False
delta (float): Minimum change in the monitored quantity to qualify as an improvement.
Default: 0
path (str): Path for the checkpoint to be saved to.
Default: 'checkpoint.pt'
trace_func (function): trace print function.
Default: print
"""
self.patience = patience
self.verbose = verbose
self.counter = 0
self.best_score = None
self.early_stop = False
self.val_loss_min = np.Inf
self.delta = delta
self.path = path
self.trace_func = trace_func
def __call__(self, val_loss, model, do_save):
val_loss = -val_loss
if self.best_score is None:
self.best_score = val_loss
self.save_checkpoint(val_loss, model, do_save)
elif val_loss < self.best_score + self.delta:
self.counter += 1
self.trace_func(f'EarlyStopping counter: {self.counter} out of {self.patience}')
if self.counter >= self.patience:
self.early_stop = True
else:
self.best_score = val_loss
self.save_checkpoint(val_loss, model, do_save)
self.counter = 0
def save_checkpoint(self, val_loss, model, do_save):
"""Saves model when validation loss decrease."""
if self.verbose:
self.trace_func(
f'Validation loss decreased ({self.val_loss_min:.6f} --> {val_loss:.6f}). Saving model ...')
if do_save is True:
torch.save(model.state_dict(), self.path)
self.val_loss_min = val_loss
def generate_square_subsequent_mask(sz):
mask = (torch.triu(torch.ones((sz, sz))) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask
def show_input_shape(**kwargs):
print("\n***************************************")
for name, arg in kwargs.items():
if type(arg) is torch.Tensor:
print(f"{name}: {arg.shape}")
elif type(arg) is dict:
print(f"{name}: ", end="")
for k, v in arg.items():
print(f"{k}:{v.shape}", end=" ")
print("")
print("***************************************\n")
class Config:
def __init__(self, path: str):
"""
Load json config file from disk.
:param path: The path of config file
"""
with open(path) as f:
self.data = json.load(f)
def display(self, l: int = 90):
self.data: dict
bold_line = "=" * l
thin_list = "-" * l
print(bold_line)
print("{:^{}}".format("Config", l))
print(bold_line)
for mk, mv in self.data.items():
print("{:^{}}".format(f"{mk}", l))
print(thin_list)
if type(mv) != dict:
print(mv)
else:
for k, v in mv.items():
print("{:<20}| {}".format(k, v))
print(bold_line)
def check(self):
model_cfg = self.data['model']
if model_cfg['video_encoder'].get('type', 'mme') == 'simple':
if self.data['train']['task'] != "caption":
raise ValueError("Simple video encoder does NOT support 'cross' task")
def setup_seed(seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.enabled = False
def configure_hardware(args):
import torch.distributed as dist
if args.cpu:
args.device = torch.device('cpu')
args.is_main_rank = True
print("\033[1;33;40m Using CPU as backend \033[0m")
elif args.gpu:
args.device = torch.device('cuda')
# args._multi_gpu = False
args.is_main_rank = True
print("\033[1;33;40m Using CUDA as backend \033[0m")
elif args.multi_gpu:
local_rank = int(os.environ["LOCAL_RANK"])
args.local_rank = local_rank
args.is_main_rank = True if local_rank == 0 else False
# args.world_size = 4
torch.cuda.set_device(local_rank)
dist.init_process_group(backend='nccl') # 一般使用的后端为nccl
args.device = torch.device("cuda", local_rank)
if args.is_main_rank:
print("\033[1;33;40m Using multiple CUDA as backend \033[0m")
else:
raise ValueError("No hardware configured")
return args
| en | 0.668695 | Early stops the training if validation loss doesn't improve after a given patience. This class is modified from https://github.com/Bjarten/early-stopping-pytorch/blob/master/pytorchtools.py Args: patience (int): How long to wait after last time validation loss improved. Default: 7 verbose (bool): If True, prints a message for each validation loss improvement. Default: False delta (float): Minimum change in the monitored quantity to qualify as an improvement. Default: 0 path (str): Path for the checkpoint to be saved to. Default: 'checkpoint.pt' trace_func (function): trace print function. Default: print Saves model when validation loss decrease. Load json config file from disk. :param path: The path of config file # args._multi_gpu = False # args.world_size = 4 # 一般使用的后端为nccl | 3.084697 | 3 |
test/test_web_tiddler.py | angeluseve/tiddlyweb | 1 | 6617934 | """
Test that GETting a tiddler in some form.
"""
import sys
import os
sys.path.append('.')
from wsgi_intercept import httplib2_intercept
import wsgi_intercept
import httplib2
import simplejson
from base64 import b64encode
from re import match
from fixtures import muchdata, reset_textstore, teststore
from tiddlyweb.model.user import User
authorization = b64encode('cdent:cowpig')
bad_authorization = b64encode('cdent:cdent')
no_user_authorization = b64encode('foop:foop')
text_put_body=u"""modifier: JohnSmith
created:
modified: 200803030303
tags: tagone
Hello, I'm <NAME> \xbb and I have something to sell.
"""
def setup_module(module):
from tiddlyweb.web import serve
# we have to have a function that returns the callable,
# Selector just _is_ the callable
def app_fn():
return serve.load_app()
#wsgi_intercept.debuglevel = 1
httplib2_intercept.install()
wsgi_intercept.add_wsgi_intercept('our_test_domain', 8001, app_fn)
module.store = teststore()
reset_textstore()
muchdata(module.store)
user = User('cdent')
user.set_password('<PASSWORD>')
module.store.put(user)
try:
os.mkdir('.test_cache')
except OSError:
pass # we don't care if it already exists
def test_get_tiddler():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8',
method='GET')
assert response['status'] == '200', 'response status should be 200'
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
def test_get_tiddler_revision():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8/revisions/1',
method='GET')
assert response['status'] == '200', 'response status should be 200'
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
assert 'revision="1"' in content
def test_get_missing_tiddler():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler27',
method='GET')
assert response['status'] == '404', 'response status should be 404'
def test_get_missing_tiddler_revision():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler27/revisions/99',
method='GET')
assert response['status'] == '404', 'response status should be 404'
def test_get_tiddler_missing_revision():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8/revisions/99',
method='GET')
assert response['status'] == '404'
def test_get_tiddler_wiki():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8.wiki',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html; chareset=UTF-8 is %s' % response['content-type']
assert '<title>\ntiddler8\n</title>' in content
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
assert 'server.permissions="read, write, create, delete"' in content
def test_get_tiddler_revision_wiki():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8/revisions/1.wiki',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html; chareset=UTF-8 is %s' % response['content-type']
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
assert 'revision="1"' in content
def test_put_tiddler_txt():
http = httplib2.Http()
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='PUT', headers={'Content-Type': 'text/plain'}, body=encoded_body)
assert response['status'] == '204', 'response status should be 204 is %s' % response['status']
tiddler_url = response['location']
assert tiddler_url == 'http://our_test_domain:8001/bags/bag0/tiddlers/TestOne', \
'response location should be http://our_test_domain:8001/bags/bag0/tiddlers/TestOne is %s' \
% tiddler_url
response, content = http.request(tiddler_url, headers={'Accept': 'text/plain'})
content = content.decode('UTF-8')
contents = content.strip().rstrip().split('\n')
texts = text_put_body.strip().rstrip().split('\n')
assert contents[-1] == texts[-1] # text
assert contents[-3] == texts[-3] # tags
def test_put_tiddler_txt_no_modified():
"""
Putting a tiddler with no modifier should make a default.
"""
http = httplib2.Http()
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='PUT', headers={'Content-Type': 'text/plain'}, body='modifier: ArthurDent\n\nTowels')
assert response['status'] == '204', 'response status should be 204 is %s' % response['status']
tiddler_url = response['location']
assert tiddler_url == 'http://our_test_domain:8001/bags/bag0/tiddlers/TestOne', \
'response location should be http://our_test_domain:8001/bags/bag0/tiddlers/TestOne is %s' \
% tiddler_url
response, content = http.request(tiddler_url, headers={'Accept': 'text/plain'})
content = content.decode('UTF-8')
assert 'modified: 2' in content
def test_put_tiddler_json():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users', tags=['tagone','tagtwo'], modifier='', modified='200805230303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestTwo',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204', 'response status should be 204 is %s' % response['status']
tiddler_url = response['location']
assert tiddler_url == 'http://our_test_domain:8001/bags/bag0/tiddlers/TestTwo', \
'response location should be http://our_test_domain:8001/bags/bag0/tiddlers/TestTwo is %s' \
% tiddler_url
response, content = http.request(tiddler_url, headers={'Accept': 'application/json'})
info = simplejson.loads(content)
assert response['last-modified'] == 'Fri, 23 May 2008 03:03:00 GMT'
assert info['title'] == 'TestTwo'
assert info['text'] == 'i fight for the users'
def test_put_tiddler_json_bad_path():
"""
/ in tiddler title is an unresolved source of some confusion.
"""
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/..%2F..%2F..%2F..%2FTestThree',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '404', 'response status should be 404 is %s' % response['status']
def test_put_tiddler_json_no_bag():
"""
/ in tiddler title is an unresolved source of some confusion.
"""
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/nobagheremaam/tiddlers/SomeKindOTiddler',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '409'
assert 'There is no bag named: nobagheremaam' in content
def test_get_tiddler_via_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
def test_get_tiddler_etag_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
assert response['etag'] == 'bag28/tiddler8/1'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
def test_get_tiddler_etag_bag():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag28/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
assert response['etag'] == 'bag28/tiddler8/1'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
def test_get_tiddler_cached():
[os.unlink('.test_cache/%s' % x) for x in os.listdir('.test_cache')]
http = httplib2.Http('.test_cache')
response, content = http.request('http://our_test_domain:8001/bags/bag28/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
assert response['etag'] == 'bag28/tiddler8/1'
assert not response.fromcache
response, content = http.request('http://our_test_domain:8001/bags/bag28/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '304'
assert response['etag'] == 'bag28/tiddler8/1'
assert response.fromcache
def test_put_tiddler_cache_fakey():
[os.unlink('.test_cache/%s' % x) for x in os.listdir('.test_cache')]
http_caching = httplib2.Http('.test_cache')
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http_caching.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
assert response['etag'] == 'bag1/CashForCache/1'
response, content = http_caching.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='GET', headers={'Accept': 'application/json'})
assert response['status'] == '200'
assert response['etag'] == 'bag1/CashForCache/1'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
assert response['etag'] == 'bag1/CashForCache/2'
response, content = http_caching.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '412'
def test_put_tiddler_via_recipe():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/FantasticVoyage',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
assert response['etag'] == 'bag1/FantasticVoyage/1'
url = response['location']
reponse, content = http.request(url, method='GET', headers={'Accept': 'application/json'})
tiddler_dict = simplejson.loads(content)
assert tiddler_dict['bag'] == 'bag1'
assert response['etag'] == 'bag1/FantasticVoyage/1'
def test_slash_in_etag():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users', tags=['tagone','tagtwo'], modifier='', modified='200805230303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test%2FTwo/1'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test/Two/2'}, body=json)
assert response['status'] == '412'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test%2FTwo/2'}, body=json)
assert response['status'] == '204'
def test_paren_in_etag():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users', tags=['tagone','tagtwo'], modifier='', modified='200805230303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test(Two)/1'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test%28Two%29/2'}, body=json)
assert response['status'] == '412'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test(Two)/2'}, body=json)
assert response['status'] == '204'
def test_get_tiddler_text_created():
"""
Make sure the tiddler comes back to us as we expect.
In the process confirm that Accept header processing is working
as expect, by wanting xml (which we don't do), more than text/plain,
which we do.
"""
http = httplib2.Http()
tiddler_url = 'http://our_test_domain:8001/bags/bag0/tiddlers/TestOne'
response, content = http.request(tiddler_url, headers={'Accept': 'text/xml; q=1, text/plain'})
content = content.decode('UTF-8')
contents = content.strip().rstrip().split('\n')
texts = text_put_body.strip().rstrip().split('\n')
assert contents[-1] == u'Towels' # text
assert contents[-3] == u'tags: ' # tags
assert match('created: \d{12}', contents[1])
def test_tiddler_bag_constraints():
encoded_body = text_put_body.encode('UTF-8')
http = httplib2.Http()
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['NONE'])))
# try to create a tiddler and fail
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
assert 'may not create' in content
# create and succeed
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '204'
# fail when bad auth format
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': '%s' % authorization},
body=encoded_body)
assert response['status'] == '403'
# fail when bad auth info
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % bad_authorization},
body=encoded_body)
assert response['status'] == '403'
# fail when bad user info
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % no_user_authorization},
body=encoded_body)
assert response['status'] == '403'
# write and fail
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
assert 'may not write' in content
# write and succeed
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['cdent'],create=['NONE'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '204'
# read and fail
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='GET', headers={'Accept': 'text/plain', 'Authorization': 'Basic %s' % authorization})
assert response['status'] == '403'
assert 'may not read' in content
# update the policy so we can read and GET the thing
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['NONE'],delete=['NONE'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne.wiki',
method='GET', headers={'Accept': 'text/plain', 'Authorization': 'Basic %s' % authorization})
assert response['status'] == '200'
assert '<NAME>' in content
assert 'server.permissions="read, create"' in content
def test_get_tiddler_via_recipe_with_perms():
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'])))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '403'
assert 'may not read' in content
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['NONE'])))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
headers=dict(Authorization='Basic %s' % authorization), method='GET')
assert response['status'] == '200'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
assert 'may not write' in content
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['nancy'])))
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['cdent'])))
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='PUT', headers={'Content-Type': 'text/plain'},
body=encoded_body)
# when we PUT without permission there's no good way to handle auth
# so we just forbid.
assert response['status'] == '403'
def test_delete_tiddler_in_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
# there are multiple tiddler8s in the recipe
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '404'
def test_delete_tiddler_in_bag():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='DELETE')
assert response['status'] == '404'
def test_delete_tiddler_etag():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag5/tiddlers/tiddler0',
method='DELETE', headers={'If-Match': 'bag5/tiddler0/9'})
assert response['status'] == '412'
response, content = http.request('http://our_test_domain:8001/bags/bag5/tiddlers/tiddler0',
method='DELETE', headers={'If-Match': 'bag5/tiddler0/1'})
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag5/tiddlers/tiddler0',
method='DELETE')
assert response['status'] == '404'
def test_delete_tiddler_in_bag_perms():
_put_policy('bag0', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['cdent'],delete=['cdent'])))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler0',
method='DELETE')
assert response['status'] == '403'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler0',
method='DELETE', headers={'Authorization': 'Basic %s' % authorization})
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler0',
method='DELETE', headers={'Authorization': 'Basic %s' % authorization})
assert response['status'] == '404'
def test_tiddler_no_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/short/tiddlers/tiddler8',
method='GET')
assert response['status'] == '404'
def test_binary_tiddler():
image = file('test/peermore.png', 'rb')
content = image.read()
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/peermorepng',
method='PUT', headers={'Content-Type': 'image/png'},
body=content)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/peermorepng',
method='GET')
assert response['status'] == '200'
assert response['content-type'] == 'image/png'
def _put_policy(bag_name, policy_dict):
json = simplejson.dumps(policy_dict)
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/%s' % bag_name,
method='PUT', headers={'Content-Type': 'application/json', 'Authorization': 'Basic %s' % authorization},
body=json)
assert response['status'] == '204'
| """
Test that GETting a tiddler in some form.
"""
import sys
import os
sys.path.append('.')
from wsgi_intercept import httplib2_intercept
import wsgi_intercept
import httplib2
import simplejson
from base64 import b64encode
from re import match
from fixtures import muchdata, reset_textstore, teststore
from tiddlyweb.model.user import User
authorization = b64encode('cdent:cowpig')
bad_authorization = b64encode('cdent:cdent')
no_user_authorization = b64encode('foop:foop')
text_put_body=u"""modifier: JohnSmith
created:
modified: 200803030303
tags: tagone
Hello, I'm <NAME> \xbb and I have something to sell.
"""
def setup_module(module):
from tiddlyweb.web import serve
# we have to have a function that returns the callable,
# Selector just _is_ the callable
def app_fn():
return serve.load_app()
#wsgi_intercept.debuglevel = 1
httplib2_intercept.install()
wsgi_intercept.add_wsgi_intercept('our_test_domain', 8001, app_fn)
module.store = teststore()
reset_textstore()
muchdata(module.store)
user = User('cdent')
user.set_password('<PASSWORD>')
module.store.put(user)
try:
os.mkdir('.test_cache')
except OSError:
pass # we don't care if it already exists
def test_get_tiddler():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8',
method='GET')
assert response['status'] == '200', 'response status should be 200'
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
def test_get_tiddler_revision():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8/revisions/1',
method='GET')
assert response['status'] == '200', 'response status should be 200'
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
assert 'revision="1"' in content
def test_get_missing_tiddler():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler27',
method='GET')
assert response['status'] == '404', 'response status should be 404'
def test_get_missing_tiddler_revision():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler27/revisions/99',
method='GET')
assert response['status'] == '404', 'response status should be 404'
def test_get_tiddler_missing_revision():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8/revisions/99',
method='GET')
assert response['status'] == '404'
def test_get_tiddler_wiki():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8.wiki',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html; chareset=UTF-8 is %s' % response['content-type']
assert '<title>\ntiddler8\n</title>' in content
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
assert 'server.permissions="read, write, create, delete"' in content
def test_get_tiddler_revision_wiki():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler8/revisions/1.wiki',
method='GET')
assert response['status'] == '200', 'response status should be 200 is %s' % response['status']
assert response['content-type'] == 'text/html; charset=UTF-8', 'response content-type should be text/html; chareset=UTF-8 is %s' % response['content-type']
assert 'i am tiddler 8' in content, 'tiddler should be correct content, is %s' % content
assert 'revision="1"' in content
def test_put_tiddler_txt():
http = httplib2.Http()
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='PUT', headers={'Content-Type': 'text/plain'}, body=encoded_body)
assert response['status'] == '204', 'response status should be 204 is %s' % response['status']
tiddler_url = response['location']
assert tiddler_url == 'http://our_test_domain:8001/bags/bag0/tiddlers/TestOne', \
'response location should be http://our_test_domain:8001/bags/bag0/tiddlers/TestOne is %s' \
% tiddler_url
response, content = http.request(tiddler_url, headers={'Accept': 'text/plain'})
content = content.decode('UTF-8')
contents = content.strip().rstrip().split('\n')
texts = text_put_body.strip().rstrip().split('\n')
assert contents[-1] == texts[-1] # text
assert contents[-3] == texts[-3] # tags
def test_put_tiddler_txt_no_modified():
"""
Putting a tiddler with no modifier should make a default.
"""
http = httplib2.Http()
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='PUT', headers={'Content-Type': 'text/plain'}, body='modifier: ArthurDent\n\nTowels')
assert response['status'] == '204', 'response status should be 204 is %s' % response['status']
tiddler_url = response['location']
assert tiddler_url == 'http://our_test_domain:8001/bags/bag0/tiddlers/TestOne', \
'response location should be http://our_test_domain:8001/bags/bag0/tiddlers/TestOne is %s' \
% tiddler_url
response, content = http.request(tiddler_url, headers={'Accept': 'text/plain'})
content = content.decode('UTF-8')
assert 'modified: 2' in content
def test_put_tiddler_json():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users', tags=['tagone','tagtwo'], modifier='', modified='200805230303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestTwo',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204', 'response status should be 204 is %s' % response['status']
tiddler_url = response['location']
assert tiddler_url == 'http://our_test_domain:8001/bags/bag0/tiddlers/TestTwo', \
'response location should be http://our_test_domain:8001/bags/bag0/tiddlers/TestTwo is %s' \
% tiddler_url
response, content = http.request(tiddler_url, headers={'Accept': 'application/json'})
info = simplejson.loads(content)
assert response['last-modified'] == 'Fri, 23 May 2008 03:03:00 GMT'
assert info['title'] == 'TestTwo'
assert info['text'] == 'i fight for the users'
def test_put_tiddler_json_bad_path():
"""
/ in tiddler title is an unresolved source of some confusion.
"""
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/..%2F..%2F..%2F..%2FTestThree',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '404', 'response status should be 404 is %s' % response['status']
def test_put_tiddler_json_no_bag():
"""
/ in tiddler title is an unresolved source of some confusion.
"""
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/nobagheremaam/tiddlers/SomeKindOTiddler',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '409'
assert 'There is no bag named: nobagheremaam' in content
def test_get_tiddler_via_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
def test_get_tiddler_etag_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
assert response['etag'] == 'bag28/tiddler8/1'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
def test_get_tiddler_etag_bag():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag28/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
assert response['etag'] == 'bag28/tiddler8/1'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
def test_get_tiddler_cached():
[os.unlink('.test_cache/%s' % x) for x in os.listdir('.test_cache')]
http = httplib2.Http('.test_cache')
response, content = http.request('http://our_test_domain:8001/bags/bag28/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '200'
assert response['etag'] == 'bag28/tiddler8/1'
assert not response.fromcache
response, content = http.request('http://our_test_domain:8001/bags/bag28/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '304'
assert response['etag'] == 'bag28/tiddler8/1'
assert response.fromcache
def test_put_tiddler_cache_fakey():
[os.unlink('.test_cache/%s' % x) for x in os.listdir('.test_cache')]
http_caching = httplib2.Http('.test_cache')
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http_caching.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
assert response['etag'] == 'bag1/CashForCache/1'
response, content = http_caching.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='GET', headers={'Accept': 'application/json'})
assert response['status'] == '200'
assert response['etag'] == 'bag1/CashForCache/1'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
assert response['etag'] == 'bag1/CashForCache/2'
response, content = http_caching.request('http://our_test_domain:8001/recipes/long/tiddlers/CashForCache',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '412'
def test_put_tiddler_via_recipe():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users 2', tags=['tagone','tagtwo'], modifier='', modified='200803030303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/FantasticVoyage',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
assert response['etag'] == 'bag1/FantasticVoyage/1'
url = response['location']
reponse, content = http.request(url, method='GET', headers={'Accept': 'application/json'})
tiddler_dict = simplejson.loads(content)
assert tiddler_dict['bag'] == 'bag1'
assert response['etag'] == 'bag1/FantasticVoyage/1'
def test_slash_in_etag():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users', tags=['tagone','tagtwo'], modifier='', modified='200805230303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test%2FTwo/1'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test/Two/2'}, body=json)
assert response['status'] == '412'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test%2FTwo',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test%2FTwo/2'}, body=json)
assert response['status'] == '204'
def test_paren_in_etag():
http = httplib2.Http()
json = simplejson.dumps(dict(text='i fight for the users', tags=['tagone','tagtwo'], modifier='', modified='200805230303', created='200803030303'))
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test(Two)/1'}, body=json)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test%28Two%29/2'}, body=json)
assert response['status'] == '412'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/Test(Two)',
method='PUT', headers={'Content-Type': 'application/json', 'If-Match': 'bag0/Test(Two)/2'}, body=json)
assert response['status'] == '204'
def test_get_tiddler_text_created():
"""
Make sure the tiddler comes back to us as we expect.
In the process confirm that Accept header processing is working
as expect, by wanting xml (which we don't do), more than text/plain,
which we do.
"""
http = httplib2.Http()
tiddler_url = 'http://our_test_domain:8001/bags/bag0/tiddlers/TestOne'
response, content = http.request(tiddler_url, headers={'Accept': 'text/xml; q=1, text/plain'})
content = content.decode('UTF-8')
contents = content.strip().rstrip().split('\n')
texts = text_put_body.strip().rstrip().split('\n')
assert contents[-1] == u'Towels' # text
assert contents[-3] == u'tags: ' # tags
assert match('created: \d{12}', contents[1])
def test_tiddler_bag_constraints():
encoded_body = text_put_body.encode('UTF-8')
http = httplib2.Http()
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['NONE'])))
# try to create a tiddler and fail
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
assert 'may not create' in content
# create and succeed
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '204'
# fail when bad auth format
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': '%s' % authorization},
body=encoded_body)
assert response['status'] == '403'
# fail when bad auth info
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % bad_authorization},
body=encoded_body)
assert response['status'] == '403'
# fail when bad user info
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'],create=['cdent'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % no_user_authorization},
body=encoded_body)
assert response['status'] == '403'
# write and fail
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
assert 'may not write' in content
# write and succeed
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['cdent'],create=['NONE'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '204'
# read and fail
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne',
method='GET', headers={'Accept': 'text/plain', 'Authorization': 'Basic %s' % authorization})
assert response['status'] == '403'
assert 'may not read' in content
# update the policy so we can read and GET the thing
_put_policy('unreadable', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['NONE'],delete=['NONE'])))
response, content = http.request('http://our_test_domain:8001/bags/unreadable/tiddlers/WroteOne.wiki',
method='GET', headers={'Accept': 'text/plain', 'Authorization': 'Basic %s' % authorization})
assert response['status'] == '200'
assert '<NAME>' in content
assert 'server.permissions="read, create"' in content
def test_get_tiddler_via_recipe_with_perms():
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['NONE'],write=['NONE'])))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
method='GET')
assert response['status'] == '403'
assert 'may not read' in content
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['NONE'])))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8.json',
headers=dict(Authorization='Basic %s' % authorization), method='GET')
assert response['status'] == '200'
tiddler_info = simplejson.loads(content)
assert tiddler_info['bag'] == 'bag28'
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
assert 'may not write' in content
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['nancy'])))
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='PUT', headers={'Content-Type': 'text/plain', 'Authorization': 'Basic %s' % authorization},
body=encoded_body)
assert response['status'] == '403'
_put_policy('bag28', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['cdent'])))
encoded_body = text_put_body.encode('UTF-8')
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='PUT', headers={'Content-Type': 'text/plain'},
body=encoded_body)
# when we PUT without permission there's no good way to handle auth
# so we just forbid.
assert response['status'] == '403'
def test_delete_tiddler_in_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
# there are multiple tiddler8s in the recipe
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/tiddler8',
method='DELETE')
assert response['status'] == '404'
def test_delete_tiddler_in_bag():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='DELETE')
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/TestOne',
method='DELETE')
assert response['status'] == '404'
def test_delete_tiddler_etag():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag5/tiddlers/tiddler0',
method='DELETE', headers={'If-Match': 'bag5/tiddler0/9'})
assert response['status'] == '412'
response, content = http.request('http://our_test_domain:8001/bags/bag5/tiddlers/tiddler0',
method='DELETE', headers={'If-Match': 'bag5/tiddler0/1'})
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag5/tiddlers/tiddler0',
method='DELETE')
assert response['status'] == '404'
def test_delete_tiddler_in_bag_perms():
_put_policy('bag0', dict(policy=dict(manage=['cdent'],read=['cdent'],write=['cdent'],delete=['cdent'])))
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler0',
method='DELETE')
assert response['status'] == '403'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler0',
method='DELETE', headers={'Authorization': 'Basic %s' % authorization})
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/bags/bag0/tiddlers/tiddler0',
method='DELETE', headers={'Authorization': 'Basic %s' % authorization})
assert response['status'] == '404'
def test_tiddler_no_recipe():
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/short/tiddlers/tiddler8',
method='GET')
assert response['status'] == '404'
def test_binary_tiddler():
image = file('test/peermore.png', 'rb')
content = image.read()
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/peermorepng',
method='PUT', headers={'Content-Type': 'image/png'},
body=content)
assert response['status'] == '204'
response, content = http.request('http://our_test_domain:8001/recipes/long/tiddlers/peermorepng',
method='GET')
assert response['status'] == '200'
assert response['content-type'] == 'image/png'
def _put_policy(bag_name, policy_dict):
json = simplejson.dumps(policy_dict)
http = httplib2.Http()
response, content = http.request('http://our_test_domain:8001/bags/%s' % bag_name,
method='PUT', headers={'Content-Type': 'application/json', 'Authorization': 'Basic %s' % authorization},
body=json)
assert response['status'] == '204'
| en | 0.876057 | Test that GETting a tiddler in some form. modifier: JohnSmith created: modified: 200803030303 tags: tagone Hello, I'm <NAME> \xbb and I have something to sell. # we have to have a function that returns the callable, # Selector just _is_ the callable #wsgi_intercept.debuglevel = 1 # we don't care if it already exists # text # tags Putting a tiddler with no modifier should make a default. / in tiddler title is an unresolved source of some confusion. / in tiddler title is an unresolved source of some confusion. Make sure the tiddler comes back to us as we expect. In the process confirm that Accept header processing is working as expect, by wanting xml (which we don't do), more than text/plain, which we do. # text # tags # try to create a tiddler and fail # create and succeed # fail when bad auth format # fail when bad auth info # fail when bad user info # write and fail # write and succeed # read and fail # update the policy so we can read and GET the thing # when we PUT without permission there's no good way to handle auth # so we just forbid. # there are multiple tiddler8s in the recipe | 2.456513 | 2 |
PLM/ui/components/__init__.py | vtta2008/pipelineTool | 7 | 6617935 | <filename>PLM/ui/components/__init__.py
# -*- coding: utf-8 -*-
"""
Script Name: __init__.py.py
Author: <NAME>/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
from .BodyCheckBoxes import BodyCheckBoxes
from .BotTab import BotTab
# from .BotTab1 import BotTab1
# from .BotTab2 import BotTab2
# from .ConnectStatus import ConnectStatus
from .FooterCheckBoxes import FooterCheckBoxes
from .HeaderCheckBoxes import HeaderCheckBoxes
# from .Loading import LoadingBar
from .MainHeader import MainHeader
# from .MainMenuBar import MainMenuBar
from .MainStatusBar import MainStatusBar
# from .MainToolBar import MainToolBar
from .MidTab import MidTab
# from .MidTab1 import MidTab1
# from .MidTab2 import MidTab2
from .SettingInput import SettingInput
from .SettingOutput import SettingOutput
from .SysTrayIconMenu import SysTrayIconMenu
from .ValiantDelegate import VariantDelegate
# -------------------------------------------------------------------------------------------------------------
# Created by panda on 3/16/2020 - 3:11 AM
# © 2017 - 2019 DAMGteam. All rights reserved | <filename>PLM/ui/components/__init__.py
# -*- coding: utf-8 -*-
"""
Script Name: __init__.py.py
Author: <NAME>/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
from .BodyCheckBoxes import BodyCheckBoxes
from .BotTab import BotTab
# from .BotTab1 import BotTab1
# from .BotTab2 import BotTab2
# from .ConnectStatus import ConnectStatus
from .FooterCheckBoxes import FooterCheckBoxes
from .HeaderCheckBoxes import HeaderCheckBoxes
# from .Loading import LoadingBar
from .MainHeader import MainHeader
# from .MainMenuBar import MainMenuBar
from .MainStatusBar import MainStatusBar
# from .MainToolBar import MainToolBar
from .MidTab import MidTab
# from .MidTab1 import MidTab1
# from .MidTab2 import MidTab2
from .SettingInput import SettingInput
from .SettingOutput import SettingOutput
from .SysTrayIconMenu import SysTrayIconMenu
from .ValiantDelegate import VariantDelegate
# -------------------------------------------------------------------------------------------------------------
# Created by panda on 3/16/2020 - 3:11 AM
# © 2017 - 2019 DAMGteam. All rights reserved | en | 0.401643 | # -*- coding: utf-8 -*- Script Name: __init__.py.py Author: <NAME>/Jimmy - 3D artist. Description: # ------------------------------------------------------------------------------------------------------------- # from .BotTab1 import BotTab1 # from .BotTab2 import BotTab2 # from .ConnectStatus import ConnectStatus # from .Loading import LoadingBar # from .MainMenuBar import MainMenuBar # from .MainToolBar import MainToolBar # from .MidTab1 import MidTab1 # from .MidTab2 import MidTab2 # ------------------------------------------------------------------------------------------------------------- # Created by panda on 3/16/2020 - 3:11 AM # © 2017 - 2019 DAMGteam. All rights reserved | 1.646235 | 2 |
aoc/y2018/tests/day9_test.py | pjiranek/AoC | 0 | 6617936 | <gh_stars>0
import unittest
from aoc.y2018 import day9
class Day9Test(unittest.TestCase):
def test_1(self):
self._do_test(10, 1618, 8317)
def test_2(self):
self._do_test(13, 7999, 146373)
def test_3(self):
self._do_test(17, 1104, 2764)
def test_4(self):
self._do_test(21, 6111, 54718)
def test_5(self):
self._do_test(30, 5807, 37305)
def _do_test(self, player_count, last_marble, high_score):
game = day9.Game(player_count)
game.play_until(last_marble)
self.assertEqual(game.high_score, high_score)
| import unittest
from aoc.y2018 import day9
class Day9Test(unittest.TestCase):
def test_1(self):
self._do_test(10, 1618, 8317)
def test_2(self):
self._do_test(13, 7999, 146373)
def test_3(self):
self._do_test(17, 1104, 2764)
def test_4(self):
self._do_test(21, 6111, 54718)
def test_5(self):
self._do_test(30, 5807, 37305)
def _do_test(self, player_count, last_marble, high_score):
game = day9.Game(player_count)
game.play_until(last_marble)
self.assertEqual(game.high_score, high_score) | none | 1 | 2.899843 | 3 | |
sanitize.py | sgalal/lshk-word-list-crawler | 4 | 6617937 | <reponame>sgalal/lshk-word-list-crawler
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import sys
d1s = []
d2s = []
with open('result.txt') as r:
for i, line in enumerate(r.readlines()):
line_strip = line.rstrip()
try:
xs = line_strip.split()
assert len(xs) == 3, 'Malformed data row'
honzis, jyutpings, _ = xs
honzi_list = list(honzis)
jyutpings_lower = jyutpings.lower()
jyutping_list = re.split(r'(?<=[1-6])(?!$)', jyutpings_lower)
assert len(honzi_list) == len(jyutping_list), 'Lengths do not match'
for honzi in honzi_list:
# https://stackoverflow.com/a/2718268
assert re.match(r'[⺀-⺙⺛-⻳⼀-⿕々〇〡-〩〸-〺〻㐀-䶵一-鿃豈-鶴侮-頻並-龎]', honzi), 'Not a Chinese character'
for jyutping in jyutping_list:
assert re.match(r'[a-z]+[1-6]', jyutping), 'Malformed Jyutping'
(d1s if len(honzi_list) == 1 else d2s).append((honzis, ' '.join(jyutping_list)))
except AssertionError as e:
print(f'Line {i + 1}: "{line_strip}", {" ".join(e.args)}', file=sys.stderr)
with open('sanitized.txt', 'w') as f:
for a, b in d1s:
print(f'{a}\t{b}', file=f)
for a, b in d2s:
print(f'{a}\t{b}', file=f)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import sys
d1s = []
d2s = []
with open('result.txt') as r:
for i, line in enumerate(r.readlines()):
line_strip = line.rstrip()
try:
xs = line_strip.split()
assert len(xs) == 3, 'Malformed data row'
honzis, jyutpings, _ = xs
honzi_list = list(honzis)
jyutpings_lower = jyutpings.lower()
jyutping_list = re.split(r'(?<=[1-6])(?!$)', jyutpings_lower)
assert len(honzi_list) == len(jyutping_list), 'Lengths do not match'
for honzi in honzi_list:
# https://stackoverflow.com/a/2718268
assert re.match(r'[⺀-⺙⺛-⻳⼀-⿕々〇〡-〩〸-〺〻㐀-䶵一-鿃豈-鶴侮-頻並-龎]', honzi), 'Not a Chinese character'
for jyutping in jyutping_list:
assert re.match(r'[a-z]+[1-6]', jyutping), 'Malformed Jyutping'
(d1s if len(honzi_list) == 1 else d2s).append((honzis, ' '.join(jyutping_list)))
except AssertionError as e:
print(f'Line {i + 1}: "{line_strip}", {" ".join(e.args)}', file=sys.stderr)
with open('sanitized.txt', 'w') as f:
for a, b in d1s:
print(f'{a}\t{b}', file=f)
for a, b in d2s:
print(f'{a}\t{b}', file=f) | en | 0.441724 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- # https://stackoverflow.com/a/2718268 | 2.927113 | 3 |
bin/app/__init__.py | shutingrz/sensorUI | 0 | 6617938 | from flask import Flask, Blueprint
from app.controllers.api import api
from app.controllers.webui import webui
from app.util import Util
from flask_wtf.csrf import CSRFProtect
from app import db
from flask_sqlalchemy import SQLAlchemy
def create_app(DBURL=None):
app = Flask(__name__)
try:
app.config.from_pyfile('../sensors.conf')
except FileNotFoundError as exc:
app.logger.critical("'../sensors.conf' is not found.")
raise FileNotFoundError(exc)
try:
if DBURL is not None:
dburl = DBURL
else:
dburl = app.config['DBURL']
app.config['SQLALCHEMY_DATABASE_URI'] = dburl
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
except KeyError as exc:
app.logger.critical(
"DBURL is not set. please set dburl at sensors.conf!")
raise KeyError(exc)
app.config["SECRET_KEY"] = Util.generateRandomBytes(32)
app.config['JSON_AS_ASCII'] = False
Util.MaxUsernameLength = app.config["MAX_USERID_LENGTH"]
Util.MaxUserPassLength = app.config["MAX_USERPASS_LENGTH"]
csrf = CSRFProtect(app)
csrf.init_app(app)
# ビューの登録
app.register_blueprint(webui, url_prefix='/')
app.register_blueprint(api, url_prefix='/api/')
# データベースの登録
db.init_db(app)
return app
app = create_app()
# Migrate対応だが一旦 db.create_all() をする運用とする
with app.app_context():
db.create_all()
| from flask import Flask, Blueprint
from app.controllers.api import api
from app.controllers.webui import webui
from app.util import Util
from flask_wtf.csrf import CSRFProtect
from app import db
from flask_sqlalchemy import SQLAlchemy
def create_app(DBURL=None):
app = Flask(__name__)
try:
app.config.from_pyfile('../sensors.conf')
except FileNotFoundError as exc:
app.logger.critical("'../sensors.conf' is not found.")
raise FileNotFoundError(exc)
try:
if DBURL is not None:
dburl = DBURL
else:
dburl = app.config['DBURL']
app.config['SQLALCHEMY_DATABASE_URI'] = dburl
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
except KeyError as exc:
app.logger.critical(
"DBURL is not set. please set dburl at sensors.conf!")
raise KeyError(exc)
app.config["SECRET_KEY"] = Util.generateRandomBytes(32)
app.config['JSON_AS_ASCII'] = False
Util.MaxUsernameLength = app.config["MAX_USERID_LENGTH"]
Util.MaxUserPassLength = app.config["MAX_USERPASS_LENGTH"]
csrf = CSRFProtect(app)
csrf.init_app(app)
# ビューの登録
app.register_blueprint(webui, url_prefix='/')
app.register_blueprint(api, url_prefix='/api/')
# データベースの登録
db.init_db(app)
return app
app = create_app()
# Migrate対応だが一旦 db.create_all() をする運用とする
with app.app_context():
db.create_all()
| ja | 0.999605 | # ビューの登録 # データベースの登録 # Migrate対応だが一旦 db.create_all() をする運用とする | 2.622057 | 3 |
main.py | indmind/AutoTypeRacer | 3 | 6617939 | <reponame>indmind/AutoTypeRacer
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import NoSuchElementException
from dotenv import load_dotenv
from time import sleep
from os import getenv
from tabulate import tabulate
import sys
import pyautogui
# save each race result
history = []
# elements selector
signin_selector = '#tstats > table > tbody > tr.datarow > td:nth-child(1) > table > tbody > tr > td:nth-child(1) > a'
username_selector = 'body > div.DialogBox.trPopupDialog.editUserPopup > div > div > div.dialogContent > div > div.bodyWidgetHolder > div > table.gwt-DisclosurePanel.gwt-DisclosurePanel-open > tbody > tr:nth-child(2) > td > div > table > tbody > tr:nth-child(1) > td:nth-child(2) > input'
password_selector = 'body > div.DialogBox.trPopupDialog.editUserPopup > div > div > div.dialogContent > div > div.bodyWidgetHolder > div > table.gwt-DisclosurePanel.gwt-DisclosurePanel-open > tbody > tr:nth-child(2) > td > div > table > tbody > tr:nth-child(2) > td:nth-child(2) > table > tbody > tr:nth-child(1) > td > input'
signinconfirm_selector = 'body > div.DialogBox.trPopupDialog.editUserPopup > div > div > div.dialogContent > div > div.bodyWidgetHolder > div > table.gwt-DisclosurePanel.gwt-DisclosurePanel-open > tbody > tr:nth-child(2) > td > div > table > tbody > tr:nth-child(4) > td:nth-child(2) > table > tbody > tr > td:nth-child(1) > button'
play_selector = '#dUI > table > tbody > tr:nth-child(2) > td:nth-child(2) > div > div.mainViewport > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(1) > td > a'
# just to check if the race page is loaded
banner_selector = 'body > div.countdownPopup.horizontalCountdownPopup > div > table > tbody > tr > td > table > tbody > tr > td:nth-child(2)'
# this selector needs #gwt-uid-{uid} >
text_selector = 'table > tbody > tr:nth-child(2) > td > table > tbody > tr:nth-child(1) > td > table > tbody > tr:nth-child(1) > td > div > div'
input_selector = 'table > tbody > tr:nth-child(2) > td > table > tbody > tr:nth-child(2) > td > input'
raceagain_selector = 'table > tbody > tr:nth-child(3) > td > table > tbody > tr > td:nth-child(2) > a'
# after race selector
wpm_selector = 'table > tbody > tr:nth-child(4) > td > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(4) > td > table > tbody > tr:nth-child(1) > td:nth-child(2) > table > tbody > tr > td:nth-child(1) > div > div'
time_selector = 'table > tbody > tr:nth-child(4) > td > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(4) > td > table > tbody > tr:nth-child(2) > td:nth-child(2) > div > span'
point_selector = 'table > tbody > tr:nth-child(4) > td > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(4) > td > table > tbody > tr:nth-child(4) > td:nth-child(2) > div > div'
# check if element exist using css selector
def isElementExist(selector):
try:
browser.find_element_by_css_selector(selector)
except NoSuchElementException:
return False
return True
# get uid where race element nested
def bruteUID():
print("bruteforce-ing uid...")
uid = 0
# try checking the input selector element
while uid < 10000:
input_selector = '#gwt-uid-%d > table > tbody > tr:nth-child(2) > td > table > tbody > tr:nth-child(2) > td > input' % uid
if isElementExist(input_selector):
break
uid += 1
print("uid found:", uid)
return uid
# get text, input, and race-again element
def getRaceElementsSelector():
uid = "#gwt-uid-%d > " % bruteUID()
selectors = {
'text': uid + text_selector,
'input': uid + input_selector,
'raceagain': uid + raceagain_selector,
'wpm': uid + wpm_selector,
'time': uid + time_selector,
'point': uid + point_selector,
}
return selectors
# get and wait an element using css selector
def getAndWait(selector, key, max=60):
print('get and wait:', key)
return WebDriverWait(browser, max).until(EC.presence_of_element_located((By.CSS_SELECTOR, selector)))
# find an element using css selector
def find(selector, key):
print('find:', key)
return browser.find_element_by_css_selector(selector)
def secureClick(element, key):
while not element.is_displayed():
print(key, 'is not visible, waiting for 1s')
sleep(1)
print('click:', key)
element.send_keys(Keys.TAB)
element.click()
# login using data from .env
def login():
print("login...")
getAndWait(signin_selector, 'sigin').click()
getAndWait(username_selector, 'username').send_keys(getenv("username"))
find(password_selector, 'password').send_keys(getenv("password"))
find(signinconfirm_selector, 'signinconfirm').click()
sleep(5)
print("done login...")
# self explanatory
def race(count):
try:
#page loading check
getAndWait(banner_selector, 'banner')
selectors = getRaceElementsSelector()
# select text element
text = find(selectors['text'], 'text').text
print("text:", text)
# select text input element where we need to type the text
text_input = find(selectors['input'], "input")
# wait for game to start
while text_input.get_attribute('disabled'):
print("wait the race to start for 1s...")
sleep(1)
# after countdown is done, click the element (47)
text_input.click()
# type using pyautogui because I dont know how to set the typing speed
print("typing...")
pyautogui.typewrite(text, interval=0.14)
# save the result
result = [
text[:10] + '...' + text[-10:],
getAndWait(selectors['wpm'], 'wpm').text,
getAndWait(selectors['time'], 'time').text,
getAndWait(selectors['point'], 'point').text
]
history.append(result)
count -= 1
if count:
secureClick(find(selectors['raceagain'], "raceagain"), "raceagain")
race(count)
except TimeoutException:
print('kelamaan')
if __name__ == "__main__":
load_dotenv()
count = 1
guestMode = False
if len(sys.argv) > 1:
count = int(sys.argv[1])
if len(sys.argv) > 2:
if sys.argv[2] == "g":
print('Start in guest mode...')
guestMode = True
# disable image load and idk what disk-cache-size used for
prefs = {'profile.managed_default_content_settings.images':2, 'disk-cache-size': 4096}
options = webdriver.ChromeOptions()
options.add_experimental_option("prefs", prefs)
browser = webdriver.Chrome(chrome_options=options)
browser.get('https://play.typeracer.com/')
if not guestMode:
login()
# click the "enter typing race button"
getAndWait(play_selector, 'playbutton').click()
# RACE!!!!
race(count)
print('\nRESULTS:')
print(tabulate(history, headers=['text', 'speed', 'time', 'point'], showindex=True))
wpms = [int(res[1].split()[0]) for res in history]
points = sum([int(res[3]) for res in history])
print('\nAVERAGE WPM:', sum(wpms) / len(wpms))
print('TOTAL POINTS:', points)
| from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import NoSuchElementException
from dotenv import load_dotenv
from time import sleep
from os import getenv
from tabulate import tabulate
import sys
import pyautogui
# save each race result
history = []
# elements selector
signin_selector = '#tstats > table > tbody > tr.datarow > td:nth-child(1) > table > tbody > tr > td:nth-child(1) > a'
username_selector = 'body > div.DialogBox.trPopupDialog.editUserPopup > div > div > div.dialogContent > div > div.bodyWidgetHolder > div > table.gwt-DisclosurePanel.gwt-DisclosurePanel-open > tbody > tr:nth-child(2) > td > div > table > tbody > tr:nth-child(1) > td:nth-child(2) > input'
password_selector = 'body > div.DialogBox.trPopupDialog.editUserPopup > div > div > div.dialogContent > div > div.bodyWidgetHolder > div > table.gwt-DisclosurePanel.gwt-DisclosurePanel-open > tbody > tr:nth-child(2) > td > div > table > tbody > tr:nth-child(2) > td:nth-child(2) > table > tbody > tr:nth-child(1) > td > input'
signinconfirm_selector = 'body > div.DialogBox.trPopupDialog.editUserPopup > div > div > div.dialogContent > div > div.bodyWidgetHolder > div > table.gwt-DisclosurePanel.gwt-DisclosurePanel-open > tbody > tr:nth-child(2) > td > div > table > tbody > tr:nth-child(4) > td:nth-child(2) > table > tbody > tr > td:nth-child(1) > button'
play_selector = '#dUI > table > tbody > tr:nth-child(2) > td:nth-child(2) > div > div.mainViewport > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(1) > td > a'
# just to check if the race page is loaded
banner_selector = 'body > div.countdownPopup.horizontalCountdownPopup > div > table > tbody > tr > td > table > tbody > tr > td:nth-child(2)'
# this selector needs #gwt-uid-{uid} >
text_selector = 'table > tbody > tr:nth-child(2) > td > table > tbody > tr:nth-child(1) > td > table > tbody > tr:nth-child(1) > td > div > div'
input_selector = 'table > tbody > tr:nth-child(2) > td > table > tbody > tr:nth-child(2) > td > input'
raceagain_selector = 'table > tbody > tr:nth-child(3) > td > table > tbody > tr > td:nth-child(2) > a'
# after race selector
wpm_selector = 'table > tbody > tr:nth-child(4) > td > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(4) > td > table > tbody > tr:nth-child(1) > td:nth-child(2) > table > tbody > tr > td:nth-child(1) > div > div'
time_selector = 'table > tbody > tr:nth-child(4) > td > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(4) > td > table > tbody > tr:nth-child(2) > td:nth-child(2) > div > span'
point_selector = 'table > tbody > tr:nth-child(4) > td > div > table > tbody > tr:nth-child(2) > td > table > tbody > tr > td:nth-child(2) > table > tbody > tr:nth-child(4) > td > table > tbody > tr:nth-child(4) > td:nth-child(2) > div > div'
# check if element exist using css selector
def isElementExist(selector):
try:
browser.find_element_by_css_selector(selector)
except NoSuchElementException:
return False
return True
# get uid where race element nested
def bruteUID():
print("bruteforce-ing uid...")
uid = 0
# try checking the input selector element
while uid < 10000:
input_selector = '#gwt-uid-%d > table > tbody > tr:nth-child(2) > td > table > tbody > tr:nth-child(2) > td > input' % uid
if isElementExist(input_selector):
break
uid += 1
print("uid found:", uid)
return uid
# get text, input, and race-again element
def getRaceElementsSelector():
uid = "#gwt-uid-%d > " % bruteUID()
selectors = {
'text': uid + text_selector,
'input': uid + input_selector,
'raceagain': uid + raceagain_selector,
'wpm': uid + wpm_selector,
'time': uid + time_selector,
'point': uid + point_selector,
}
return selectors
# get and wait an element using css selector
def getAndWait(selector, key, max=60):
print('get and wait:', key)
return WebDriverWait(browser, max).until(EC.presence_of_element_located((By.CSS_SELECTOR, selector)))
# find an element using css selector
def find(selector, key):
print('find:', key)
return browser.find_element_by_css_selector(selector)
def secureClick(element, key):
while not element.is_displayed():
print(key, 'is not visible, waiting for 1s')
sleep(1)
print('click:', key)
element.send_keys(Keys.TAB)
element.click()
# login using data from .env
def login():
print("login...")
getAndWait(signin_selector, 'sigin').click()
getAndWait(username_selector, 'username').send_keys(getenv("username"))
find(password_selector, 'password').send_keys(getenv("password"))
find(signinconfirm_selector, 'signinconfirm').click()
sleep(5)
print("done login...")
# self explanatory
def race(count):
try:
#page loading check
getAndWait(banner_selector, 'banner')
selectors = getRaceElementsSelector()
# select text element
text = find(selectors['text'], 'text').text
print("text:", text)
# select text input element where we need to type the text
text_input = find(selectors['input'], "input")
# wait for game to start
while text_input.get_attribute('disabled'):
print("wait the race to start for 1s...")
sleep(1)
# after countdown is done, click the element (47)
text_input.click()
# type using pyautogui because I dont know how to set the typing speed
print("typing...")
pyautogui.typewrite(text, interval=0.14)
# save the result
result = [
text[:10] + '...' + text[-10:],
getAndWait(selectors['wpm'], 'wpm').text,
getAndWait(selectors['time'], 'time').text,
getAndWait(selectors['point'], 'point').text
]
history.append(result)
count -= 1
if count:
secureClick(find(selectors['raceagain'], "raceagain"), "raceagain")
race(count)
except TimeoutException:
print('kelamaan')
if __name__ == "__main__":
load_dotenv()
count = 1
guestMode = False
if len(sys.argv) > 1:
count = int(sys.argv[1])
if len(sys.argv) > 2:
if sys.argv[2] == "g":
print('Start in guest mode...')
guestMode = True
# disable image load and idk what disk-cache-size used for
prefs = {'profile.managed_default_content_settings.images':2, 'disk-cache-size': 4096}
options = webdriver.ChromeOptions()
options.add_experimental_option("prefs", prefs)
browser = webdriver.Chrome(chrome_options=options)
browser.get('https://play.typeracer.com/')
if not guestMode:
login()
# click the "enter typing race button"
getAndWait(play_selector, 'playbutton').click()
# RACE!!!!
race(count)
print('\nRESULTS:')
print(tabulate(history, headers=['text', 'speed', 'time', 'point'], showindex=True))
wpms = [int(res[1].split()[0]) for res in history]
points = sum([int(res[3]) for res in history])
print('\nAVERAGE WPM:', sum(wpms) / len(wpms))
print('TOTAL POINTS:', points) | en | 0.694993 | # save each race result # elements selector # just to check if the race page is loaded # this selector needs #gwt-uid-{uid} > # after race selector # check if element exist using css selector # get uid where race element nested # try checking the input selector element # get text, input, and race-again element # get and wait an element using css selector # find an element using css selector # login using data from .env # self explanatory #page loading check # select text element # select text input element where we need to type the text # wait for game to start # after countdown is done, click the element (47) # type using pyautogui because I dont know how to set the typing speed # save the result # disable image load and idk what disk-cache-size used for # click the "enter typing race button" # RACE!!!! | 2.190006 | 2 |
exam_2020_08_22/project/rooms/alone_young.py | Minkov/python-oop-2021-02 | 2 | 6617940 | from project.appliances.tv import TV
from project.rooms.room import Room
class AloneYoung(Room):
default_room_members_count = 1
room_cost = 10
appliance_types = (TV,)
def __init__(self, name: str, salary: float):
super().__init__(name, salary, self.default_room_members_count)
self.calculate_expenses(self.appliances)
| from project.appliances.tv import TV
from project.rooms.room import Room
class AloneYoung(Room):
default_room_members_count = 1
room_cost = 10
appliance_types = (TV,)
def __init__(self, name: str, salary: float):
super().__init__(name, salary, self.default_room_members_count)
self.calculate_expenses(self.appliances)
| none | 1 | 2.705821 | 3 | |
scripts/process_eval_logs.py | stanford-oval/SPL | 7 | 6617941 | <filename>scripts/process_eval_logs.py
import csv
from argparse import ArgumentParser
import re
parser = ArgumentParser()
parser.add_argument('--input_file', type=str)
parser.add_argument('--output_csv_file', type=str)
parser.add_argument('--option', default='eval', choices=['eval', 'debug'])
args = parser.parse_args()
lang_regex = re.compile('lang=(\w+)')
row_dicts = []
with open(args.input_file, 'r') as f_in:
for line in f_in:
if args.option == 'eval':
fieldnames = ['language', 'em_accuracy', 'bleu_score']
em_regex = re.compile('\"em\":\s(\d+\.\d+)')
bleu_regex = re.compile('"bleu":\s(\d+\.\d+)')
if ('lang' in line):
language = lang_regex.findall(line)[0]
elif ('em' in line) or ('bleu' in line):
em = em_regex.findall(line)[0]
bleu = bleu_regex.findall(line)[0]
row_dicts.append({'language': language, 'em_accuracy': em, 'bleu_score': bleu})
elif args.option == 'debug':
fieldnames = ['language', 'size', 'em_accuracy', 'em_wo_params', 'syntax']
if ('lang' in line):
language = lang_regex.findall(line)[0]
elif 'eval' in line or 'test' in line:
_, _, size, em, em_wo_params, fm, dm, nfm, syntax = map(lambda part: part.strip(), line.split(','))
row_dicts.append({'language': language, 'size': size, 'em_accuracy': float(em)*100, 'em_wo_params': float(em_wo_params)*100, 'syntax': float(syntax)*100})
with open(args.output_csv_file, 'w') as f_out:
csv_writer = csv.DictWriter(f_out, fieldnames)
csv_writer.writeheader()
csv_writer.writerows(row_dicts)
| <filename>scripts/process_eval_logs.py
import csv
from argparse import ArgumentParser
import re
parser = ArgumentParser()
parser.add_argument('--input_file', type=str)
parser.add_argument('--output_csv_file', type=str)
parser.add_argument('--option', default='eval', choices=['eval', 'debug'])
args = parser.parse_args()
lang_regex = re.compile('lang=(\w+)')
row_dicts = []
with open(args.input_file, 'r') as f_in:
for line in f_in:
if args.option == 'eval':
fieldnames = ['language', 'em_accuracy', 'bleu_score']
em_regex = re.compile('\"em\":\s(\d+\.\d+)')
bleu_regex = re.compile('"bleu":\s(\d+\.\d+)')
if ('lang' in line):
language = lang_regex.findall(line)[0]
elif ('em' in line) or ('bleu' in line):
em = em_regex.findall(line)[0]
bleu = bleu_regex.findall(line)[0]
row_dicts.append({'language': language, 'em_accuracy': em, 'bleu_score': bleu})
elif args.option == 'debug':
fieldnames = ['language', 'size', 'em_accuracy', 'em_wo_params', 'syntax']
if ('lang' in line):
language = lang_regex.findall(line)[0]
elif 'eval' in line or 'test' in line:
_, _, size, em, em_wo_params, fm, dm, nfm, syntax = map(lambda part: part.strip(), line.split(','))
row_dicts.append({'language': language, 'size': size, 'em_accuracy': float(em)*100, 'em_wo_params': float(em_wo_params)*100, 'syntax': float(syntax)*100})
with open(args.output_csv_file, 'w') as f_out:
csv_writer = csv.DictWriter(f_out, fieldnames)
csv_writer.writeheader()
csv_writer.writerows(row_dicts)
| none | 1 | 2.993743 | 3 | |
app.py | prakhar21/Investment-Risk-Return | 2 | 6617942 | """
@author: <NAME>
"""
from flask import Flask, request, jsonify
import yfinance as yf
import pandas as pd
import numpy as np
from datetime import date
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
def get_data(stock_name, start_date, end_date, default_column='Close'):
stock = yf.Ticker(stock_name)
hist = stock.history(period='1d', start=start_date, end=end_date)
s = hist[default_column].pct_change()
#s = np.log(hist[default_column]/hist[default_column].shift(1))
default_col_history = s.tolist()[1:]
if len(default_col_history) == 0: return 0
else: return default_col_history
def calculate_sharpe(history):
expected_return = np.mean(history)
risk_free = 10.0
sharpe = (expected_return - risk_free) / np.std(history)
return round(sharpe, 2)
def calculate_sortino(history):
expected_return = np.mean(history)
below_avg = [i for i in history if i < expected_return]
risk_free = 10.0
sortino = (expected_return - risk_free) / np.std(below_avg)
return round(sortino, 2)
@app.route('/getmetrics', methods=['POST'])
def metrics():
history, sharpe_ratios, sortino_ratio = [], [], []
start_date = request.form['start_date']
end_date = request.form['end_date']
# if start_date < end_date:
# return "Start date can't be less than end date"
for s in ['AAPL', 'GOOG', 'AMZN']: #TODO #change to variable coming from UI
history.append(get_data(s, start_date, end_date))
assert len(history)==3
response = {}
sortino_max = -10000
s = None
for data, stock in zip(history, ['AAPL', 'GOOG', 'AMZN']):
print (stock)
sortino = calculate_sortino(data)
print (sortino)
if sortino > sortino_max:
sortino_max = sortino
s = stock
response[stock] = {'data': data[::30], 'metrics': [calculate_sharpe(data), sortino]}
response['best']=s
return jsonify(response)
if __name__=='__main__':
app.run(host='0.0.0.0', port=5555, debug=True, threaded=True)
| """
@author: <NAME>
"""
from flask import Flask, request, jsonify
import yfinance as yf
import pandas as pd
import numpy as np
from datetime import date
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
def get_data(stock_name, start_date, end_date, default_column='Close'):
stock = yf.Ticker(stock_name)
hist = stock.history(period='1d', start=start_date, end=end_date)
s = hist[default_column].pct_change()
#s = np.log(hist[default_column]/hist[default_column].shift(1))
default_col_history = s.tolist()[1:]
if len(default_col_history) == 0: return 0
else: return default_col_history
def calculate_sharpe(history):
expected_return = np.mean(history)
risk_free = 10.0
sharpe = (expected_return - risk_free) / np.std(history)
return round(sharpe, 2)
def calculate_sortino(history):
expected_return = np.mean(history)
below_avg = [i for i in history if i < expected_return]
risk_free = 10.0
sortino = (expected_return - risk_free) / np.std(below_avg)
return round(sortino, 2)
@app.route('/getmetrics', methods=['POST'])
def metrics():
history, sharpe_ratios, sortino_ratio = [], [], []
start_date = request.form['start_date']
end_date = request.form['end_date']
# if start_date < end_date:
# return "Start date can't be less than end date"
for s in ['AAPL', 'GOOG', 'AMZN']: #TODO #change to variable coming from UI
history.append(get_data(s, start_date, end_date))
assert len(history)==3
response = {}
sortino_max = -10000
s = None
for data, stock in zip(history, ['AAPL', 'GOOG', 'AMZN']):
print (stock)
sortino = calculate_sortino(data)
print (sortino)
if sortino > sortino_max:
sortino_max = sortino
s = stock
response[stock] = {'data': data[::30], 'metrics': [calculate_sharpe(data), sortino]}
response['best']=s
return jsonify(response)
if __name__=='__main__':
app.run(host='0.0.0.0', port=5555, debug=True, threaded=True)
| en | 0.429581 | @author: <NAME> #s = np.log(hist[default_column]/hist[default_column].shift(1)) # if start_date < end_date: # return "Start date can't be less than end date" #TODO #change to variable coming from UI | 2.795971 | 3 |
src/compas_fea/fea/ansys/reading/__init__.py | yijiangh/compas_fea | 0 | 6617943 | <filename>src/compas_fea/fea/ansys/reading/__init__.py
from .ansys_read import * | <filename>src/compas_fea/fea/ansys/reading/__init__.py
from .ansys_read import * | none | 1 | 0.864739 | 1 | |
python/tests/testdata/region_FR.py | rodgar-nvkz/python-phonenumbers | 2,424 | 6617944 | """Auto-generated file, do not edit by hand. FR metadata"""
from phonenumbers.phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_FR = PhoneMetadata(id='FR', country_code=33, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='3\\d{6}', possible_length=(7,)),
fixed_line=PhoneNumberDesc(national_number_pattern='3\\d{6}', example_number='3123456', possible_length=(7,)),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(\\d)(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['3'], national_prefix_formatting_rule='0\\1')])
| """Auto-generated file, do not edit by hand. FR metadata"""
from phonenumbers.phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_FR = PhoneMetadata(id='FR', country_code=33, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='3\\d{6}', possible_length=(7,)),
fixed_line=PhoneNumberDesc(national_number_pattern='3\\d{6}', example_number='3123456', possible_length=(7,)),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(\\d)(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['3'], national_prefix_formatting_rule='0\\1')])
| en | 0.829446 | Auto-generated file, do not edit by hand. FR metadata | 2.067709 | 2 |
gradient.py | DylanFrank/optimize | 0 | 6617945 | import numpy as np
from numpy.linalg import norm
from scipy.optimize import minimize_scalar, OptimizeResult
def is_stop(next_val, current, tol):
"""
停机准则梯度方法
:param next_val:
:param current:
:return: bool
"""
return norm(next_val - current, 2) / max(1, norm(current, 2)) < tol
def fast_gradient(fun, grad, x0, tol=1e-7, max_iter=500):
phi = lambda alpha, x: fun(x - alpha * np.array(grad(x))) # 最速降参数
iters = max_iter
while iters > 0:
iters -= 1
res = minimize_scalar(phi, method='brent', args=x0, tol=1e-5)
x_next = x0 - res.x * np.array(grad(x0))
if is_stop(x_next, x0, tol):
break
x0 = x_next
return OptimizeResult({'x': x0, 'fun': fun(x0), 'jac': grad(x0), 'nit': max_iter - iters})
def cg_gradient(fun, grad, x0, args=(), g_args=(), tol=1e-8, max_iter=5000):
alpha = lambda a, x_k, d: fun(*((x_k + a * d,) + args))
g0 = grad(*((x0,) + g_args))
d0 = -g0
for _ in range(max_iter):
a_k = minimize_scalar(alpha, bounds=(0, 100), args=(x0, d0), tol=1e-4)
x0 = x0 + a_k.x * d0
g_k = grad(*((x0,) + g_args))
if is_stop(g_k, np.zeros(g_k.shape), tol):
break
beta = np.sum(g_k ** 2) / np.sum(g0 ** 2) # Fletcher-Reeves 公式
g0 = g_k
d0 = -g_k + beta * d0
if _ % (len(x0) + 5) == 0:
d0 = -g_k
return OptimizeResult({'x': x0, 'fun': fun(*((x0,) + args)), 'jac': grad(*((x0,) + g_args)), 'nit': max_iter - _})
def dfp(fun, grad, x0, args=(), g_args=(), tol=1e-8, max_iter=5000):
"""
:param fun: function ,目标函数
:param grad:function ,目标函数梯度
:param x0: list, 初始向量
:param args: tuple, fun,其余参数
:param g_args: tuple, grad其余参数
:param tol: float,精度
:param max_iter: int, 最大迭代次数
:return: OptimizeResult, 最优解
"""
h0 = np.eye(len(x0))
g_0 = grad(*((x0,) + g_args))
alpha = lambda a, x, d: fun(*((x + a * d,) + args))
for i in range(max_iter):
if is_stop(g_0, np.zeros(g_0.shape), tol):
break
d = -h0.dot(g_0)
alp = minimize_scalar(alpha, bounds=(0, 10000), args=(x0, d), method='brent', tol=1e-4)
alp = alp.x
x_next = x0 + alp * d
delta_x = (alp * d).reshape((len(x0), 1))
g_next = grad(*((x_next,) + g_args))
delta_g = g_next - g_0
delta_g = delta_g.reshape((delta_x.shape))
tmp = h0.dot(delta_g)
h0 = h0 + delta_x.dot(delta_x.T) / (delta_x.T.dot(delta_g)) - tmp.dot(tmp.T) / (delta_g.T.dot(tmp))
x0 = x_next
g_0 = g_next
return OptimizeResult({'nit': i, 'x': x0, 'jac': g_0, 'fun': fun(*((x0,) + args))})
def bfgs(fun, grad, x0, args=(), g_args=(), tol=1e-8, max_iter=5000):
"""
:param fun: function ,目标函数
:param grad:function ,目标函数梯度
:param x0: list, 初始向量
:param args: tuple, fun,其余参数
:param g_args: tuple, grad其余参数
:param tol: float,精度
:param max_iter: int, 最大迭代次数
:return: OptimizeResult, 最优解
"""
h0 = np.eye(len(x0))
g_0 = grad(*((x0,) + g_args))
alpha = lambda a, x, d: fun(*((x + a * d,) + args))
for i in range(max_iter):
if is_stop(g_0, np.zeros(g_0.shape), tol):
break
d = -h0.dot(g_0)
alp = minimize_scalar(alpha, bounds=(0, 10000), args=(x0, d), method='brent', tol=1e-4)
alp = alp.x
x_next = x0 + alp * d
delta_x = (alp * d).reshape((len(x0), 1))
g_next = grad(*((x_next,) + g_args))
delta_g = g_next - g_0
delta_g = delta_g.reshape((delta_x.shape))
tmp = h0.dot(delta_g).dot(delta_x.T)
tmp1 = (1+delta_g.T.dot(h0).dot(delta_g)/(delta_g.T.dot(delta_x)))*delta_x.dot(delta_x.T)/(delta_x.T.dot(delta_g))
tmp2 = (tmp + tmp.T)/(delta_g.T.dot(delta_x))
h0 = h0 + tmp1 - tmp2
x0 = x_next
g_0 = g_next
return OptimizeResult({'nit': i, 'x': x0, 'jac': g_0, 'fun': fun(*((x0,) + args))})
| import numpy as np
from numpy.linalg import norm
from scipy.optimize import minimize_scalar, OptimizeResult
def is_stop(next_val, current, tol):
"""
停机准则梯度方法
:param next_val:
:param current:
:return: bool
"""
return norm(next_val - current, 2) / max(1, norm(current, 2)) < tol
def fast_gradient(fun, grad, x0, tol=1e-7, max_iter=500):
phi = lambda alpha, x: fun(x - alpha * np.array(grad(x))) # 最速降参数
iters = max_iter
while iters > 0:
iters -= 1
res = minimize_scalar(phi, method='brent', args=x0, tol=1e-5)
x_next = x0 - res.x * np.array(grad(x0))
if is_stop(x_next, x0, tol):
break
x0 = x_next
return OptimizeResult({'x': x0, 'fun': fun(x0), 'jac': grad(x0), 'nit': max_iter - iters})
def cg_gradient(fun, grad, x0, args=(), g_args=(), tol=1e-8, max_iter=5000):
alpha = lambda a, x_k, d: fun(*((x_k + a * d,) + args))
g0 = grad(*((x0,) + g_args))
d0 = -g0
for _ in range(max_iter):
a_k = minimize_scalar(alpha, bounds=(0, 100), args=(x0, d0), tol=1e-4)
x0 = x0 + a_k.x * d0
g_k = grad(*((x0,) + g_args))
if is_stop(g_k, np.zeros(g_k.shape), tol):
break
beta = np.sum(g_k ** 2) / np.sum(g0 ** 2) # Fletcher-Reeves 公式
g0 = g_k
d0 = -g_k + beta * d0
if _ % (len(x0) + 5) == 0:
d0 = -g_k
return OptimizeResult({'x': x0, 'fun': fun(*((x0,) + args)), 'jac': grad(*((x0,) + g_args)), 'nit': max_iter - _})
def dfp(fun, grad, x0, args=(), g_args=(), tol=1e-8, max_iter=5000):
"""
:param fun: function ,目标函数
:param grad:function ,目标函数梯度
:param x0: list, 初始向量
:param args: tuple, fun,其余参数
:param g_args: tuple, grad其余参数
:param tol: float,精度
:param max_iter: int, 最大迭代次数
:return: OptimizeResult, 最优解
"""
h0 = np.eye(len(x0))
g_0 = grad(*((x0,) + g_args))
alpha = lambda a, x, d: fun(*((x + a * d,) + args))
for i in range(max_iter):
if is_stop(g_0, np.zeros(g_0.shape), tol):
break
d = -h0.dot(g_0)
alp = minimize_scalar(alpha, bounds=(0, 10000), args=(x0, d), method='brent', tol=1e-4)
alp = alp.x
x_next = x0 + alp * d
delta_x = (alp * d).reshape((len(x0), 1))
g_next = grad(*((x_next,) + g_args))
delta_g = g_next - g_0
delta_g = delta_g.reshape((delta_x.shape))
tmp = h0.dot(delta_g)
h0 = h0 + delta_x.dot(delta_x.T) / (delta_x.T.dot(delta_g)) - tmp.dot(tmp.T) / (delta_g.T.dot(tmp))
x0 = x_next
g_0 = g_next
return OptimizeResult({'nit': i, 'x': x0, 'jac': g_0, 'fun': fun(*((x0,) + args))})
def bfgs(fun, grad, x0, args=(), g_args=(), tol=1e-8, max_iter=5000):
"""
:param fun: function ,目标函数
:param grad:function ,目标函数梯度
:param x0: list, 初始向量
:param args: tuple, fun,其余参数
:param g_args: tuple, grad其余参数
:param tol: float,精度
:param max_iter: int, 最大迭代次数
:return: OptimizeResult, 最优解
"""
h0 = np.eye(len(x0))
g_0 = grad(*((x0,) + g_args))
alpha = lambda a, x, d: fun(*((x + a * d,) + args))
for i in range(max_iter):
if is_stop(g_0, np.zeros(g_0.shape), tol):
break
d = -h0.dot(g_0)
alp = minimize_scalar(alpha, bounds=(0, 10000), args=(x0, d), method='brent', tol=1e-4)
alp = alp.x
x_next = x0 + alp * d
delta_x = (alp * d).reshape((len(x0), 1))
g_next = grad(*((x_next,) + g_args))
delta_g = g_next - g_0
delta_g = delta_g.reshape((delta_x.shape))
tmp = h0.dot(delta_g).dot(delta_x.T)
tmp1 = (1+delta_g.T.dot(h0).dot(delta_g)/(delta_g.T.dot(delta_x)))*delta_x.dot(delta_x.T)/(delta_x.T.dot(delta_g))
tmp2 = (tmp + tmp.T)/(delta_g.T.dot(delta_x))
h0 = h0 + tmp1 - tmp2
x0 = x_next
g_0 = g_next
return OptimizeResult({'nit': i, 'x': x0, 'jac': g_0, 'fun': fun(*((x0,) + args))})
| zh | 0.70612 | 停机准则梯度方法 :param next_val: :param current: :return: bool # 最速降参数 # Fletcher-Reeves 公式 :param fun: function ,目标函数 :param grad:function ,目标函数梯度 :param x0: list, 初始向量 :param args: tuple, fun,其余参数 :param g_args: tuple, grad其余参数 :param tol: float,精度 :param max_iter: int, 最大迭代次数 :return: OptimizeResult, 最优解 :param fun: function ,目标函数 :param grad:function ,目标函数梯度 :param x0: list, 初始向量 :param args: tuple, fun,其余参数 :param g_args: tuple, grad其余参数 :param tol: float,精度 :param max_iter: int, 最大迭代次数 :return: OptimizeResult, 最优解 | 2.734787 | 3 |
vkts/vklib/vkreq.py | smurphik/vkts | 0 | 6617946 | #! /usr/bin/env python3
"""Realisation of API requests to vk.com. For single request use
`apply_vk_method`. For speedup you can use class `Executor`, wich pack
many requests to packs by 25 requests."""
import requests
import json
import time
import os
import getpass
from . import vkauth
from ..usrdata import UsrData
import logging
from logging.handlers import RotatingFileHandler
# Adjust logging
handler = RotatingFileHandler(".vklib.vkreq.log", mode='a',
maxBytes=1024*1024*10, backupCount=1)
handler.setFormatter(logging.Formatter("%(asctime)s vkreq: %(message)s"))
handler.setLevel(logging.DEBUG)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
_token = None
mock_responses = '.mock_request_responses.json'
def update_token():
"""Read token from local user data or get new one from vk.com"""
global _token
u = UsrData()
# For first call
if not _token:
# Try read token from the previous run
ac_name, ac_obj = u.get_active_obj('acc', 'vk')
_token = ac_obj['token']
if _token:
logger.debug("Token is read")
return
# Get token by request
_token = None
attempts = 5
attempt_pause = 10
while not _token and attempts > 0:
# Wait for sparse requests
if attempts < 5:
logger.debug("Attempt pause %s", attempt_pause)
time.sleep(attempt_pause)
attempt_pause *= 2
attempts -= 1
# Request
logger.debug("Try to get token")
ac_name, ac_obj = u.get_active_obj('acc', 'vk')
email = ac_obj['uname']
pswd = ac_obj['password']
while not pswd:
pswd = getpass.getpass('Input password of vk account'
+ ' «{}»: '.format(ac_name))
user_auth = vkauth.VKAuth(permissions=['friends',
'groups',
'wall'],
app_id='6471192',
api_v='5.74',
email=email,
pswd=pswd)
user_auth.auth()
_token = user_auth._access_token
# Here token must be
if _token is None:
logger.debug("Failed to get token")
raise
# Dump token (if account isn't private)
if ac_obj['password']:
u.set(_token, 'acc', 'vk', ac_name, 'token')
logger.debug("Token is updated")
def _set_token_to_params(params):
# Add token & version (if user doesn't give specific this)
if 'access_token' not in params:
params['access_token'] = _token
if 'v' not in params:
params['v'] = '5.9'
def _make_params_string(params):
"""Convert list of parameters to string"""
p_str = '&'.join(['{}={}'.format(f, params[f]) for f in params.keys()])
return p_str
def _short_print(s):
"""shrink output if it's too long"""
if len(s) <= 500:
print(s)
else:
print('{} ...... {}'.format(s[:245], s[-245:]))
def _vk_api_error_print(obj, url_of_req, params):
print("vk API error code: {}".format(obj['error_code']))
print(obj['error_msg'])
_short_print("Broken request: {}{}".format(url_of_req,
_make_params_string(params)))
def _vk_api_request(url_of_req, method, params):
"""Request (can be mocked by existence of file
'.mock_request_responses.json' for testing)."""
if os.path.isfile(mock_responses):
# "Home-made" mock
logger.debug("Try Mocked Request (method: %s)", method)
with open(mock_responses) as fp:
mocks_list = json.load(fp)
json_obj = mocks_list.pop(0)
with open(mock_responses, 'w') as fp:
json.dump(mocks_list, fp)
return json_obj
else:
# Real request
logger.debug("Try Request (method: %s)", method)
response = requests.post(url_of_req, data=params)
return response.json()
def apply_vk_method(method, handle_api_errors=True, **params):
"""Make request to https://api.vk.com/method/. Return JSON-object"""
global _token
# Get token if absent
if not _token and not os.path.isfile('.mock_request_responses.json'):
update_token()
# Set token to URL string
url_of_req = 'https://api.vk.com/method/' + method + '?'
_set_token_to_params(params)
# Do request with error processing
error_pause = 5
while True:
try:
# Request
json_obj = _vk_api_request(url_of_req, method, params)
except Exception as e:
# In case of network problems.
logger.debug("Request Error: " + str(e))
p_str = _make_params_string(params)
_short_print("Broken request: {}{}".format(url_of_req, p_str))
print(e)
if error_pause > 35:
raise
print('Wait ' + str(error_pause) + ' seconds')
time.sleep(error_pause)
error_pause += 5
continue
# Process response
if 'error' in json_obj:
logger.debug("API Error (%s: %s)", json_obj['error']['error_code'],
json_obj['error']['error_msg'])
if json_obj['error']['error_code'] == 5:
if 'access_token' in params:
del params['access_token']
update_token()
_set_token_to_params(params)
continue
elif json_obj['error']['error_code'] == 6:
# It took a long time to create a "clever" mechanism to bypass
# these errors. However, such a stupid solution was the most
# effective.
time.sleep(0.5)
continue
elif not handle_api_errors:
return json_obj
elif json_obj['error']['error_code'] == 18:
# Page is deleted or banned
return {}
elif (json_obj['error']['error_code'] == 203
or json_obj['error']['error_code'] == 7):
# No have access
return {}
elif json_obj['error']['error_code'] == 12:
# execute compilation error
_vk_api_error_print(json_obj['error'], url_of_req, params)
return {}
elif json_obj['error']['error_code'] == 14:
# Captcha needed
_vk_api_error_print(json_obj['error'], url_of_req, params)
return json_obj # to study the response format in the future
elif json_obj['error']['error_code'] in (3, 8, 100, 113):
# Wrong rquest
_vk_api_error_print(json_obj['error'], url_of_req, params)
return {}
else:
# In case of server problems.
_vk_api_error_print(json_obj['error'], url_of_req, params)
if error_pause > 35:
raise
print('Wait ' + str(error_pause) + ' seconds')
time.sleep(error_pause)
error_pause += 5
continue
# No errors -> out loop
logger.debug("Successful Request")
break
return json_obj
class Executor:
"""Class for exploit vk API method `execute` to join
several requests (up to 25) into single one.
If > 25 requests are added by `add_request`, then > 1 requests
of `execute` will be executed by `emit_requests`.
Example:
>>> e = Executor()
>>> e.add_request('utils.resolveScreenName', screen_name='phys_kek')
>>> e.add_request('utils.resolveScreenName_', screen_name='phys_kek')
>>> e.add_request('users.getFollowers', **{'user_id': '1'})
>>> e.emit_requests()
>>> e.responses[0]['object_id'], e.responses[1], e.responses[2]['count']
(111557607, [], 5909523)
More tricky way - use parameter `processor` of `Executor.add_request()`:
>>> e = Executor()
>>> f = lambda r: print(r['object_id'] if r else "False")
>>> for name in ['phys_kek', 'unreal-_-', 'drec_mipt']:
>>> e.add_request('utils.resolveScreenName', f, screen_name=name)
>>> e.emit_requests()
111557607
False
17708"""
def __init__(self):
logger.debug("Executor created")
self.responses = []
self.errors = []
self.requests = []
self.processors = []
def add_request(self, method, processor=None, **params):
"""Add one request to `Executor.requests`.
`processor` - function for processing response of this request"""
logger.debug("Add %sth request %s", len(self.requests) + 1,
"with processor" if processor else "without processor")
# add string request to requests list
params_s = str(params).replace("'", '"')
self.requests.append('API.{}({});'.format(method, params_s))
self.processors.append(processor)
def emit_requests(self):
"""Pack requests from `Executor.requests`
(see `Executor.add_request()`) into `execute` requests and emit them
to server. Responses will be saved in `Executor.responses`."""
logger.debug("Emit requests (%s pieces)", len(self.requests))
self.responses = []
self.errors = []
requests = self.requests
cnt = 0
while requests:
# start code for execute
if cnt == 0:
code = 'var arr = [];\n' + \
'var r;\n'
# collect requests
req = requests.pop(0)
code += 'r = {}\n'.format(req) + \
'arr = arr + [r];\n'
# step - recorded regular request
cnt += 1
# check for maximum requests to execute
if cnt == 25:
# complete code
code += 'return arr;'
r = apply_vk_method('execute', **{'code': code})
# save responses
self.responses += r['response']
if 'execute_errors' in r:
self.errors += r['execute_errors']
# process responses
while cnt:
func = self.processors.pop(0)
if func:
func(r['response'][-cnt])
cnt -= 1
# make last execute if need
if cnt:
# complete code
code += 'return arr;'
r = apply_vk_method('execute', **{'code': code})
# save responses
self.responses += r['response']
if 'execute_errors' in r:
self.errors.append(r['execute_errors'])
# process responses
while cnt:
func = self.processors.pop(0)
if func:
func(r['response'][-cnt])
cnt -= 1
self.requests = []
self.processors = []
| #! /usr/bin/env python3
"""Realisation of API requests to vk.com. For single request use
`apply_vk_method`. For speedup you can use class `Executor`, wich pack
many requests to packs by 25 requests."""
import requests
import json
import time
import os
import getpass
from . import vkauth
from ..usrdata import UsrData
import logging
from logging.handlers import RotatingFileHandler
# Adjust logging
handler = RotatingFileHandler(".vklib.vkreq.log", mode='a',
maxBytes=1024*1024*10, backupCount=1)
handler.setFormatter(logging.Formatter("%(asctime)s vkreq: %(message)s"))
handler.setLevel(logging.DEBUG)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
_token = None
mock_responses = '.mock_request_responses.json'
def update_token():
"""Read token from local user data or get new one from vk.com"""
global _token
u = UsrData()
# For first call
if not _token:
# Try read token from the previous run
ac_name, ac_obj = u.get_active_obj('acc', 'vk')
_token = ac_obj['token']
if _token:
logger.debug("Token is read")
return
# Get token by request
_token = None
attempts = 5
attempt_pause = 10
while not _token and attempts > 0:
# Wait for sparse requests
if attempts < 5:
logger.debug("Attempt pause %s", attempt_pause)
time.sleep(attempt_pause)
attempt_pause *= 2
attempts -= 1
# Request
logger.debug("Try to get token")
ac_name, ac_obj = u.get_active_obj('acc', 'vk')
email = ac_obj['uname']
pswd = ac_obj['password']
while not pswd:
pswd = getpass.getpass('Input password of vk account'
+ ' «{}»: '.format(ac_name))
user_auth = vkauth.VKAuth(permissions=['friends',
'groups',
'wall'],
app_id='6471192',
api_v='5.74',
email=email,
pswd=pswd)
user_auth.auth()
_token = user_auth._access_token
# Here token must be
if _token is None:
logger.debug("Failed to get token")
raise
# Dump token (if account isn't private)
if ac_obj['password']:
u.set(_token, 'acc', 'vk', ac_name, 'token')
logger.debug("Token is updated")
def _set_token_to_params(params):
# Add token & version (if user doesn't give specific this)
if 'access_token' not in params:
params['access_token'] = _token
if 'v' not in params:
params['v'] = '5.9'
def _make_params_string(params):
"""Convert list of parameters to string"""
p_str = '&'.join(['{}={}'.format(f, params[f]) for f in params.keys()])
return p_str
def _short_print(s):
"""shrink output if it's too long"""
if len(s) <= 500:
print(s)
else:
print('{} ...... {}'.format(s[:245], s[-245:]))
def _vk_api_error_print(obj, url_of_req, params):
print("vk API error code: {}".format(obj['error_code']))
print(obj['error_msg'])
_short_print("Broken request: {}{}".format(url_of_req,
_make_params_string(params)))
def _vk_api_request(url_of_req, method, params):
"""Request (can be mocked by existence of file
'.mock_request_responses.json' for testing)."""
if os.path.isfile(mock_responses):
# "Home-made" mock
logger.debug("Try Mocked Request (method: %s)", method)
with open(mock_responses) as fp:
mocks_list = json.load(fp)
json_obj = mocks_list.pop(0)
with open(mock_responses, 'w') as fp:
json.dump(mocks_list, fp)
return json_obj
else:
# Real request
logger.debug("Try Request (method: %s)", method)
response = requests.post(url_of_req, data=params)
return response.json()
def apply_vk_method(method, handle_api_errors=True, **params):
"""Make request to https://api.vk.com/method/. Return JSON-object"""
global _token
# Get token if absent
if not _token and not os.path.isfile('.mock_request_responses.json'):
update_token()
# Set token to URL string
url_of_req = 'https://api.vk.com/method/' + method + '?'
_set_token_to_params(params)
# Do request with error processing
error_pause = 5
while True:
try:
# Request
json_obj = _vk_api_request(url_of_req, method, params)
except Exception as e:
# In case of network problems.
logger.debug("Request Error: " + str(e))
p_str = _make_params_string(params)
_short_print("Broken request: {}{}".format(url_of_req, p_str))
print(e)
if error_pause > 35:
raise
print('Wait ' + str(error_pause) + ' seconds')
time.sleep(error_pause)
error_pause += 5
continue
# Process response
if 'error' in json_obj:
logger.debug("API Error (%s: %s)", json_obj['error']['error_code'],
json_obj['error']['error_msg'])
if json_obj['error']['error_code'] == 5:
if 'access_token' in params:
del params['access_token']
update_token()
_set_token_to_params(params)
continue
elif json_obj['error']['error_code'] == 6:
# It took a long time to create a "clever" mechanism to bypass
# these errors. However, such a stupid solution was the most
# effective.
time.sleep(0.5)
continue
elif not handle_api_errors:
return json_obj
elif json_obj['error']['error_code'] == 18:
# Page is deleted or banned
return {}
elif (json_obj['error']['error_code'] == 203
or json_obj['error']['error_code'] == 7):
# No have access
return {}
elif json_obj['error']['error_code'] == 12:
# execute compilation error
_vk_api_error_print(json_obj['error'], url_of_req, params)
return {}
elif json_obj['error']['error_code'] == 14:
# Captcha needed
_vk_api_error_print(json_obj['error'], url_of_req, params)
return json_obj # to study the response format in the future
elif json_obj['error']['error_code'] in (3, 8, 100, 113):
# Wrong rquest
_vk_api_error_print(json_obj['error'], url_of_req, params)
return {}
else:
# In case of server problems.
_vk_api_error_print(json_obj['error'], url_of_req, params)
if error_pause > 35:
raise
print('Wait ' + str(error_pause) + ' seconds')
time.sleep(error_pause)
error_pause += 5
continue
# No errors -> out loop
logger.debug("Successful Request")
break
return json_obj
class Executor:
"""Class for exploit vk API method `execute` to join
several requests (up to 25) into single one.
If > 25 requests are added by `add_request`, then > 1 requests
of `execute` will be executed by `emit_requests`.
Example:
>>> e = Executor()
>>> e.add_request('utils.resolveScreenName', screen_name='phys_kek')
>>> e.add_request('utils.resolveScreenName_', screen_name='phys_kek')
>>> e.add_request('users.getFollowers', **{'user_id': '1'})
>>> e.emit_requests()
>>> e.responses[0]['object_id'], e.responses[1], e.responses[2]['count']
(111557607, [], 5909523)
More tricky way - use parameter `processor` of `Executor.add_request()`:
>>> e = Executor()
>>> f = lambda r: print(r['object_id'] if r else "False")
>>> for name in ['phys_kek', 'unreal-_-', 'drec_mipt']:
>>> e.add_request('utils.resolveScreenName', f, screen_name=name)
>>> e.emit_requests()
111557607
False
17708"""
def __init__(self):
logger.debug("Executor created")
self.responses = []
self.errors = []
self.requests = []
self.processors = []
def add_request(self, method, processor=None, **params):
"""Add one request to `Executor.requests`.
`processor` - function for processing response of this request"""
logger.debug("Add %sth request %s", len(self.requests) + 1,
"with processor" if processor else "without processor")
# add string request to requests list
params_s = str(params).replace("'", '"')
self.requests.append('API.{}({});'.format(method, params_s))
self.processors.append(processor)
def emit_requests(self):
"""Pack requests from `Executor.requests`
(see `Executor.add_request()`) into `execute` requests and emit them
to server. Responses will be saved in `Executor.responses`."""
logger.debug("Emit requests (%s pieces)", len(self.requests))
self.responses = []
self.errors = []
requests = self.requests
cnt = 0
while requests:
# start code for execute
if cnt == 0:
code = 'var arr = [];\n' + \
'var r;\n'
# collect requests
req = requests.pop(0)
code += 'r = {}\n'.format(req) + \
'arr = arr + [r];\n'
# step - recorded regular request
cnt += 1
# check for maximum requests to execute
if cnt == 25:
# complete code
code += 'return arr;'
r = apply_vk_method('execute', **{'code': code})
# save responses
self.responses += r['response']
if 'execute_errors' in r:
self.errors += r['execute_errors']
# process responses
while cnt:
func = self.processors.pop(0)
if func:
func(r['response'][-cnt])
cnt -= 1
# make last execute if need
if cnt:
# complete code
code += 'return arr;'
r = apply_vk_method('execute', **{'code': code})
# save responses
self.responses += r['response']
if 'execute_errors' in r:
self.errors.append(r['execute_errors'])
# process responses
while cnt:
func = self.processors.pop(0)
if func:
func(r['response'][-cnt])
cnt -= 1
self.requests = []
self.processors = []
| en | 0.736437 | #! /usr/bin/env python3 Realisation of API requests to vk.com. For single request use `apply_vk_method`. For speedup you can use class `Executor`, wich pack many requests to packs by 25 requests. # Adjust logging Read token from local user data or get new one from vk.com # For first call # Try read token from the previous run # Get token by request # Wait for sparse requests # Request # Here token must be # Dump token (if account isn't private) # Add token & version (if user doesn't give specific this) Convert list of parameters to string shrink output if it's too long Request (can be mocked by existence of file '.mock_request_responses.json' for testing). # "Home-made" mock # Real request Make request to https://api.vk.com/method/. Return JSON-object # Get token if absent # Set token to URL string # Do request with error processing # Request # In case of network problems. # Process response # It took a long time to create a "clever" mechanism to bypass # these errors. However, such a stupid solution was the most # effective. # Page is deleted or banned # No have access # execute compilation error # Captcha needed # to study the response format in the future # Wrong rquest # In case of server problems. # No errors -> out loop Class for exploit vk API method `execute` to join several requests (up to 25) into single one. If > 25 requests are added by `add_request`, then > 1 requests of `execute` will be executed by `emit_requests`. Example: >>> e = Executor() >>> e.add_request('utils.resolveScreenName', screen_name='phys_kek') >>> e.add_request('utils.resolveScreenName_', screen_name='phys_kek') >>> e.add_request('users.getFollowers', **{'user_id': '1'}) >>> e.emit_requests() >>> e.responses[0]['object_id'], e.responses[1], e.responses[2]['count'] (111557607, [], 5909523) More tricky way - use parameter `processor` of `Executor.add_request()`: >>> e = Executor() >>> f = lambda r: print(r['object_id'] if r else "False") >>> for name in ['phys_kek', 'unreal-_-', 'drec_mipt']: >>> e.add_request('utils.resolveScreenName', f, screen_name=name) >>> e.emit_requests() 111557607 False 17708 Add one request to `Executor.requests`. `processor` - function for processing response of this request # add string request to requests list Pack requests from `Executor.requests` (see `Executor.add_request()`) into `execute` requests and emit them to server. Responses will be saved in `Executor.responses`. # start code for execute # collect requests # step - recorded regular request # check for maximum requests to execute # complete code # save responses # process responses # make last execute if need # complete code # save responses # process responses | 2.324311 | 2 |
pyrclib/channels.py | martinsileno/pyrclib | 1 | 6617947 | <gh_stars>1-10
from pyrclib.user import User
class Channel(object):
"""Represents a channel we are in.
"""
def __init__(self, name):
self.name = name
self.users = {}
self.topic = Topic()
# TODO: needs a better way to store modes,
# what if the channel has a key/limit?
self.modes = ''
def renameuser(self, oldnick, newnick):
"""Called when a user changes nick.
"""
modes = self.users[oldnick]
del self.users[oldnick]
self.users[newnick] = modes
def __str__(self):
return '{0} [+{1}]'.format(self.name, self.modes)
def __contains__(self, item):
if isinstance(item, User):
if item.nick in self.users:
return True
return False
for user in self.users.keys():
if user.lower() == item.lower():
return True
return False
class Topic(object):
"""Represents a channel topic (text, set_by, date).
"""
def __init__(self, text=None, set_by=None, date=None):
self.text = text
self.set_by = set_by
self.date = date
def reset(self):
self.text = None
self.set_by = None
self.date = None
def __str__(self):
return self.text
| from pyrclib.user import User
class Channel(object):
"""Represents a channel we are in.
"""
def __init__(self, name):
self.name = name
self.users = {}
self.topic = Topic()
# TODO: needs a better way to store modes,
# what if the channel has a key/limit?
self.modes = ''
def renameuser(self, oldnick, newnick):
"""Called when a user changes nick.
"""
modes = self.users[oldnick]
del self.users[oldnick]
self.users[newnick] = modes
def __str__(self):
return '{0} [+{1}]'.format(self.name, self.modes)
def __contains__(self, item):
if isinstance(item, User):
if item.nick in self.users:
return True
return False
for user in self.users.keys():
if user.lower() == item.lower():
return True
return False
class Topic(object):
"""Represents a channel topic (text, set_by, date).
"""
def __init__(self, text=None, set_by=None, date=None):
self.text = text
self.set_by = set_by
self.date = date
def reset(self):
self.text = None
self.set_by = None
self.date = None
def __str__(self):
return self.text | en | 0.866264 | Represents a channel we are in. # TODO: needs a better way to store modes, # what if the channel has a key/limit? Called when a user changes nick. Represents a channel topic (text, set_by, date). | 3.253731 | 3 |
djangomaster/sites.py | kpekepoh/django-aww | 0 | 6617948 | from collections import defaultdict
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from django.template.defaultfilters import slugify
from django.views.generic.base import RedirectView
class MasterSite(object):
def __init__(self):
self.pages = defaultdict(list)
self.widgets = defaultdict(list)
self._menu = OrderedDict()
@property
def homeview(self):
homeurl = reverse_lazy('djangomaster:djangomaster-home')
return RedirectView.as_view(url=homeurl)
@property
def urlpatterns(self):
self._menu = OrderedDict()
urls = [url(r'^$', self.homeview, name='djangomaster')]
for module, pages in self.pages.items():
module = module.replace('.master', '')
self._menu[module] = []
module_name = slugify(module)
for page in pages:
page_name = slugify(page.name)
page.slug = module_name + '-' + page_name
pattern = r'^{module}/{page}'.format(module=module_name,
page=page_name)
urls.append(url(pattern, page.as_view(), name=page.slug))
self._menu[module].append({
'name': page.slug,
'label': page.label
})
return patterns(r'', *urls)
def add_view(self, module_name, view):
if view.abstract is False:
self.pages[module_name].append(view)
def add_widget(self, module_name, widget):
if widget.abstract is False:
self.widgets[module_name].append(widget)
def get_menu(self):
if not self._menu:
self.urlpatterns
return self._menu
mastersite = MasterSite()
| from collections import defaultdict
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from django.template.defaultfilters import slugify
from django.views.generic.base import RedirectView
class MasterSite(object):
def __init__(self):
self.pages = defaultdict(list)
self.widgets = defaultdict(list)
self._menu = OrderedDict()
@property
def homeview(self):
homeurl = reverse_lazy('djangomaster:djangomaster-home')
return RedirectView.as_view(url=homeurl)
@property
def urlpatterns(self):
self._menu = OrderedDict()
urls = [url(r'^$', self.homeview, name='djangomaster')]
for module, pages in self.pages.items():
module = module.replace('.master', '')
self._menu[module] = []
module_name = slugify(module)
for page in pages:
page_name = slugify(page.name)
page.slug = module_name + '-' + page_name
pattern = r'^{module}/{page}'.format(module=module_name,
page=page_name)
urls.append(url(pattern, page.as_view(), name=page.slug))
self._menu[module].append({
'name': page.slug,
'label': page.label
})
return patterns(r'', *urls)
def add_view(self, module_name, view):
if view.abstract is False:
self.pages[module_name].append(view)
def add_widget(self, module_name, widget):
if widget.abstract is False:
self.widgets[module_name].append(widget)
def get_menu(self):
if not self._menu:
self.urlpatterns
return self._menu
mastersite = MasterSite()
| none | 1 | 2.126323 | 2 | |
cvxbind/main.py | jpanikulam/cvxbind | 9 | 6617949 | <reponame>jpanikulam/cvxbind<gh_stars>1-10
import os
import argparse
from utils import Log
from parse_cvxgen import ParseCVX
from gen_cpp import GenCPP
def main():
parser = argparse.ArgumentParser(description='CVXGEN Python Binding Generator')
parser.add_argument('path', metavar='path', default='./images',
help='Give the target path')
parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='Decide verbosity')
args = parser.parse_args()
Log.set_verbose(args.verbose)
path = os.path.realpath(args.path)
parsed_cvx = ParseCVX.read_file(path)
write_text = GenCPP.make_cvx_binding(parsed_cvx)
print write_text
if __name__ == '__main__':
main()
| import os
import argparse
from utils import Log
from parse_cvxgen import ParseCVX
from gen_cpp import GenCPP
def main():
parser = argparse.ArgumentParser(description='CVXGEN Python Binding Generator')
parser.add_argument('path', metavar='path', default='./images',
help='Give the target path')
parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='Decide verbosity')
args = parser.parse_args()
Log.set_verbose(args.verbose)
path = os.path.realpath(args.path)
parsed_cvx = ParseCVX.read_file(path)
write_text = GenCPP.make_cvx_binding(parsed_cvx)
print write_text
if __name__ == '__main__':
main() | none | 1 | 2.519513 | 3 | |
win_service.py | XXL6/Uniback | 0 | 6617950 | <filename>win_service.py
import win32serviceutil
import win32service
import win32event
import servicemanager
from multiprocessing import Process
class Service(win32serviceutil.ServiceFramework):
_svc_name_ = "TestService"
_svc_display_name_ = "Test Service"
_svc_description_ = "Tests Python service framework by receiving and echoing messages over a named pipe"
def __init__(self, *args):
super().__init__(*args)
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
self.process.terminate()
self.ReportServiceStatus(win32service.SERVICE_STOPPED)
def SvcDoRun(self):
self.process = Process(target=self.main)
self.process.start()
self.process.run()
def main(self):
#f = open('D:\\test.txt', 'a')
rc = None
while rc != win32event.WAIT_OBJECT_0:
#f.write('Test Service \n')
#f.flush()
# block for 24*60*60 seconds and wait for a stop event
# it is used for a one-day loop
#servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED,
# (self._svc_name_, ''))
rc = win32event.WaitForSingleObject(self.hWaitStop, 60 * 1000)
#f.write('shut down \n')
#f.close()
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(Service) | <filename>win_service.py
import win32serviceutil
import win32service
import win32event
import servicemanager
from multiprocessing import Process
class Service(win32serviceutil.ServiceFramework):
_svc_name_ = "TestService"
_svc_display_name_ = "Test Service"
_svc_description_ = "Tests Python service framework by receiving and echoing messages over a named pipe"
def __init__(self, *args):
super().__init__(*args)
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
self.process.terminate()
self.ReportServiceStatus(win32service.SERVICE_STOPPED)
def SvcDoRun(self):
self.process = Process(target=self.main)
self.process.start()
self.process.run()
def main(self):
#f = open('D:\\test.txt', 'a')
rc = None
while rc != win32event.WAIT_OBJECT_0:
#f.write('Test Service \n')
#f.flush()
# block for 24*60*60 seconds and wait for a stop event
# it is used for a one-day loop
#servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED,
# (self._svc_name_, ''))
rc = win32event.WaitForSingleObject(self.hWaitStop, 60 * 1000)
#f.write('shut down \n')
#f.close()
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(Service) | en | 0.63379 | #f = open('D:\\test.txt', 'a') #f.write('Test Service \n') #f.flush() # block for 24*60*60 seconds and wait for a stop event # it is used for a one-day loop #servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED, # (self._svc_name_, '')) #f.write('shut down \n') #f.close() | 2.576077 | 3 |