index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
25,311
|
tweenty247/CompleteWebside
|
refs/heads/master
|
/Web_Laundry/forms.py
|
from django import forms
from .models import FormNames, AppointmentSection, SubscribeForm
class ModelFormNames(forms.ModelForm):
class Meta:
model = FormNames
fields = '__all__'
class AppointmentSectionFormNames(forms.ModelForm):
class Meta:
model = AppointmentSection
fields = '__all__'
class ModalSubscribeForm(forms.ModelForm):
class Meta:
model = SubscribeForm
fields = '__all__'
|
{"/Web_Laundry/admin.py": ["/Web_Laundry/models.py"], "/Web_Laundry/views.py": ["/Web_Laundry/forms.py"], "/Web_Laundry/forms.py": ["/Web_Laundry/models.py"]}
|
25,312
|
tweenty247/CompleteWebside
|
refs/heads/master
|
/Web_Laundry/migrations/0003_formnames_number.py
|
# Generated by Django 3.1.2 on 2020-10-03 07:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Web_Laundry', '0002_remove_formnames_subject'),
]
operations = [
migrations.AddField(
model_name='formnames',
name='number',
field=models.IntegerField(default=1234567),
),
]
|
{"/Web_Laundry/admin.py": ["/Web_Laundry/models.py"], "/Web_Laundry/views.py": ["/Web_Laundry/forms.py"], "/Web_Laundry/forms.py": ["/Web_Laundry/models.py"]}
|
25,313
|
tweenty247/CompleteWebside
|
refs/heads/master
|
/Web_Laundry/migrations/0007_subscribeform.py
|
# Generated by Django 3.1.2 on 2020-10-07 04:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Web_Laundry', '0006_delete_subscribtionmodel'),
]
operations = [
migrations.CreateModel(
name='SubscribeForm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subscribe', models.EmailField(max_length=254)),
],
),
]
|
{"/Web_Laundry/admin.py": ["/Web_Laundry/models.py"], "/Web_Laundry/views.py": ["/Web_Laundry/forms.py"], "/Web_Laundry/forms.py": ["/Web_Laundry/models.py"]}
|
25,314
|
tweenty247/CompleteWebside
|
refs/heads/master
|
/Web_Laundry/migrations/0006_delete_subscribtionmodel.py
|
# Generated by Django 3.1.2 on 2020-10-07 04:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Web_Laundry', '0005_auto_20201004_0024'),
]
operations = [
migrations.DeleteModel(
name='SubscribtionModel',
),
]
|
{"/Web_Laundry/admin.py": ["/Web_Laundry/models.py"], "/Web_Laundry/views.py": ["/Web_Laundry/forms.py"], "/Web_Laundry/forms.py": ["/Web_Laundry/models.py"]}
|
25,315
|
tweenty247/CompleteWebside
|
refs/heads/master
|
/Web_Laundry/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('contact/', views.contact_page, name='contact'),
path('about/', views.about, name='about'),
path('services/', views.service, name='services'),
path('pricing/', views.pricing, name='pricing'),
path('blog', views.blog, name='blog'),
]
|
{"/Web_Laundry/admin.py": ["/Web_Laundry/models.py"], "/Web_Laundry/views.py": ["/Web_Laundry/forms.py"], "/Web_Laundry/forms.py": ["/Web_Laundry/models.py"]}
|
25,316
|
tweenty247/CompleteWebside
|
refs/heads/master
|
/Web_Laundry/apps.py
|
from django.apps import AppConfig
class WebLaundryConfig(AppConfig):
name = 'Web_Laundry'
|
{"/Web_Laundry/admin.py": ["/Web_Laundry/models.py"], "/Web_Laundry/views.py": ["/Web_Laundry/forms.py"], "/Web_Laundry/forms.py": ["/Web_Laundry/models.py"]}
|
25,333
|
Sandeep-vishwakarma-sfdc/SentimentAnalysis
|
refs/heads/master
|
/app.py
|
import string
import stopwords
from flask import Flask,render_template,request
from tweets import Twitterclient
from collections import Counter
app = Flask(__name__)
tweet=""
@app.route("/",methods=['POST','GET'])
def home():
if request.method=='POST':
username = request.form['username']
print('user name ---->'+username)
if len(username)!=0:
count = request.form.get('noOfTweets')
twitter_client1 = Twitterclient()
api = twitter_client1.get_twitter_client_api()
tweets = api.user_timeline(screen_name=username,count=count)
emotionlist, w = analyseEmotions(tweets=tweets)
labels = []
for label in w.keys():
labels.append(label)
data = []
for val in w.values():
data.append(val)
print(tweets)
return render_template("index.html",tweets=tweets,labels=labels,data=data,username=username)
else:
return render_template("index.html",tweets=[],labels=[],data=[],username=username)
else:
return render_template("index.html",tweet='no tweet available')
def analyseEmotions(tweets):
text = ' '.join([str(elem) for elem in tweets])
lower_case = text.lower()
cleaned_text = lower_case.translate(str.maketrans('','',string.punctuation))
token = cleaned_text.split()
stop_words = stopwords.stop_words
final_word = []
for word in token:
if word not in stop_words:
final_word.append(word)
emotion_list = []
with open('./static/emotions.csv','r',encoding='latin-1') as file:
for line in file:
clear_line = line.replace('\n','').replace("'",'').strip()
word,emotion = clear_line.split(',')
if word in final_word:
emotion_list.append(emotion)
print(emotion_list)
w = Counter(emotion_list)
print(w)
return emotion_list,w
if __name__=='__main__':
app.run(debug=True)
|
{"/app.py": ["/tweets.py"], "/tweets.py": ["/twitter_credential.py"]}
|
25,334
|
Sandeep-vishwakarma-sfdc/SentimentAnalysis
|
refs/heads/master
|
/tweets.py
|
from tweepy import API
from tweepy import OAuthHandler
import pandas as pd
import twitter_credential
class Twitterclient():
def __init__(self,twitter_user=None):
self.auth = TwitterAuthenticator().authenticate_twitter_app()
self.twitter_client = API(self.auth)
self.twitter_user = twitter_user
def get_twitter_client_api(self):
return self.twitter_client
class TwitterAuthenticator():
def authenticate_twitter_app(self):
auth = OAuthHandler(twitter_credential.CONSUMER_KEY,twitter_credential.CONSUMER_SECRET)
auth.set_access_token(twitter_credential.ACCESS_TOKEN,twitter_credential.ACCESS_TOKEN_SECRET)
return auth
class TweetAnalyzer():
def tweets_to_data_frame(self,tweets):
df = pd.DataFrame(data=[tweet.text for tweet in tweets], columns=['Tweets'])
return df
|
{"/app.py": ["/tweets.py"], "/tweets.py": ["/twitter_credential.py"]}
|
25,335
|
Sandeep-vishwakarma-sfdc/SentimentAnalysis
|
refs/heads/master
|
/twitter_credential.py
|
ACCESS_TOKEN = "1283635357876015104-dbtiPwtOZLGyfd4wtefVrLKv3REz7m"
ACCESS_TOKEN_SECRET = "QN6M9Gsqrk7CIXmYj9r437I1TQyAzBfrDJzwuDQyHk5tH"
CONSUMER_KEY = "18X6p4fgSiacN3jlgrVrAPFTQ"
CONSUMER_SECRET = "oWALX5pJDl74gmT6Bng03PZ1Kk6yyYfZooo64qc4x85dhDRAvg"
|
{"/app.py": ["/tweets.py"], "/tweets.py": ["/twitter_credential.py"]}
|
25,398
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/DatabricksAndSimulatedDevicesSetup/simulated_devices_setup.py
|
import os
import json
import random
from iot_hub_helpers import IoTHub
def create_device(iot_hub, device_id, simulation_parameters):
iot_hub.create_device(device_id)
tags = {
'simulated': True
}
tags.update(simulation_parameters)
twin_properties = {
'tags': tags
}
iot_hub.update_twin(device_id, json.dumps(twin_properties))
if __name__ == "__main__":
IOT_HUB_NAME = os.environ['IOT_HUB_NAME']
IOT_HUB_OWNER_KEY = os.environ['IOT_HUB_OWNER_KEY']
iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
count = 5
for i in range(count):
device_id = 'Machine-{0:03d}'.format(i)
h1 = random.uniform(0.8, 0.95)
h2 = random.uniform(0.8, 0.95)
simulation_parameters = {
'simulator': 'devices.engines.Engine',
'h1': h1,
'h2': h2
}
create_device(iot_hub, device_id, simulation_parameters)
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,399
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/Simulator/devices/engines/device.py
|
import numpy as np
import random
from datetime import date, datetime
from scipy.interpolate import interp1d
class VibrationSensorSignalSample:
CUTOFF = 150
def __init__(self, W, A, fundamental_from, fundamental_to, t = 0, interval = 1, previous_sample = None, sample_rate = 1024):
self.interval = interval
self.sample_rate = sample_rate
self.W = W
self.A = A
self.t = t
self.base_frequency = fundamental_from
self.target_base_frequency = fundamental_to
self.add_noise = True
self.__previous_sample = previous_sample
self.__N = sample_rate * interval
def pcm(self):
ts = np.linspace(self.t, self.t + self.interval, num = self.__N, endpoint=False)
x = np.array([0, self.interval]) + self.t
points = np.array([self.base_frequency, self.target_base_frequency])
rpm = interp1d(x, points, kind='linear')
f = rpm(ts)
f[f < 0] = 0
fi = np.cumsum(f / self.sample_rate) + (self.__previous_sample.__last_cumsum if self.__previous_sample else 0)
base = 2 * np.pi * fi
b = np.array([np.sin(base * w) * a for w, a in zip(self.W, self.A)])
a = b.sum(axis = 0)
if self.add_noise:
a += np.random.normal(0, 0.1, self.__N)
self.__last_cumsum = fi[-1]
self.base_frequency = self.target_base_frequency
a[a > self.CUTOFF] = self.CUTOFF
a[a < -self.CUTOFF] = -self.CUTOFF
return np.int16(a / self.CUTOFF * 32767)
class RotationalMachine:
ambient_temperature = 20 # degrees Celsius
max_temperature = 120
ambient_pressure = 101 # kPa
def __init__(self, name, h1, h2):
self.W = [1/2, 1, 2, 3, 5, 7, 12, 18]
self.A = [1, 5, 80, 2/3, 8, 2, 14, 50]
self.t = 0
self.name = name
self.speed = 0
self.speed_desired = 0
self.temperature = RotationalMachine.ambient_temperature
self.pressure = RotationalMachine.ambient_pressure
self.pressure_factor = 2
self.__vibration_sample = None
self.__h1 = h1
self.__h2 = h2
self.broken = False
self.h1 = None
self.h2 = None
def set_health(self, h1, h2):
self.__h1 = h1
self.__h2 = h2
self.broken = False
def set_speed(self, speed):
self.speed_desired = speed
def __g(self, v, min_v, max_v, target, rate):
delta = (target - v) * rate
return max(min(v + delta, max_v), min_v)
def noise(self, magnitude):
return random.uniform(-magnitude, magnitude)
def next_state(self):
try:
_, self.h1 = next(self.__h1)
except:
self.broken = True
raise Exception("F1")
try:
_, self.h2 = next(self.__h2)
except:
self.broken = True
raise Exception("F2")
v_from = self.speed / 60
self.speed = (self.speed + (2 - self.h2) * self.speed_desired) / 2
v_to = self.speed / 60
self.temperature = (2 - self.h1) * self.__g(self.temperature, self.ambient_temperature, self.max_temperature, self.speed / 10, 0.01 * self.speed / 1000)
self.pressure = self.h1 * self.__g(self.pressure, self.ambient_pressure, np.inf, self.speed * self.pressure_factor, 0.3 * self.speed / 1000)
self.__vibration_sample = VibrationSensorSignalSample(
#self.W, self.A, v_from, v_to, t = self.t, previous_sample = self.__vibration_sample)
self.W, self.A, v_from, v_to, t = self.t)
state = {
'speed_desired': self.speed_desired,
'ambient_temperature': self.ambient_temperature + self.noise(0.1),
'ambient_pressure': self.ambient_pressure + self.noise(0.1),
'speed': self.speed + self.noise(5),
'temperature': self.temperature + self.noise(0.1),
'pressure': self.pressure + self.noise(20),
'vibration': self.__vibration_sample
}
self.t += 1
for key in state:
value = state[key]
if isinstance(value, (int, float)):
state[key] = round(value, 2)
return state
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,400
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/flask/app.py
|
import numpy as np
import sys, os, time, glob
import requests
import json
import uuid
import json
import random
import markdown
import jwt
import io
import csv
import collections
from urllib.parse import urlparse
from datetime import datetime, timedelta
from functools import wraps
from flask import Flask, render_template, Response, request, redirect, url_for
from threading import Thread
from azure.storage.blob import BlockBlobService
from azure.storage.file import FileService
from azure.storage.file.models import FilePermissions
from azure.storage.blob.models import BlobPermissions
from azure.storage.table import TableService, Entity, TablePermissions
from flask_breadcrumbs import Breadcrumbs, register_breadcrumb
from iot_hub_helpers import IoTHub
from http import HTTPStatus
app = Flask(__name__)
app.debug = True
# Initialize Flask-Breadcrumbs
Breadcrumbs(app=app)
STORAGE_ACCOUNT_NAME = os.environ['STORAGE_ACCOUNT_NAME']
STORAGE_ACCOUNT_KEY = os.environ['STORAGE_ACCOUNT_KEY']
IOT_HUB_NAME = os.environ['IOT_HUB_NAME']
IOT_HUB_OWNER_KEY = os.environ['IOT_HUB_OWNER_KEY']
DSVM_NAME = os.environ['DSVM_NAME']
DATABRICKS_WORKSPACE_LOGIN_URL = os.environ['DATABRICKS_WORKSPACE_LOGIN_URL']
VERSION_INFO = open(os.path.join(os.path.dirname(__file__), 'version.info')).readlines()[0]
table_service = TableService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'x-ms-token-aad-refresh-token' not in request.headers:
pass
#return redirect(url_for('setup'))
return f(*args, **kwargs)
return decorated_function
def get_identity():
id_token = request.headers['x-ms-token-aad-id-token']
return jwt.decode(id_token, verify=False)
@app.context_processor
def context_processor():
return dict(
#user_name=get_identity()['name']
version_info = VERSION_INFO)
@app.route('/home')
@register_breadcrumb(app, '.', 'Home')
@login_required
def home():
readme_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'README.md'))
with open(readme_path, 'r') as f:
content = f.read()
html = markdown.markdown(content)
return render_template('home.html', content = html)
@app.route('/devices')
@register_breadcrumb(app, '.devices', 'Simulated IoT Devices')
@login_required
def devices():
return render_template('devices.html')
def error_response(error_code, message, http_status_code):
data = {
'code': error_code,
'message': message
}
return Response(json.dumps(data), http_status_code, mimetype='application/json')
@app.route('/api/devices', methods=['GET'])
@login_required
def get_devices():
iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
devices = iot_hub.get_device_list()
devices.sort(key = lambda x: x.deviceId)
device_properties = json.dumps([{
'deviceId': device.deviceId,
'lastActivityTime': device.lastActivityTime,
'connectionState':str(device.connectionState) } for device in devices])
return Response(device_properties, mimetype='application/json')
@app.route('/api/devices', methods=['PUT'])
@login_required
def create_device():
device_id = str.strip(request.form['deviceId'])
if not device_id:
return error_response('INVALID_ID', 'Device ID cannot be empty.', HTTPStatus.BAD_REQUEST)
try:
simulation_properties = json.loads(request.form['simulationProperties'])
except Exception as e:
return error_response('INVALID_PARAMETERS', str(e), HTTPStatus.BAD_REQUEST)
iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
try:
iot_hub.create_device(device_id)
except Exception as e:
return error_response('INVALID_ID', str(e), HTTPStatus.BAD_REQUEST)
tags = {
'simulated': True
}
tags.update(simulation_properties)
twin_properties = {
'tags': tags
}
try:
iot_hub.update_twin(device_id, json.dumps(twin_properties))
except Exception as e:
return error_response('INVALID_PARAMETERS', str(e), HTTPStatus.BAD_REQUEST)
return Response()
@app.route('/api/devices/<device_id>', methods=['DELETE'])
@login_required
def delete_device(device_id):
iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
iot_hub.delete_device(device_id)
resp = Response()
return resp
def view_device_dlc(*args, **kwargs):
device_id = request.view_args['device_id']
url = urlparse(request.url)
base_path = os.path.split(url.path)[0]
return [{'text': device_id, 'url': '{0}/{1}'.format(base_path, device_id)}]
@register_breadcrumb(app, '.devices.device', '', dynamic_list_constructor=view_device_dlc)
@app.route('/devices/<device_id>')
@login_required
def devices_device(device_id):
return render_template('devices_device.html', device_id = device_id)
@app.route('/api/devices/<device_id>/logs', methods=['GET'])
@login_required
def get_device_logs(device_id):
query_filter = "PartitionKey eq '{0}'".format(device_id)
log_entities = table_service.query_entities('logs', filter=query_filter)
output = io.StringIO()
writer = csv.writer(output, quoting=csv.QUOTE_MINIMAL)
for entity in sorted(log_entities, key=lambda e: e.Timestamp):
level = entity.Level if 'Level' in entity else None
code = entity.Code if 'Code' in entity else None
message = entity.Message if 'Message' in entity else None
if code == 'SIM_HEALTH':
continue
row = (str(entity.Timestamp), entity.PartitionKey, level, code, message)
writer.writerow(row)
log_output = output.getvalue()
resp = Response(log_output)
resp.headers['Content-type'] = 'text/plain'
return resp
@app.route('/api/devices/<device_id>', methods=['GET'])
@login_required
def get_device(device_id):
#iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
#twin_data = iot_hub.get_device_twin(device_id)
query_filter = "PartitionKey eq '{0}' and Code eq '{1}'".format(device_id, 'SIM_HEALTH')
health_history_entities = table_service.query_entities('logs', filter=query_filter)
health_history = []
for entity in health_history_entities:
timestamp = entity.Timestamp
message_json = json.loads(entity.Message)
#indices = [x[1] for x in sorted(message_json.items())]
health_history.append((timestamp, message_json))
health_history.sort(key = lambda x: x[0])
health_history_by_index = {}
for entry in health_history:
timestamp = entry[0].replace(tzinfo=None).isoformat()
indices_json = entry[1]
for k, v in indices_json.items():
if k not in health_history_by_index:
health_history_by_index[k] = {'t': [], 'h': []}
health_history_by_index[k]['t'].append(timestamp)
health_history_by_index[k]['h'].append(v)
response_json = {
#'twin': json.loads(twin_data),
'health_history': health_history_by_index
}
resp = Response(json.dumps(response_json))
resp.headers['Content-type'] = 'application/json'
return resp
@app.route('/api/devices/<device_id>', methods=['POST'])
@login_required
def set_desired_properties(device_id):
desired_props = {}
for key in request.form:
if key == 'speed':
desired_props[key] = int(request.form[key])
else:
desired_props[key] = request.form[key]
payload = {
'properties': {
'desired': desired_props
}
}
payload_json = json.dumps(payload)
iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
twin_data = iot_hub.update_twin(device_id, payload_json)
resp = Response(twin_data)
resp.headers['Content-type'] = 'application/json'
return resp
def get_access_token():
refresh_token = request.headers['x-ms-token-aad-refresh-token']
parameters = {
'grant_type': 'refresh_token',
'client_id': os.environ['WEBSITE_AUTH_CLIENT_ID'],
'client_secret': os.environ['WEBSITE_AUTH_CLIENT_SECRET'],
'refresh_token': refresh_token,
'resource': 'https://management.core.windows.net/'
}
tid = get_identity()['tid']
result = requests.post('https://login.microsoftonline.com/{0}/oauth2/token'.format(tid), data = parameters)
access_token = result.json()['access_token']
return access_token
def parse_website_owner_name():
owner_name = os.environ['WEBSITE_OWNER_NAME']
subscription, resource_group_location = owner_name.split('+', 1)
resource_group, location = resource_group_location.split('-', 1)
return subscription, resource_group, location
@app.route('/modeling')
@register_breadcrumb(app, '.modeling', 'Modeling')
@login_required
def analytics():
return render_template('modeling.html', dsvmName = DSVM_NAME, databricks_workspace= DATABRICKS_WORKSPACE_LOGIN_URL)
@app.route('/intelligence')
@register_breadcrumb(app, '.intelligence', 'Intelligence')
@login_required
def intelligence():
return render_template('intelligence.html')
@register_breadcrumb(app, '.intelligence.device', '', dynamic_list_constructor=view_device_dlc)
@app.route('/intelligence/<device_id>')
@login_required
def intelligence_device(device_id):
return render_template('intelligence_device.html', device_id = device_id)
@app.route('/api/intelligence')
@login_required
def get_intelligence():
iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
devices = iot_hub.get_device_list()
device_ids = [d.deviceId for d in devices]
latest_predictions = table_service.query_entities('predictions', filter="PartitionKey eq '_INDEX_'")
predictions_by_device = dict([(p.RowKey, (p.Prediction, p.Date)) for p in latest_predictions])
unknown_predictions = dict([(device_id, ('Unknown', None)) for device_id in device_ids if device_id not in predictions_by_device])
combined = {**predictions_by_device, **unknown_predictions}
summary = {
'Failure predicted': 0,
'Healthy': 0,
'Need maintenance': 0,
'Unknown': 0
}
summary_computed = collections.Counter(['Failure predicted' if v[0].startswith('F') else v[0] for v in combined.values()])
summary.update(summary_computed)
payload = {
'predictions': [{
'deviceId': k,
'prediction': v[0],
'lastUpdated': v[1]
} for (k, v) in combined.items()],
'summary': summary
}
payload_json = json.dumps(payload)
resp = Response(payload_json)
resp.headers['Content-type'] = 'application/json'
return resp
@app.route('/api/intelligence/<device_id>/cycles')
@login_required
def get_intelligence_device_cycles(device_id):
# cycles_index = table_service.get_entity('cycles', '_INDEX_', device_id)
# latest_cycles = json.loads(cycles_index['RollingWindow'])
# max_cycle = latest_cycles[0]
# min_cycle = latest_cycles[-1]
all_cycles = table_service.query_entities('cycles', filter="PartitionKey eq '{0}'".format(device_id))
all_cycles = list(all_cycles)
all_cycles.sort(key = lambda x: x.RowKey)
x = []
y = {}
for cycle in all_cycles:
x.append(cycle.RowKey)
for key in cycle.keys():
if key in ['PartitionKey', 'RowKey', 'CycleEnd', 'Timestamp', 'etag']:
continue
if key not in y:
y[key] = []
y[key].append(cycle[key])
payload = {
'x': x,
'y': y
}
payload_json = json.dumps(payload)
resp = Response(payload_json)
resp.headers['Content-type'] = 'application/json'
return resp
@app.route('/api/intelligence/<device_id>/predictions')
@login_required
def get_intelligence_device_predictions(device_id):
all_predictions = table_service.query_entities('predictions', filter="PartitionKey eq '{0}'".format(device_id))
all_predictions = list(all_predictions)
all_predictions.sort(key = lambda x: x.RowKey)
count = len(all_predictions)
if count > 50:
all_predictions = all_predictions[-50:-1]
x = []
y = []
for prediction in all_predictions:
x.append(prediction.RowKey)
y.append(prediction.Prediction)
payload = {
'x': x,
'y': y
}
payload_json = json.dumps(payload)
resp = Response(payload_json)
resp.headers['Content-type'] = 'application/json'
return resp
if __name__ == "__main__":
app.run('0.0.0.0', 8000, debug=True)
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,401
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/Simulator/devices/engines/__init__.py
|
from devices.engines.engine import Engine
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,402
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/shared_modules/iot_hub_helpers/__init__.py
|
from .iot_hub_helpers import IoTHub, IoTHubDevice
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,403
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/Simulator/simulator.py
|
import os
import io
import pickle
import random
import uuid
import datetime
import time
import json
import numpy as np
import logging
import csv
from multiprocessing import Pool, TimeoutError, cpu_count
from multiprocessing.dummy import Pool as DummyPool
from multiprocessing import Process
from iot_hub_helpers import IoTHub, IoTHubDevice
from devices import SimulatorFactory
from azure.storage.table import TableService, Entity, TablePermissions
STORAGE_ACCOUNT_NAME = os.environ['STORAGE_ACCOUNT_NAME']
STORAGE_ACCOUNT_KEY = os.environ['STORAGE_ACCOUNT_KEY']
IOT_HUB_NAME = os.environ['IOT_HUB_NAME']
IOT_HUB_OWNER_KEY = os.environ['IOT_HUB_OWNER_KEY']
IOT_HUB_DEVICE_KEY = os.environ['IOT_HUB_DEVICE_KEY']
def claim_and_run_device(driver_id):
iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY)
device, device_twin = iot_hub.claim_device(driver_id)
device_twin_json = json.loads(device_twin)
device_id = device_twin_json['deviceId']
iothub_device = IoTHubDevice(IOT_HUB_NAME, device_id, device.primaryKey)
table_service = TableService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
table_service.create_table('logs', fail_on_exist=False)
def report_state(state):
iothub_device.send_reported_state(state)
def send_telemetry(data):
iothub_device.send_message(data)
def log(message, code, level):
level_name = logging.getLevelName(level)
log_entity = {
'PartitionKey': device_id,
'RowKey': uuid.uuid4().hex,
'Level': level_name,
'Code': code,
'Message': message,
'_Driver': driver_id
}
print(', '.join([driver_id, device_id, str(level_name), str(code), str(message)]))
table_service.insert_or_replace_entity('logs', log_entity)
if level == logging.CRITICAL:
# disable device
iot_hub.disable_device(device_id)
device_simulator = SimulatorFactory.create('devices.engines.Engine', report_state, send_telemetry, log)
if not device_simulator.initialize(device_twin_json):
return
def device_twin_callback(update_state, payload, user_context):
device_simulator.on_update(str(update_state), json.loads(payload))
iothub_device.client.set_device_twin_callback(device_twin_callback, 0)
device_simulator.run()
def device_driver():
driver_unique_id = str(uuid.uuid4())
while True:
try:
claim_and_run_device(driver_unique_id)
logging.log(logging.WARNING, 'Driver {0} finished execution.'.format(driver_unique_id))
except Exception as e:
logging.log(logging.ERROR, 'Driver {0} threw an exception: {1}.'.format(driver_unique_id, str(e)))
except:
logging.log(logging.ERROR, 'Driver {0} threw an exception.')
if __name__ == '__main__':
device_driver_count = 20
processes = []
for _ in range(device_driver_count):
processes.append(Process(target=device_driver))
for process in processes:
process.daemon = True
process.start()
while all(map(lambda c: c.is_alive(), processes)):
time.sleep(3)
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,404
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/Simulator/devices/simulated_device.py
|
import importlib
import logging
from abc import ABC, abstractmethod
class SimulatedDevice(ABC):
def __init__(self, report_state_function, send_telemetry_function, log_function):
self.__report_state = report_state_function
self.__send_telemetry = send_telemetry_function
self.__log_function = log_function
def report_state(self, state):
self.__report_state(state)
def send_telemetry(self, data):
self.__send_telemetry(data)
def log(self, message, code = None, level = logging.INFO):
self.__log_function(message, code, level)
@abstractmethod
def initialize(self, device_info):
pass
@abstractmethod
def on_update(self, update_state, payload):
pass
@abstractmethod
def run(self):
pass
class SimulatorFactory:
@staticmethod
def create(full_class_name, *args):
parts = full_class_name.split('.')
module = '.'.join(parts[:-1])
simple_class_name = parts[-1]
module = importlib.import_module(module)
simulator_class = getattr(module, simple_class_name)
return simulator_class(*args)
if __name__ == '__main__':
pass
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,405
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/PythonAndStorageSetup/run.py
|
import os
from azure.storage.table import TableService, Entity, TablePermissions
from azure.storage.blob import BlockBlobService
STORAGE_ACCOUNT_NAME = os.environ['STORAGE_ACCOUNT_NAME']
STORAGE_ACCOUNT_KEY = os.environ['STORAGE_ACCOUNT_KEY']
table_service = TableService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
table_service.create_table('cycles')
table_service.create_table('features')
table_service.create_table('predictions')
table_service.create_table('databricks')
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,406
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py
|
import json
import time
import requests
import random
import datetime
import dateutil.parser
import logging
from base64 import b64encode, b64decode
from hashlib import sha256
from time import time, sleep
from urllib.parse import quote_plus, urlencode
from hmac import HMAC
from iothub_service_client import IoTHubRegistryManager, IoTHubRegistryManagerAuthMethod, IoTHubDeviceTwin, IoTHubDeviceConnectionState, IoTHubDeviceStatus
from iothub_client import IoTHubClient, IoTHubMessage, IoTHubConfig, IoTHubTransportProvider
from http import HTTPStatus
class IoTHub:
def __init__(self, iothub_name, owner_key, suffix='.azure-devices.net'):
self.iothub_name = iothub_name
self.owner_key = owner_key
self.iothub_host = iothub_name + suffix
self.owner_connection_string ='HostName={0};SharedAccessKeyName=iothubowner;SharedAccessKey={1}'.format(self.iothub_host, owner_key)
self.registry_manager = IoTHubRegistryManager(self.owner_connection_string)
self.device_twin = IoTHubDeviceTwin(self.owner_connection_string)
self.__device_clients = {}
def create_device(self, device_id, primary_key = '', secondary_key = ''):
return self.registry_manager.create_device(device_id, primary_key, secondary_key, IoTHubRegistryManagerAuthMethod.SHARED_PRIVATE_KEY)
def delete_device(self, device_id):
return self.registry_manager.delete_device(device_id)
def disable_device(self, device_id):
self.registry_manager.update_device(device_id, '', '', IoTHubDeviceStatus.DISABLED, IoTHubRegistryManagerAuthMethod.SHARED_PRIVATE_KEY)
def get_device_list(self):
return self.registry_manager.get_device_list(1000) # NOTE: this API is marked as deprecated,
# but Python SDK doesn't seem to offer
# an alternative yet (03/25/2018).
def get_device_twin(self, device_id):
return self.device_twin.get_twin(device_id)
def __get_sas_token(self, device_id, key, policy, expiry=3600):
ttl = time() + expiry
uri = '{0}/devices/{1}'.format(self.iothub_host, device_id)
sign_key = "%s\n%d" % ((quote_plus(uri)), int(ttl))
signature = b64encode(HMAC(b64decode(key), sign_key.encode('utf-8'), sha256).digest())
rawtoken = {
'sr' : uri,
'sig': signature,
'se' : str(int(ttl))
}
rawtoken['skn'] = policy
sas = 'SharedAccessSignature ' + urlencode(rawtoken)
return sas
# return 'HostName={0}{1};DeviceId={2};SharedAccessSignature={3}'.format(self.iothub_name, self.suffix, device_id, sas)
def update_twin(self, device_id, payload, etag = '*'):
"""
Update device twin.
Unfortunately, Python IoTHub SDK does not implement optimistic concurrency, so
falling back to the REST API.
SDK equivalent:
return self.device_twin.update_twin(device_id, payload)
"""
twin_url = 'https://{0}/twins/{1}?api-version=2017-06-30'.format(self.iothub_host, device_id)
sas_token = self.__get_sas_token(device_id, self.owner_key, 'iothubowner')
headers = {
'Authorization': sas_token,
'Content-Type': 'application/json',
'If-Match': '"{0}"'.format(etag)
}
payload_json = json.loads(payload)
keys = map(str.lower, payload_json.keys())
if 'tags' not in keys:
payload_json['tags'] = {}
if 'desiredproperties' not in keys:
payload_json['desiredProperties'] = {}
payload= json.dumps(payload_json)
r = requests.patch(twin_url, data=payload, headers=headers)
if r.status_code != HTTPStatus.OK:
raise Exception(r.text)
return r.text
def claim_device(self, client_id):
while True:
claimed_device = self.try_claim_device(client_id)
if claimed_device:
return claimed_device
sleep(random.randint(5, 10))
def try_claim_device(self, client_id):
try:
devices = self.get_device_list()
except:
return
random.shuffle(devices)
for device in devices:
current_time = datetime.datetime.utcnow().replace(tzinfo=None)
last_activity_time = dateutil.parser.parse(device.lastActivityTime).replace(tzinfo=None)
# it seems that sometimes devices remain in a CONNECTED state long after the connection is lost,
# so claiming CONNECTED devices that have been inactive for at least 10 minutes
if device.connectionState == IoTHubDeviceConnectionState.CONNECTED and (current_time - last_activity_time).total_seconds() < 600:
continue
if device.status == IoTHubDeviceStatus.DISABLED:
continue
# attempt to acquire lock using device twin's optimistic concurrency
twin_data = self.get_device_twin(device.deviceId)
twin_data_json = json.loads(twin_data)
random.randint(5, 10)
etag = twin_data_json['etag']
twin_tags = None
if 'tags' not in twin_data_json:
twin_tags = {}
else:
twin_tags = twin_data_json['tags']
if 'simulated' not in twin_tags or not twin_tags['simulated']:
continue
if 'simulator' not in twin_tags:
continue
if '_claim' in twin_tags:
simulator_data = twin_tags['_claim']
if 'lastClaimed' in simulator_data:
last_claimed = dateutil.parser.parse(simulator_data['lastClaimed']).replace(tzinfo=None)
if (current_time - last_claimed).total_seconds() < 600:
continue
twin_tags['_claim'] = {
'clientId': client_id,
'lastClaimed': current_time.isoformat()
}
updated_properties = {
'tags': twin_tags
}
try:
updated_twin_data = self.update_twin(device.deviceId, json.dumps(updated_properties), etag)
logging.log(logging.INFO, 'Claimed device %s.', device.deviceId)
return device, updated_twin_data
except:
continue
class IoTHubDevice:
def __init__(self, iothub_name, device_id, device_key, suffix='.azure-devices.net'):
self.device_id = device_id
device_connection_string = 'HostName={0}{1};DeviceId={2};SharedAccessKey={3}'.format(
iothub_name, suffix, device_id, device_key
)
self.client = IoTHubClient(device_connection_string, IoTHubTransportProvider.MQTT) # HTTP, AMQP, MQTT ?
def send_message(self, message):
m = IoTHubMessage(message) # string or bytearray
self.client.send_event_async(m, IoTHubDevice.__dummy_send_confirmation_callback, 0)
def send_reported_state(self, state, send_reported_state_callback = None, user_context = None):
if send_reported_state_callback is None:
send_reported_state_callback = IoTHubDevice.__dummy_send_reported_state_callback
state_json = json.dumps(state)
self.client.send_reported_state(state_json, len(state_json), send_reported_state_callback, user_context)
@staticmethod
def __dummy_send_confirmation_callback(message, result, user_context):
pass
#print(result)
@staticmethod
def __dummy_send_reported_state_callback(status_code, user_context):
pass
# print(status_code)
if __name__ == '__main__':
pass
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,407
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/shared_modules/setup.py
|
from distutils.core import setup
setup(
name='iot_hub_helpers',
version='0.1',
packages=['iot_hub_helpers',]
)
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,408
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/Simulator/devices/__init__.py
|
from devices.simulated_device import SimulatorFactory
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,409
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/Scorer/scorer.py
|
import os
import requests
import json
import time
from azure.storage.table import TableService, Entity, TablePermissions
STORAGE_ACCOUNT_NAME = os.environ['STORAGE_ACCOUNT_NAME']
STORAGE_ACCOUNT_KEY = os.environ['STORAGE_ACCOUNT_KEY']
SCORE_URL = os.environ['SCORE_URL']
table_service = TableService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
def call_score_web_service(url, payload):
response = requests.post(url, json=payload)
return response.json()
def publish(prediction):
prediction_text = prediction[1] or 'Healthy'
entity = {
'PartitionKey': prediction[0][0],
'RowKey': prediction[0][1],
'Prediction': prediction_text
}
table_service.insert_or_replace_entity('predictions', entity)
index_entity = {
'PartitionKey': '_INDEX_',
'RowKey': prediction[0][0],
'Date': prediction[0][1],
'Prediction': prediction_text
}
table_service.insert_or_replace_entity('predictions', index_entity)
def score():
indices = table_service.query_entities('cycles', filter="PartitionKey eq '_INDEX_'")
latest_features = []
for index in indices:
machine_id = index.RowKey
rolling_window = json.loads(index.RollingWindow)
for cycle in rolling_window:
try:
features = table_service.get_entity('features', machine_id, cycle)
latest_features.append(features)
break
except:
pass
payload = [json.loads(x.FeaturesJson) for x in latest_features]
predictions = zip([(x.PartitionKey, x.CycleEnd) for x in latest_features], call_score_web_service(SCORE_URL, payload))
for prediction in predictions:
publish(prediction)
if __name__ == '__main__':
while True:
score()
time.sleep(30)
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,410
|
Azure/AI-PredictiveMaintenance
|
refs/heads/master
|
/src/WebApp/App_Data/jobs/continuous/DatabricksAndSimulatedDevicesSetup/run.py
|
import urllib
import os
import time
import requests
import uuid
import json
import zipfile
import base64
from azure.storage.table import TableService, Entity, TablePermissions
STORAGE_ACCOUNT_NAME = os.environ['STORAGE_ACCOUNT_NAME']
STORAGE_ACCOUNT_KEY = os.environ['STORAGE_ACCOUNT_KEY']
DATABRICKS_API_BASE_URL = os.environ['DATABRICKS_WORKSPACE_URL'] + '/api/'
FEATURIZER_JAR_URL = os.environ['FEATURIZER_JAR_URL']
DATABRICKS_TOKEN = os.environ['DATABRICKS_TOKEN']
IOT_HUB_NAME = os.environ['IOT_HUB_NAME']
EVENT_HUB_ENDPOINT = os.environ['EVENT_HUB_ENDPOINT']
TMP = os.environ['TMP']
NOTEBOOKS_URL = os.environ['NOTEBOOKS_URL']
STORAGE_ACCOUNT_CONNECTION_STRING = "DefaultEndpointsProtocol=https;AccountName=" + STORAGE_ACCOUNT_NAME + ";AccountKey=" + STORAGE_ACCOUNT_KEY + ";EndpointSuffix=core.windows.net"
def call_api(uri, method=requests.get, json=None, data=None, files=None):
headers = { 'Authorization': 'Bearer ' + DATABRICKS_TOKEN }
#TODO: add retries
response = method(DATABRICKS_API_BASE_URL + uri, headers=headers, json=json, data=data, files=files)
if response.status_code != 200:
raise Exception('Error when calling Databricks API {0}. Response:\n{1}'.format(uri, response.text))
return response
def get_last_run_id():
table_service = TableService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
databricks_cluster_details_entries = table_service.query_entities('databricks', filter="PartitionKey eq 'pdm'")
databricks_cluster_details = list(databricks_cluster_details_entries)
if databricks_cluster_details:
return databricks_cluster_details[0]['run_id']
return None
def set_last_run_id(run_id):
table_service = TableService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
databricks_details = {'PartitionKey': 'pdm', 'RowKey': 'pdm', 'run_id' : str(run_id)}
table_service.insert_or_replace_entity('databricks', databricks_details)
def get_run(run_id):
run_state = 'PENDING'
while run_state in ['PENDING', 'RESIZING']:
run_details = call_api('2.0/jobs/runs/get?run_id=' + str(run_id)).json()
run_state = run_details['state']['life_cycle_state']
time.sleep(10)
return run_details
def is_job_active(run_details):
run_state = run_details['state']['life_cycle_state']
return run_state == 'RUNNING'
def upload_notebooks_databricks():
#upload notebook to app service
notebooks_zip_local_path = os.path.join(TMP, 'Notebooks.zip')
urllib.request.urlretrieve(NOTEBOOKS_URL, notebooks_zip_local_path)
zip_ref = zipfile.ZipFile(notebooks_zip_local_path, 'r')
notebooks_local_path = os.path.join(TMP, 'Notebooks')
zip_ref.extractall(notebooks_local_path)
#upload feature engineering notebook to databricks workspace
featureEngineering_local_path = os.path.join(notebooks_local_path, 'FeatureEngineering.ipynb')
files = {'file': open(featureEngineering_local_path, 'rb')}
bdfs = "/FeatureEngineering"
put_payload = { 'path' : bdfs, 'overwrite' : 'true', 'language':'PYTHON', 'format':'JUPYTER' }
resp = call_api('2.0/workspace/import', method=requests.post, data=put_payload, files = files).json()
#upload data ingestion notebook to databricks workspace
dataIngestion_local_path = os.path.join(notebooks_local_path, 'DataIngestion.ipynb')
files = {'file': open(dataIngestion_local_path, 'rb')}
bdfs = "/DataIngestion"
put_payload = { 'path' : bdfs, 'overwrite' : 'true', 'language':'PYTHON', 'format':'JUPYTER' }
resp = call_api('2.0/workspace/import', method=requests.post, data=put_payload, files = files).json()
upload_notebooks_databricks()
data = '{"DataIngestion" : { "STORAGE_ACCOUNT_NAME" :"' + STORAGE_ACCOUNT_NAME + '", "STORAGE_ACCOUNT_KEY" :"' + STORAGE_ACCOUNT_KEY +'", "TELEMETRY_CONTAINER_NAME" : "telemetry", "LOG_TABLE_NAME" : "Logs", "DATA_ROOT_FOLDER" : "/root"}}'
file = open('D:/home/site/NotebookEnvironmentVariablesConfig.json','w')
file.write(data)
file.close()
config_path = '/root/NotebookEnvironmentVariablesConfig.json'
files = {'file': open('D:/home/site/NotebookEnvironmentVariablesConfig.json', 'rb')}
put_payload = { 'path' : config_path, 'overwrite' : 'true' }
call_api('2.0/dbfs/put', method=requests.post, data=put_payload, files=files)
last_run_id = get_last_run_id()
if last_run_id is not None and is_job_active(get_run(last_run_id)):
exit(0)
jar_local_path = os.path.join(TMP, 'featurizer_2.11-1.0.jar')
dbfs_path = '/predictive-maintenance/jars/'
jar_dbfs_path = dbfs_path + 'featurizer_2.11-1.0.jar'
urllib.request.urlretrieve(FEATURIZER_JAR_URL, jar_local_path)
mkdirs_payload = { 'path': dbfs_path }
call_api('2.0/dbfs/mkdirs', method=requests.post, json=mkdirs_payload)
files = {'file': open(jar_local_path, 'rb')}
put_payload = { 'path' : jar_dbfs_path, 'overwrite' : 'true' }
call_api('2.0/dbfs/put', method=requests.post, data=put_payload, files=files)
sparkSpec= {
'spark.speculation' : 'true'
}
payload = {
'spark_version' : '4.2.x-scala2.11',
'node_type_id' : 'Standard_D3_v2',
'spark_conf' : sparkSpec,
'num_workers' : 1
}
#run job
jar_path = "dbfs:" + jar_dbfs_path
jar = {
'jar' : jar_path
}
maven_coordinates = {
'coordinates' : 'com.microsoft.azure:azure-eventhubs-spark_2.11:2.3.1'
}
maven = {
'maven' : maven_coordinates
}
libraries = [jar, maven]
jar_params = [EVENT_HUB_ENDPOINT, IOT_HUB_NAME, STORAGE_ACCOUNT_CONNECTION_STRING]
spark_jar_task= {
'main_class_name' : 'com.microsoft.ciqs.predictivemaintenance.Featurizer',
'parameters' : jar_params
}
payload = {
"run_name": "featurization_task",
"new_cluster" : payload,
'libraries' : libraries,
'max_retries' : 1,
'spark_jar_task' : spark_jar_task
}
run_job = True
i = 0
while run_job and i < 5:
run_details = call_api('2.0/jobs/runs/submit', method=requests.post, json=payload).json()
run_id = run_details['run_id']
set_last_run_id(run_id)
run_details = get_run(run_id)
i= i + 1
if not is_job_active(run_details):
run_job = True
errorMessage = 'Unable to create Spark job. Run ID: {0}. Failure Details: {1}'.format(run_id, run_details['state']['state_message'])
print(errorMessage)
else:
run_job = False
|
{"/src/WebApp/shared_modules/iot_hub_helpers/__init__.py": ["/src/WebApp/shared_modules/iot_hub_helpers/iot_hub_helpers.py"]}
|
25,411
|
mspeekenbrink/SpeedAccuracyMovingDots
|
refs/heads/master
|
/Task.py
|
import random, math, array, random
from psychopy import core,visual,event,parallel
from itertools import product
class Task:
#speedTime = 0.5
cueTime = 1.5
fixTime = 0.5
jitterTime = 1
preFeedbackTime = .1
feedbackTime = .4
postFeedbackTime = .1
def __init__(self,win,filename,nsubblocks,nblocks,blockSize,speedTime,trialTime):
self.datafile = open(filename, 'a') #a simple text file with 'comma-separated-values'
self.win = win
self.nsubblocks = nsubblocks # this is the size of each block for trial randomization
self.nblocks = nblocks # this is used to randomize trials
self.blockSize = blockSize # this is the block size seen by participants.
self.speedTime = speedTime
self.trialTime = trialTime
self.typeInstructions = visual.TextStim(win,text="Ac",pos=(0,0))
self.feedback = visual.TextStim(win,text="",pos=(0,0))
self.blockInstructions = visual.TextStim(win,text="",pos=(0,0))
self.dotPatch = visual.DotStim(win, units='pix',color=(1.0,1.0,1.0), dir= 0,
nDots=120, fieldShape='circle', fieldPos=(0.0,0.0),dotSize=3,fieldSize=250,
dotLife=-1, #number of frames for each dot to be drawn
signalDots='different', #are the signal and noise dots 'different' or 'same' popns (see Scase et al)
noiseDots='direction', #do the noise dots follow random- 'walk', 'direction', or 'position'
speed=1.00, coherence=0.5)
self.fixation = visual.ShapeStim(win,
units='pix',
lineColor='white',
lineWidth=2.0,
vertices=((-25, 0), (25, 0), (0,0), (0,25), (0,-25)),
closeShape=False,
pos= [0,0])
self.trialClock = core.Clock()
# following returns a list with: id, type, coherence
tids = list(product([0,1],[0,1],[.05,.1,.15,.25,.35,.5])) * self.nsubblocks
self.tids = []
#self.dirs = []
#self.ttype = []
#self.tcoherence = []
for i in range(self.nblocks):
random.shuffle(tids)
#random.shuffle(tttype)
self.tids += tids
#self.ttype += tttype
## add 100% coherency block at the end
tids = list(product([0,1],[0,1],[1.0])) * 30
self.tids += tids
#self.dirs = [0]*(self.ntrials/2) + [1]*(self.ntrials/2)
#random.shuffle(self.dirs)
#self.ttype = [0]*(self.ntrials/2) + [1]*(self.ntrials/2)
#random.shuffle(self.ttype)
self.tinstructions = ["ACCURATE","FAST"]
self.datafile.write('trial,type(1=Ac,2=Sp),coherence,direction(1=L,2=R),response (1=L,2=R),responsetime,feedback (1=correct,2=incorrect,3=inTime,4=tooSlow,5=noResponse)\n')
def Run(self):
running = True
trial = 1
block = 1
while running:#forever
self.dotPatch._dotsXY = self.dotPatch._newDotsXY(self.dotPatch.nDots)
# set direction
self.dotPatch.setDir(180 - self.tids[trial - 1][0]*180)
# set instructions
self.typeInstructions.setText(self.tinstructions[self.tids[trial - 1][1]])
# set coherence
self.dotPatch.setFieldCoherence(self.tids[trial - 1][2])
# show instruction for cueTime
self.typeInstructions.draw()
self.win.flip()
core.wait(self.cueTime)
# do nothing
# draw jitter time
jitter = random.random() * self.jitterTime
self.win.flip()
#core.wait(jitter)
# show fixation 500 ms
self.fixation.draw()
self.win.flip()
core.wait(self.fixTime)
# do nothing
# jitter with blank screen
#self.win.flip()
core.wait(self.jitterTime - jitter)
# show stimulus 1500 ms
self.trialClock.reset()
ttime = -1.0
rgiven = False
response = -1
event.clearEvents(eventType='keyboard')
event.clearEvents('mouse')
while (self.trialClock.getTime() < self.trialTime):
if (rgiven == False):
self.dotPatch.draw()
self.win.flip()
for key in event.getKeys():
if key in ['a','b','escape']:
ttime = self.trialClock.getTime()
rgiven = True
if key in ['b']:
response = 0
if key in ['a']:
response = 1
if key in ['escape']:
self.win.close()
core.quit()
self.win.flip() # delete contents of window
else:
break
# do nothing
self.win.flip()
core.wait(self.preFeedbackTime)
feedcode = 0
dfeed = 0
# show feedback 400 ms
if (ttime < 0):
self.feedback.setColor("red")
self.feedback.setText("No response")#
#feedcode = codes.feedback_noResponse_on
dfeed = 5
else:
if (self.tids[trial - 1][1] == 0):
# accuracy
if (response == self.tids[trial - 1][0]):
self.feedback.setText("correct")
self.feedback.setColor("green")
#feedcode = codes.feedback_correct_on
dfeed = 1
else:
self.feedback.setText("incorrect")
self.feedback.setColor("red")
#feedcode = codes.feedback_incorrect_on
dfeed = 2
else:
if (ttime < self.speedTime):
self.feedback.setText("in time")
self.feedback.setColor("green")
#feedcode = codes.feedback_inTime_on
dfeed = 3
else:
self.feedback.setText("too slow")
self.feedback.setColor("red")
#feedcode = codes.feedback_tooSlow_on
dfeed = 4
self.feedback.draw()
self.win.flip()
#while (self.trialClock.getTime() < 400):
core.wait(self.feedbackTime)
# do nothing
self.datafile.write(
str(trial) + ',' +
str(self.tids[trial - 1][0] + 1) + ',' +
str(self.tids[trial - 1][2]) + ',' +
str(self.tids[trial - 1][1] + 1) + ',' +
str(response + 1) + ',' +
str(1000*ttime) + ',' +
str(dfeed) + '\n')
if(trial == 6*self.nsubblocks*self.nblocks):
running = False
elif(trial == block*self.blockSize):
# show end of block instructions and wait for response
txt = "This is the end of block "
txt += str(block) + "\n\n"
txt += "You can now take a short rest. Please wait for the experimenter to continue the task."
self.blockInstructions.setText(txt)
self.blockInstructions.draw()
self.win.flip()
cont = False
while (cont == False):
for key in event.getKeys():
if key in ['enter','return','escape']:
if key in ['enter','return']:
cont = True
block += 1
if key in ['escape']:
self.win.close()
core.quit()
trial = trial + 1
# remove feedback
self.win.flip()
core.wait(self.postFeedbackTime)
self.datafile.close()
|
{"/Dots.py": ["/Task.py"]}
|
25,412
|
mspeekenbrink/SpeedAccuracyMovingDots
|
refs/heads/master
|
/Dots.py
|
#!/usr/bin/env python
from psychopy import visual, event, core, data, gui, misc, parallel
import Instructions, StartMainInstructions, Task
debug = False
speedTime = 500
if debug == True:
nsubblocks = 1
nblocks = 1
blockSize = 6
#speedTime = .4
#ntrials2 = 12
#nblocks2 = 1
#blockSize2 = 6#
#ntrials3 = 4
#nblocks3 = 2
#blockSize3 = 4
else:
nsubblocks = 5
nblocks = 4
blockSize = 120
#speedTime = .4
#ntrials2 = 30
#nblocks2 = 4
#blockSize2 = 120
#ntrials3 = 100
#nblocks3 = 2
#blockSize3 = 100
# uncomment for debug run
#ntrials = 4
#nblocks = 4
#blockSize = 4
#create a window to draw in
myWin =visual.Window((1280,1024), allowGUI=True,
bitsMode=None, units='norm', winType='pyglet', color=(-1,-1,-1))
# Admin
expInfo = {'subject':'test','date':data.getDateStr(),'practice':True,'speed time':speedTime,'trial time':1500}
#expInfo['dateStr']= data.getDateStr() #add the current time
#expInfo['practice'] = True
#present a dialogue to change params
ok = False
while(not ok):
dlg = gui.DlgFromDict(expInfo, title='Moving Dots', fixed=['dateStr'],order=['date','subject','practice','speed time','trial time'])
if dlg.OK:
misc.toFile('lastParams.pickle', expInfo)#save params to file for next time
ok = True
else:
core.quit()#the user hit cancel so exit
# setup data file
fileName = 'Data/' + expInfo['subject'] + expInfo['date'] + '.csv'
dataFile = open(fileName, 'w') #a simple text file with 'comma-separated-values'
dataFile.write('subject = ' + str(expInfo['subject']) + "; date = " + str(expInfo['date']) + "; speed time = " + str(expInfo['speed time']) + "; trial time = " + str(expInfo['trial time']) + '\n')
dataFile.close()
trialClock = core.Clock()
speedTime = float(expInfo['speed time'])/1000
trialTime = float(expInfo['trial time'])/1000
practiceTask = expInfo['practice']
#myPort = parallel.ParallelPort(address=0x0378)
#myPort = parallel.ParallelPort()
instr = Instructions.Instructions(myWin,practiceTask)
instr.Run()
if practiceTask == True:
practice = Task.Task(myWin,fileName,2,1,12,speedTime,trialTime)
practice.Run()
dataFile = open(fileName, 'a') #a simple text file with 'comma-separated-values'
dataFile.write('End Practice\n')
dataFile.close()
instr = StartMainInstructions.Instructions(myWin)
instr.Run()
task = Task.Task(myWin,fileName,nsubblocks,nblocks,blockSize,speedTime,trialTime)
task.Run()
endText = "This is the end of the experiment \n \n"
endText += "Thank your for your participation."
end = visual.TextStim(myWin, pos=[0,0],text=endText)
end.draw()
myWin.flip()
done = False
while not done:
for key in event.getKeys():
if key in ['escape','q']:
done = True
core.quit()
|
{"/Dots.py": ["/Task.py"]}
|
25,442
|
fuetser/coffee
|
refs/heads/master
|
/UI/mainUI.py
|
from PyQt5 import QtWidgets
class Main(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.setupUi()
def setupUi(self):
self.resize(800, 600)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.edit_button = QtWidgets.QPushButton(self)
self.horizontalLayout.addWidget(self.edit_button)
self.add_button = QtWidgets.QPushButton(self)
self.horizontalLayout.addWidget(self.add_button)
self.verticalLayout.addLayout(self.horizontalLayout)
self.table = QtWidgets.QTableWidget(self)
self.table.setColumnCount(0)
self.table.setRowCount(0)
self.verticalLayout.addWidget(self.table)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.setWindowTitle("Эспрессо")
self.edit_button.setText("Редактировать")
self.add_button.setText("Добавить")
|
{"/release/main.py": ["/UI/mainUI.py", "/UI/addEditCoffeeForm.py"]}
|
25,443
|
fuetser/coffee
|
refs/heads/master
|
/release/main.py
|
from functools import partial
from UI.mainUI import Main
from UI.addEditCoffeeForm import DialogForm
from PyQt5 import QtWidgets
import sys
import sqlite3
class Dialog(DialogForm):
def __init__(self):
super().__init__()
def show(self, set_default=False):
if set_default:
self.radioButton.setChecked(True)
self.roast_field.setValue(0.5)
self.price_field.setValue(300.0)
self.package_size_field.setValue(500.0)
super().show()
def get_params(self):
variety_name = self.variety_field.text()
roast_degree = self.roast_field.value()
is_mashed = int(self.radioButton.isChecked())
taste_desc = self.taste_desc_field.text()
price = self.price_field.value()
package_size = self.package_size_field.value()
if variety_name and taste_desc:
return {"ID": None,
"variety_name": variety_name,
"roast_degree": roast_degree,
"is_mashed": is_mashed,
"taste_desc": taste_desc,
"price": price,
"package_size": package_size
}
def closeEvent(self, event):
self.variety_field.setText("")
self.taste_desc_field.setText("")
event.accept()
def fill(self, record):
id_, variety_name, roast_degree, is_mashed, taste_desc, price, package_size = record
self.variety_field.setText(variety_name)
self.roast_field.setValue(roast_degree)
if is_mashed == 1:
self.radioButton.setChecked(True)
elif is_mashed == 0:
self.radioButton_2.setChecked(True)
self.taste_desc_field.setText(taste_desc)
self.price_field.setValue(price)
self.package_size_field.setValue(package_size)
class MainWindow(Main):
def __init__(self, db_name):
super().__init__()
self.db_name = db_name
self.conn = sqlite3.connect(db_name)
self.dialog = Dialog()
self.create_database()
self.fill_table()
self.add_button.clicked.connect(partial(self.show_dialog, 0))
self.edit_button.clicked.connect(partial(self.show_dialog, 1))
self.row = 0
def create_database(self):
self.conn.execute("""CREATE TABLE IF NOT EXISTS items(
ID INTEGER PRIMARY KEY,
variety_name TEXT,
roast_degree REAL,
is_mashed INTEGER,
taste_desc TEXT,
price REAL,
package_size REAL
)
""")
self.conn.commit()
def fill_database(self):
self.conn.execute("""INSERT INTO items VALUES
(1, 'Арабика', 0.5, 1, 'Отличается сложным ароматом', 350.5, 500.0),
(2, 'Робуста', 0.3, 0, 'Высокое содержание кофеина', 450.9, 350.0),
(3, 'Либерика', 0.7, 0, 'Используется в смесях', 300.68, 450.5),
(4, 'Эксцельза', 0.9, 1, 'Не имеет хозяйственного значения', 200.0, 200.0),
(5, 'Арабика Сантос', 0.4, 0, 'Терпкий, с легкой горчинкой', 400.0, 300.0),
(6, 'Арабика Медельин', 0.5, 1, 'Мягкий вкус со сладковатым оттенком', 350.0, 300.5),
(7, 'Арабиен Мокко', 0.8, 0, 'Винный привкус, высокая кислотность', 450.0, 200.0)
""")
self.conn.commit()
def fill_table(self):
table_data = self.conn.execute("SELECT * FROM items").fetchall()
headers = ("ID", "Название сорта", "Степень обжарки",
"Молотый/в зернах", "Описание вкуса", "Цена", "Объем упаковки")
self.table.setRowCount(0)
self.table.setColumnCount(len(headers))
self.table.setHorizontalHeaderLabels(headers)
for i, row in enumerate(table_data):
self.table.setRowCount(self.table.rowCount() + 1)
for j, elem in enumerate(row):
self.table.setItem(
i, j, QtWidgets.QTableWidgetItem(str(elem)))
self.table.resizeColumnsToContents()
def show_dialog(self, index: int):
if index == 0:
self.dialog.ok_button.clicked.connect(self.add_record)
self.dialog.show(set_default=True)
elif index == 1 and (row := self.table.currentRow()) != -1:
self.dialog.ok_button.clicked.connect(self.update_record)
record = self.conn.execute(
"SELECT * FROM items WHERE ID = ?", (row + 1,)).fetchone()
self.dialog.fill(record)
self.row = row + 1
self.dialog.show()
def add_record(self):
if (data := self.dialog.get_params()) is not None:
self.dialog.close()
self.conn.execute("""INSERT INTO items VALUES (
:ID, :variety_name, :roast_degree, :is_mashed, :taste_desc,
:price, :package_size)""", data)
self.conn.commit()
self.fill_table()
def update_record(self):
if (data := self.dialog.get_params()) is not None:
self.dialog.close()
data["ID"] = self.row
self.conn.execute("""UPDATE items SET
variety_name = :variety_name,
roast_degree = :roast_degree,
is_mashed = :is_mashed,
taste_desc = :taste_desc,
price = :price,
package_size = :package_size
WHERE ID = :ID
""", data)
self.conn.commit()
self.fill_table()
def closeEvent(self, event):
self.conn.close()
event.accept()
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
window = MainWindow("data/coffee.sqlite")
window.show()
sys.exit(app.exec())
|
{"/release/main.py": ["/UI/mainUI.py", "/UI/addEditCoffeeForm.py"]}
|
25,444
|
fuetser/coffee
|
refs/heads/master
|
/UI/addEditCoffeeForm.py
|
from PyQt5 import QtWidgets
class DialogForm(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.setupUi()
def setupUi(self):
self.resize(400, 242)
self.formLayout_2 = QtWidgets.QFormLayout(self)
self.formLayout = QtWidgets.QFormLayout()
self.formLayout_2.setLayout(0, QtWidgets.QFormLayout.LabelRole, self.formLayout)
self.label = QtWidgets.QLabel(self)
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label)
self.variety_field = QtWidgets.QLineEdit(self)
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.variety_field)
self.label2 = QtWidgets.QLabel(self)
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label2)
self.roast_field = QtWidgets.QDoubleSpinBox(self)
self.roast_field.setMaximum(1.0)
self.roast_field.setSingleStep(0.01)
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.roast_field)
self.radioButton = QtWidgets.QRadioButton(self)
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.radioButton)
self.radioButton_2 = QtWidgets.QRadioButton(self)
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.radioButton_2)
self.label3 = QtWidgets.QLabel(self)
self.formLayout_2.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label3)
self.taste_desc_field = QtWidgets.QLineEdit(self)
self.formLayout_2.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.taste_desc_field)
self.label4 = QtWidgets.QLabel(self)
self.formLayout_2.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.label4)
self.price_field = QtWidgets.QDoubleSpinBox(self)
self.price_field.setMaximum(9999.99)
self.formLayout_2.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.price_field)
self.label5 = QtWidgets.QLabel(self)
self.formLayout_2.setWidget(6, QtWidgets.QFormLayout.LabelRole, self.label5)
self.package_size_field = QtWidgets.QDoubleSpinBox(self)
self.package_size_field.setMaximum(9999.99)
self.formLayout_2.setWidget(6, QtWidgets.QFormLayout.FieldRole, self.package_size_field)
self.ok_button = QtWidgets.QPushButton(self)
self.formLayout_2.setWidget(7, QtWidgets.QFormLayout.LabelRole, self.ok_button)
self.setWindowTitle("Изменить/добавить запись")
self.label.setText("Название сорта")
self.label2.setText("Степень обжарки")
self.radioButton.setText("Молотый")
self.radioButton_2.setText("В зёрнах")
self.label3.setText("Описание вкуса")
self.label4.setText("Цена")
self.label5.setText("Объем упаковки")
self.ok_button.setText("OK")
|
{"/release/main.py": ["/UI/mainUI.py", "/UI/addEditCoffeeForm.py"]}
|
25,445
|
zc00gii/b00tii
|
refs/heads/master
|
/testing/proto/irc/events.py
|
from base.events import Event, EventHandler
from base.buffer import Buffer
class IRCEvents(Event):
handler = EventHandler()
def __init__(self):
self.handler.events = { "privmsg" : Event(privmsg), "part" : Event(part),
"quit" : Event(quit), "join" : Event(join),
"kick" : Event(kick), "mode" : Event(mode),
"topic" : Event(topic) }
def privmsg(self, message = False):
if buffer[0].startswith(':PRIVMSG'):
if message != False:
if buffer[1].startswith(message):
return True
return False
return True
return False
def part(self):
if buffer[0].startswith(':PART'):
return True
return False
def join(self):
if buffer[0].startswith(':JOIN'):
return True
return False
def mode(self, mode = None):
pass
def topic(self):
if buffer[0].startswith(':TOPIC'):
return True
return False
def kick(self):
if buffer[0].startswith(':KICK'):
return True
return False
def raw(self, rawdata):
if buffer[0].startswith(rawdata):
return True
return False
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,446
|
zc00gii/b00tii
|
refs/heads/master
|
/base/buffer.py
|
import socket
class Buffer():
_buffer = []
def readBuffer(self):
data = ''
extra = [""]
line = [""]
try:
data = extra[0] + self.recv(1024)
data = data.replace('\r', "")
extra.pop(0)
for x in range(len(data.split('\n'))):
extra.append(data.split('\n')[x])
data = data.split('\n')[0]
line = data.split(':')
line = [':'.join(line[:2]), ':'.join(line[2:])]
except socket.error:
self.close
return line
def findBuffer(self, line):
x = 0
for x in range(len(self._buffer)):
if self._buffer[x].find(line) == 0:
return True
break
return False
def getBuffer(self):
self._buffer = self.readBuffer()
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,447
|
zc00gii/b00tii
|
refs/heads/master
|
/base/module.py
|
class Module():
modules = dict()
def reloadModule(self,name):
reload(self.modules[name])
def loadModule(self,name):
if name not in globals().keys():
try:
self.modules[name] = __import__(name)
except ImportError:
pass # no such module
def unloadModule(self, name):
self.modules.pop(name)
globals().pop(name)
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,448
|
zc00gii/b00tii
|
refs/heads/master
|
/testing/base/server.py
|
import socket
from socket import SocketType
class Server(SocketType):
def socketInit(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
if _sock is None:
_sock = socket._realsocket(family, type, proto)
self._sock = _sock
for method in socket._delegate_methods:
setattr(self, method, getattr(_sock, method))
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,449
|
zc00gii/b00tii
|
refs/heads/master
|
/testing/proto/irc/server.py
|
from base.server import Server
class IRCServer(Server):
channels = []
def __init__(self, server, port):
self.connect((server, port))
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,450
|
zc00gii/b00tii
|
refs/heads/master
|
/testing/b00tii.py
|
import base.server
import base.module
import base.buffer
import base.events
import proto.irc.functions
import proto.irc.events
#import proto.irc.server
from proto.irc.functions import IRCFunctions
channels = ["#botters"]
a = IRCFunctions()
a.connect(("irc.freenode.net",6667))
a.user['nick'] = "b00tii"
a.loop()
#a.getBuffer()
#a.pingPong()
print a.buffer
a.identify()
while True:
print a.buffer
if a.findBuffer('001'):
for channel in channels:
a.join(channel)
# if a.buffer.startswith('~'):
# a.message('#1ntrusion', a.buffer[1].replace('~',''))
a.loop()
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,451
|
zc00gii/b00tii
|
refs/heads/master
|
/b00tii.py
|
import base.server
import base.buffer
import base.module
import proto.irc
from proto.irc import IRCFunctions
channels = ["#offtopic"]
a = IRCFunctions()
a.connect(("irc.ninthbit.net",6667))
a.user['nick'] = "b00tii"
a.identify()
a.loop()
#a.getBuffer()
#a.pingPong()
print a._buffer
while True:
print a._buffer
if "001" or "042" in a._buffer[0]:
for channel in channels:
a.join(channel)
# a.getBuffer()
# a.pingPong()
a.loop()
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,452
|
zc00gii/b00tii
|
refs/heads/master
|
/proto/irc.py
|
import socket
import base.server
import base.buffer
from base.server import Server
from base.buffer import Buffer
class IRCFunctions(Server,Buffer):
user = {"name" : "b00ti", "ident" : "b00tii",\
"pass": "secret", "nick" : "b00tii"}
# contains nick, (real)name,
# ident, and pass(NickServ)
server = dict() # contains addr(ess) and port
def sendraw(self, whatToSend):
print "SENDING: " + whatToSend
self.send(whatToSend + "\r\n")
def message(self, recvr, message):
self.sendraw("PRIVMSG " + recvr + " :" + message)
def identify(self, ident = "b00tii", name = "b00tii",):
self.sendraw("USER " + self.user["ident"] + " * * : " + self.user["name"])
self.sendraw("NICK " + self.user["nick"])
def nick(self, nick = user["nick"]):
if nick != self.user["nick"]:
self.user["nick"] = nick
self.sendraw("NICK " + self.user["nick"])
def pingPong(self):
if self._buffer[0].startswith("PING"):
self.sendraw("PONG " + self._buffer[0][6:])
#for thing in self._buffer:
# print ">>>> " + thing
def ctcp(self, recvr, message, upper = True):
if upper == True:
self.message(recvr, "\001" + message.upper() + "\001")
elif upper == False:
self.message(recvr, "\001" + message + "\001")
def action(self, recvr, message):
self.ctcp(recvr, "ACTION " + message, False)
def join(self, channel):
self.sendraw("JOIN " + channel)
def part(self, channel, reason = ""):
self.sendraw("PART " + channel + " :" + reason)
def kick(self, channel, usr, reason=""):
self.sendraw("KICK ", channel + " " + usr + " :" + reason)
def mode(self, channel, mode, args):
self.sendraw("MODE ", channel + " " + mode + " " + args)
def topic(self, channel, tpc):
self.sendraw("TOPIC " + channel + " :" + tpc)
def loop(self):
try:
self.getBuffer()
self.pingPong()
except (KeyboardInterrupt, socket.error):
self.close()
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,453
|
zc00gii/b00tii
|
refs/heads/master
|
/testing/base/events.py
|
def doNothing(): pass
class EventHandler:
events= {}
def hookEvent(self, name, onWhat, doFunction = doNothing):
self.events[name] = Event(onWhat, doFunction)
def unhookEvent(self, name):
del self.events[name]
def rehookEvent(self, name, onWhat, doFunction = doNothing):
del self.events[name]
self.events[name] = Event(onWhat, doFunction)
def handleEvents(self):
for name in self.events.keys():
if self.events[name].when():
self.events[name].function()
class Event:
def when(): pass
def function(): pass
def __init__(self, onWhat, doFunction=doNothing):
self.when = onWhat
self.function = doFunction
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,454
|
zc00gii/b00tii
|
refs/heads/master
|
/testing/base/buffer.py
|
import socket
class Buffer():
buffer = []
extra = ['']
def readBuffer(self):
try:
line = ''
data = self.extra[0] + self.recv(1024)
# data = data.replace('\r', "")
self.extra.pop(0)
for x in range(len(data.split('\n'))):
self.extra.append(data.split('\n')[x] + '\n')
line = data.split('\n')[0] + '\n'
except socket.error:
self.close
return line
def findBuffer(self, line):
if self.buffer.find(line) == 0:
return True
return False
def getBuffer(self):
self.buffer = self.readBuffer()
|
{"/testing/proto/irc/events.py": ["/base/buffer.py"]}
|
25,455
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/shadowsocks/api.py
|
import socket
import random
socket.setdefaulttimeout(2)
class ServiceOpenException(Exception):
def __init__(self, msg):
super().__init__(msg)
def get_sock():
s = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
rand_file_name = '/tmp/vpn_bot' + \
(''.join([chr(65+random.randint(0,25)) for _ in range(16)])) + \
'.sock'
s.bind(rand_file_name)
s.connect('/run/shadowsocks-manager.sock')
return s
def send_data(sock):
pass
def ping():
pass
def open_port(port, password):
s = get_sock()
try:
i = int(password)
except:
raise ServiceOpenException('invalid password ' + password)
data = 'add: {"server_port":%s,"password":"%s"}'\
% (port, password)
data = bytearray(data.encode('ascii'))
s.send(data)
response = s.recv(1024)
s.close()
if response != b'ok':
return None
return True
def close_port(port):
s = get_sock()
data = 'remove: {"server_port":%s}' % (port)
data = bytearray(data.encode('ascii'))
s.send(data)
response = s.recv(1024)
s.close()
if response != b'ok':
return None
return True
def open_services():
port = str(random.randint(50000,60000))
password = ''.join([str(random.randint(0,9)) for _ in range(6)])
count = 5
while (not open_port(port,password)) and count > 0:
port = str(random.randint(50000,60000))
count -= 1
if count == 0:
raise ServiceOpenException('port cannot be opened')
return (port, password)
def close_services(ports):
if type(ports) == int or type(ports) == str:
ports = [ports]
for port in ports:
close_port(port)
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,456
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/wxbot/api.py
|
# coding: utf-8
# provide socket api for other process to communicate with bot
import subprocess
import multiprocessing
import collections
import logging
from threading import Thread
SOCKET_FILE = '/run/vpn_bot.sock'
UNIX_SOCK = 'unix://'+SOCKET_FILE
Wechat_msg = collections.namedtuple('Wechat_message',['wechat_id','msg'])
# TODO, add authentication
class Bot_api(object):
def __init__(self):
self.client = multiprocessing.Client(UNIX_SOCK)
def send_msg(self, wechat_id, msg):
self.client.send(Wechat_msg(wechat_id,msg))
class Bot_server(object):
'''
listens for request from other processes and perform
corresponding bot actions
'''
def __init__(self):
# remove socket file so that new one can be created
subprocess.run('rm ' + SOCKET_FILE)
self.listener = multiprocessing.Listener(UNIX_SOCK)
self.listening_thread = None
def register_msg_handler(self, func):
self._msg_handler = func
def start_listening(self):
self.listening_thread = Thread(target=self._conn_handler, args=(self))
self.listening_thread.start()
def _conn_handler(self):
while True:
conn = self.listener.accept()
t = Thread(target=self._event_handler, args=(self, conn))
t.start()
def _event_handler(self, conn):
while True:
data = None
try:
data = conn.recv()
except:
logging.debug('connection closed', conn)
return
if type(data) == Wechat_msg:
try:
self._msg_handler(data.wechat_id, data.msg)
except:
logging.error(\
'Error handling wechat message send for'\
' user {}, msg: {}'.format(data.wechat_id, data.msg))
else:
logging.error(
'Unexpected object received, wechat_msg expected', data)
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,457
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/management/commands/start_active_vpn.py
|
from django.core.management.base import BaseCommand, CommandError
import vpn_bot.models as models
from vpn_bot.shadowsocks.api import open_port
class Command(BaseCommand):
def add_arguments(self, parser):
# parser.add_argument('poll_id', nargs='+', type=int)
pass
def handle(self, *args, **options):
active_services = models.VPN_service.objects.filter(is_active=1)
for a in active_services:
open_port(a.port, a.password)
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,458
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/management/commands/debug.py
|
from django.core.management.base import BaseCommand, CommandError
import vpn_bot.models as models
from IPython import embed
from decimal import Decimal
import datetime
def create_expiring_service():
customer = models.Customer.objects.get_or_create(wechat_id='@123456', name='test')
order = models.Order.objects.create(state=Order.COMPLETED,\
payment_code='12345',\
payment_value=Decimal('1.0'),
transaction_id='a12345678',
transaction_type=Order.WECHAT,
customer_id=customer,
item_type=Order.ONE_WEEK)
seven_days_ago = datetime.datetime.now() - datetime.timedelta(days=7,minutes=1)
one_min_ago = seven_days_ago + datetime.timedelta(days=7)
vpn_service = models.VPN_service.create(is_active=1, ip='0.0.0.0',
port=1234,password='7383',order_id=Order,start_time=seven_days_ago,
expire_on=one_min_ago)
class Command(BaseCommand):
help = 'scrape pages'
def add_arguments(self, parser):
# parser.add_argument('poll_id', nargs='+', type=int)
pass
def handle(self, *args, **options):
embed()
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,459
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/management/commands/cronjob.py
|
# coding: utf-8
from django.core.management.base import BaseCommand
from vpn_bot.models import Order, VPN_service
from vpn_bot.shadowsocks.api import ping, close_port
from vpn_bot.wxbot.api import Bot_api
import datetime
import logging
def notify_customer(bot_api, customer):
msg = u'你好,你的服务刚刚过期,需要续费的话可以回复【购买】哦'
bot_api.send_msg(customer.wechat_id,msg)
def expire_pending_orders():
ten_min_ago = datetime.datetime.now() - datetime.timedelta(minutes=10)
expired_orders = Order.objects.filter(state=Order.PENDING, \
payment_code_issued_at__lte=ten_min_ago)
for o in expired_orders:
o.state = Order.EXPIRED
o.save()
def expire_vpn_services(bot_api):
now = datetime.datetime.now()
expired_vpn_services = list(VPN_service.objects.filter(expire_on__lte=now, is_active=1))
for expired_service in expired_vpn_services:
customer_name = expired_service.order_id.customer_id.name
customer_id = expired_service.order_id.customer_id.wechat_id
logging.info('Found expired service with id {}, for customer {}, '\
'with name {}, closing vpn service at port {}'.format(
expired_service.id,
customer_name,
customer_id,
expired_service.port))
close_port(expired_service.port)
logging.info('port {} closed successfully, notifying customer'\
.format(expired_service.port))
expired_service.is_active = 0
expired_service.save()
customer = expired_service.order_id.customer_id
notify_customer(bot_api, customer)
logging.info('Notified customer {}'.format(customer_name))
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
bot_api = Bot_api()
ping()
expire_pending_orders()
expire_vpn_services(bot_api)
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,460
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/migrations/0001_initial.py
|
# Generated by Django 2.1 on 2018-08-25 09:17
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('wechat_id', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Dialog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state', models.CharField(choices=[('ACTIVE', 'ACTIVE'), ('SLEEP', 'SLEEP')], max_length=64)),
('update_time', models.DateTimeField(auto_now=True)),
('customer_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='vpn_bot.Customer')),
],
),
migrations.CreateModel(
name='issue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state', models.CharField(choices=[('OPEN', 'OPEN'), ('CLOSED', 'CLOSED')], default='OPEN', max_length=10)),
('payment_id', models.CharField(max_length=255, null=True)),
('additional_info', models.TextField(null=True)),
('customer_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='vpn_bot.Customer')),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state', models.CharField(choices=[('PENDING', 'PENDING'), ('EXPIRED', 'EXPIRED'), ('COMPLETED', 'COMPLETED')], max_length=60)),
('payment_code', models.CharField(max_length=10, null=True)),
('payment_value', models.DecimalField(decimal_places=4, max_digits=28, null=True)),
('transaction_id', models.CharField(max_length=255, null=True)),
('transaction_type', models.CharField(choices=[('WECHAT', 'WECHAT')], max_length=64, null=True)),
('item_type', models.CharField(choices=[('ONE_WEEK', 'ONE_WEEK'), ('ONE_MONTH', 'ONE_MONTH'), ('THREE_MONTH', 'THREE_MONTH')], max_length=64, null=True)),
('comment', models.TextField()),
('customer_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='vpn_bot.Customer')),
],
),
migrations.CreateModel(
name='VPN_service',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.DateTimeField()),
('expire_on', models.DateTimeField()),
('ip', models.CharField(max_length=255)),
('port', models.CharField(max_length=10)),
('password', models.CharField(max_length=64)),
('order_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='vpn_bot.Order')),
],
),
migrations.AddField(
model_name='issue',
name='order_id',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='vpn_bot.Order'),
),
]
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,461
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/wxbot/bot_app.py
|
#!/usr/bin/env python
# coding: utf-8
import wxpy
import logging
from decimal import Decimal
import sys
import random
import socket
import datetime
sys.path.append("..")
from vpn_bot.models import Order, Customer, VPN_service
import vpn_bot.shadowsocks.api as api
from vpn_bot.wxbot.api import Bot_server
logging.basicConfig(level=logging.INFO)
ch = logging.StreamHandler(sys.stdout)
NEW_CUSTOMER_ONE_WEEK_DISCOUNT = 'new customer one week discount'
IP = '35.236.145.194'
def find_pending_order(payment_code):
orders = Order.objects.filter(state=Order.PENDING, payment_code=payment_code)
if len(orders) > 1:
logging.warn('For payment_code {}, found more than 1 order.'\
'\n Order ids: {}'
.format(payment_code,','.join([str(o.id) for o in orders])))
if len(orders) == 0:
return None
return orders[0]
def match_item_type_by_value(value):
if value == Decimal('3.00'):
return Order.ONE_WEEK
elif value == Decimal('10.00'):
return Order.ONE_MONTH
elif value == Decimal('30.00'):
return Order.THREE_MONTH
return None
def complete_order(order, payment_value, item_type, msg):
logging.info('Completing item order of {}, amount paid: {}'\
.format(item_type, payment_value))
port, password = api.open_services()
today = datetime.datetime.now()
days = 0
if item_type == Order.ONE_WEEK:
days = 7
elif item_type == Order.ONE_MONTH:
days=30
elif item_type == Order.THREE_MONTH:
days=90
if order.comment == NEW_CUSTOMER_ONE_WEEK_DISCOUNT:
days += 7
expire_on = today + datetime.timedelta(days=days)
vpn_service = VPN_service.objects.create(order_id=order,\
start_time=today, expire_on=expire_on, ip=IP,\
port=port, password=password, is_active=1)
logging.info('Created service at {}:{}, pass: {}; expiring_on: {}'\
.format(ip, port, password, expire_on))
order.state = Order.COMPLETED
order.transaction_id = str(msg.id)
order.transaction_type = Order.WECHAT
order.item_type = item_type
order.payment_value = payment_value
order.save()
return vpn_service
def generate_pending_order(customer):
payment_code = ''.join([str(random.randint(0,9)) for _ in range(6)])
new_customer = Order.objects.filter(state=Order.COMPLETED,\
customer_id=customer).count() == 0
comment = NEW_CUSTOMER_ONE_WEEK_DISCOUNT if new_customer else None
order = Order.objects.create(state=Order.PENDING,\
customer_id=customer, payment_code=payment_code,\
comment=comment)
return order
def reply_with_service_info(customer, bot, vpn_service):
ip,port,password,expire_on = \
vpn_service.ip,vpn_service.port,vpn_service.password,\
vpn_service.expire_on
expire_on = expire_on.strftime('%Y-%m-%d %H:%M %p')
receiver = wxpy.ensure_one(bot.friends()\
.search(user_name=customer.wechat_id))
receiver.send(u'台湾服务器:{ip},端口:{port},密码:{password};'\
'到期时间:{expire_on}'
.format(ip=ip,port=port,password=password,expire_on=expire_on))
def send_purchase_info(msg):
msg.reply(u'你好我是VPN小助手,台服VPN,3块钱一个星期,'\
'10块钱一个月,第一次买的话有一个星期免费,请问需要购买吗?回复【购买】')
def send_order_info_and_payment_code(msg, payment_code):
msg.reply(u'上面是收款码,请在五分钟之内付款哦,'\
'付款的时候填写备注{},要不然系统不知道这是你付的'\
.format(payment_code))
def send_order_still_pending(msg, order):
msg.reply(u'你的支付码{}还可以继续使用哦,请扫二维码完成支付'\
.format(order.payment_code))
class VPN_bot(object):
def __init__(self):
print('Please log in by scanning QR code')
self.bot = wxpy.Bot(cache_path=True,console_qr=True)
self.bot.enable_puid()
self.payment_mp = wxpy.ensure_one(self.bot.mps().search(u'微信支付'))
self.developer = self.bot.friends().search('Han Chen')[0]
logging.info('setting developer to: {}, with wechat id: {}'\
.format(self.developer.name, self.developer.user_name))
self._register_wechat_listeners()
logging.debug('registered wechat listeners')
self._start_bot_server()
logging.debug('started bot server')
def start(self):
self.bot.join()
def notify_developer(self, payment_id=None, additional_info=None):
import datetime
self.developer.send('At {time}, there is an issue: {info}'\
.format(time=str(datetime.datetime.now()), info=additional_info))
def _start_bot_server(self):
'''
handles request from other processes
'''
self.bot_server = Bot_server()
def handle_msg_send(wechat_id, msg):
user = wxpy.ensure_one(self.bot.friends().search(user_name=wechat_id))
user.send(msg)
self.bot_server.register_msg_handler(handle_msg_send)
self.bot_server.start_listening()
def _register_wechat_listeners(self):
@self.bot.register()
def print_others(msg):
print(str(msg))
print(msg.sender)
if type(msg.sender) != wxpy.Friend:
return
customer = None
wechat_id = msg.sender.user_name
is_new_customer = False
try:
customer = Customer.objects.get(wechat_id=wechat_id)
except(Customer.DoesNotExist) as e:
# adding new customer
customer = Customer.objects.create(wechat_id=wechat_id)
is_new_customer = True
if is_new_customer:
send_purchase_info(msg)
return
pending_orders = Order.objects.filter(
state=Order.PENDING, customer_id=customer)
print(pending_orders)
if len(pending_orders) > 1:
logging.warn(customer, \
'Customer with {} and wechat_id {} had more than one pending order'\
.format(customer.id, customer.wechat_id))
pending_order_exists = len(pending_orders) > 0
if msg.text == '购买' and not pending_order_exists:
order = generate_pending_order(customer)
payment_code = order.payment_code
send_order_info_and_payment_code(msg, payment_code)
return
elif msg.text == '购买' and pending_order_exists:
order = pending_orders[0]
send_order_still_pending(msg, order)
return
elif pending_order_exists:
msg.reply(u'请问付款的时候是不是出问题了,'\
'可以回复【客服】联系客服,会尽快回复你的')
elif msg.text == '客服':
self.notify_developer()
msg.reply(u'已经在联系客服了,请你稍等哦')
return
return
@self.bot.register(chats=self.payment_mp, msg_types=wxpy.SHARING)
def on_receive_pay_msg(self, msg):
def find_payment_code(txt):
import re
matching = re.search('<!\[CDATA\[收款金额:¥.+\n付款方备注:(\d{4,6})',txt)
if matching:
return matching.group(1)
return None
def find_payment_val(txt):
import re
matching = re.search('<!\[CDATA\[收款金额:¥(\d+.\d+)',txt)
if matching:
return matching.group(1)
return None
content = msg.raw['Content']
logging.info('Received new payment info: {}'.format(msg.text))
payment_code = find_payment_code(content)
value_raw = find_payment_val(content)
logging.info('Found payment_code {}, value {}'.format(payment_code, value_raw))
if not value_raw:
# not a payment message
return
order = find_pending_order(payment_code)
value = Decimal(value_raw)
item_type = match_item_type_by_value(value)
if value_raw and (not order or (not item_type)):
# payment without code or with wrong code, might be a customer error
additional_info = 'For payment of value {},'.format(value_raw)
if not order:
additional_info += ' order not found for this; '
elif not item_type:
additional_info += ' cannot match an item based on this value; '
additional_info += 'additional information: ' + str(msg.raw)
self.notify_developer(additional_info=additional_info)
return
# now proceed with transaction
vpn_service = complete_order(order, value, item_type, msg)
reply_with_service_info(order.customer_id, self.bot, vpn_service)
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,462
|
Chen-Han/vpn_bot
|
refs/heads/master
|
/vpn_bot/models.py
|
from django.db import models
'''
All VPN models are here
'''
class Customer(models.Model):
wechat_id = models.CharField(max_length=255)
name = models.CharField(max_length=255)
class Order(models.Model):
PENDING = 'PENDING'
EXPIRED = 'EXPIRED'
COMPLETED = 'COMPLETED'
STATE_CHOICES = ((PENDING, PENDING), (EXPIRED,EXPIRED),(COMPLETED,COMPLETED))
state = models.CharField(max_length=60, choices = STATE_CHOICES)
# assume that payment_code is issued at object creation time
payment_code_issued_at = models.DateTimeField(auto_now=True)
payment_code = models.CharField(max_length=10, null=True)
payment_value = models.DecimalField(max_digits=28, decimal_places=4, null=True)
transaction_id = models.CharField(max_length=255, null=True)
WECHAT = 'WECHAT'
transaction_type_choices = [(WECHAT, WECHAT)]
transaction_type = models.CharField(max_length=64, choices=transaction_type_choices, null=True)
customer_id = models.ForeignKey(Customer, null=True, on_delete=models.SET_NULL)
ONE_WEEK = 'ONE_WEEK'
ONE_MONTH = 'ONE_MONTH'
THREE_MONTH = 'THREE_MONTH'
ITEM_CHOICES = ((ONE_WEEK,ONE_WEEK),(ONE_MONTH,ONE_MONTH),(THREE_MONTH,THREE_MONTH))
item_type = models.CharField(max_length=64, choices=ITEM_CHOICES)
comment = models.TextField(null=True)
class Dialog(models.Model):
customer_id = models.ForeignKey(Customer, on_delete=models.SET_NULL, null=True)
ACTIVE = 'ACTIVE'
SLEEP = 'SLEEP'
STATE_CHOICES = ((ACTIVE, ACTIVE),(SLEEP,SLEEP))
state = models.CharField(max_length=64,choices=STATE_CHOICES)
# timestamp when object is updated
update_time = models.DateTimeField(auto_now=True)
class VPN_service(models.Model):
start_time = models.DateTimeField()
order_id = models.ForeignKey(Order, null=True, on_delete=models.SET_NULL)
expire_on = models.DateTimeField()
is_active = models.SmallIntegerField()
ip = models.CharField(max_length=255)
port=models.CharField(max_length=10)
password=models.CharField(max_length=64)
class issue(models.Model):
OPEN = 'OPEN'
CLOSED = 'CLOSED'
STATE_CHOICES = ((OPEN,OPEN),(CLOSED,CLOSED))
state = models.CharField(max_length=10, choices=STATE_CHOICES, default=OPEN)
order_id = models.ForeignKey(Order, null=True, on_delete=models.SET_NULL)
customer_id = models.ForeignKey(Customer, null=True, on_delete=models.SET_NULL)
payment_id = models.CharField(max_length=255, null=True)
additional_info = models.TextField(null=True)
|
{"/vpn_bot/management/commands/start_active_vpn.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py"], "/vpn_bot/management/commands/debug.py": ["/vpn_bot/models.py"], "/vpn_bot/management/commands/cronjob.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"], "/vpn_bot/wxbot/bot_app.py": ["/vpn_bot/models.py", "/vpn_bot/shadowsocks/api.py", "/vpn_bot/wxbot/api.py"]}
|
25,474
|
vb64/pdftoc
|
refs/heads/main
|
/tests/test/__init__.py
|
"""Root class for testing."""
from unittest import TestCase
class TestBase(TestCase):
"""Base class for tests."""
def setUp(self):
"""Init tests."""
TestCase.setUp(self)
from source.cli import PARSER
self.options, _args = PARSER.parse_args(args=[])
|
{"/tests/test/__init__.py": ["/source/cli.py"], "/tests/test/test_console.py": ["/tests/test/__init__.py", "/source/cli.py"]}
|
25,475
|
vb64/pdftoc
|
refs/heads/main
|
/tests/test/test_console.py
|
# coding: utf-8
"""Console client stuff.
make test T=test_console.py
"""
import os
from . import TestBase
class TestConsole(TestBase):
"""Tests console client."""
def test_noargs(self):
"""Call without args."""
from source.cli import main
assert main([], self.options) == 1
def test_args(self):
"""Call with arg."""
from source.cli import main
assert main([os.path.join('source', 'toc.json')], self.options) == 0
def _test_nodirs(self):
"""Target without dirs."""
from source.cli import main
assert main([os.path.join('fixtures', 'nodir.json')], self.options) == 0
|
{"/tests/test/__init__.py": ["/source/cli.py"], "/tests/test/test_console.py": ["/tests/test/__init__.py", "/source/cli.py"]}
|
25,476
|
vb64/pdftoc
|
refs/heads/main
|
/source/__init__.py
|
"""Need for test suite."""
|
{"/tests/test/__init__.py": ["/source/cli.py"], "/tests/test/test_console.py": ["/tests/test/__init__.py", "/source/cli.py"]}
|
25,477
|
vb64/pdftoc
|
refs/heads/main
|
/source/cli.py
|
"""Console client."""
import os
import sys
import json
from optparse import OptionParser # pylint: disable=deprecated-module
from PyPDF2 import PdfFileReader, PdfFileWriter
COPYRIGHTS = 'Copyrights by Vitaly Bogomolov 2021'
VERSION = '1.2'
OPTS = None
PARSER = OptionParser(
usage='Usage: %prog toc.json\n\nvizit https://github.com/vb64/pdftoc for more info.',
version="%prog version {}".format(VERSION)
)
FOLDER = '{f}'
class Bookmark:
"""One pdf bookmark."""
def __init__(self, title, page_number, parent):
"""Creat new bookmark."""
self.title = title
self.page_number = page_number
self.parent = parent
self.obj = None
def add(self, merger):
"""Add bookmark to pdf."""
parent = None
if self.parent:
parent = self.parent.obj
self.obj = merger.addBookmark(self.title, self.page_number, parent=parent)
return self.obj
class Bookmarks:
"""Pdf bookmarks list."""
items = []
def add(self, title, page_number, parent):
"""Create and return new bookmark."""
self.items.append(Bookmark(title, page_number, parent))
return self.items[-1]
def link(self, merger):
"""Make bookmarks in pdf."""
for i in self.items:
i.add(merger)
def make(merger, toc, default_folder, parent, bookmarks, evenpages):
"""Join several pdf files to target."""
for title, pdf, childs in toc:
if pdf.startswith(FOLDER):
pdf = os.path.join(
default_folder,
pdf.replace(FOLDER, '')
)
new_parent = bookmarks.add(title, merger.getNumPages(), parent)
if pdf:
print(pdf)
merger.appendPagesFromReader(PdfFileReader(open(pdf, 'rb'))) # pylint: disable=consider-using-with
if evenpages and (merger.getNumPages() % 2):
merger.addBlankPage()
if childs:
make(merger, childs, default_folder, new_parent, bookmarks, evenpages)
return 0
def main(argv, _options):
"""Entry point."""
print("Pdf merge tool. {}".format(COPYRIGHTS))
if len(argv) < 1:
PARSER.print_usage()
return 1
bookmarks = Bookmarks()
data = json.loads(open(argv[0], encoding='utf-8').read()) # pylint: disable=consider-using-with
merger = PdfFileWriter()
make(merger, data["toc"], data["folder"], None, bookmarks, bool(data.get('evenpages', False)))
bookmarks.link(merger)
path = os.path.dirname(data["target"])
if path:
os.makedirs(path, exist_ok=True)
with open(data["target"], "wb") as output:
merger.write(output)
return 0
if __name__ == '__main__': # pragma: no cover
OPTS, ARGS = PARSER.parse_args()
sys.exit(main(ARGS, OPTS))
|
{"/tests/test/__init__.py": ["/source/cli.py"], "/tests/test/test_console.py": ["/tests/test/__init__.py", "/source/cli.py"]}
|
25,483
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/uber/urls.py
|
from django.conf.urls import url
from django.urls import path
from .import views
#from .views import IndexView
from django.contrib.auth import login, logout
from django.conf.urls import include, url
from django.contrib.auth.decorators import login_required
#from .views import search
app_name= 'uber'
urlpatterns = [
# /uber/
#url(r'^$',views.IndexView.as_view(), name='index'),
path('', views.IndexView.as_view(), name='index'),
path('drivers/', views.DriverIndexView.as_view(), name='driver_index'),
#path('', views.index, name='index'),
path('results/', views.search, name="search"),
path('all_rides/', views.RidesView.as_view(), name='all_rides'),
# /uber/712/
path('<pk>/',views.DetailView.as_view(), name= 'detail'),
url(r'^(?P<driver_id>[0-9]+)/favourite/$', views.favourite, name='favourite'),
url(r'^(?P<vehicle_id>[0-9]+)/favourite_vehicle/$', views.favourite_vehicle, name='favourite_vehicle'),
#url(r'^(?P<vehicle_id>[0-9]+)/(?P<driver_id>[0-9]+)/$', views.ride, name='ride'),
url(r'^(?P<vehicle_id>[0-9]+)/(?P<driver_id>[0-9]+)/$', views.RideView.as_view(), name='ride'),
url(r'^(?P<vehicle_id>[0-9]+)/(?P<driver_id>[0-9]+)/ride_details/$', views.ride_details, name='ride_details'),
#uber/vehicle/add
path('vehicle/add/', views.VehicleCreate.as_view(), name='create_vehicle'),
#uber/vehicle/2/
path('vehicle/<pk>/', views.VehicleUpdate.as_view(), name='update_vehicle'),
#uber/vehicle/2/delete/
path('vehicle/<pk>/delete/', views.VehicleDelete.as_view(), name='delete_vehicle'),
#uber/all_driver/
path('all_driver/<pk>/', views.driver_detail, name='all_driver'),
#path('all_driver/', views.driver_view, name='all_driver'),
# #uber/driver/add
path('driver/add/', views.DriverCreate.as_view(), name='create_driver'),
# #uber/driver/2/
path('driver/<pk>/', views.DriverUpdate.as_view(), name='update_driver'),
# #uber/driver/2/delete/
path('driver/<pk>/delete/', views.DriverDelete.as_view(), name='delete_driver'),
]
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,484
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/uber/admin.py
|
from django.contrib import admin
from .models import Driver, Vehicle, Customer, Ride
admin.site.register(Driver)
admin.site.register(Vehicle)
admin.site.register(Customer)
admin.site.register(Ride)
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,485
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/uber/views.py
|
from django.shortcuts import render, get_object_or_404 # shortcuts for HttpResponse, and get an object or show 404 response
from .models import Vehicle, Driver, Customer, Ride #importing required model
from django.views import generic
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy
from django.shortcuts import render, redirect
from django.views.generic import View
from .forms import UserForm, EditProfileForm, RideForm
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserChangeForm, PasswordChangeForm
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth import authenticate, login
from django.contrib.auth import logout
from django.http import JsonResponse
from django.db.models import Q
from django.views.generic import TemplateView
#from django.contrib.auth.forms import UserForm
# def driver_view(request):
# drivers = Driver.objects.all()
# return render(request, 'uber/all_driver.html',{'drivers':drivers})
def home(request):
#plans = FitnessPlan.objects
return render(request, 'home.html')
def view_profile(request):
args = {'user': request.user}
return render(request, 'registration/profile.html',args)
def edit_profile(request):
if request.method == 'POST':
form = EditProfileForm(request.POST, instance=request.user)
if form.is_valid():
form.save()
return redirect('/auth/profile/')
else:
form = EditProfileForm(instance=request.user)
args = {'form': form}
return render(request, 'registration/edit_profile.html', args)
def change_password(request):
if request.method == 'POST':
form = PasswordChangeForm(data=request.POST, user=request.user)
if form.is_valid():
form.save()
update_session_auth_hash(request, form.user) #to restore the session of previous user after changing password
return redirect('/auth/profile/')
else:
return redirect('auth/change-password/')
else:
form = PasswordChangeForm(user=request.user)
args = {'form': form}
return render(request, 'registration/change_password.html', args)
@login_required
def ride(request,vehicle_id,driver_id):
vehicle = get_object_or_404(Vehicle, pk=vehicle_id)
driver = get_object_or_404(Driver, pk=driver_id)
#customer = get_object_or_404(Customer, pk=customer_id)
form = RideForm(request.POST or None)
if form.is_valid():
ride = form.save()
ride.user = request.user
#ride.customer_id = customer
ride.vehicle_id = vehicle
ride.driver_ssn = driver
ride.save()
args = {'form':form,'vehicle':vehicle,'driver':driver}
return render(request,'uber/ride.html',args)
class RideView(TemplateView):
template_name = 'uber/ride.html'
def get(self, request,vehicle_id,driver_id):
form = RideForm()
return render(request, self.template_name, {'form':form})
def post(self, request,vehicle_id,driver_id):
vehicle = get_object_or_404(Vehicle, pk=vehicle_id)
driver = get_object_or_404(Driver, pk=driver_id)
form = RideForm(request.POST)
if form.is_valid():
ride = form.save(commit=False)
ride.user = request.user
ride.vehicle_id = vehicle
ride.driver_ssn = driver
ride.save()
starting_location = form.cleaned_data['starting_location']
destination = form.cleaned_data['destination']
starting_time = form.cleaned_data['starting_time']
ending_time = form.cleaned_data['ending_time']
fare = form.cleaned_data['fare']
form = RideForm()
#return redirect('uber:ride.html')
args = {
'vehicle':vehicle,
'driver':driver,
'form':form,
'starting_location':starting_location,
'destination':destination,
'starting_time':starting_time,
'ending_time':ending_time,
'fare':fare
}
return render(request, 'uber/ride_details.html', args)
@login_required
def ride_details(request,vehicle_id,driver_id):
vehicle = get_object_or_404(Vehicle, pk=vehicle_id)
driver = get_object_or_404(Driver, pk=driver_id)
#customer = get_object_or_404(Customer, pk=customer_id)
#ride = Ride.objects.get(pk=ride_id)
#customer = Customer.objects.all()
rides = Ride.objects.all()
args = {'vehicle':vehicle,'driver':driver,'rides':rides}
return render(request,'uber/ride_details.html',args)
# def all_rides(request):
# rides = Ride.objects.all()
# for ride in rides:
# if ride.user == request.user:
# new_ride = ride
# break
# #user = User.objects.get(pk=request.user)
# return render(request,'uber/all_rides', {'new_ride':new_ride})
class RidesView(generic.ListView):
template_name = 'uber/all_rides.html'
context_object_name = 'rides'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(RidesView, self).dispatch(*args, **kwargs)
# def get(self):
# rides = Ride.objects.all()
# for ride in rides:
# if ride.user is request.user:
# current_ride = ride
# return render(request, self.template_name, {'current_ride':current_ride})
def get_queryset(self):
return Ride.objects.all()
class IndexView(generic.ListView):
template_name = 'uber/index.html'
context_object_name = 'vehicles'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(IndexView, self).dispatch(*args, **kwargs)
def get_queryset(self):
return Vehicle.objects.all()
class DriverIndexView(generic.ListView):
template_name = 'uber/driver_index.html'
context_object_name = 'drivers'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DriverIndexView, self).dispatch(*args, **kwargs)
def get_queryset(self):
return Driver.objects.all()
def search(request):
template = 'uber/search_results.html'
query = request.GET.get('q')
vehicles = Vehicle.objects.filter(
Q(vehicle_make=query)|
Q(vehicle_model=query)|
Q(vehicle_type=query)).distinct()
drivers = Driver.objects.filter(
Q(first_name=query)|
Q(sex=query)|
Q(last_name=query)).distinct()
return render(request, template,
{
'vehicles':vehicles,
'drivers': drivers,
})
# def index(request):
# vehicles = Vehicle.objects.filter(request.user)
# driver_results = Driver.objects.all()
# query = request.GET.get("q")
# if query:
# vehicles = vehicles.filter(
# Q(vehicle_make__icontains=query) |
# Q(vehicle_model__icontains=query)
# ).distinct()
# driver_results = driver_results.filter(
# Q(first_name__icontains=query)
# ).distinct()
# return render(request, 'uber/index.html', {
# 'vehicles': vehicles,
# 'drivers': driver_results,
# })
# else:
# return render(request, 'uber/index.html', {'vehicles': vehicles})
class DetailView(generic.DetailView):
model = Vehicle
template_name = 'uber/detail.html'
def favourite(request, driver_id):
driver = get_object_or_404(Driver, pk=driver_id)
try:
if driver.is_favourite:
driver.is_favourite = False
else:
driver.is_favourite = True
driver.save()
except (KeyError, Driver.DoesNotExist):
return render(request,'uber/all_driver.html',{'driver':driver})
else:
return render(request,'uber/all_driver.html',{'driver':driver})
def favourite_vehicle(request, vehicle_id):
vehicle = get_object_or_404(Vehicle, pk=vehicle_id)
try:
if vehicle.is_favourite:
vehicle.is_favourite = False
else:
vehicle.is_favourite = True
vehicle.save()
except (KeyError, Vehicle.DoesNotExist):
return render(request, 'uber/detail.html',{'vehicle':vehicle})
else:
return render(request, 'uber/detail.html',{'vehicle':vehicle})
#return JsonResponse({'success': True})
class VehicleCreate(CreateView):
model = Vehicle
fields = ['ID','vehicle_type','vehicle_make','vehicle_model','passenger_capacity','luggage_capacity','vehicle_image']
class VehicleUpdate(UpdateView):
model = Vehicle
fields = ['ID','vehicle_type','vehicle_make','vehicle_model','passenger_capacity','luggage_capacity','vehicle_image']
class VehicleDelete(DeleteView):
model = Vehicle
success_url = reverse_lazy("uber:index")
class DriverDetailView(generic.ListView):
model = Driver
template_name = 'uber/all_driver.html'
def driver_detail(request,pk):
driver = Driver.objects.get(pk=pk)
return render(request, 'uber/all_driver.html', {'driver':driver})
def get_queryset(self):
return Driver.objects.all()
class DriverCreate(CreateView):
model = Driver
fields = ['ssn', 'first_name', 'last_name', 'sex', 'birth_day', 'vehicle_id', 'is_favourite', 'driver_image']
class DriverUpdate(UpdateView):
model = Driver
fields = ['ssn', 'first_name', 'last_name', 'sex', 'birth_day', 'vehicle_id', 'is_favourite', 'driver_image']
class DriverDelete(DeleteView):
model = Driver
success_url = reverse_lazy("uber:index")
class UserFormView(View):
form_class = UserForm
template_name = 'registration/signup.html'
def get(self, request):
form = self.form_class(None)
return render(request, self.template_name, {'form':form})
#process from data
def post(self, request):
form = self.form_class(request.POST)
if form.is_valid():
user = form.save(commit=False)
# cleaned (normalized) data
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user.set_password(password)
user.save()
# returns User objects if credentials are correct
user = authenticate(username=username, password = password)
if user is not None:
if user.is_active:
login(request,user)
return redirect('uber:index')
return render(request, self.template_name, {'form': form})
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,486
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/uber/forms.py
|
from django.contrib.auth.models import User
from .models import Ride, Customer, Vehicle, Driver
from django import forms
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
class UserForm(forms.ModelForm):
#class UserForm(UserCreationForm):
password = forms.CharField(widget=forms.PasswordInput)
email = forms.EmailField(required=True)
class Meta:
model = User
fields = (
'username',
'first_name',
'last_name',
'email',
'password'
)
def save(self, commit=True):
user = super(UserForm, self).save(commit=False)
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.email = self.cleaned_data['email']
if commit:
user.save()
return user
class EditProfileForm(UserChangeForm):
class Meta:
model = User
fields = (
'email',
'first_name',
'last_name'
)
class RideForm(forms.ModelForm):
starting_location = forms.CharField( max_length=500, required=False)
destination = forms.CharField(max_length=500, required=False)
starting_time = forms.CharField(max_length=500, required=False)
ending_time = forms.CharField(max_length=500, required=False)
fare = forms.IntegerField(required=False)
class Meta:
model = Ride
fields = ('starting_location', 'destination', 'starting_time', 'ending_time','fare',)
#included comma at the last to make it tuple
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,487
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/uber/apps.py
|
from django.apps import AppConfig
class UberConfig(AppConfig):
name = 'uber'
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,488
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/taxicab_project/urls.py
|
"""taxicab_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls import include, url
from uber import views
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth import login, logout
#from django.contrib.auth import password_reset, password_reset_done, password_reset_confirm, password_reset_complete
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^uber/',include('uber.urls', namespace='uber')), # include url from other file (e.g. uber/urls.py)
path('', views.home, name='home'),
path('auth/',include('django.contrib.auth.urls')),
path('auth/signup/', views.UserFormView.as_view(), name='signup'),
path('auth/profile/', views.view_profile, name='view_profile'),
path('auth/profile/edit/',views.edit_profile, name='edit_profile'),
path('auth/change-password/', auth_views.PasswordChangeView.as_view(), name='change_password'),
path('auth/password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'),
path('auth/password_reset/done',auth_views.PasswordResetDoneView.as_view(),name='password_reset_done'),
url(r'^auth/password_reset/confirm/(?P<uib64>[0-9A-Za-z]+)-(?P<token>.+)/$',auth_views.PasswordResetConfirmView.as_view(),name='password_reset_confirm'),
url(r'^auth/password_reset/complete/$', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,489
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/uber/models.py
|
from django.db import models
from django.urls import reverse
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django import forms
class Vehicle(models.Model):
ID = models.AutoField(primary_key = True)
vehicle_type = models.CharField(max_length=250,default="")
vehicle_make = models.CharField(max_length=250,default='')
vehicle_model = models.CharField(max_length=250,default='')
passenger_capacity = models.IntegerField()
luggage_capacity = models.IntegerField()
vehicle_image= models.FileField()
is_favourite = models.BooleanField(default=False)
def get_absolute_url(self):
return reverse('uber:detail', kwargs={"pk": self.pk})
# no need to use migration for __str__() class, because you are not adding/deleting columns/rows in database
def __str__(self):
return self.vehicle_type+" - "+self.vehicle_make+" - "+self.vehicle_model
class Driver(models.Model):
ssn = models.AutoField(primary_key = True)
first_name = models.CharField(max_length=200,default="")
last_name = models.CharField(max_length=250,default="")
sex = models.CharField(max_length=50,default="")
birth_day = models.DateField(null=True, auto_now=False, auto_now_add=False, default='')
vehicle_id = models.ManyToManyField(Vehicle)
is_favourite= models.BooleanField(default= False)
driver_image= models.FileField()
def get_absolute_url(self):
return reverse('uber:all_driver', kwargs={"pk": self.pk})
def __str__(self):
return self.first_name + self.last_name +" - "+ self.sex
class Customer(models.Model):
user = models.OneToOneField(User,primary_key=True, on_delete=models.CASCADE)
email = models.EmailField(max_length=500, default="1@gmail.com")
first_name = models.CharField(max_length=200,default="")
last_name = models.CharField(max_length=250,default="")
sex = models.CharField(max_length=50,default="")
def __str__(self):
return self.first_name+" "+self.last_name+" - "+self.sex
# def create_profile(sender, **kwargs):
# if kwargs['created']:
# user_profile = Customer.objects.create(user=kwargs['instance'])
# post_save.connect(create_profile, sender=User)
class Ride(models.Model):
#ID = models.AutoField(primary_key=True)
#customer_id = models.OneToOneField(Customer, primary_key=True,default="",on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
vehicle_id = models.ForeignKey(Vehicle, on_delete=models.CASCADE)
driver_ssn = models.ForeignKey(Driver,on_delete=models.CASCADE)
starting_location = models.CharField(max_length=500,default="")
destination = models.CharField(max_length=500,default="")
starting_time = models.CharField(max_length=500, default="")
ending_time = models.CharField(max_length=500,default="")
fare = models.IntegerField()
def __str__(self):
return self.starting_location+" "+self.destination
# class Feedback(models.Model):
# ID = models.AutoField(primary_key= True)
# customer_email = models.ForeignKey(Customer, on_delete=models.CASCADE)
# ride_id = models.ForeignKey(Ride, on_delete=models.CASCADE)
# driver_ssn = models.ForeignKey(Driver, on_delete=models.CASCADE)
# safety = models.IntegerField()
# customer_service = models.IntegerField()
# clean = models.IntegerField()
# overall = models.IntegerField()
# def __str__(self):
# return self
# class Coupon(models.Model):
# #ID = models.AutoField()
# customer_email = models.ForeignKey(Customer, on_delete=models.CASCADE)
# discount = models.IntegerField()
# def __str__(self):
# return self
# class Customer_GPS(models.Model):
# customer_email = models.ForeignKey(Customer, on_delete=models.CASCADE)
# location = models.CharField( max_length=250,default="")
# time_stamp= models.DateTimeField(primary_key=True, auto_now=False, auto_now_add=False)
# def __str__(self):
# return self
# class Vehicle_GPS(models.Model):
# vehicle_id = models.ForeignKey(Vehicle, on_delete=models.CASCADE)
# location = models.CharField( max_length=250,default="")
# time_stamp= models.DateTimeField(primary_key=True, auto_now=False, auto_now_add=False)
# def __str__(self):
# return self
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,490
|
Sen2k9/Cholo-A-Simplified-Car-Rental-Application
|
refs/heads/master
|
/uber/migrations/0001_initial.py
|
# Generated by Django 2.2.3 on 2019-07-24 06:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('email', models.EmailField(default='1@gmail.com', max_length=500)),
('first_name', models.CharField(default='', max_length=200)),
('last_name', models.CharField(default='', max_length=250)),
('sex', models.CharField(default='', max_length=50)),
],
),
migrations.CreateModel(
name='Driver',
fields=[
('ssn', models.AutoField(primary_key=True, serialize=False)),
('first_name', models.CharField(default='', max_length=200)),
('last_name', models.CharField(default='', max_length=250)),
('sex', models.CharField(default='', max_length=50)),
('birth_day', models.DateField(default='', null=True)),
('is_favourite', models.BooleanField(default=False)),
('driver_image', models.FileField(upload_to='')),
],
),
migrations.CreateModel(
name='Vehicle',
fields=[
('ID', models.AutoField(primary_key=True, serialize=False)),
('vehicle_type', models.CharField(default='', max_length=250)),
('vehicle_make', models.CharField(default='', max_length=250)),
('vehicle_model', models.CharField(default='', max_length=250)),
('passenger_capacity', models.IntegerField()),
('luggage_capacity', models.IntegerField()),
('vehicle_image', models.FileField(upload_to='')),
('is_favourite', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Ride',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('starting_location', models.CharField(default='', max_length=500)),
('destination', models.CharField(default='', max_length=500)),
('starting_time', models.CharField(default='', max_length=500)),
('ending_time', models.CharField(default='', max_length=500)),
('fare', models.IntegerField()),
('driver_ssn', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='uber.Driver')),
('user', models.ForeignKey(default='', editable=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('vehicle_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='uber.Vehicle')),
],
),
migrations.AddField(
model_name='driver',
name='vehicle_id',
field=models.ManyToManyField(to='uber.Vehicle'),
),
]
|
{"/uber/admin.py": ["/uber/models.py"], "/uber/views.py": ["/uber/models.py", "/uber/forms.py"], "/uber/forms.py": ["/uber/models.py"]}
|
25,494
|
dplyakin/rcc_app
|
refs/heads/master
|
/db.py
|
import pandas as pd
import numpy as np
import datetime
from app import db
import requests
import json
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
def __init__(self, name):
self.name = name
def __repr__(self):
return f'User-{self.name}'
class Entry(db.Model):
id = db.Column(db.Integer, primary_key=True)
points = db.Column(db.Float)
rank = db.Column(db.Integer)
league_points = db.Column(db.Integer)
swiss_lichess_id = db.Column(db.String, db.ForeignKey('swiss.lichess_id'), nullable=False)
swiss = db.relationship('Swiss', backref=db.backref('entries', lazy=True))
username = db.Column(db.String, db.ForeignKey('user.name'), nullable=False)
user = db.relationship('User', backref=db.backref('entries', lazy=True))
def __init__(self, points, rank, league_points, swiss_lichess_id, username):
self.points = points
self.rank = rank
self.league_points = league_points
self.swiss_lichess_id = swiss_lichess_id
self.username = username
def __repr__(self):
return f'<Entry-{self.swiss_lichess_id}-{self.username}>'
class Swiss(db.Model):
id = db.Column(db.Integer, primary_key=True)
lichess_id = db.Column(db.String, unique=True)
name = db.Column(db.String)
start_at = db.Column(db.Date)
time_limit = db.Column(db.Integer)
increment = db.Column(db.Integer)
number_of_rounds = db.Column(db.Integer)
number_of_players = db.Column(db.Integer)
def __init__(self, lichess_id, name, start_at, time_limit, increment, number_of_rounds, number_of_players):
self.lichess_id = lichess_id
self.name = name
self.start_at = start_at
self.time_limit = time_limit
self.increment = increment
self.number_of_rounds = number_of_rounds
self.number_of_players = number_of_players
def __repr__(self):
return f'<Tournament-{self.name}-{self.lichess_id}'
def calculate_league_points(rank: int):
"""
Calculate how much leagues points players gets for the rank(place) he took on the tournament
:param rank: int
:return:
"""
points_dict = {
1: 10,
2: 7,
3: 5,
4: 3,
5: 1
}
if rank > 5:
return 0
else:
return points_dict[rank]
def fill_db():
"""
Parse data of all tournaments of the club from lichess api and collect it in the db
:return: None
"""
swiss_as_list_of_string = requests.get('https://lichess.org/api/team/romes-papa-club/swiss').text.split('\n')
swiss_as_list_of_json = [json.loads(i) for i in swiss_as_list_of_string[:-1]]
db_users = set(User.query.all())
swiss_users = set()
for swiss in swiss_as_list_of_json:
db.session.add(Swiss(
lichess_id=swiss['id'],
name=swiss['name'],
start_at=datetime.datetime.strptime(swiss['startsAt'][:10], "%Y-%m-%d").date(),
time_limit=swiss['clock']['limit'],
increment=swiss['clock']['increment'],
number_of_rounds=swiss['nbRounds'],
number_of_players=swiss['nbPlayers']
))
entries_as_list_of_string = requests.get(
f'https://lichess.org/api/swiss/{swiss["id"]}/results').text.split('\n')
for entry_as_json in [json.loads(i) for i in entries_as_list_of_string[:-1]]:
swiss_users.add(entry_as_json['username'])
db.session.add(Entry(
points=entry_as_json['points'],
rank=entry_as_json['rank'],
league_points=calculate_league_points(entry_as_json['rank']),
swiss_lichess_id=swiss['id'],
username=entry_as_json['username']
))
for new_username in swiss_users - db_users:
db.session.add(User(
name=new_username
))
db.session.commit()
return
def update_db():
"""
Update db data by comparing with lichess data
:return:
"""
swiss_as_list_of_string = requests.get('https://lichess.org/api/team/romes-papa-club/swiss').text.split('\n')
swiss_as_list_of_json = [json.loads(i) for i in swiss_as_list_of_string[:-1]]
swiss_ids = set([i['id'] for i in swiss_as_list_of_json])
db_swiss_ids = set([i.lichess_id for i in list(db.session.query(Swiss).all())])
db_users = set(User.query.all())
swiss_users = set()
for new_swiss in swiss_ids - db_swiss_ids:
db.session.add(Swiss(
lichess_id=new_swiss['id'],
name=new_swiss['name'],
start_at=datetime.datetime.strptime(new_swiss['startsAt'][:10], "%Y-%m-%d").date(),
time_limit=new_swiss['clock']['limit'],
increment=new_swiss['clock']['increment'],
number_of_rounds=new_swiss['nbRounds'],
number_of_players=new_swiss['nbPlayers']
))
entries_as_list_of_string = requests.get(f'https://lichess.org/api/swiss/{new_swiss["id"]}/results').text.split(
'\n')
for entry_as_json in [json.loads(i) for i in entries_as_list_of_string[:-1]]:
swiss_users.add(entry_as_json['username'])
db.session.add(Entry(
points=entry_as_json['points'],
rank=entry_as_json['rank'],
league_points=calculate_league_points(entry_as_json['rank']),
swiss_lichess_id=new_swiss['id'],
username=entry_as_json['username']
))
for new_username in swiss_users - db_users:
db.session.add(User(
name=new_username
))
db.session.commit()
return
def get_leaderboard_data():
"""
Return aggregated and sorted leaderbord from db
:return: dict
"""
df = pd.read_sql(db.session.query(Entry).statement, db.session.bind, index_col='id')
leaderboard_as_df = df.groupby('username').agg(sum_league_points=('league_points', 'sum'),
mean_points=('points', 'mean'),
mean_rank=('rank', 'mean'),
sum_entries=('swiss_lichess_id', 'count')
)
leaderboard_as_df.sort_values(by='sum_league_points', ascending=False, inplace=True)
return leaderboard_as_df.to_dict('index')
if __name__ == '__main__':
db.create_all()
fill_db()
update_db()
|
{"/db.py": ["/app.py"], "/app.py": ["/db.py"]}
|
25,495
|
dplyakin/rcc_app
|
refs/heads/master
|
/app.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import os
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://khwvbuqamcctie' \
':7faf7cc39a759fdc3942331bff81104100cffb18a92481b9bae9e22de297f83e@ec2-34-196' \
'-238-94.compute-1.amazonaws.com:5432/d6lhs24vjd9397'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['JSON_SORT_KEYS'] = False
db = SQLAlchemy(app)
@app.route("/")
def leaderboard():
update_db()
return get_leaderboard_data()
if __name__ == '__main__':
from db import update_db, get_leaderboard_data
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
{"/db.py": ["/app.py"], "/app.py": ["/db.py"]}
|
25,498
|
climboid/movie-trailer-website
|
refs/heads/master
|
/media.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import webbrowser
class Movie:
# Constructor class. Initializes instances with given properties
def __init__(
self,
movie_title,
movie_storyline,
poster_image,
trailer_youtube,
):
self.title = movie_title
self.storyline = movie_storyline
self.poster_image_url = poster_image
self.trailer_youtube_url = trailer_youtube
# method that will open the class instance youtube video
def show_trailer(self):
webbrowser.open(self.trailer_youtube_url)
|
{"/entertainment_center.py": ["/media.py"]}
|
25,499
|
climboid/movie-trailer-website
|
refs/heads/master
|
/entertainment_center.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import media
import fresh_tomatoes
# Create the instance variables that will containa all the necessary properties to render movies
toy_story = media.Movie('Toy Story',
'A story of a boy that comes to life',
'http://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg'
, 'https://www.youtube.com/watch?v=KYz2wyBy3kc')
avatar = media.Movie('Avatar', 'A marine on an alien planet',
'http://fc02.deviantart.net/fs70/f/2010/014/b/c/Avatar_by_Eggar919.jpg'
, 'https://www.youtube.com/watch?v=cRdxXPV9GNQ')
rambo = media.Movie('Rambo', 'A soldier that suffers war',
'http://i2.cdnds.net/13/36/618x400/rambo.jpg',
'https://www.youtube.com/watch?v=OI0kenxkoNg')
# Set those instance variables to the movies array
movies = [toy_story, avatar, rambo]
# Pass the movies array to the open_movies_page method which will open the youtube url in a modal window
fresh_tomatoes.open_movies_page(movies)
|
{"/entertainment_center.py": ["/media.py"]}
|
25,506
|
s-wheels/yolov3_icdarmlt
|
refs/heads/master
|
/convert_gt.py
|
# -*- coding: utf-8 -*-
"""
Copyright 15 November 2018, Sean Wheeler, All rights reserved
This file reads in the groundtruth data from the ICDAR2017 MLT dataset and converts it
into the desired format. It resizes the ground truths to rectangles and determines
the height and width of these boxes
In addition it is capable of pre-processing the images to the desired size and will adjust
the ground truth accordingly
"""
import csv
import cv2
import os
import tensorflow as tf
imgs_len = 7200
dataset = 'training'
img_h_new = 416
img_w_new = 416
input_image_dir = dataset + '_data/'
output_image_dir = "processed_data/" + dataset + "_images/"
input_label_dir = dataset + '_localization_data/gt_img_'
output_label_dir = "processed_data/" + dataset + "_labels/"
if not os.path.exists(output_image_dir):
os.makedirs(output_image_dir)
if not os.path.exists(output_label_dir):
os.makedirs(output_label_dir)
scripts = {"Arabic":0,
"Latin":1,
"Chinese":2,
"Japanese":3,
"Korean":4,
"Bangla":5,
"Symbols":6,
"Mixed":7,
"None":8
}
for img_num in range(1,imgs_len + 1):
"""
Resizes images to 416x416 for YOLO training
Reads in individual ground truths
Recalculates for downsampled images
Changes all groundtruths into rectangles for usage in YOLO
Can calculate area of each groundtruth"""
gt_output = []
img = cv2.imread(input_image_dir + "img_" + str(img_num) + '.jpg')
img_h, img_w, _ = img.shape
img_resized = cv2.resize(img, (img_h_new, img_w_new), interpolation=cv2.INTER_CUBIC)
output_image_file = output_image_dir + "img_" + str(img_num) + '.png'
cv2.imwrite(output_image_file, img_resized)
ratio_h = 1/img_h #used to normalise labels to range [0:1]
ratio_w = 1/img_w
input_label_file = input_label_dir + str(img_num) + '.txt'
with open(input_label_file, newline='') as input_file:
for row in csv.reader(input_file):
for i in range(0,8):
row[i]=float(row[i])
x_tpl = min([row[0],row[2],row[4],row[6]])*ratio_w
y_tpl = min([row[1],row[3],row[5],row[7]])*ratio_h
x_btr = max([row[0],row[2],row[4],row[6]])*ratio_w
y_btr = max([row[1],row[3],row[5],row[7]])*ratio_h
width = x_btr - x_tpl
height = y_btr - y_tpl
x_centre = x_tpl + width/2
y_centre = y_tpl + height/2
one_hot = np.zeros(9, dtype=np.uint8)
one_hot[scripts[row[8]]]=1
#area = x_height * y_height #For determining anchor boxes with K-means
gt_output.append([x_centre, y_centre, width, height,one_hot])
#Writes out file containing all readjusted groundtruths
output_label_file = output_label_dir + "label_" + str(img_num) + '.txt'
with open(output_label_file, "w") as output_file:
writer = csv.writer(output_file)
writer.writerows(gt_output)
|
{"/convert_darknet_weights.py": ["/yolo_net.py"], "/main.py": ["/yolo_net.py"]}
|
25,507
|
s-wheels/yolov3_icdarmlt
|
refs/heads/master
|
/convert_darknet_weights.py
|
# -*- coding: utf-8 -*-
import tensorflow as tf
import numpy as np
import yolo_net
FLAGS = tf.app.flags.FLAGS
def del_all_flags(FLAGS):
flags_dict = FLAGS._flags()
keys_list = [keys for keys in flags_dict]
for keys in keys_list:
FLAGS.__delattr__(keys)
del_all_flags(tf.flags.FLAGS)
tf.app.flags.DEFINE_string(
'weights_file', 'yolov3.weights', 'Binary file with detector weights')
tf.app.flags.DEFINE_string(
'data_format', 'NCHW', 'Data format: NCHW (gpu only) / NHWC')
tf.app.flags.DEFINE_string(
'ckpt_file', './saved_darknet_model/model.ckpt', 'Checkpoint file')
def main(argv=None):
model = yolo_net.darknet53
inputs = tf.placeholder(tf.float32, [None, 416, 416, 3])
with tf.variable_scope('detector/darknet-53'):
detections = model(inputs, data_format=FLAGS.data_format)
load_ops = load_weights(tf.global_variables(scope='detector/darknet-53'), FLAGS.weights_file)
saver = tf.train.Saver(tf.global_variables(scope='detector/darknet-53'))
with tf.Session() as sess:
sess.run(load_ops)
save_path = saver.save(sess, save_path=FLAGS.ckpt_file)
print('Model saved in path: {}'.format(save_path))
def load_weights(var_list, weights_file):
"""
Loads and converts pre-trained weights.
:param var_list: list of network variables.
:param weights_file: name of the binary file.
:return: list of assign ops
"""
with open(weights_file, "rb") as fp:
_ = np.fromfile(fp, dtype=np.int32, count=5) #Skip first 5 int values which contain meta-info
weights = np.fromfile(fp, dtype=np.float32)
ptr = 0
i = 0
assign_ops = []
while i < len(var_list) - 1:
print(i)
var1 = var_list[i]
var2 = var_list[i + 1]
# do something only if we process conv layer
if 'Conv' in var1.name.split('/')[-2]:
# check type of next layer
if 'BatchNorm' in var2.name.split('/')[-2]:
# load batch norm params
gamma, beta, mean, var = var_list[i + 1:i + 5]
batch_norm_vars = [beta, gamma, mean, var]
for var in batch_norm_vars:
shape = var.shape.as_list()
num_params = np.prod(shape)
var_weights = weights[ptr:ptr + num_params].reshape(shape)
ptr += num_params
assign_ops.append(
tf.assign(var, var_weights, validate_shape=True))
# we move the pointer by 4, because we loaded 4 variables
i += 4
elif 'Conv' in var2.name.split('/')[-2]:
# load biases
bias = var2
bias_shape = bias.shape.as_list()
bias_params = np.prod(bias_shape)
bias_weights = weights[ptr:ptr +
bias_params].reshape(bias_shape)
ptr += bias_params
assign_ops.append(
tf.assign(bias, bias_weights, validate_shape=True))
# we loaded 1 variable
i += 1
# we can load weights of conv layer
shape = var1.shape.as_list()
num_params = np.prod(shape)
var_weights = weights[ptr:ptr + num_params].reshape(
(shape[3], shape[2], shape[0], shape[1]))
# remember to transpose to column-major
var_weights = np.transpose(var_weights, (2, 3, 1, 0))
ptr += num_params
assign_ops.append(
tf.assign(var1, var_weights, validate_shape=True))
i += 1
print(i)
return assign_ops
if __name__ == '__main__':
tf.app.run()
|
{"/convert_darknet_weights.py": ["/yolo_net.py"], "/main.py": ["/yolo_net.py"]}
|
25,508
|
s-wheels/yolov3_icdarmlt
|
refs/heads/master
|
/main.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 26 13:28:53 2018
@author: Sean Wheeler
This program builds a parser and uses it to determine hyperparameters for the YOLO network
Takes input folders for images and ground truth
Builds YOLO network and runs it in tensorflow
"""
import tensorflow as tf
import numpy as np
import os
import cv2
import time
import yolo_net
## TRAINING IS BEST USING NCHW, INFERENCE/PREDICTION IS BEST USING NHWC
data_format = 'NCHW'
num_images = 7200
split = False #Which set of anchors to use
trn_img_dir = 'ICDAR2017_MLT/processed_data/training_images'
trn_lab_dir = 'ICDAR2017_MLT/processed_data/training_labels_onehot'
val_img_dir = 'ICDAR2017_MLT/processed_data/validation_images'
val_lab_dir = 'ICDAR2017_MLT/processed_data/validation_labels_onehot'
if split == False: #Use the anchors from Kmeans on entire training dataset
anchors = [(6,5),(18,10),(37,13),(25,41),(63,23),(105,33),(67,92),(173,57),(110,234),(296,95)]
else: #Use the anchors from the split dataset
anchors = [(5,3),(7,9),(14,4),(45,11),(14,22),(23,10),(120,40),(253,84),(88,170),(54,35)]
## set hyperparams
batch_norm_decay = 0.9
batch_norm_epsilon = 1e-05
leaky_relu = 0.1
num_epochs = 150
batch_size = 8
num_scales = 3
num_anchors = 3
#% Reset tensorflow flags, sessions and graph
FLAGS = tf.app.flags.FLAGS
def del_all_flags(FLAGS):
flags_dict = FLAGS._flags()
keys_list = [keys for keys in flags_dict]
for keys in keys_list:
FLAGS.__delattr__(keys)
del_all_flags(tf.flags.FLAGS)
tf.Session().close()
tf.reset_default_graph()
## MODEL INPUT PARAMETERS
tf.app.flags.DEFINE_integer('img_width', 416, 'Image width')
tf.app.flags.DEFINE_integer('img_height', 416, 'Image height')
tf.app.flags.DEFINE_integer('img_channels', 3, 'Image channels')
tf.app.flags.DEFINE_string('class_names', 'icdar_mlt.names', 'File with class names')
tf.app.flags.DEFINE_integer('num_classes', 9, 'Number of classes ')
## PARAMETERS
tf.app.flags.DEFINE_string('log_dir', '{cwd}/logs/'.format(cwd=os.getcwd()),
'Directory where to write event logs and checkpoint.')
tf.app.flags.DEFINE_string('data_dir', os.getcwd() + '/dataset/',
'Directory where the dataset will be stored and checkpoint.')
tf.app.flags.DEFINE_integer('log_frequency', 10,
'Number of steps between logging results to the console and saving summaries')
tf.app.flags.DEFINE_integer('save_model', 1, 'Number of steps between model saves')
tf.app.flags.DEFINE_string('ckpt_file', 'saved_icdarmlt_model/model.ckpt', 'Where to save checkpoint models')
tf.app.flags.DEFINE_string('pretrained_file', 'saved_darknet_model/model.ckpt', 'Pre-trained Darknet model')
## HYPERPARAMETERS
tf.app.flags.DEFINE_integer('num_epochs', num_epochs,
'Number of epochs to train for. ')
tf.app.flags.DEFINE_integer('batch_size', batch_size, 'Number of examples per mini-batch ')
tf.app.flags.DEFINE_float('learning_rate', 1e-4, 'Learning rate')
tf.app.flags.DEFINE_integer('decay_steps', 1000, 'Decay the learning rate every 1000 steps')
tf.app.flags.DEFINE_float('decay_rate', 0.8, 'The base of our exponential for the decay')
## HARDWARE PARAMETERS
tf.app.flags.DEFINE_float('gpu_memory_fraction', 1.0, 'Gpu memory fraction to use')
tf.app.flags.DEFINE_string('data_format', 'NCHW', 'Data format: NCHW (gpu only) / NHWC')
# Main Function
def yolo():
#Configure GPU Options
config = tf.ConfigProto(
gpu_options=tf.GPUOptions(per_process_gpu_memory_fraction=FLAGS.gpu_memory_fraction),
log_device_placement=False,
)
#BUILD TF GRAPH
images_ph = tf.placeholder(tf.float32, [batch_size, FLAGS.img_height, FLAGS.img_width, 3])
labels_ph = tf.placeholder(tf.float32, [batch_size, None, 4+FLAGS.num_classes])
labels_gr_ph = tf.placeholder(tf.int32, [batch_size, None,num_scales*num_anchors])
with tf.variable_scope('detector'):
predictions = yolo_net.yolo_v3(images_ph, FLAGS.num_classes, anchors, is_training=True)
labels_assigned, obj_present = yolo_net.tf_assign_label(labels_ph, labels_gr_ph, predictions)
cost = yolo_net.yolo_cost(labels_assigned, obj_present, predictions, labels_ph, batch_size)
global_step = tf.Variable(0, trainable=False)
decayed_learning_rate = tf.train.exponential_decay(FLAGS.learning_rate, global_step,
FLAGS.decay_steps, FLAGS.decay_rate, staircase=True)
optimizer = tf.train.AdamOptimizer(decayed_learning_rate).minimize(cost, global_step=global_step)
saver = tf.train.Saver(tf.global_variables(scope='detector'), max_to_keep=10)
#EXECUTE GRAPH - FEED IN: IMAGE, INPUT_LABELS, LABELS_GRIDS
with tf.Session(config = config) as sess:
sess.run(tf.global_variables_initializer())
batch_range = int(num_images/batch_size)
for epoch in range(num_epochs):
t_st = time.time()
epoch_cost = 0
for batch_num in range(batch_range):
batch_st = 1+(batch_num*batch_size)
img_range = range(batch_st, batch_st+batch_size)
images = load_images_fd(img_range, trn_img_dir, tensor=False)
labels = load_labels_fd(img_range, trn_lab_dir)
labels_gr = assign_grid_box(labels)
#RUN SESSION
_ , batch_cost = sess.run([optimizer, cost], feed_dict={images_ph: images,
labels_ph: labels, labels_gr_ph: labels_gr})
epoch_cost += batch_cost
if (batch_num % 20==0):
print(batch_num, batch_cost)
print('Epoch {0} trained in {1:.2f}s'.format(epoch, time.time()-t_st))
print('Epoch {0} cost {1}'.format(epoch, epoch_cost))
if (epoch % FLAGS.save_model == 0):
print('Saving model')
saver.save(sess, save_path=FLAGS.ckpt_file, global_step=epoch)
return out
#%% Loading Functions
def load_labels_fd(labels_range, labels_dir, num_classes=9):
"""
ARGS:
labels_range = int or range of labels to be read in
label_dir = directory where labels are stored
OUTPUTS:
labels = np array of shape (batch, num_labels, 5)
"""
if type(labels_range)==int:
labels_file = labels_dir + "/label_" + str(labels_range) + ".txt"
labels = np.loadtxt(labels_file, dtype = np.float32, delimiter=',')
elif type(labels_range)==range:
labels_file = labels_dir + "/label_" + str(labels_range[0]) + ".txt"
labels = np.loadtxt(labels_file, dtype = np.float32, delimiter=',')
lab_len = len(labels)
if len(labels.shape)==1:
lab_len=1
labels = np.expand_dims(labels, axis=0)
labels = np.expand_dims(labels, axis=0)
for i in range(labels_range[0]+1,labels_range[-1]+1):
labels_file = labels_dir + "/label_" + str(i) + ".txt"
labels_int = np.loadtxt(labels_file, dtype = np.float32, delimiter=',')
len_lab_int = len(labels_int) if len(labels_int.shape)==2 else 1
if len_lab_int==1:
labels_int = np.expand_dims(labels_int, axis=0)
chg_len = len_lab_int - lab_len
if chg_len<0:
labels_int = np.concatenate((labels_int, np.zeros((abs(chg_len),4+num_classes), dtype=np.float32)), axis=0)
elif chg_len>0:
labels = np.concatenate((labels, np.zeros((labels.shape[0],chg_len,4+num_classes), dtype=np.float32)), axis=1)
lab_len = len_lab_int
labels = np.append(labels, [labels_int], axis=0)
else:
print("Error, labels_range must be type int or range")
return labels
def load_images_fd(imgs_range, img_dir, normalise=True, img_type =".png", tensor=True, augment=True):
if type(imgs_range)==int:
img_file = img_dir + "/img_" + str(imgs_range) + img_type
imgs = load_image_fd(img_file, normalise=normalise, img_type=img_type, augment=augment)
imgs = tf.expand_dims(imgs, axis=0)
elif type(imgs_range)==range:
img_file = img_dir + "/img_" + str(imgs_range[0]) + img_type
img = load_image_fd(img_file, normalise=normalise, img_type=img_type, augment=augment)
imgs = tf.expand_dims(img, axis=0)
for i in range(imgs_range[0]+1, imgs_range[-1]+1):
img_file = img_dir + "/img_" + str(i) + img_type
img = load_image_fd(img_file, normalise=normalise, img_type=img_type, augment=augment)
img = tf.expand_dims(img, axis=0)
imgs = tf.concat([imgs, img], axis=0)
if tensor==False:
sess = tf.Session()
imgs = sess.run(imgs)
sess.close()
return imgs
def load_image_fd(img_file, normalise=True, img_type =".png", tensor=True, augment=True):
"""
Loads the image required from the specified directory
and normalises it
"""
img = tf.read_file(img_file)
if img_type==".png":
img = tf.image.decode_png(img, channels=3)
elif img_type==".jpg":
img = tf.image.decode_jpeg(img, channels=3)
else:
print("Only png and jpg image types loadable")
return
if normalise==True:
img = tf.divide(img, 255)
if augment==True:
img = tf.image.random_brightness(img, max_delta=32.0 / 255.0)
img = tf.image.random_saturation(img, lower=0.5, upper=1.5)
# Make sure the image is still in [0, 1]
img = tf.clip_by_value(img, 0.0, 1.0)
if tensor==False:
sess = tf.Session()
img = sess.run(img)
sess.close()
return img
def assign_grid_box(labels, num_anchors = 3, num_scales = 3, st_gr_size = 13):
"""
ARGS:
labels: shape (batch_size, max num of labels, 4+num_classes)
RETURNS:
gr_coords: shape (batch_size, max num of labels, num_scales)
Reads in labels and parameters
Determines the starting reference value of each grid box at each scale
within the detections tensor.
These references and the references after depending on num_anchors
should then be selected
"""
batch_size = len(labels)
lab_len = len(labels[0])
gr_coords = np.zeros((batch_size,lab_len,num_scales*num_anchors), dtype=np.int32)
for j in range(batch_size):
labels_int = labels[j]
scale_start = 0
gr_size = st_gr_size
for scale in range(num_scales):
gr_len = 1/gr_size
for i in range(lab_len):
if sum(abs(labels_int[i]))!=0:
x = labels_int[i,1]
y = labels_int[i,2]
gr_x = int(x // gr_len)
gr_y = int(y // gr_len)
detect_ref = ((gr_y*gr_size) + gr_x)*num_anchors
detect_ref += scale_start
gr_coords[j,i,scale*num_anchors] = detect_ref
gr_coords[j,i,scale*num_anchors+1] = detect_ref+1
gr_coords[j,i,scale*num_anchors+2] = detect_ref+2
scale_start += (gr_size**2)*num_anchors
gr_size = gr_size*2
return gr_coords
#%% Post-processing Functions
def draw_boxes(box_params, img, img_file="detect_default.jpg"):
"""
ARGS:
box_params - (x_centre, y_centre, width, height)
img - image to draw boxes on
img_file - name of outputted image
"""
half_w = box_params[:,2]/2
half_h = box_params[:,3]/2
y_tpl = box_params[:,1] - half_h
x_tpl = box_params[:,0] - half_w
y_btr = box_params[:,1] + half_h
x_btr = box_params[:,0] + half_w
boxes = np.stack((y_tpl, x_tpl, y_btr, x_btr), axis=1)
boxes = np.expand_dims(boxes, axis=0)
img = np.expand_dims(img, axis=0)
if np.amax(img) <= 1:
img = img*255
detects_img = tf.image.draw_bounding_boxes(img, boxes)
sess = tf.Session()
detects_img = sess.run(detects_img)
sess.close()
detects_img = np.squeeze(detects_img, axis=0)
cv2.imwrite(img_file, detects_img)
return detects_img
def iou(box1, box2, mode='hw'):
"""Implement the intersection over union (IoU) between box1 and box2
Arguments:
boxes -- list object with coordinates (x_tpl, y_tpl, x_btr, y_btr)
or (x_centre, y_centre, width, height) if in hw mode
"""
if mode=='hw': #convert coordinates to corners
box1_n = [0,0,0,0]
box2_n = [0,0,0,0]
box1_n[0] = box1[0] - box1[2]
box1_n[1] = box1[1] - box1[3]
box2_n[0] = box2[0] - box2[2]
box2_n[1] = box2[1] - box2[3]
box1_n[2] = box1[0] + box1[2]
box1_n[3] = box1[1] + box1[3]
box2_n[2] = box2[0] + box2[2]
box2_n[3] = box2[1] + box2[3]
box1 = box1_n
box2 = box2_n
# Calculate the (y1, x1, y2, x2) coordinates of the intersection of box1 and box2. Calculate its Area.
xi1 = max(box1[0],box2[0])
yi1 = max(box1[1],box2[1])
xi2 = min(box1[2],box2[2])
yi2 = min(box1[3],box2[3])
inter_area = max(yi2-yi1,0) * max(xi2-xi1,0)
# Calculate the Union area by using Formula: Union(A,B) = A + B - Inter(A,B)
box1_area = (box1[2]-box1[0]) * (box1[3]-box1[1])
box2_area = (box2[2]-box2[0]) * (box2[3]-box2[1])
union_area = box1_area + box2_area - inter_area + 1e-10
# compute the IoU
iou = inter_area/union_area
return iou
#%# Main
#%%
if __name__ == '__main__':
out=train_yolo()
print(out)
|
{"/convert_darknet_weights.py": ["/yolo_net.py"], "/main.py": ["/yolo_net.py"]}
|
25,509
|
s-wheels/yolov3_icdarmlt
|
refs/heads/master
|
/yolo_net.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 11 22:44:01 2018
@author: Sean Wheeler
Creates the darknet-53 network using tensorflow
As found in 'YOLOv3: An Incremental Improvement by Joseph Redmon and Ali Farhadi'
Builds the YOLO FPN-based detection layers on top of that.
Also contains functions that can assign the bounding boxes to labels and calculate
the cost using an altered YOLOv3 loss function with softmax cross entropy.
"""
import tensorflow as tf
import tensorflow.contrib.slim as slim
#%% YOLO Network
def yolo_v3(inputs, num_classes, anchors, data_format='NCHW', is_training=False,
batch_norm_epsilon=1e-05, batch_norm_decay=0.9, leaky_relu_alpha=0.1, reuse=False):
"""
Creates the YOLOv3 network consisting of Darknet-53 (52 layers) and a 3 stage RPN detection
ARGS:
inputs = RGB Image tensor - shape (batch_size, img_height, img_width, img_channels)
num_classes = number of classes in data - integer
is_training = is the network to be trained - boolean
data_format = NCHW or NHWC - NCHW is faster for training and NHWC for inference
batch_norm_epsilon = batch normalisation parameter -float
batch_norm_decay = batch normalisation parameter -float
leaky_relu_alpha = leaky relu slope parameter -float
reuse = are variables to be reused - boolean
RETURNS:
detections = tensor output from YOLOv3 network - shape (batch_size, 10647, 5+num_classes)
"""
# it will be needed later on
img_size = inputs.get_shape().as_list()[1:3]
# transpose the inputs to NCHW
if data_format == 'NCHW':
inputs = tf.transpose(inputs, [0, 3, 1, 2])
# set batch norm params
batch_norm_params = {
'decay': batch_norm_decay,
'epsilon': batch_norm_epsilon,
'scale': True,
'is_training': is_training,
'fused': None, # Use fused batch norm if possible.
}
if is_training==True:
biases_initializer=tf.zeros_initializer()
weights_initializer=tf.contrib.layers.xavier_initializer()
else:
biases_initializer=None
weights_initializer=None
# Set activation_fn and parameters for conv2d, batch_norm.
with slim.arg_scope([slim.conv2d, slim.batch_norm, _fixed_padding], data_format=data_format, reuse=reuse):
with slim.arg_scope([slim.conv2d], normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params,
biases_initializer=biases_initializer,
weights_initializer=weights_initializer,
activation_fn=lambda x: tf.nn.leaky_relu(x, alpha=leaky_relu_alpha)):
#Build Darknet-52
with tf.variable_scope('darknet-53'):
route_1, route_2, inputs = darknet53(inputs, data_format)
#Upsample final layer and concatenate with earlier layers for multi-scale detections
with tf.variable_scope('yolo-v3'):
route, inputs = _yolo_block(inputs, 512, data_format=data_format)
detect_1 = _detection_layer(inputs, num_classes, anchors[6:9], img_size, data_format)
detect_1 = tf.identity(detect_1, name='detect_1') #Lowest resolution detections (13^2 grid)
inputs = _conv2d_fixed_padding(route, 256, 1, data_format=data_format)
upsample_size = route_2.get_shape().as_list()
inputs = _upsample(inputs, upsample_size, data_format)
inputs = tf.concat([inputs, route_2], axis=1 if data_format == 'NCHW' else 3) #Concatenate early darknet layer with upsampled final layer
route, inputs = _yolo_block(inputs, 256, data_format=data_format)
detect_2 = _detection_layer(inputs, num_classes, anchors[3:6], img_size, data_format)
detect_2 = tf.identity(detect_2, name='detect_2') #Middle resolution detections (26^2 grid)
inputs = _conv2d_fixed_padding(route, 128, 1, data_format=data_format)
upsample_size = route_1.get_shape().as_list()
inputs = _upsample(inputs, upsample_size, data_format)
inputs = tf.concat([inputs, route_1], axis=1 if data_format == 'NCHW' else 3) #Concatenate early darknet layer with upsampled final layer
_, inputs = _yolo_block(inputs, 128, data_format=data_format)
detect_3 = _detection_layer(inputs, num_classes, anchors[0:3], img_size, data_format)
detect_3 = tf.identity(detect_3, name='detect_3') #Highest resolution detections (52^2 grid)
detections = tf.concat([detect_1, detect_2, detect_3], axis=1)
detections = tf.identity(detections, name='detections')
return detections
def _yolo_block(inputs, filters, data_format='NCHW'):
"""
Builds a typical YOLO block in the detection layer, that effectively reduces
the number of channels to the required input (2*filters)
ARGS:
inputs = tensor - input tensor from previous layer - Shape: (batch_size, prev_layer_dims, prev_layer_filters)
filters = integer - number of filters for route output
data_format = string - NCHW or NHWC
RETURNS:
route = tensor - output without final layer for passing to different scales
outputs = tensor - output for detection at a scale, channels=filters*2
"""
outputs = _conv2d_fixed_padding(inputs, filters, 1, data_format=data_format)
outputs = _conv2d_fixed_padding(outputs, filters * 2, 3, data_format=data_format)
outputs = _conv2d_fixed_padding(outputs, filters, 1, data_format=data_format)
outputs = _conv2d_fixed_padding(outputs, filters * 2, 3, data_format=data_format)
outputs = _conv2d_fixed_padding(outputs, filters, 1, data_format=data_format)
route = outputs
outputs = _conv2d_fixed_padding(outputs, filters * 2, 3, data_format=data_format)
return route, outputs
def _detection_layer(inputs, num_classes, anchors, img_size, data_format):
"""
ARGS:
inputs = tensor - input tensor from previous layer - Shape: (batch_size, prev_layer_dims, prev_layer_filters)
num_classes = integer - number of classes for classification task
anchors = the pre-defined anchors for bounding boxes
img_size = integer - image dimensions, assuming square
data_format = string - NCHW or NHWC
RETURNS:
detections = tensor - final output detections for a scale - Shape: (batch_size, num_predictions, 5+num_classes)
"""
num_anchors = len(anchors)
#Create the final detection layer, where outputs = num of kernels for each grid cell
detections = slim.conv2d(inputs, num_anchors * (5 + num_classes), 1,
stride=1, normalizer_fn=None,
activation_fn=None,biases_initializer=tf.zeros_initializer())
#Determine the size of the resolution (13, 26 or 52)
grid_size = detections.get_shape().as_list()
grid_size = grid_size[1:3] if (data_format=='NHWC') else grid_size[2:4]
dim = grid_size[0] * grid_size[1] #How many inputs to detection layer per channel
box_attrs = 5 + num_classes #How many outputs per box?
if data_format == 'NCHW':
detections = tf.reshape(detections, [-1, num_anchors*box_attrs, dim])
detections = tf.linalg.transpose(detections)
detections = tf.reshape(detections, [-1, num_anchors*dim, box_attrs])
#Split the detections into the different categories
#Centres(x,y), Sizes(w,h), Objectness, Class Logits (Softmaxed later)
box_cens, box_sizs, box_objs, clss = tf.split(detections, [2, 2, 1, num_classes], axis=-1)
#Create an array of reference points (one for each anchor per grid)
gr_x = tf.range(grid_size[0], dtype=tf.float32)
gr_y = tf.range(grid_size[1], dtype=tf.float32)
x_ref, y_ref = tf.meshgrid(gr_x, gr_y)
x_ref = tf.reshape(x_ref, (-1,1))
y_ref = tf.reshape(y_ref, (-1,1))
gr_ref = tf.concat([x_ref, y_ref], axis=-1)
gr_ref = tf.reshape( tf.tile(gr_ref, [1,num_anchors]) , [1, -1, 2])
#Side lengths of a grid box in pixels of input image
grid_len = (img_size[0] // grid_size[0], img_size[1] // grid_size[1])
#Normalise the anchor lengths by the grid box sides
anchors = [(anchor[0] / grid_len[0], anchor[1] / grid_len[1]) for anchor in anchors]
#Process the network outputs sigma(t_x) +c_x
box_cens = tf.multiply( tf.add( tf.nn.sigmoid(box_cens) , gr_ref), grid_len)
anchors = tf.tile(anchors, [dim,1])
box_sizs = tf.multiply( tf.multiply( tf.exp(box_sizs), anchors), grid_len)
box_objs = tf.nn.sigmoid(box_objs)
detections = tf.concat([box_cens, box_sizs, box_objs, clss], axis=-1)
return detections
def _upsample(inputs, out_shape, data_format='NHWC'):
"""
ARGS:
inputs = tensor - input tensor from previous layer - Shape: (batch_size, prev_layer_dims, prev_layer_filters)
out_shape = list - shape of the darknet-53 route to which upsampled layer is concatenated
data_format = string - NCHW or NHWC
RETURNS:
outputs = tensor - output tensor from this layer that has been upsampled
"""
if data_format =='NCHW':
inputs = tf.transpose(inputs, [0, 2, 3, 1])
height_n = out_shape[3]
width_n = out_shape[2]
else:
height_n = out_shape[2]
width_n = out_shape[1]
outputs = tf.image.resize_nearest_neighbor(inputs, (height_n, width_n))
if data_format == 'NCHW':
outputs = tf.transpose(outputs, [0, 3, 1, 2])
outputs = tf.identity(outputs, name='upsampled')
return outputs
#%% Darknet
@tf.contrib.framework.add_arg_scope
def _fixed_padding(inputs, kernel_size, data_format = 'NCHW', mode='CONSTANT', **kwargs):
"""
Pads input H and W with a fixed amount of padding, independent of input size.
ARGS:
inputs = tensor, (batch, C, H, W) or (batch, H, W, C)
kernel_size = positive integer, kernel to be used in conv2d or max_pool2d
mode = sring, the mode for tf.pad
RETURNS:
padded_inputs = tensor, same format as inputs and padded if kernel_size > 1
"""
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
if data_format == 'NCHW':
padded_inputs = tf.pad(inputs, [[0,0], [0,0], [pad_beg,pad_end], [pad_beg,pad_end]], mode = mode)
else:
padded_inputs = tf.pad(inputs, [[0,0], [pad_beg,pad_end], [pad_beg,pad_end], [0,0]], mode = mode)
return padded_inputs
def _conv2d_fixed_padding(inputs, filters, kernel_size, strides = 1, data_format='NCHW'):
"""
Adds a 2D convolutional layer to inputs and pads the image if necessary
ARGS:
inputs = tensor - input tensor from previous layer - Shape: (batch_size, prev_layer_dims, prev_layer_filters)
filters = integer - number of filters to apply to layer/number of channels in output
kernel_size = integer/list - size and shape of filters
strides = integer/list - length of each stride over input
data_format = string - NCHW or NHWC
RETURNS:
outputs = tensor - output tensor from this layer that has been convoluted
"""
if (strides > 1): #If layer needs fixed padding
inputs = _fixed_padding(inputs, kernel_size, data_format = data_format)
outputs = slim.conv2d(inputs, filters, kernel_size, stride = strides,
padding =( 'SAME' if strides == 1 else 'VALID'))
return outputs
def _darknet_53_block(inputs, filters, num_blocks=1, data_format='NCHW'):
"""
Constructs typical blocks used in Darknet consisting of two conv layers
and a residual throughput
ARGS:
inputs = tensor - input tensor from previous layer - Shape: (batch_size, prev_layer_dims, prev_layer_filters)
filters = integer - number of filters to apply to layer/number of channels in output
num_blocks = integer - number of darknet blocks to apply
data_format = string - NCHW or NHWC
RETURNS:
inputs = tensor - output tensor from this layer that has been convoluted
"""
for i in range(num_blocks):
residual = inputs
inputs = _conv2d_fixed_padding(inputs, filters, 1, data_format=data_format)
inputs = _conv2d_fixed_padding(inputs, 2*filters, 3, data_format=data_format)
inputs = inputs + residual
return inputs
def darknet53(inputs, data_format='NCHW'):
"""
Builds the darknet model not including the avgpool, connected or softmax layers
also returns the outputs at 2 additional scales for the FPN detection stage
ARGS:
inputs = tensor - input tensor from previous layer - Shape: (batch_size, prev_layer_dims, prev_layer_filters)
data_format = string - NCHW or NHWC
RETURNS:
outputs = tensor - output tensor from this layer that has been convoluted
"""
inputs = _conv2d_fixed_padding(inputs, 32, 3, data_format=data_format)
inputs = _conv2d_fixed_padding(inputs, 64, 3, strides=2, data_format=data_format)
inputs = _darknet_53_block(inputs, 32, data_format=data_format)
inputs = _conv2d_fixed_padding(inputs, 128, 3, strides=2, data_format=data_format)
inputs = _darknet_53_block(inputs, 64, num_blocks=2, data_format=data_format)
inputs = _conv2d_fixed_padding(inputs, 256, 3, strides=2, data_format=data_format)
inputs = _darknet_53_block(inputs, 128, num_blocks=8, data_format=data_format)
scale_1 = inputs
inputs = _conv2d_fixed_padding(inputs, 512, 3, strides=2, data_format=data_format)
inputs = _darknet_53_block(inputs, 256, num_blocks=8, data_format=data_format)
scale_2 = inputs
inputs = _conv2d_fixed_padding(inputs, 1024, 3, strides=2, data_format=data_format)
outputs = _darknet_53_block(inputs, 512, num_blocks=4, data_format=data_format)
return scale_1, scale_2, outputs
#%% TF Assign Label function and TF Loops
def tf_assign_label(labels, labels_grids, detections, iou_threshold=0.5, num_scs=3, num_ancs=3, num_classes=9):
"""
ARGS:
labels = (x, y, h, w, classes) - tensor shape (num_batches, num_labels, 4+num_classes)
labels_grids = grid box indices - tensor shape (num_batches, num_labels, num_scales*num_anchors)
detections = outputted detections from YOLOv3.
Shape: (num_batches, num_predicted_boxes, 4+1+num_classes)
where for num_scales, num_anchors = 3: 10647 = (13^2 + 26^2 + 52^2)*3
RETURNS:
labels_assigned = indices of bounding box detections assigned to labels
Elements: (batch_num, label_num, assigned_pred_ind)
Shape: (num_assigned_labels, 3)
obj_present = indices of bounding box detections assigned/or with iou
over threshold. Elements: (batch_num, assigned_pred_ind)
Shape: (num_assigned_labels+num_labels_over_threshold, 2)
This function calculates IoUs between the detections and labels within
the relevant grid box. It then assigns the detections to the labels with
the highest IoU. If there is any detection with no IoU with any label it is
then assigned to the label which it's centre is nearest to.
"""
#Determine parameters for loop iterations
pred_size = detections.get_shape()
batch_size = labels.get_shape()[0]
labels_size = tf.shape(labels)[1]
num_pos_ancs = num_ancs*num_scs
#Execute IoU loop which calculates the IoUs with possible anchors for each label and stores in tensor of shape
#(batch_size, labels_size, num_pos_anchors, 2) where the final dimension contains (iou, bounding_box_index)
batch_num=0
loop_vars = [batch_num, batch_size, labels_size, num_pos_ancs, detections,
labels_grids, labels, tf.zeros([batch_size, labels_size, num_pos_ancs, 2])]
con_shp = tf.constant(0).get_shape()
shp_invars = [con_shp, con_shp, con_shp, con_shp, pred_size,
labels_grids.get_shape(), labels.get_shape(), tf.TensorShape([None,None,None, None])]
batch_iou_out=tf.while_loop(_tf_count, _batch_iou_loop, loop_vars, shp_invars, back_prop=False)
pos_ious = batch_iou_out[-1]
ious, refs = tf.split(pos_ious, 2, axis=3)
ious = tf.layers.flatten(ious)
refs = tf.layers.flatten(refs)
#Execute loop which assigns each label an anchor, provided it has an IoU>0
batch_num=0
loop_vars = [batch_num, batch_size, labels_size, num_pos_ancs,
ious, refs, tf.zeros([batch_size, labels_size])]
shp_invars = [con_shp, con_shp, con_shp, con_shp,
tf.TensorShape([None,None]), tf.TensorShape([None,None]),
tf.TensorShape([None,None])]
batch_assign_out=tf.while_loop(_tf_count, _batch_assign_loop, loop_vars, shp_invars, back_prop=False)
labels_assigned = tf.cast(batch_assign_out[-1], tf.int32)
#Any bounding box with an iou less than the threshold is marked with -1
obj_present = tf.cast(tf.greater_equal(ious, iou_threshold), tf.float32)
obj_present_no = obj_present-1
obj_present = tf.cast(refs*obj_present+obj_present_no, tf.int32)
#Create tensors which concatenates batch numbers on labels_assigned and obj_present
rang = tf.range(batch_size)
rang = tf.reshape(rang, [-1,1])
rang_1 = tf.tile(rang, [1,labels_size])
rang_2 = tf.tile(rang, [1,tf.shape(obj_present)[1]])
#Create masks to be remove any -1 elements
mask_1 = tf.not_equal(labels_assigned, -1)
mask_2 = tf.not_equal(obj_present, -1)
#Expand dimensions for concatenation
labels_assigned = tf.expand_dims(labels_assigned, axis=2)
obj_present = tf.expand_dims(obj_present, axis=2)
rang_1 = tf.expand_dims(rang_1, axis=2)
rang_2 = tf.expand_dims(rang_2, axis=2)
#Create tensor which concatenates label numbers on labels_assigned
label_nums = tf.range(labels_size)
label_nums = tf.tile(label_nums, [batch_size])
label_nums = tf.reshape(label_nums, [batch_size, labels_size])
label_nums = tf.expand_dims(label_nums, axis=2)
#Add label and batch numbers to label_assigned and batch numbers to obj_present
labels_assigned = tf.concat([label_nums, rang_1, labels_assigned],axis=2)
obj_present = tf.concat([rang_2, obj_present],axis=2)
#Apply the boolean masks to remove -1 elements which represent
#labels with no assigned bounding box for labels_assigned
#bounding boxes with IoU less than threshold for obj_present
labels_assigned = tf.boolean_mask(labels_assigned,mask_1,axis=0)
obj_present = tf.boolean_mask(obj_present,mask_2,axis=0)
#Add all assigned bounding boxes to thresholded bounding boxes
obj_present = tf.concat([obj_present, labels_assigned[:,1:3]],axis=0)
obj_present = _tf_unique_2d(obj_present) #Remove multiple appearances of the same bounding box
return labels_assigned, obj_present
def _batch_iou_loop(batch_num, batch_size, labels_size, num_pos_ancs, detections, labels_grids, labels, pos_ious):
"""
ARGS:
batch_num & batch_size = counter and limit for loop
labels_size = limit for nested label loop
num_pos_ancs = number of possible anchors for each label
labels = (x, y, h, w, classes) - tensor shape (num_batches, num_labels, 4+num_classes)
labels_grids = grid box indices - tensor shape (num_batches, num_labels, num_scales*num_anchors)
detections = outputted detections from YOLOv3.
Shape: (num_batches, num_predicted_boxes, 4+1+num_classes)
RETURNS:
pos_ious = tensor containing IoUs of labels with possible bounding box detections
Elements: (IoU, bounding box detection index)
Shape: (batch_size, label_size, num_pos_ancs, 2)
"""
#Loop over each image in the batch
con_shp = tf.constant(0).get_shape()
img_labels_num=0
loop_vars = [img_labels_num, labels_size, num_pos_ancs, detections[batch_num],
labels_grids[batch_num], labels[batch_num],
tf.zeros([labels_size, num_pos_ancs, 14])]
shp_invars = [con_shp, con_shp, con_shp, detections[batch_num].get_shape(),
labels_grids[batch_num].get_shape(), labels[batch_num].get_shape(),
tf.TensorShape([None,None,None])]
labels_loop_out = tf.while_loop(_tf_count, _labels_iou_loop,
loop_vars, shp_invars, back_prop=False)
img_pos_ious = labels_loop_out[-1]
img_pos_ious = tf.expand_dims(img_pos_ious, axis=0)
#Add all possible anchors to tensor
pos_ious = tf.cond( tf.equal(batch_num,0),
lambda: img_pos_ious,
lambda: tf.concat([pos_ious, img_pos_ious],0) )
return batch_num+1, batch_size, labels_size, num_pos_ancs, detections, labels_grids, labels, pos_ious
def _labels_iou_loop(img_labels_num, labels_size, num_pos_ancs, img_detections, img_labels_grids, img_labels, img_pos_ious):
"""
Nested loop inside batch_iou_loop
ARGS:
img_labels_num & labels_size = counter and limit for loop
num_pos_ancs = number of possible anchors for each label
img_labels = (x, y, h, w, classes) - tensor shape (num_labels, 4+num_classes)
img_labels_grids = grid box indices - tensor shape (num_labels, num_scales*num_anchors)
img_detections = outputted detections from YOLOv3.
Shape: (num_predicted_boxes, 4+1+num_classes)
RETURNS:
img_pos_ious = tensor containing IoUs of labels with possible bounding box detections
Elements: (IoU, bounding box detection index)
Shape: (label_size, num_pos_ancs, 2)
"""
#Loop over each label for an image
con_shp = tf.constant(0).get_shape()
label_num = 0
loop_vars = [label_num, num_pos_ancs, img_detections,
img_labels_grids[img_labels_num], img_labels[img_labels_num],
tf.zeros([num_pos_ancs, 14])]
shp_invars = [con_shp, con_shp, img_detections.get_shape(),
img_labels_grids[img_labels_num].get_shape(), img_labels[img_labels_num].get_shape(),
tf.TensorShape([None, None])]
label_loop_out = tf.while_loop(_tf_count, _label_iou_loop,
loop_vars, shp_invars, back_prop=False)
#Extract the possible anchors for each ground truth
lab_ious = label_loop_out[-1]
lab_ious = tf.expand_dims(lab_ious, axis=0)
#Add all possible anchors to tensor
img_pos_ious = tf.cond( tf.equal(img_labels_num,0),
lambda: lab_ious,
lambda: tf.concat([img_pos_ious, lab_ious],0) )
return img_labels_num+1, labels_size, num_pos_ancs, img_detections, img_labels_grids, img_labels, img_pos_ious
def _label_iou_loop(label_num, num_pos_ancs, img_detections, label_grids, label, lab_ious):
"""
Nested loop inside labels_iou_loop
ARGS:
label_num & num_pos_ancs = counter and limit for loop
label = (x, y, h, w, classes) - tensor shape (4+num_classes)
label_grids = grid box indices - tensor shape (num_scales*num_anchors)
img_detections = outputted detections from YOLOv3.
Shape: (num_predicted_boxes, 4+1+num_classes)
RETURNS:
lab_ious = tensor containing IoUs of labels with possible bounding box detections
Elements: (IoU, bounding box detection index)
Shape: (num_pos_ancs, 2)
"""
#Gather all the appropriate anchor detections
lab_pos_ancs = tf.gather(img_detections, label_grids)
lab_pos_ancs = lab_pos_ancs[:,0:4]
lab_box_coords = label[0:4]
lab_boxes = tf.tile(lab_box_coords, [tf.constant(9)])
lab_boxes = tf.reshape(lab_boxes, (num_pos_ancs,4))
#Calculate the IoUs and concatenate with bounding box detection indices
lab_ious = tf_iou(lab_boxes, lab_pos_ancs)
lab_gr_inds = tf.expand_dims(tf.to_float(tf.transpose(label_grids)),axis=1)
#Concatenate possible anchors with their index in detections to use later
lab_ious = tf.concat([lab_ious, lab_gr_inds], axis=1)
return label_num+1, num_pos_ancs, img_detections, label_grids, label, lab_ious
def _batch_assign_loop(batch_num, batch_size, labels_size, num_pos_ancs, ious, refs, labels_assigned):
"""
ARGS:
batch_num & batch_size = counter and limit for loop
labels_size = limit for nested label loop
num_pos_ancs = number of possible anchors for each label
ious = tensor containing IoUs of labels with possible bounding box detections
Elements: (IoU)
Shape: (batch_size, label_size, num_pos_ancs)
refs = tensor containing indexes of bounding box detections which were used
to calculate the IoUs in the iou tensor.
Elements: (bounding box detection index)
Shape: (batch_size, label_size, num_pos_ancs)
RETURNS:
labels_assigned = assigns each label it can a unique bounding box based
on the highest IoUs
Elements: (assigned bounding box detection index)
Shape: (batch_size, label_size)
"""
#Labels Loop
assign_comp=False
loop_vars = [assign_comp, labels_size, num_pos_ancs,
ious[batch_num], refs[batch_num],
batch_num, tf.zeros(labels_size)-1]
labels_assign_loop_out = tf.while_loop(_tf_bool, _labels_assign_loop,
loop_vars, back_prop=False)
img_labels_assigned = labels_assign_loop_out[-1]
img_labels_assigned = tf.expand_dims(img_labels_assigned, axis=0)
#Add all possible anchors to tensor
labels_assigned = tf.cond( tf.equal(batch_num,0),
lambda: img_labels_assigned,
lambda: tf.concat([labels_assigned, img_labels_assigned],0) )
return batch_num+1, batch_size, labels_size, num_pos_ancs, ious, refs, labels_assigned
def _labels_assign_loop(assign_comp, labels_size, num_pos_ancs, ious, refs, batch_num, img_labels_assigned):
"""
ARGS:
assign_comp = condition for loop
labels_size = limit for nested label loop
num_pos_ancs = number of possible anchors for each label
ious = tensor containing IoUs of labels with possible bounding box detections
Elements: (IoU)
Shape: (batch_size, label_size, num_pos_ancs)
refs = tensor containing indexes of bounding box detections which were used
to calculate the IoUs in the iou tensor.
Elements: (bounding box detection index)
Shape: (batch_size, label_size, num_pos_ancs)
RETURNS:
labels_assigned = assigns each label it can a unique bounding box based
on the highest IoUs
Elements: (assigned bounding box detection index)
Shape: (label_size)
"""
tot_pos_ancs = labels_size*num_pos_ancs
#Get max IoU value
max_iou_ref = tf.argmax(ious, axis=0)
max_iou = ious[max_iou_ref]
max_iou_box = refs[max_iou_ref]
#if max iou = 0 then assignation for image complete and break from loop
assign_comp = tf.cond( tf.less_equal(max_iou,0),
lambda: True,
lambda: False )
#check if bounding box already assigned
max_iou_box = tf.tile([max_iou_box], [labels_size])
assigned = tf.equal(max_iou_box, img_labels_assigned)
assigned = tf.reduce_sum(tf.cast(assigned, tf.float32))
assigned = tf.equal(assigned,1.0)
#If bounding box assigned then zero that iou
ious = tf.cond( assigned,
lambda: ious*_tf_zero_mask(max_iou_ref, tot_pos_ancs),
lambda: ious)
#if box unassigned and IoU>0 assign it to that label in img_labels_assigned and zero all ious for that label
assign_label_cond = tf.logical_and( tf.equal(assign_comp, False), tf.equal(assigned, False))
img_labels_assigned, ious = tf.cond(assign_label_cond,
lambda: _label_assign_function(max_iou_ref, max_iou_box,
num_pos_ancs, labels_size,
tot_pos_ancs, img_labels_assigned,
ious),
lambda: (img_labels_assigned, ious))
return assign_comp, labels_size, num_pos_ancs, ious, refs, batch_num, img_labels_assigned
def _label_assign_function(max_iou_ref, max_iou_box, num_pos_ancs, labels_size, tot_pos_ancs, img_labels_assigned, ious ):
num_pos_ancs = tf.cast(num_pos_ancs, tf.int64)
labels_size = tf.cast(labels_size, tf.int64)
#Determine which label the bounding box is associated with
label_num = max_iou_ref//num_pos_ancs
#Store the assigned bounding box index in img_labels_assigned
img_labels_assigned = img_labels_assigned + _tf_one_mask(label_num, labels_size, max_iou_box+1)
#As label is now assigned, zero all it's IoUs in the iou tensor
zero_mask = _tf_zero_mask(label_num*num_pos_ancs, tot_pos_ancs, num_pos_ancs)
ious = ious * zero_mask
return img_labels_assigned, ious
#%% Cost Function
def yolo_cost(labels_assigned, obj_present, predictions, labels_ph, batch_size=1, lambda_coord=5, lambda_noobj=0.5):
"""
ARGS:
labels_assigned = indices of bounding box predictions assigned to labels
Elements: (batch_num, label_num, assigned_pred_ind)
Shape: (num_assigned_labels, 3)
obj_present = indices of bounding box predictions assigned/or with iou
over threshold. Elements: (batch_num, assigned_pred_ind)
Shape: (num_assigned_labels+num_labels_over_threshold, 2)
predictions = outputted predictions from YOLOv3.
Shape: (num_batches, num_predicted_boxes, 4+1+num_classes)
lambda_coord = constant which weights loss from bounding box parameters
lambda_noobj = constant which weights loss from unassigned bounding boxes.
RETURNS:
total_cost = total cost for forward pass of YOLO network
"""
#Ensure gradient backpropagates into 'predictions' only
labels_assigned = tf.stop_gradient(labels_assigned)
#Gather the assigned bounding boxes and the labels they were assigned to
assigned_pred = tf.gather_nd(predictions, labels_assigned[:,1:3])
assigned_labs_inds = tf.stack([labels_assigned[:,1],labels_assigned[:,0]],axis=1)
assigned_labs = tf.gather_nd(labels_ph, assigned_labs_inds)
#Calculate the cost of the bounding box predictions
assigned_labs_hw = tf.sqrt(assigned_labs[:,2:4])
assigned_pred_hw = tf.sqrt(assigned_pred[:,2:4])
cost_hw = tf.reduce_sum((assigned_pred_hw - assigned_labs_hw)**2)
cost_xy = tf.reduce_sum((assigned_pred[:,0:2] - assigned_labs[:,0:2])**2)
#Calculate the cost of the class predictions using softmax cross entropy
assigned_labs_cls = assigned_labs[:,4:]
assigned_labs_cls = tf.stop_gradient(assigned_labs_cls)
assigned_pred_cls = assigned_pred[:,5:]
cost_cls = tf.nn.softmax_cross_entropy_with_logits_v2(labels = assigned_labs_cls,
logits = assigned_pred_cls)
cost_cls = tf.reduce_sum(cost_cls)
#Calculate the cost of objectness predictions for the assigned bounding boxes using log loss
cost_obj = -tf.log(assigned_pred[:,4])
cost_obj = tf.reduce_sum(cost_obj)
assigned_cost = lambda_coord*(cost_xy + cost_hw) + cost_cls + cost_obj
#Create tensor of indices for all predictions
batch_range = tf.reshape(tf.expand_dims(tf.range(batch_size),axis=0),[batch_size,-1])
batch_range = tf.expand_dims(tf.tile(batch_range,[1,10647]),axis=0)
pred_range = tf.expand_dims(tf.reshape(tf.tile(tf.range(10647),[batch_size]),[batch_size,-1]),axis=0)
noobj_present_ind = tf.stack([batch_range, pred_range],axis=3)
#Using obj_present create a mask which removes indices from noobj_present_ind
#if the bounding box was assigned or had an IoU over the IoU threshold with any object
obj_present_mask=tf.SparseTensor(indices=tf.cast(obj_present,tf.int64),
values=tf.zeros(tf.shape(obj_present)[0]),
dense_shape=[batch_size,10647])
obj_present_mask=tf.sparse_reorder(obj_present_mask)
obj_present_mask=tf.sparse_tensor_to_dense(obj_present_mask,1)
obj_present_mask=tf.cast(obj_present_mask,bool)
obj_present_mask=tf.expand_dims(obj_present_mask,axis=0)
noobj_present_ind=tf.boolean_mask(noobj_present_ind, obj_present_mask)
#Select the objectness values from unassigned bounding boxes
#and calculate the cost using log loss
objectness_ind = tf.ones([tf.shape(noobj_present_ind)[0],1],dtype=tf.int32)*4
noobj_present_ind = tf.concat([noobj_present_ind, objectness_ind], axis=1)
noobj_present_ind = tf.stop_gradient(noobj_present_ind) #Ensure gradient only goes into predictions
noobj_present = tf.gather_nd(predictions, noobj_present_ind)
unassigned_cost = -tf.log(1-noobj_present)
unassigned_cost = tf.reduce_sum(unassigned_cost)
unassigned_cost = lambda_noobj*unassigned_cost
total_cost = assigned_cost + unassigned_cost
return total_cost
#%%General TF functions
def yolo_non_max_suppression(predictions, max_boxes = 10, iou_threshold = 0.5):
"""
Applies Non-max suppression (NMS) to set of boxes
ARGS:
predictions = tensor - shape (num_bounding_boxes, 5+num_classes)
max_boxes = integer - maximum number of predicted boxes you'd like
iou_threshold = float - IoU threshold used for NMS filtering
RETURNS:
filtered_predictions = tensor - shape (<=max_boxes, 5+num_classes)
"""
boxes = predictions[:,:4]
scores = predictions[:,4]
nms_indices = tf.image.non_max_suppression(boxes, scores, max_boxes)
filtered_predictions = tf.gather(predictions, nms_indices)
return filtered_predictions
def tf_iou(box1, box2, mode='hw'):
"""
ARGS:
box1, box2 = tensor - containing box parameters and depending on mode:
Elements: (x,y,w,h) or (x_tpl, y_tpl, x_btr, y_btr)
Shape: (num_boxes, 4)
mode = string - hw or not
RETURNS:
tf_ious = tensor containing the IoUs of the inputs
Shape: (num_boxes)
"""
box1 = tf.to_float(box1)
box2 = tf.to_float(box2)
if mode=='hw':
xc_1, yc_1, w_1, h_1 = tf.split(box1, 4, axis=1)
xc_2, yc_2, w_2, h_2 = tf.split(box2, 4, axis=1)
xtpl_1 = xc_1 - w_1
ytpl_1 = yc_1 - h_1
xbtr_1 = xc_1 + w_1
ybtr_1 = yc_1 + h_1
xtpl_2 = xc_2 - w_2
ytpl_2 = yc_2 - h_2
xbtr_2 = xc_2 + w_2
ybtr_2 = yc_2 + h_2
else:
xtpl_1, ytpl_1, xbtr_1, ybtr_1 = tf.split(box1, 4, axis=1)
xtpl_2, ytpl_2, xbtr_2, ybtr_2 = tf.split(box2, 4, axis=1)
xi1 = tf.maximum(xtpl_1,xtpl_2)
yi1 = tf.maximum(ytpl_1,ytpl_2)
xi2 = tf.minimum(xbtr_1,xbtr_2)
yi2 = tf.minimum(ybtr_1,ybtr_2)
inter_area = tf.maximum(yi2-yi1,0) * tf.maximum(xi2-xi1,0)
# Calculate the Union area by using Formula: Union(A,B) = A + B - Inter(A,B)
box1_area = (xbtr_1-xtpl_1) * (ybtr_1-ytpl_1)
box2_area = (xbtr_2-xtpl_2) * (ybtr_2-ytpl_2)
union_area = box1_area + box2_area - inter_area + 1e-10
# compute the IoU
tf_ious = inter_area/union_area
return tf_ious
def _tf_unique_2d(x):
"""
ARGS:
X = shape: 2d tensor potentially with elements with the same value
RETURNS:
X = shape: 2d tensor with all values being unique
"""
x_shape=tf.shape(x)
x1=tf.tile(x,[1,x_shape[0]])
x2=tf.tile(x,[x_shape[0],1])
x1_2 = tf.reshape(x1,[x_shape[0]*x_shape[0],x_shape[1]])
x2_2 = tf.reshape(x2,[x_shape[0]*x_shape[0],x_shape[1]])
cond = tf.reduce_all(tf.equal(x1_2,x2_2),axis=1)
cond = tf.reshape(cond,[x_shape[0],x_shape[0]])
cond_shape = tf.shape(cond)
cond_cast = tf.cast(cond,tf.int32)
cond_zeros = tf.zeros(cond_shape,tf.int32)
r = tf.range(x_shape[0])
r = tf.add(tf.tile(r,[x_shape[0]]),1)
r = tf.reshape(r,[x_shape[0],x_shape[0]])
f1 = tf.multiply(tf.ones(cond_shape,tf.int32),x_shape[0]+1)
f2 = tf.ones(cond_shape,tf.int32)
cond_cast2 = tf.where(tf.equal(cond_cast,cond_zeros),f1,f2)
r_cond_mul = tf.multiply(r,cond_cast2)
r_cond_mul2 = tf.reduce_min(r_cond_mul,axis=1)
r_cond_mul3,unique_idx = tf.unique(r_cond_mul2)
r_cond_mul4 = tf.subtract(r_cond_mul3,1)
x=tf.gather(x,r_cond_mul4)
return x
def _tf_zero_mask(zero_st, tensor_len, length=1):
"""
ARGS:
zero_st = integer - element at which zeros start
tensor_len = integer - how large is the tensor
length = integer - how many elements should be zero
RETURNS:
mask = tensor - ones with elements of zeros with at a specified position
"""
tensor_len = tf.cast(tensor_len, tf.int64)
top = tf.ones(zero_st)
mid = tf.zeros(length)
bot = tf.ones(tensor_len-zero_st-length)
mask = tf.concat([top,mid,bot],axis=0)
return mask
def _tf_one_mask(one_st, tensor_len, value=1, length=1):
"""
ARGS:
zero_st = integer - element at which the ones/given values start
tensor_len = integer - how large is the tensor
value = what number should the value be, default=1
length = integer - how many elements should be one/the given value
RETURNS:
mask = tensor - zeros with elements of a given value at a specified position
"""
tensor_len = tf.cast(tensor_len, tf.int64)
top = tf.zeros(one_st)
mid = tf.ones(length)
bot = tf.zeros(tensor_len-one_st-length)
mask = tf.concat([top,mid,bot],axis=0)
mask = mask*value
return mask
def _tf_bool(i, *args):
return tf.equal(i, False)
def _tf_count(i, max_count=10, *args):
return tf.less(i, max_count)
|
{"/convert_darknet_weights.py": ["/yolo_net.py"], "/main.py": ["/yolo_net.py"]}
|
25,510
|
s-wheels/yolov3_icdarmlt
|
refs/heads/master
|
/anchor_boxes/k_means.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 19 18:44:09 2018
@author: aegeus
"""
import csv
import operator
import scipy.cluster as sp
import numpy as np
localization_data_types = (int, int, int, int, int, int, int, int, str, str)
#Create three empty arrays for the data
boxes = []
with open("localization_sorted.csv") as f:
for row in csv.reader(f):
for i in range(3,7):
row[i]=int(row[i])
box_width = row[5]-row[3]
box_height = row[6]-row[4]
boxes.append([box_width, box_height])
boxes = np.array(boxes, dtype=float)
anchor_boxes, distortion_1 = sp.vq.kmeans(boxes, 9, iter=300)
np.savetxt("anchor_boxes.txt", anchor_boxes.astype(int), fmt='%i', delimiter=",")
#%%
def sort_csv(csv_filename, types, sort_columns):
data = []
with open(csv_filename) as f:
for row in csv.reader(f):
data.append(convert(types,row))
data.sort(key=operator.itemgetter(sort_columns))
with open ("localization_sorted.csv", 'w') as f:
csv.writer(f).writerows(data)
def convert(convert_funcs, seq):
return [item if func is None else func(item)
for func, item in zip(convert_funcs,seq)]
sort_csv("training_localization_data_resized.txt", localization_data_types, 7)
#%%
|
{"/convert_darknet_weights.py": ["/yolo_net.py"], "/main.py": ["/yolo_net.py"]}
|
25,511
|
s-wheels/yolov3_icdarmlt
|
refs/heads/master
|
/anchor_boxes/k_means_split.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 19 18:44:09 2018
@author: aegeus
"""
import csv
import operator
import scipy.cluster as sp
import numpy as np
localization_data_types = (int, int, int, int, int, int, int, int, str, str)
word_total = 86183
split_point_1 = round(86183/3)
split_point_2 = 2 * split_point_1
#Create three empty arrays for the data
split_1 = []
split_2 = []
split_3 = []
count = 0
with open("localization_sorted.csv") as f:
for row in csv.reader(f):
for i in range(3,7):
row[i]=int(row[i])
box_width = row[5]-row[3]
box_height = row[6]-row[4]
if count < split_point_1:
split_1.append([box_width, box_height])
elif count < split_point_2:
split_2.append([box_width, box_height])
else:
split_3.append([box_width, box_height])
count += 1
split_1 = np.array(split_1, dtype=float)
split_2 = np.array(split_2, dtype=float)
split_3 = np.array(split_3, dtype=float)
anchor_boxes_1, distortion_1 = sp.vq.kmeans(split_1, 3, iter=300)
anchor_boxes_2, distortion_2 = sp.vq.kmeans(split_2, 3, iter=300)
anchor_boxes_3, distortion_3 = sp.vq.kmeans(split_3, 4, iter=300)
anchor_boxes = np.concatenate((np.round(anchor_boxes_1),np.round(anchor_boxes_2),np.round(anchor_boxes_3)))
np.savetxt("anchor_boxes_split.txt", anchor_boxes.astype(int), fmt='%i', delimiter=",")
#%%
def sort_csv(csv_filename, types, sort_columns):
data = []
with open(csv_filename) as f:
for row in csv.reader(f):
data.append(convert(types,row))
data.sort(key=operator.itemgetter(sort_columns))
with open ("localization_sorted.csv", 'w') as f:
csv.writer(f).writerows(data)
def convert(convert_funcs, seq):
return [item if func is None else func(item)
for func, item in zip(convert_funcs,seq)]
#sort_csv("training_localization_data_resized.txt", localization_data_types, 7)
#%%
|
{"/convert_darknet_weights.py": ["/yolo_net.py"], "/main.py": ["/yolo_net.py"]}
|
25,512
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/routes/usuarios.py
|
from database.banco import Cadastrados
class Usuarios(object):
def __init__(self, usuario = "", senha = ""):
self.usuario = usuario
self.senha = senha
def cadastrarUsuario(self):
banco = Cadastrados()
c = banco.conexao.cursor()
novoUsuario = self.usuario
novaSenha = self.senha
print("\nCriando cadastro...")
print("Novo usuário: {}".format(novoUsuario))
print("Nova senha: {}".format(novaSenha))
localizarUsuario = ('SELECT * FROM usuarios WHERE usuario = ?')
c.execute(localizarUsuario, [(novoUsuario)])
print("\nLocalizando usuário existente...")
verificarCadastro = c.fetchall()
try:
if verificarCadastro:
print("\nUsuário localizado!")
return "Usuário já existe!"
elif novoUsuario == "" and novaSenha == "":
return "Preencha o formulário"
elif novoUsuario == "":
return "Insira um usuário"
elif novaSenha == "":
return "Insira uma senha"
elif len(novaSenha) < 8:
return "Utilize uma senha com: \n- 8 caracteres ou mais"
else:
inserir = ('INSERT INTO usuarios(usuario, senha) VALUES (?, ?)')
c.execute(inserir, [(novoUsuario), (novaSenha)])
banco.conexao.commit()
print("\nUsuário criado!")
return "Conta criada!"
except:
return "Error"
banco.conexao.commit()
c.close()
def autenticarUsuario(self):
banco = Cadastrados()
c = banco.conexao.cursor()
usuario = self.usuario
senha = self.senha
print("Autenticando usuário...")
print("Usuario: {}".format(usuario))
print("Senha: {}\n".format(senha))
localizarUsuario = ('SELECT * FROM usuarios WHERE usuario = ? and senha = ?')
c.execute(localizarUsuario, [(usuario), (senha)])
print("Localizando usuário existente...")
verificarUsuario = c.fetchall()
print(verificarUsuario)
try:
if verificarUsuario:
print("Verificado com sucesso!")
return True
elif usuario == "" and senha == "":
return "Preencha o formulário"
elif usuario == "":
return "Insira um usuário"
elif senha == "":
return "Insira uma senha"
else:
return "Usuário não encontrado."
except:
return "Error"
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,513
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/routes/menubar.py
|
def menus(self, master):
menubar = Menu(self.master)
menuUsuario = Menu(menubar, tearoff=0)
menuUsuario.add_command(label="Login")
menuUsuario.add_command(label="Cadastro")
menuUsuario.add_separator()
menuUsuario.add_command(label="Sair", command=self.master.quit)
menubar.add_cascade(label="Usuário", menu=menuUsuario)
menuEditar = Menu(menubar, tearoff=0)
menuEditar.add_command(label="Criar Ficha")
menuEditar.add_separator()
menuEditar.add_command(label="Alterar Ficha")
menuEditar.add_command(label="Deletar Ficha")
menubar.add_cascade(label="Editar", menu=menuEditar)
menuAjuda = Menu(menubar, tearoff=0)
menuAjuda.add_command(label="Ajuda")
menuAjuda.add_command(label="Sobre")
menubar.add_cascade(label="Ajuda", menu=menuAjuda)
self.master.config(menu=menubar)
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,514
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/principal.py
|
from routes.janelas import *
from tkinter import *
if __name__ == "__main__":
root = Tk()
root.iconbitmap("img/spqr-icon.ico")
root.title("< SPQR > Gerenciador de fichas")
root.resizable(width=False, height=False)
root.geometry("600x600+500+100")
root["bg"] = "#330c50"
programa = JanelaInicial(root)
root.mainloop()
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,515
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/routes/janelas.py
|
from routes.usuarios import Usuarios
from routes.estilo import Estilo
from tkinter import *
from tkinter import messagebox
from tkinter import PhotoImage
from tkinter import ttk
class JanelaFicha():
def __init__(self, master=None):
# Atributos
self.master = master
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
pass
def iniciarProcedimento(self):
pass
class JanelaDelecao():
def __init__(self, master=None):
# Atributos
self.master = master
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
pass
def iniciarProcedimento(self):
pass
class JanelaAlteracao():
def __init__(self, master=None):
# Atributos
self.master = master
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
pass
def iniciarProcedimento(self):
pass
class JanelaCriacao():
def __init__(self, master=None):
# Atributos
self.master = master
self.raca = IntVar()
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
self.frmNome = Frame(self.master, bg=self.corFundo)
self.frmIdade = Frame(self.master, bg=self.corFundo)
self.frmRaca = Frame(self.master, bg=self.corFundo)
self.frmDivindade = Frame(self.master, bg=self.corFundo)
self.frmPersonalidade = Frame(self.master, bg=self.corFundo)
self.frmHistoria = Frame(self.master, bg=self.corFundo)
self.frmMensagem = Frame(self.master, bg=self.corFundo)
self.frmCriar = Frame(self.master, bg=self.corFundo)
self.frmBotoes = Frame(self.master, bg=self.corFundo)
self.lblNome = Label(self.frmNome, text="Nome")
self.lblNome["font"] = self.fontePadrao
self.lblNome["fg"] = self.corFrente
self.lblNome["bg"] = self.corFundo
self.lblIdade = Label(self.frmIdade, text="Idade")
self.lblIdade["font"] = self.fontePadrao
self.lblIdade["fg"] = self.corFrente
self.lblIdade["bg"] = self.corFundo
self.lblRaca = Label(self.frmRaca, text="Raça")
self.lblRaca["font"] = self.fontePadrao
self.lblRaca["fg"] = self.corFrente
self.lblRaca["bg"] = self.corFundo
self.lblPersonalidade = Label(self.frmPersonalidade, text="Personalidade")
self.lblPersonalidade["font"] = self.fontePadrao
self.lblPersonalidade["fg"] = self.corFrente
self.lblPersonalidade["bg"] = self.corFundo
self.lblHistoria = Label(self.frmHistoria, text="História")
self.lblHistoria["font"] = self.fontePadrao
self.lblHistoria["fg"] = self.corFrente
self.lblHistoria["bg"] = self.corFundo
self.entNome = Entry(self.frmNome)
self.spbIdade = Spinbox(self.frmIdade, width=5)
# Criando Radiobutton de Raças
self.rdbSemideus = Radiobutton(self.frmRaca, text="Semideus", value=1, variable=self.raca, selectcolor="black")
self.rdbSemideus["activebackground"] = self.corFrente
self.rdbSemideus["activeforeground"] = self.corFundo
self.rdbSemideus["fg"] = self.corFrente
self.rdbSemideus["bg"] = self.corFundo
self.rdbSemideus["command"] = self.selecionarRaca
self.rdbLegado = Radiobutton(self.frmRaca, text="Legado", value=2, variable=self.raca, selectcolor="black")
self.rdbLegado["activebackground"] = self.corFrente
self.rdbLegado["activeforeground"] = self.corFundo
self.rdbLegado["fg"] = self.corFrente
self.rdbLegado["bg"] = self.corFundo
self.rdbLegado["command"] = self.selecionarRaca
# Criando Text de Personalidade e História
self.txtPersonalidade = Text(self.frmPersonalidade, width=20, height=5, wrap=WORD)
self.txtHistoria = Text(self.frmHistoria, width=20, height=5, wrap=WORD)
self.lblMensagem = Label(self.frmMensagem, text="Preencha o formulário")
self.lblMensagem["font"] = self.fontePadrao
self.lblMensagem["fg"] = self.corFrente
self.lblMensagem["bg"] = self.corFundo
self.btnCriar = Button(self.frmCriar)
self.btnCriar["fg"] = self.corFrente
self.btnCriar["bg"] = self.corFundo
self.btnLimpar = Button(self.frmBotoes)
self.btnLimpar["text"] = "Limpar"
self.btnLimpar["fg"] = self.corFrente
self.btnLimpar["bg"] = self.corFundo
self.btnLimpar["command"] = self.limparCriacao
self.btnCancelar = Button(self.frmBotoes)
self.btnCancelar["text"] = "Cancelar"
self.btnCancelar["fg"] = self.corFrente
self.btnCancelar["bg"] = self.corFundo
self.btnCancelar["command"] = self.cancelarCriacao
self.frmNome.grid(column=0, row=0)
self.frmIdade.grid(column=0, row=1)
self.frmRaca.grid(column=0, row=2)
self.frmDivindade.grid(column=0, row=3)
self.frmPersonalidade.grid(column=0, row=4, pady=5)
self.frmHistoria.grid(column=0, row=5, pady=5)
self.frmCriar.grid(column=0, row=6)
self.frmMensagem.grid(column=0, row=7)
self.frmBotoes.grid(column=0, row=8)
self.lblNome.grid(sticky = W)
self.entNome.grid()
self.lblIdade.grid(sticky = W)
self.spbIdade.grid()
self.lblRaca.grid(column=0, row=0, sticky = W)
self.rdbSemideus.grid(column=0, row=1)
self.rdbLegado.grid(column=1, row=1)
self.lblPersonalidade.grid(sticky = W)
self.txtPersonalidade.grid()
self.lblHistoria.grid(sticky = W)
self.txtHistoria.grid()
self.lblMensagem.grid()
self.btnCriar.grid()
self.btnCancelar.grid(sticky = W)
self.btnLimpar.grid(sticky = E)
def limparCriacao(self):
self.entNome.delete()
self.spbIdade.delete()
self.rdbSemideus.delete()
self.rdbLegado.delete()
self.txtPersonalidade.delete()
self.txtHistoria.delete()
def cancelarCriacao(self):
self.frmNome.grid_forget()
self.frmIdade.grid_forget()
self.frmRaca.grid_forget()
self.frmDivindade.grid_forget()
self.frmPersonalidade.grid_forget()
self.frmHistoria.grid_forget()
self.frmCriar.grid_forget()
self.frmMensagem.grid_forget()
self.lblNome.grid_forget()
self.entNome.grid_forget()
self.lblIdade.grid_forget()
self.spbIdade.grid_forget()
self.lblRaca.grid_forget()
self.rdbSemideus.grid_forget()
self.rdbLegado.grid_forget()
self.lblPersonalidade.grid_forget()
self.txtPersonalidade.grid_forget()
self.lblHistoria.grid_forget()
self.txtHistoria.grid_forget()
self.lblMensagem.grid_forget()
self.btnCriar.grid_forget()
def selecionarRaca(self):
self.selecionado = "Você selecionou a " + str(self.raca.get())
self.label = Label(self.master, text=self.selecionado)
self.label.grid()
class JanelaPrincipal():
def __init__(self, master=None):
# Atributos
self.master = master
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
self.frmLogo = Frame(self.master, bg=self.corFundo)
self.frmBarra = Frame(self.master, bg=self.corFundo)
self.frmMensagem = Frame(self.master, bg=self.corFundo)
# Criando IMG de Logo
imgLogo = PhotoImage(file="img/spqr.png")
self.lblLogo = Label(self.frmLogo, image=imgLogo, bg=self.corFundo)
self.lblLogo.image = imgLogo
self.btnCriacao = Button(self.frmBarra, text="Criar", cursor="hand2", width=10, height=1)
self.btnCriacao["font"] = self.fontePadrao
self.btnCriacao["relief"] = RIDGE
self.btnCriacao["fg"] = self.corFundo
self.btnCriacao["bg"] = self.corFrente
self.btnCriacao["activeforeground"] = self.corFundo
self.btnCriacao["activebackground"] = self.corFrente
self.btnCriacao["command"] = self.irCriacao
self.btnAlteracao = Button(self.frmBarra, text="Alterar", cursor="hand2", width=10, height=1)
self.btnAlteracao["font"] = self.fontePadrao
self.btnAlteracao["relief"] = RIDGE
self.btnAlteracao["fg"] = self.corFundo
self.btnAlteracao["bg"] = self.corFrente
self.btnAlteracao["activeforeground"] = self.corFundo
self.btnAlteracao["activebackground"] = self.corFrente
self.btnAlteracao["command"] = self.irAlteracao
self.btnDelecao = Button(self.frmBarra, text="Deletar", cursor="hand2", width=10, height=1)
self.btnDelecao["font"] = self.fontePadrao
self.btnDelecao["relief"] = RIDGE
self.btnDelecao["fg"] = self.corFundo
self.btnDelecao["bg"] = self.corFrente
self.btnDelecao["activeforeground"] = self.corFundo
self.btnDelecao["activebackground"] = self.corFrente
self.btnDelecao["command"] = self.irDelecao
self.lblMensagem = Label(self.frmMensagem, text="Escolha uma opção")
self.lblMensagem["font"] = self.fontePadrao
self.lblMensagem["justify"] = CENTER
self.lblMensagem["fg"] = self.corFrente
self.lblMensagem["bg"] = self.corFundo
self.lblMensagem["activeforeground"] = self.corFrente
self.lblMensagem["activebackground"] = self.corFundo
# Empacotando widgets de FRM
self.frmLogo.grid(column=0, row=0, padx=180, pady=10)
self.frmBarra.grid(column=0, row=1, pady=10)
self.frmMensagem.grid(column=0, row=2)
self.lblLogo.grid(column=0, row=0)
self.btnCriacao.grid(column=0, row=0, padx=5)
self.btnAlteracao.grid(column=1, row=0, padx=5)
self.btnDelecao.grid(column=2, row=0, padx=5)
self.lblMensagem.grid(pady=5)
self.btnCriacao.bind("<Enter>", lambda msgCriar: self.mostrarDescricao("Clique para criar uma ficha"))
self.btnAlteracao.bind("<Enter>", lambda msgAlterar: self.mostrarDescricao("Clique para alterar uma ficha"))
self.btnDelecao.bind("<Enter>", lambda msgDeletar: self.mostrarDescricao("Clique para deletar uma ficha"))
def mostrarDescricao(self, msg):
self.lblMensagem["text"] = msg
def irCriacao(self):
self.limparJanela()
# Chamando a class JanelaCriacao
criacao = JanelaCriacao()
def irAlteracao(self):
self.limparJanela()
# Chamando a class JanelaCriacao
criacao = JanelaCriacao()
def irDelecao(self):
self.limparJanela()
delecao = JanelaDelecao()
def irHistorico(self):
pass
def limparJanela(self):
# Desempacotando widgets de FRM
self.frmLogo.grid_forget()
self.frmBarra.grid_forget()
self.frmMensagem.grid_forget()
# Desempacontando widgets de LBL e BTN
self.lblLogo.grid_forget()
self.btnCriacao.grid_forget()
self.btnAlteracao.grid_forget()
self.btnDelecao.grid_forget()
self.lblMensagem.grid_forget()
class JanelaAjuda():
def __init__(self, master=None):
# Atributos
self.master = master
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
pass
class JanelaSobre():
def __init__(self, master=None):
# Atributos
self.master = master
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
self.barra = Frame(self.master)
self.barra.grid(column=0)
self.btnHistoria = Button(self.barra)
self.btnRacas = Button(self.barra)
self.btnClasses = Button(self.barra)
self.btnHistoria.grid(column=0)
self.btnRacas.grid(column=1)
self.btnClasses.grid(column=2)
def iniciarProcedimento(self):
pass
class JanelaCadastro():
def __init__(self):
# Atributos
self.cadastro = Toplevel()
self.janela = []
self.usuario = StringVar()
self.senha = StringVar()
self.senha2 = StringVar()
# Lista de estilo
Estilo.__init__(self)
# Modificação
self.janela.append(self.cadastro)
self.configurarJanela()
def configurarJanela(self):
self.cadastro.iconbitmap("img/spqr-icon.ico")
self.cadastro.resizable(width=False, height=False)
self.cadastro.protocol('WM_DELETE_WINDOW', self.sairCadastro)
self.cadastro.title("< SPQR > Gerenciador de Fichas")
self.cadastro.geometry("300x300+650+200")
self.cadastro["bg"] = "#330c50"
self.criarWidgets()
def criarWidgets(self):
self.lblUsuario = Label(self.cadastro, text="Usuário")
self.lblUsuario["font"] = self.fontePadrao
self.lblUsuario["fg"] = self.corFrente
self.lblUsuario["bg"] = self.corFundo
# Criando ENT do Usuário
self.entUsuario = Entry(self.cadastro, textvariable=self.usuario, width=20)
self.entUsuario["relief"] = RIDGE
self.entUsuario["justify"] = CENTER
# Criando ENT da Senha 1
self.lblSenha1 = Label(self.cadastro, text="Senha")
self.lblSenha1["font"] = self.fontePadrao
self.lblSenha1["fg"] = self.corFrente
self.lblSenha1["bg"] = self.corFundo
self.entSenha1 = Entry(self.cadastro, textvariable=self.senha, show="*", width=20)
self.entSenha1["relief"] = RIDGE
self.entSenha1["justify"] = CENTER
# Criando ENT da Senha 2
self.lblSenha2 = Label(self.cadastro, text="Repita a Senha")
self.lblSenha2["font"] = self.fontePadrao
self.lblSenha2["fg"] = self.corFrente
self.lblSenha2["bg"] = self.corFundo
self.entSenha2 = Entry(self.cadastro, textvariable=self.senha2, show="*", width=20)
self.entSenha2["relief"] = RIDGE
self.entSenha2["justify"] = CENTER
# Criando BTN de Logar
self.btnCadastro = Button(self.cadastro, text="Cadastro", cursor="hand2", width=10, height=1)
self.btnCadastro["font"] = self.fontePadrao
self.btnCadastro["relief"] = RIDGE
self.btnCadastro["fg"] = self.corFundo
self.btnCadastro["bg"] = self.corFrente
self.btnCadastro["activeforeground"] = self.corFundo
self.btnCadastro["activebackground"] = self.corFrente
self.btnCadastro["command"] = self.cadastrarUsuario
self.lblMensagem = Label(self.cadastro, text="Preencha o formulário")
self.lblMensagem["font"] = self.fontePadrao
self.lblMensagem["fg"] = self.corFrente
self.lblMensagem["bg"] = self.corFundo
self.lblMensagem["activeforeground"] = self.corFrente
self.lblMensagem["activebackground"] = self.corFundo
self.lblUsuario.grid(column=0, row=0, pady=10, padx=120)
self.entUsuario.grid(column=0, row=1)
self.lblSenha1.grid(column=0, row=2, pady=10)
self.entSenha1.grid(column=0, row=3, padx=20)
self.lblSenha2.grid(column=0, row=4, pady=10)
self.entSenha2.grid(column=0, row=5, padx=20)
self.btnCadastro.grid(column=0, row=6, pady=20)
self.lblMensagem.grid(column=0, row=7)
def sairCadastro(self):
self.cadastro.destroy()
self.cadastro.update()
def cadastrarUsuario(self):
cadastro = Usuarios()
novoUsuario = self.usuario.get()
novaSenha = self.senha.get()
confirmaSenha = self.senha2.get()
novoUsuario = novoUsuario.lower()
if novaSenha != confirmaSenha:
self.lblMensagem["text"] = "Senha incorreta!"
else:
cadastro.usuario = novoUsuario
cadastro.senha = novaSenha
print(cadastro.usuario)
print(cadastro.senha)
retorno = cadastro.cadastrarUsuario()
if retorno != "Conta criada!":
self.lblMensagem["text"] = retorno
else:
messagebox.showwarning("Cadastro", retorno)
class JanelaLogin():
def __init__(self, master=None):
# Atributos
self.master = master
self.usuario = StringVar()
self.senha = StringVar()
# Lista de estilo
Estilo.__init__(self)
# Chamando métodos
self.criarWidgets() # Chamando widgets
def criarWidgets(self):
# Criando Frames
self.frmLogo = Frame(self.master, bg=self.corFundo)
self.frmLogin = Frame(self.master, bd=5, relief=RIDGE, bg=self.corContainer)
self.frmBarra = Frame(self.master, bg=self.corFundo)
# Criando IMG de Logo
imgLogo = PhotoImage(file="img/spqr.png")
self.lblLogo = Label(self.frmLogo, image=imgLogo, bg=self.corFundo)
self.lblLogo.image = imgLogo
# Criando IMG de Usuário
imgUsuario = PhotoImage(file="img/user-icon2.png")
self.lblUsuario = Label(self.frmLogin, image=imgUsuario, bg=self.corContainer)
self.lblUsuario.image = imgUsuario
# Criando ENT de Usuário
self.entUsuario = Entry(self.frmLogin, bd=5, textvariable=self.usuario, width=10)
self.entUsuario["font"] = self.fonteEntry
self.entUsuario["relief"] = RIDGE
self.entUsuario["justify"] = CENTER
# Criando IMG de Senha
imgSenha = PhotoImage(file="img/password-icon2.png")
self.lblSenha = Label(self.frmLogin, image=imgSenha, bg=self.corContainer)
self.lblSenha.image = imgSenha
# Criando ENT de Senha
self.entSenha = Entry(self.frmLogin, bd=5, textvariable=self.senha, show="*", width=10)
self.entSenha["font"] = self.fonteEntry
self.entSenha["relief"] = RIDGE
self.entSenha["justify"] = CENTER
# Criando BTN de Login
self.btnLogin = Button(self.frmLogin, bd=5, text="LOGIN", cursor="hand2", width=20, height=2)
self.btnLogin["font"] = self.fontePadrao
self.btnLogin["relief"] = RIDGE
self.btnLogin["fg"] = self.corFrente
self.btnLogin["bg"] = self.corFundo
self.btnLogin["activeforeground"] = self.corFrente
self.btnLogin["activebackground"] = self.corFundo
self.btnLogin["command"] = self.logarUsuario
# Criando BTN de Cadastrar
self.lblMensagem = Label(self.frmLogin, text="Cadastre-se", cursor="hand2")
self.lblMensagem["font"] = self.fontePadrao
self.lblMensagem["fg"] = self.corFundo
self.lblMensagem["bg"] = self.corContainer
self.lblMensagem["activeforeground"] = self.corFundo
self.lblMensagem["activebackground"] = self.corContainer
# Criando LBL de Voltar
self.lblVoltar = Label(self.frmBarra, text="Voltar ao início", cursor="hand2")
self.lblVoltar["font"] = self.fontePadrao
self.lblVoltar["fg"] = self.corFrente
self.lblVoltar["bg"] = self.corFundo
# Criando LBL de Divisor
self.lblDivisor1 = Label(self.frmLogin, text=" ", bg=self.corContainer)
self.lblDivisor2 = Label(self.frmLogin, text=" ", bg=self.corContainer)
self.lblDivisor3 = Label(self.frmLogin, text=" ", bg=self.corContainer)
self.lblDivisor4 = Label(self.frmLogin, text=" ", bg=self.corContainer)
# Empacotando Frames
self.frmLogo.grid(column=0, row=0)
self.frmLogin.grid(column=0, row=1, padx=170)
self.frmBarra.grid(column=0, row=2)
# Empacotando widgets de Usuário
self.lblLogo.grid()
self.lblUsuario.grid(column=1, row=1, sticky=W, pady=5)
self.entUsuario.grid(column=1, row=1, sticky=E)
# Empacotando widgets de Senha
self.lblSenha.grid(column=1, row=2, sticky=W, pady=5)
self.entSenha.grid(column=1, row=2, stick=E)
self.btnLogin.grid(column=1, row=4, pady=10)
self.lblMensagem.grid(column=1, row=5, pady=5)
self.lblVoltar.grid(column=0, row=0, pady=115)
self.lblMensagem.bind("<Button-1>", lambda ir: self.irCadastro())
self.lblVoltar.bind("<Button-1>", lambda voltar: self.voltarInicio())
self.lblDivisor1.grid(column=0, row=0)
self.lblDivisor2.grid(column=2, row=0)
self.lblDivisor3.grid(column=2, row=5)
self.lblDivisor4.grid(column=0, row=5)
def logarUsuario(self):
login = Usuarios()
usuario = self.usuario.get()
senha = self.senha.get()
usuario = usuario.lower()
login.usuario = usuario
login.senha = senha
print("Evento de Login - OK")
print(login.usuario)
print(login.senha + "\n")
retorno = login.autenticarUsuario()
if retorno == True:
self.irPrincipal()
else:
messagebox.showwarning("Login", retorno)
def irCadastro(self):
cadastro = JanelaCadastro()
def irPrincipal(self):
self.limparJanela()
principal = JanelaPrincipal()
def voltarInicio(self):
self.limparJanela()
inicio = JanelaInicial()
def limparJanela(self):
# Desempacotando Frames
self.frmLogo.grid_forget()
self.frmLogin.grid_forget()
self.frmBarra.grid_forget()
# Desempacotando widgets de Usuário
self.lblLogo.grid_forget()
self.lblUsuario.grid_forget()
self.entUsuario.grid_forget()
# Desempacotando widgets de Senha
self.lblSenha.grid_forget()
self.entSenha.grid_forget()
self.btnLogin.grid_forget()
self.lblMensagem.grid_forget()
self.lblVoltar.grid_forget()
# Desempacotando widgets de Divisores
self.lblDivisor1.grid_forget()
self.lblDivisor2.grid_forget()
self.lblDivisor3.grid_forget()
self.lblDivisor4.grid_forget()
class JanelaInicial():
def __init__(self, master=None):
# Atributos
self.master = master
# Lista de estilo
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
# Criando frames
self.frmLogo = Frame(self.master, bg=self.corFundo)
self.frmDescricao = Frame(self.master, bg=self.corFundo)
self.frmBarra = Frame(self.master, bg=self.corFundo)
self.frmMensagem = Frame(self.master, bg=self.corFundo)
# Criando IMG de Logo
imgLogo = PhotoImage(file="img/spqr.png")
self.lblLogo = Label(self.frmLogo, image=imgLogo, bg=self.corFundo)
self.lblLogo.image = imgLogo
# Criando LBL de Título
self.lblTitulo = Label(self.frmDescricao)
self.lblTitulo["font"] = self.fontePadrao
self.lblTitulo["fg"] = self.corFrente
self.lblTitulo["bg"] = self.corFundo
self.lblTitulo["text"] = "ACAMPAMENTO JÚPITER"
# Criando LBL de Descrição
self.lblDescricao = Label(self.frmDescricao)
self.lblDescricao["font"] = ("Arial", "12")
self.lblDescricao["fg"] = self.corFrente
self.lblDescricao["bg"] = self.corFundo
self.lblDescricao["text"] = "Sistema de Gerenciamento de Fichas"
# Criando BTN de Login
self.btnLogin = Button(self.frmBarra, text="Login", cursor="hand2", width=10, height=1)
self.btnLogin["font"] = self.fontePadrao
self.btnLogin["relief"] = RIDGE
self.btnLogin["fg"] = self.corFundo
self.btnLogin["bg"] = self.corFrente
self.btnLogin["activeforeground"] = self.corFundo
self.btnLogin["activebackground"] = self.corFrente
self.btnLogin["command"] = self.irLogin
# Criando BTN de Cadastro
self.btnCadastro = Button(self.frmBarra, text="Cadastro", cursor="hand2", width=10, height=1)
self.btnCadastro["font"] = self.fontePadrao
self.btnCadastro["relief"] = RIDGE
self.btnCadastro["fg"] = self.corFundo
self.btnCadastro["bg"] = self.corFrente
self.btnCadastro["activeforeground"] = self.corFundo
self.btnCadastro["activebackground"] = self.corFrente
self.btnCadastro["command"] = self.irCadastro
# Criando BTN de Sobre
self.btnSobre = Button(self.frmBarra, text="Sobre", cursor="hand2", state=DISABLED, width=10, height=1)
self.btnSobre["font"] = self.fontePadrao
self.btnSobre["relief"] = RIDGE
self.btnSobre["fg"] = self.corFundo
self.btnSobre["bg"] = self.corFrente
self.btnSobre["activeforeground"] = self.corFundo
self.btnSobre["activebackground"] = self.corFrente
self.btnSobre["command"] = self.irSobre
# Criando BTN de Ajuda
self.btnAjuda = Button(self.frmBarra, text="Ajuda", cursor="hand2", state=DISABLED, width=10, height=1)
self.btnAjuda["font"] = self.fontePadrao
self.btnAjuda["relief"] = RIDGE
self.btnAjuda["fg"] = self.corFundo
self.btnAjuda["bg"] = self.corFrente
self.btnAjuda["activeforeground"] = self.corFundo
self.btnAjuda["activebackground"] = self.corFrente
self.btnAjuda["command"] = self.irAjuda
# Criando BTN de Cadastrar
self.lblMensagem = Label(self.frmMensagem, text="Selecione uma opção")
self.lblMensagem["font"] = self.fontePadrao
self.lblMensagem["justify"] = CENTER
self.lblMensagem["fg"] = self.corFrente
self.lblMensagem["bg"] = self.corFundo
self.lblMensagem["activeforeground"] = self.corFrente
self.lblMensagem["activebackground"] = self.corFundo
# Empacotando widgets de FRM
self.frmLogo.grid(column=0, row=0, padx=180, pady=10)
self.frmDescricao.grid(column=0, row=1, pady=10)
self.frmBarra.grid(column=0, row=2, pady=10)
self.frmMensagem.grid(column=0, row=3)
# Empacontando widgets de LBL
self.lblLogo.grid()
self.lblTitulo.grid()
self.lblDescricao.grid()
self.lblMensagem.grid()
# Empacotando widgets de BTN
self.btnLogin.grid(column=0, row=0, padx=5, pady=5)
self.btnCadastro.grid(column=1, row=0, padx=5, pady=5)
self.btnSobre.grid(column=0, row=1, padx=5, pady=5)
self.btnAjuda.grid(column=1, row=1, padx=5, pady=5)
self.btnLogin.bind("<Enter>", lambda msgLogin: self.mostrarDescricao("Clique para conectar-se com aplicativo"))
self.btnCadastro.bind("<Enter>", lambda msgCadastro: self.mostrarDescricao("Clique para registrar uma conta"))
self.btnSobre.bind("<Enter>", lambda msgSobre: self.mostrarDescricao("Informações sobre o jogo"))
self.btnAjuda.bind("<Enter>", lambda msgAjuda: self.mostrarDescricao("Guia de ajuda"))
def mostrarDescricao(self, msg):
self.lblMensagem["text"] = msg
def irLogin(self):
self.limparJanela()
login = JanelaLogin()
def irCadastro(self):
cadastro = JanelaCadastro()
def irSobre(self):
self.limparJanela()
sobre = JanelaSobre()
def irAjuda(self):
self.limparJanela()
ajuda = JanelaAjuda()
def limparJanela(self):
self.frmLogo.grid_forget()
self.frmDescricao.grid_forget()
self.frmBarra.grid_forget()
self.lblTitulo.grid_forget()
self.lblLogo.grid_forget()
self.lblDescricao.grid_forget()
self.btnSobre.grid_forget()
self.btnAjuda.grid_forget()
self.lblMensagem.grid_forget()
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,516
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/testando.py
|
from routes.estilo import Estilo
from tkinter import *
from tkinter import ttk
class JanelaInicial():
def __init__(self, master=None):
self.master = master
Estilo.__init__(self)
self.criarWidgets()
def criarWidgets(self):
self.abas = ttk.Notebook(self.master)
self.frmMensagem = Frame(self.master)
self.frmRacas = ttk.Frame(self.abas)
self.frmClasses = ttk.Frame(self.abas)
self.frmDeuses = ttk.Frame(self.abas)
self.frmRacas.grid()
self.frmClasses.grid()
self.frmDeuses.grid()
self.abas.add(self.frmRacas, text="Raças")
self.abas.add(self.frmClasses, text="Classes")
self.abas.add(self.frmDeuses, text="Deuses")
self.lblSemideus = Label(self.frmRacas, text="Semideus", cursor="question_arrow")
self.lblLegado = Label(self.frmRacas, text="Legado", cursor="question_arrow")
self.lblJupiter = Label(self.frmDeuses, text="Júpiter")
self.lblMarte = Label(self.frmDeuses, text="Marte")
self.lblNetuno = Label(self.frmDeuses, text="Netuno")
self.lblSemideus.grid()
self.lblLegado.grid()
self.lblJupiter.grid()
self.lblMarte.grid()
self.lblNetuno.grid()
self.abas.grid(column=0, row=0)
self.lblMensagem = Label(self.frmMensagem, text="Testando", fg=self.corFrente, bg=self.corFundo)
self.frmMensagem.grid(column=1, row=1)
self.lblMensagem.grid()
self.lblSemideus.bind("<Enter>", lambda msgSemideus: self.mensagem("Semideus é uma raça com origem no cruzamento de um Deus com um Humano"))
self.lblLegado.bind("<Enter>", lambda msgLegado: self.mensagem("Legado é uma raça com origem no cruzamento de um Semideus com um Humano"))
def mensagem(self, msg):
self.lblMensagem["text"] = msg
if __name__ == "__main__":
root = Tk()
root.iconbitmap("img/spqr-icon.ico")
root.resizable(width=False, height=False)
root.geometry("600x600")
root.title("Gerenciador de Fichas")
root["bg"] = "#330c50"
programa = JanelaInicial(root)
root.mainloop()
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,517
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/routes/estilo.py
|
class Estilo():
def __init__(self):
# Cores
self.corFrente = "white"
self.corFundo = "#330c50"
self.corContainer = "#DCDCDC"
# Fontes
self.fonteTitulo = ("Arial", "30", "bold")
self.fontePadrao = ("Arial", "12", "bold")
self.fonteEntry = ("Arial", "20")
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,518
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/database/banco.py
|
import sqlite3
class Cadastrados():
def __init__(self):
self.conexao = sqlite3.connect('database/cadastrados.db')
self.createTable()
def createTable(self):
c = self.conexao.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS usuarios(
idusuario INTEGER PRIMARY KEY AUTOINCREMENT,
usuario TEXT NOT NULL,
senha TEXT NOT NULL
)""")
self.conexao.commit()
c.close()
class Ficheiro():
def __init__(self):
self.conexao = sqlite3.connect('database/cadastrados.db')
self.createTable()
def createTable(self):
c = self.conexao.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS fichas(
idficha INTEGER PRIMARY KEY AUTOINCREMENT,
idusuario INTEGER,
nome TEXT NOT NULL,
idade INTEGER NOT NULL,
raca TEXT NOT NULL,
divindade TEXT NOT NULL,
FOREIGN KEY (idusuario) REFERENCES usuarios(idusuario)
)""")
self.conexao.commit()
c.close()
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,519
|
AlexPaivaBR/projeto
|
refs/heads/master
|
/routes/fichas.py
|
from database.banco import Ficheiro
class Fichas(object):
def __init__(self, nome = "", idade = 0, raca = "", classe = "", usuario = "", senha = ""):
self.nome = nome
self.idade = idade
self.raca = raca
self.classe = classe
self.usuario = usuario
self.senha = senha
def criarFicha(self):
banco = Ficheiro()
c = banco.conexao.cursor()
try:
if self.nome == "" and self.idade == "" and self.raca == "" and self.classe == "":
return "Preencha o formulário"
elif self.nome == "":
return "Insira um nome"
elif self.idade == "":
return "Insira uma idade"
elif self.raca == "":
return "Selecione uma raça"
elif self.classe == "":
return "Selecione uma classe"
else:
inserindo = ('INSERT INTO fichas(nome, idade, raca, classe) VALUES (?, ?, ?, ?)')
c.execute(inserir, [(self.nome), (self.idade), (self.raca), (self.classe)])
banco.conexao.commit()
return "Ficha criada!"
c.close()
except:
return "ERROR"
banco.conexao.commit()
c.close()
def alterarFicha(self):
pass
def deletarFicha(self):
pass
def selecionarFicha(self):
pass
|
{"/routes/usuarios.py": ["/database/banco.py"], "/principal.py": ["/routes/janelas.py"], "/routes/janelas.py": ["/routes/usuarios.py", "/routes/estilo.py"], "/testando.py": ["/routes/estilo.py"], "/routes/fichas.py": ["/database/banco.py"]}
|
25,538
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/pages/views.py
|
from django.shortcuts import render, redirect
from students.models import Student
import cv2
import pickle
from attendance.models import Attendance
import time
Id = 0
Name = ""
# Create your views here.
def admin_login_view(request, *args, **kwargs):
return render(request, 'login-form.html')
def add_student_view(request, *args, **kwargs):
return render(request, 'add_student.html')
def all_student(request):
student = Student.objects.all()
context = {
'students': student
}
return render(request, 'all_students.html', context)
def landingpage_view(request, *args, **kwargs):
return render(request, 'landing-page.html')
def camera(request):
global Id, Name
systemDate = time.strftime("%d/%m/%Y")
face_recog = cv2.CascadeClassifier(
'/home/amar/BCP/attendance/lib/python3.6/site-packages/cv2/data/haarcascade_frontalface_default.xml')
print(face_recog)
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read('/home/amar/BCP/attendance/src/facerecog_attendance/pages/trainer.yml')
labels = {"person_name":1}
with open('/home/amar/BCP/attendance/src/facerecog_attendance/pages/labels.pickle', 'rb') as f:
og_labels = pickle.load(f)
labels = {v: k for k, v in og_labels.items()}
cap = cv2.VideoCapture(0)
count = 0
while (True):
ret, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_recog.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
count += 1
# print(.x, y, w, h)
roi_gray = gray[y:y + h, x:x + w]
roi_color = frame[y:y + h, x:x + w]
id_, conf = recognizer.predict(roi_gray)
if conf >= 30 and conf <= 85:
font = cv2.FONT_HERSHEY_SIMPLEX
Id = id_
Name = labels[id_]
name = labels[id_]
color = (120, 255, 100)
stroke = 3
cv2.putText(frame, name, (x,y), font, 1, color, stroke, cv2.LINE_AA)
# cv2.imwrite("/home/aashir/Documents/7th Sem/bcp/{}/user" + str(count) + ".jpg", roi_color)
# cv2.imwrite("/home/amar/BCP/FaceDb/{}/{}".format(int(Id), FName) + str(count) + ".jpg", roi_color)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.imshow('Face', frame)
if cv2.waitKey(100) == 13 or count == 100:
break
if Id > 0 and Name != "":
RollNo = Id
attendance = Attendance.objects.all()
if not attendance.exists():
Attendance.objects.create(RollNo=RollNo, Name=Name, Status=1, Date=systemDate)
else:
attendanceStd = Attendance.objects.filter(Name=Name, Date=systemDate)
if not attendanceStd.exists():
Attendance.objects.create(RollNo=RollNo, Name=Name, Status=1, Date=systemDate)
else:
print("Attendance already taken")
cap.release()
cv2.destroyAllWindows()
return redirect('landingpage')
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,539
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/students/migrations/0001_initial.py
|
# Generated by Django 2.2.11 on 2020-03-16 08:02
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Student',
fields=[
('FirstName', models.CharField(max_length=20)),
('LastName', models.CharField(max_length=15)),
('RollNo', models.IntegerField(primary_key=True, serialize=False)),
('Email', models.EmailField(max_length=40)),
('RegistrationDate', models.CharField(max_length=40)),
('Class', models.CharField(max_length=40, null=True)),
('Gender', models.CharField(max_length=10)),
('MobileNo', models.IntegerField()),
('ParentsName', models.CharField(max_length=40)),
('ParentMobileNo', models.IntegerField()),
('BirthDate', models.CharField(max_length=20)),
('BloodGroup', models.CharField(max_length=40)),
('Address', models.CharField(max_length=40)),
],
),
]
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,540
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/facerecog_attendance/urls.py
|
"""facerecog_attendance URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from pages.views import admin_login_view
from administrator.views import admin_login,calendar
from pages.views import add_student_view, landingpage_view, camera
from students.views import delete_student
urlpatterns = [
path('admin/', admin_login_view, name="admin"),
path('dashboard/', admin_login, name="dashboard"),
path('dashboard/add_student_view', add_student_view, name="add_student_view"),
path('calendar/', calendar,name="calendar"),
path('student/', include('students.urls')),
path('student/all/delete/<int:id>', delete_student, name="deleteStudent"),
path('', landingpage_view, name="landingpage"),
path('camera', camera, name="recognizeface")
]
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,541
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/students/views.py
|
from django.shortcuts import render, redirect
from .models import Student
import cv2
import os
import shutil
Id = 0
FName = ""
# Create your views here.
def add_student(request, *args, **kwargs):
global Id, FName
RollNo = request.POST.get('RollNo')
Name = request.POST.get('FirstName')
Id = RollNo
FName = Name
print(Id)
Student.objects.create(FirstName=request.POST.get('FirstName'), LastName=request.POST.get('LastName') ,RollNo =request.POST.get('RollNo'),Email=request.POST.get('Email'),RegistrationDate=request.POST.get('RegisterDate'),
Class=request.POST.get('Class'),Gender=request.POST.get('Gender'),MobileNo=request.POST.get('MobileNumber'),ParentsName=request.POST.get('ParentName'),ParentMobileNo=request.POST.get('ParentNumber'),
BirthDate=request.POST.get('DOB'), BloodGroup=request.POST.get('BloodGroup'),Address=request.POST.get('Address'))
os.chdir('/home/amar/BCP/FaceDb')
try:
os.mkdir(Id)
except FileExistsError:
os.rename(Id, Id)
return redirect('add_student_view')
def edit_student(request, id, *args, **kwargs):
student = Student.objects.get(RollNo=id)
if request.method == 'POST':
FirstName = request.POST.get('FirstName')
LastName = request.POST.get('LastName')
RollNo = request.POST.get('RollNo')
Email = request.POST.get('Email')
RegistrationDate = request.POST.get('RegisterDate')
Class = request.POST.get('Class')
Gender = request.POST.get('Gender')
MobileNo = request.POST.get('MobileNumber')
ParentsName = request.POST.get('ParentName')
ParentMobileNo = request.POST.get('ParentNumber')
BirthDate = request.POST.get('DOB')
BloodGroup = request.POST.get('BloodGroup')
Address = request.POST.get('Address')
student.FirstName = FirstName
student.LastName = LastName
student.Email = Email
student.RegistrationDate = RegistrationDate
student.Class = Class
student.Gender = Gender
student.MobileNo = MobileNo
student.ParentsName = ParentsName
student.ParentMobileNo = ParentMobileNo
student.BirthDate = BirthDate
student.BloodGroup = BloodGroup
student.Address = Address
student.save()
return redirect('allStudent')
else:
context = {
'student': student
}
return render(request, 'edit_student.html', context)
def delete_student(request, id, *args, **kwargs):
student = Student.objects.get(RollNo=id)
os.chdir('/home/amar/BCP/FaceDb')
if os.path.exists(str(student.Id)) and not os.listdir(str(student.Id)):
os.rmdir(str(student.Id))
else:
shutil.rmtree(str(student.Id))
student.delete()
return redirect('allStudent')
def camera(request, *args, **kwargs):
global Id
global FName
print("Id {}".format(Id))
print("Name {}".format(FName))
face_recog = cv2.CascadeClassifier('/home/amar/BCP/attendance/lib/python3.6/site-packages/cv2/data/haarcascade_frontalface_default.xml')
print(face_recog)
cap = cv2.VideoCapture(0)
count = 0
while (True):
ret, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_recog.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
count += 1
# print(x, y, w, h)
roi_gray = gray[y:y + h, x:x + w]
roi_color = frame[y:y + h, x:x + w]
# cv2.imwrite("/home/aashir/Documents/7th Sem/bcp/{}/user" + str(count) + ".jpg", roi_color)
print("TYPE OF ID: {}".format(type(Id)))
cv2.imwrite("/home/amar/BCP/FaceDb/{}/{}".format(int(Id), FName) + str(count) + ".jpg", roi_color)
cv2.putText(frame, str(count), (50, 50), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 255, 0), 2)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.imshow('Face', frame)
if cv2.waitKey(100) == 13 or count == 100:
break
cap.release()
cv2.destroyAllWindows()
return redirect('add_student_view')
def profile(request):
return render(request, 'user_profile.html')
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,542
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/attendance/models.py
|
from django.db import models
# Create your models here.
class Attendance(models.Model):
id = models.AutoField(primary_key=True)
RollNo = models.IntegerField()
Name = models.CharField(max_length=20)
Status = models.IntegerField()
Date = models.CharField(max_length=20)
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,543
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/administrator/views.py
|
from django.shortcuts import render, redirect
from .models import Admin
from attendance.models import Attendance
# Create your views here.
isLoggedIn = False
def admin_login(request):
global isLoggedIn
attendance = Attendance.objects.all()
context = {
'attendance': attendance
}
if request.method == "POST":
username = request.POST['username']
password = request.POST['password']
if username and password:
try:
admin = Admin.objects.get(username=username, password=password)
isLoggedIn = True
except Admin.DoesNotExist:
return redirect('admin')
return render(request, 'index.html', context)
else:
return redirect('admin')
if isLoggedIn:
return render(request, 'index.html', context)
else:
return redirect('admin')
def calendar(request):
return render(request, 'calendar.html')
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,544
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/students/urls.py
|
from django.urls import path
from .views import add_student,edit_student,profile, camera
from pages.views import all_student
urlpatterns = [
path('add/', add_student, name="addStudent"),
path('all/', all_student, name="allStudent"),
path('edit/<int:id>', edit_student, name="editStudent"),
path('camera/', camera, name="camera"),
path('profile/',profile, name="profile"),
]
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,545
|
Amar-Ag/facialAttendance
|
refs/heads/master
|
/facialAttendance/attendance/src/facerecog_attendance/students/models.py
|
from django.db import models
# Create your models here.
class Student(models.Model):
FirstName = models.CharField(max_length=20, blank=False, null=False)
LastName = models.CharField(max_length=15, blank=False, null=False)
RollNo = models.IntegerField(primary_key=True)
Email = models.EmailField(max_length=40, blank=False, null=False)
RegistrationDate = models.CharField(max_length=40, blank=False, null=False)
Class = models.CharField(max_length=40, blank=False, null=True)
Gender = models.CharField(max_length=10, blank=False, null=False)
MobileNo = models.IntegerField()
ParentsName = models.CharField(max_length=40, blank=False, null=False)
ParentMobileNo = models.IntegerField()
BirthDate = models.CharField(max_length=20)
BloodGroup = models.CharField(max_length=40, blank=False, null=False)
Address = models.CharField(max_length=40, blank=False, null=False)
|
{"/facialAttendance/attendance/src/facerecog_attendance/students/views.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/models.py"], "/facialAttendance/attendance/src/facerecog_attendance/students/urls.py": ["/facialAttendance/attendance/src/facerecog_attendance/students/views.py"]}
|
25,563
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Gui/Gui_Builder.py
|
from tkinter import *
from tkinter import ttk
import BatchExtractor.Gui.Settings_Screen
import BatchExtractor.Gui.Main_Screen
class Gui_Builder:
def __init__(self, sh, file):
self.root = Tk()
self.sh = sh
self.source = StringVar()
self.source.set(self.sh.get_setting('src'))
self.destination = StringVar()
self.destination.set(self.sh.get_setting('des'))
self.database = StringVar()
self.file = file
self.build_gui()
def build_gui(self):
self.root.geometry("%dx%d" % (800, 600))
self.root.title('BatchExtractor')
nb = ttk.Notebook(self.root)
nb.pack(fill='both', expand='yes')
main = BatchExtractor.Gui.Main_Screen.Main_Screen(self.sh, self.file).build_main_screen()
settings_window = BatchExtractor.Gui.Settings_Screen.Settings_Screen(self.source, self.destination,
self.sh).build_settings_screen()
nb.add(main, text='Main')
nb.add(settings_window, text='Settings')
def start(self):
self.root.mainloop()
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,564
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Model/File.py
|
from os import stat, path, walk
from operator import attrgetter
from datetime import datetime
class File():
def __init__(self, file_location):
statistics = stat(file_location)
self.location = file_location
self.size = statistics.st_size
self.archive_size = self.__size()
self.creation_time = statistics.st_ctime
def __size(self):
size = self.size
for directory_path, directory_name, files in walk(path.dirname(self.location)):
for file_name in files:
if file_name.lower().endswith(tuple([".r%.2d" % i for i in range(1000)])):
size = size + stat(path.join(directory_path, file_name)).st_size
return size
def get_location(self):
return self.location
def get_creation_datetime(self):
d = datetime.fromtimestamp(self.creation_time)
return d.ctime()
def get_size(self, suffix='B'):
number = self.archive_size
for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
if abs(number) < 1024.0:
return "%3.1f%s%s" % (number, unit, suffix)
number /= 1024.0
return "%.1f%s%s" % (number, 'Yi', suffix)
def __eq__(self, other):
return self.__dict__ == other.__dict__
class FileList():
def __init__(self, top_directory, allowed_file_extensions):
self.files = []
self.allowed_file_extensions = allowed_file_extensions
self.__fill(top_directory)
def __fill(self, top_directory):
for directory_path, directory_name, files in walk(top_directory):
for file_name in files:
if file_name.lower().endswith(tuple(self.allowed_file_extensions)):
self.files.append(File(path.join(directory_path, file_name)))
def remove_files_by_tasks(self, excluded_tasks=[], completed_tasks=[]):
task_list = []
if excluded_tasks or completed_tasks:
if excluded_tasks:
task_list.extend(excluded_tasks)
if completed_tasks:
task_list.extend(completed_tasks)
for task in task_list:
self.__remove_file(task.file)
def __remove_file(self, file):
for tmpfile in self.files:
if tmpfile == file:
self.files.remove(tmpfile)
break
def sort(self, attribute, descending=True):
for method in [x for x in dir(self) if callable(getattr(self, x)) and 'sort_by_' in x]:
if attribute in method:
getattr(self, method)(descending)
break
def __sort_by_date(self, descending):
self.files = sorted(self.files, key=attrgetter('creation_time'), reverse=descending)
def __sort_by_size(self, descending):
self.files = sorted(self.files, key=attrgetter('archive_size'), reverse=descending)
def get_files(self):
return self.files
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,565
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Gui/Settings_Screen.py
|
from tkinter import filedialog
from tkinter import *
class Settings_Screen:
def __init__(self, source, destination, shelve_handler):
self.source = source
self.destination = destination
self.sh = shelve_handler
def build_settings_screen(self):
settings = Frame()
Label(settings, text="Settings", font="TkDefaultFont 24 bold").grid(column=0,
row=0, columnspan=4, sticky=NW, pady=5)
Label(settings, text="Select source folder:").grid(column=0, row=1, pady=20, sticky=W)
source_folder_entry = Entry(settings, textvariable=self.source, width=40)
source_folder_entry.grid(column=1, row=1, sticky=W)
browse_button = Button(settings, text="Browse...", command=self.select_source)
browse_button.grid(column=4, row=1, sticky=W)
Label(settings, text="Select folder to extract to:").grid(column=0, row=2, pady=20, sticky=W)
destination_folder_entry = Entry(settings, textvariable=self.destination, width=40)
destination_folder_entry.grid(column=1, row=2)
browse_button2 = Button(settings, text="Browse...", command=self.select_destination)
browse_button2.grid(column=4, row=2, sticky=W)
back_button = Button(settings, text="Back", )
back_button.grid(column=4, row=4, padx=10)
return settings
def select_source(self):
source = filedialog.askdirectory()
self.source.set(source)
self.sh.set_setting('src', source)
def select_destination(self):
destination = filedialog.askdirectory()
self.destination.set(destination)
self.sh.set_setting('des', destination)
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,566
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Model/Task.py
|
from subprocess import call
class TaskHandler():
def __init__(self, file_list, destination_directory):
self.__tasks = self.__create_tasks(file_list, destination_directory)
self.successful_tasks = []
self.failed_tasks =[]
def __create_tasks(self, file_list, destination_directory):
tasks = []
for file in file_list:
tasks.append(Task(file, destination_directory))
return tasks
def execute_tasks(self):
for task in self.__tasks:
return_value = call(task.to_cmd(), shell=True)
if return_value == 0:
self.successful_tasks.append(task)
elif return_value == 1:
self.failed_tasks.append(task)
def get_tasks(self):
return self.__tasks
class Task():
def __init__(self, file, destination_directory, parameters=('7z', 'x', '-o')):
self.file = file
self.destination_directory = destination_directory
self.parameters = parameters
def to_cmd(self):
prm = self.parameters
return '%s %s %s %s%s' % (prm[0], prm[1] , self.file.location, prm[2], self.destination_directory)
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,567
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Model/Example.py
|
from BatchExtractor.Database.Database import ShelveHandler
from BatchExtractor.Model.File import FileList
from BatchExtractor.Model.Task import TaskHandler
sh = ShelveHandler('D:\\Test\\DB\\Shelve')
sh.set_setting('src', 'D:\\Test\\SRC')
sh.set_setting('des', 'D:\\Test\\DES')
sh.set_setting('ext', ['.rar', '.zip', '.7z'])
excluded = sh.get_excluded()
completed = sh.get_completed()
fl = FileList(sh.get_setting('src'), sh.get_setting('ext'))
fl.remove_files_by_tasks(sh.get_excluded(), sh.get_completed())
th = TaskHandler(fl.get_files(), sh.get_setting('des'))
th.execute_tasks()
sh.set_completed(th.successful_tasks)
sh = ShelveHandler('D:\\Test\\DB\\Shelve')
for task in sh.get_completed():
print('Task %s || %s || %s' % (task.file.location, task.destination_directory, task.file.get_size()))
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,568
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Main.py
|
import BatchExtractor.Gui.Gui_Builder
from BatchExtractor.Database.Database import ShelveHandler
from BatchExtractor.Model.File import FileList
from BatchExtractor.Model.Task import TaskHandler
class Main():
def __init__(self):
self.sh = ShelveHandler('C:\\BE\\DB\\Shelve')
self.sh.set_setting('src', 'C:\\BE\\Test\\SRC')
self.sh.set_setting('des', 'C:\\BE\\Test\\DES')
self.sh.set_setting('ext', ['.rar', '.zip', '.7z'])
self.fl = FileList(self.sh.get_setting('src'), self.sh.get_setting('ext'))
self.fl.remove_files_by_tasks(self.sh.get_excluded(), self.sh.get_completed())
def main(self):
BatchExtractor.Gui.Gui_Builder.Gui_Builder(self.sh, self.fl).start()
def extract(self):
th = TaskHandler(self.fl.get_files(), self.sh.get_setting('des'))
th.execute_tasks()
self.sh.set_completed(th.successful_tasks)
if __name__ == '__main__':
Main().main()
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,569
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Database/Database.py
|
from shelve import *
class ShelveHandler():
def __init__(self, shelve_location):
self.__shelve = open(shelve_location, flag='c', writeback=True)
def get_completed(self):
try:
return self.__shelve['completed']
except KeyError:
return []
def get_excluded(self):
try:
return self.__shelve['excluded']
except KeyError:
return []
def get_setting(self, key):
try:
temp = self.__shelve['settings']
self.sync()
return temp[key]
except KeyError:
return None
def set_completed(self, completed):
try:
temp = self.__shelve['completed']
temp.extend(completed)
except KeyError:
temp = completed
finally:
self.__shelve['completed'] = temp
self.sync()
def set_excluded(self, excluded):
try:
temp = self.__shelve['excluded']
temp.extend(excluded)
except KeyError:
temp = excluded
finally:
self.__shelve['excluded'] = temp
self.sync()
def set_setting(self, key, setting):
try:
temp = self.__shelve['settings']
temp[key] = setting
except KeyError:
temp = {key: setting}
finally:
self.__shelve['settings'] = temp
self.sync()
def close(self):
self.__shelve.close()
def sync(self):
self.__shelve.sync()
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,570
|
shawnsteinz/BatchExtractor
|
refs/heads/master
|
/BatchExtractor/Gui/Main_Screen.py
|
from tkinter import *
import BatchExtractor.Main
import tkinter.ttk as ttk
import tkinter.font as tkFont
class Main_Screen:
def __init__(self, sh, files):
self.var = int()
self.v = StringVar()
self.sh = sh
self.files = files
self.label = []
self.checkbutton = []
self.list_header = ['Name', 'size']
self.data_list = ['', '']
self.fill_data_list()
self.tree = None
def build_main_screen(self):
main_screen = ttk.Frame()
container = ttk.Frame(main_screen)
container.pack(fill='both', expand=True)
# create a treeview with dual scrollbars
self.tree = ttk.Treeview(columns=self.list_header, show="headings")
vsb = ttk.Scrollbar(orient="vertical",
command=self.tree.yview)
hsb = ttk.Scrollbar(orient="horizontal",
command=self.tree.xview)
self.tree.configure(yscrollcommand=vsb.set,
xscrollcommand=hsb.set)
self.tree.grid(column=0, row=0, sticky='nsew', in_=container)
vsb.grid(column=1, row=0, sticky='ns', in_=container)
hsb.grid(column=0, row=1, sticky='ew', in_=container)
container.grid_columnconfigure(0, weight=1)
container.grid_rowconfigure(0, weight=1)
self._build_tree()
start_button = Button(main_screen, text="Start", height='2', width='10', command=self.run)
start_button.pack(side=BOTTOM)
# start_button.grid(row=1, column=1, padx=(20, 0), pady=(10, 0))
self.progressbar = ttk.Progressbar(main_screen, orient='horizontal', mode='determinate')
self.progressbar.pack(fill='both', side=BOTTOM)
# self.progressbar.grid(row=1, column=0, sticky=W + E, padx=(1, 0), pady=(0, 0))
return main_screen
def _build_tree(self):
for col in self.list_header:
self.tree.heading(col, text=col.title(),
command=lambda c=col: self.sortby(self.tree, c, 0))
# adjust the column's width to the header string
self.tree.column(col, width=tkFont.Font().measure(col.title()))
for item in self.data_list:
self.tree.insert('', 'end', values=item)
# adjust column's width if necessary to fit each value
"""
for ix, val in enumerate(item):
col_w = tkFont.Font().measure(val)
if self.tree.column(self.list_header[ix],width=None)<col_w:
self.tree.column(self.list_header[ix], width=col_w)
"""
def sortby(self, tree, col, descending):
"""sort tree contents when a column header is clicked on"""
# grab values to sort
data = [(tree.set(child, col), child) \
for child in tree.get_children('')]
# if the data to be sorted is numeric change to float
# data = change_numeric(data)
# now sort the data in place
data.sort(reverse=descending)
for ix, item in enumerate(data):
tree.move(item[1], '', ix)
# switch the heading so it will sort in the opposite direction
tree.heading(col, command=lambda col=col: self.sortby(tree, col, \
int(not descending)))
def fill_data_list(self):
list = []
for i in self.files.get_files():
new = [i.location]
for j in self.files.get_files():
new.append(j.archive_size)
list.append(new)
self.data_list = list
def fill_frame(self):
self.label = ["label%.d" % i for i in range(self.files.files.__len__())]
self.checkbutton = ["checkbutton%.d" % i for i in range(self.files.files.__len__())]
self.Name = Label(self.frame, text="Name", bg="black", fg='green', width=80, anchor=W)
self.Name.grid(row=0, column=0, sticky=W)
self.Extract = Label(self.frame, text="Extract", bg="black", fg='green', width=7, anchor=W)
self.Extract.grid(row=0, column=1, sticky=W)
for file in self.files.files:
for id in range(self.label.__len__()):
self.var += 1
self.label[id] = Label(self.frame, text=file.location, bg="black", fg='green')
self.label[id].grid(row=self.var, column=0, sticky=W)
self.checkbutton[id] = Checkbutton(self.frame, variable=self.var, bg="black")
self.checkbutton[id].grid(row=self.var, column=1, sticky=W)
def clear_frame(self):
self.var = 1
for id in range(self.label.__len__()):
self.label[id].grid_forget()
self.checkbutton[id].grid_forget()
def run(self):
"""self.progressbar.start(50)"""
BatchExtractor.Main.Main().extract()
"""self.clear_frame()"""
|
{"/BatchExtractor/Gui/Gui_Builder.py": ["/BatchExtractor/Gui/Settings_Screen.py", "/BatchExtractor/Gui/Main_Screen.py"], "/BatchExtractor/Model/Example.py": ["/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Main.py": ["/BatchExtractor/Gui/Gui_Builder.py", "/BatchExtractor/Database/Database.py", "/BatchExtractor/Model/File.py", "/BatchExtractor/Model/Task.py"], "/BatchExtractor/Gui/Main_Screen.py": ["/BatchExtractor/Main.py"]}
|
25,571
|
RescuePi/Rescue_Pi_Code
|
refs/heads/main
|
/constants.py
|
RUN_PROGRAM = True
prototxt_path = "human_detection_model/MobileNetSSD_deploy.prototxt.txt"
human_model_path = "human_detection_model/MobileNetSSD_deploy.caffemodel"
rescue_cnn_model_path = "saved_models/PyTorch_Models/Final_Rescue_Model_Onnx.onnx"
sound_file = "alarm.wav"
MIN_CONFIDENCE = 0.8
frame_width_in_pixels = 320
OPEN_DISPLAY = True
USE_VIDEO = True
USE_GRAPHICS = True
VID_CAM_INDEX = 0
MODEL_INPUT_SIZE = 128
SLEEP_TIME_AMOUNT = 2
LABELS = ["Fighting", "Crying", "Normal"]
COLORS = [(0, 255, 0), (0, 0, 255), (255, 0, 0)]
MIN_THRESHOLD = 200
CLASSES = ["background", "aeroplane", "bicycle", "bird", "boat",
"bottle", "bus", "car", "cat", "chair", "cow", "diningtable",
"dog", "horse", "motorbike", "person", "pottedplant", "sheep",
"sofa", "train", "tvmonitor"]
FIGHTING_INDEX = 0
CRYING_INDEX = 1
NORMAL_INDEX = 2
|
{"/Rescue_Pi.py": ["/constants.py"]}
|
25,572
|
RescuePi/Rescue_Pi_Code
|
refs/heads/main
|
/Rescue_Pi.py
|
import cv2
import numpy as np
from constants import *
import imutils
from imutils.video import VideoStream
from pygame import mixer
import threading
import os
class Rescue_PI:
run_program = RUN_PROGRAM
input_video_file_path = None
preferable_target = cv2.dnn.DNN_TARGET_CPU
def __init__(self):
self.SoundThread = None
self.AudioPlay = None
self.rescue_model = None
self.frame = None
self.h = None
self.w = None
self.vs = None
self.image_blob = None
self.confidence = None
self.detections = None
self.box = None
self.human_blob = None
self.f_h = None
self.f_w = None
self.startX = None
self.startY = None
self.endX = None
self.endY = None
self.human_blob = None
self.predictions = None
self.name = None
self.detector = None
self.prediction_index = None
self.fileName = None
self.text = None
self.y = None
self.colorIndex = None
self.threshold = MIN_THRESHOLD
self.model_input_size = MODEL_INPUT_SIZE
self.current_time = None
self.time = ""
self.seconds = None
self.debug = False
self.sound_thread = None
self.use_graphics = USE_GRAPHICS
self.voice = None
self.sound = None
self.idx = None
self.label = None
self.classes = CLASSES
self.load_caffe_model()
self.load_onnx_model()
self.init_audio()
self.create_play_audio_thread()
self.initialize_camera()
@classmethod
def perform_job(cls, preferableTarget=preferable_target, input_video_file_path=input_video_file_path):
"""
This method performs the job expected from this class.
:key
"""
# Set preferable target.
Rescue_PI.preferable_target = preferableTarget
# Set input video file path (if applicable)
Rescue_PI.input_video_file_path = input_video_file_path
# Create a thread that uses the thread_for_mask_detection function and start it.
t1 = threading.Thread(target=Rescue_PI().thread_for_rescue_detection)
t1.start()
# print("[INFO] Starting Process for Mask Detection")
# p1 = Process(target=Rescue_PI().thread_for_mask_detection)
# p1.start()
# t1.join()
def is_blur(self, frame, thresh):
fm = cv2.Laplacian(frame, cv2.CV_64F).var()
if fm < thresh:
return True
else:
return False
def super_res(self, frame):
self.frame = cv2.resize(frame, (self.model_input_size, self.model_input_size), interpolation=cv2.INTER_CUBIC)
def load_caffe_model(self):
"""
This function will load the caffe model that we will use for detecting a human_blob, and then set the preferable target to the correct target.
:key
"""
print("Loading caffe model used for detecting a human_blob.")
# Use cv2.dnn function to read the caffe model used for detecting faces and set preferable target.
# self.detector = cv2.dnn.readNetFromCaffe(os.path.join(
# os.path.dirname(os.path.realpath(__file__)),
# prototxt_path),
# os.path.join(
# os.path.dirname(os.path.realpath(__file__)),
# human_model_path))
self.detector = cv2.dnn.readNetFromCaffe(prototxt="human_detection_model/MobileNetSSD_deploy.prototxt.txt",
caffeModel="human_detection_model/MobileNetSSD_deploy.caffemodel")
self.detector.setPreferableTarget(Rescue_PI.preferable_target)
def load_onnx_model(self):
"""
This function will load the pytorch model that is used for predicting the class of the human_blob.
:key
"""
print("Loading Rescue Detection Model")
self.rescue_model = cv2.dnn.readNetFromONNX(os.path.join(
os.path.dirname(os.path.realpath(__file__)),
rescue_cnn_model_path))
self.rescue_model.setPreferableTarget(Rescue_PI.preferable_target)
def initialize_camera(self):
"""
This function will initialize the camera or video stream by figuring out whether to stream the camera capture or from a video file.
:key
"""
if Rescue_PI.input_video_file_path is None:
print("[INFO] starting threaded video stream...")
self.vs = VideoStream(src=VID_CAM_INDEX).start()
else:
self.vs = cv2.VideoCapture(Rescue_PI.input_video_file_path)
def grab_next_frame(self):
"""
This function extracts the next frame from the video stream.
:return:
"""
if Rescue_PI.input_video_file_path is None:
self.orig_frame = self.vs.read()
self.frame = self.orig_frame.copy()
else:
_, self.frame = self.vs.read()
# self.frame = cv2.rotate(self.frame, cv2.ROTATE_180)
if self.frame is None:
pass
else:
self.frame = imutils.resize(self.frame, width=frame_width_in_pixels)
def set_dimensions_for_frame(self):
"""
This function will set the frame dimensions, which we will use later on.
:key
"""
if not self.h or not self.w:
(self.h, self.w) = self.frame.shape[:2]
def create_frame_blob(self):
"""
This function will create a blob for our human_blob detector to detect a human_blob.
:key
"""
# self.image_blob = cv2.dnn.blobFromImage(
# cv2.resize(self.frame, (300, 300)), 1.0, (300, 300),
# (104.0, 177.0, 123.0), swapRB=False, crop=False)
self.image_blob = cv2.dnn.blobFromImage(cv2.resize(self.frame, (300, 300)),
0.007843, (300, 300), 127.5)
def extract_face_detections(self):
"""
This function will extract each human_blob detection that our human_blob detection model provides.
:return:
"""
self.detector.setInput(self.image_blob)
self.detections = self.detector.forward()
def extract_confidence_from_human_detections(self, i):
"""
This function will extract the confidence(probability) of the human_blob detection so that we can filter out weak detections.
:param i:
:return:
"""
self.confidence = self.detections[0, 0, i, 2]
def get_class_label(self, i):
self.idx = int(self.detections[0, 0, i, 1])
self.label = round(self.idx)
def create_human_box(self, i):
"""
This function will define coordinates of the human_blob.
:param i:
:return:
"""
self.box = self.detections[0, 0, i, 3:7] * np.array([self.w, self.h, self.w, self.h])
(self.startX, self.startY, self.endX, self.endY) = self.box.astype("int")
def extract_human_roi(self):
"""
This function will use the coordinates defined earlier and create a ROI that we will use for embeddings.
:return:
"""
self.human_blob = self.frame[self.startY:self.endY, self.startX:self.endX]
(self.f_h, self.f_w) = self.human_blob.shape[:2]
def create_predictions_blob(self):
"""
This function will create another blob out of the human_blob ROI that we will use for prediction.
:return:
"""
self.human_blob = cv2.dnn.blobFromImage(cv2.resize(self.human_blob,
(MODEL_INPUT_SIZE, MODEL_INPUT_SIZE)), 1.0 / 255,
(MODEL_INPUT_SIZE, MODEL_INPUT_SIZE), (0, 0, 0),
swapRB=True, crop=False)
def extract_detections(self):
"""
This function uses the PyTorch model to predict from the given human_blob blob.
:return:
"""
self.rescue_model.setInput(self.human_blob)
self.predictions = self.rescue_model.forward()
def perform_classification(self):
"""
This function will now use the prediction to do the following:
1. Extract the class prediction from the predictions.
2. Get the label of the prediction.
:return:
"""
self.prediction_index = np.array(self.predictions)[0].argmax()
print(self.prediction_index)
if self.prediction_index == FIGHTING_INDEX:
self.name = "Fighting"
elif self.prediction_index == CRYING_INDEX:
self.name = "Crying"
elif self.prediction_index == NORMAL_INDEX:
self.name = "Normal"
else:
pass
def init_audio(self):
mixer.init()
mixer.set_num_channels(8)
self.voice = mixer.Channel(5)
self.sound = mixer.Sound(sound_file)
def play_audio(self):
"""
This function is used for playing the alarm if a person is not wearing a mask.
:return:
"""
if not self.voice.get_busy():
self.voice.play(self.sound)
else:
pass
def create_play_audio_thread(self):
"""
This function is used for creating a thread for the audio playing so that there won't be a blocking call.
"""
self.sound_thread = threading.Thread(target=self.play_audio)
def create_frame_icons(self):
"""
This function will create the icons that will be displayed on the frame.
:return:
"""
self.text = "{}".format(self.name)
self.y = self.startY - 10 if self.startY - 10 > 10 else self.startY + 10
self.colorIndex = LABELS.index(self.name)
def loop_over_frames(self):
"""
This is the main function that will loop through the frames and use the functions defined above to detect for human_blob mask.
:return:
"""
while Rescue_PI.run_program:
self.grab_next_frame()
self.set_dimensions_for_frame()
self.create_frame_blob()
self.extract_face_detections()
for i in range(0, self.detections.shape[2]):
self.extract_confidence_from_human_detections(i)
if self.confidence > MIN_CONFIDENCE:
self.get_class_label(i)
if self.label == 15:
self.create_human_box(i)
self.extract_human_roi()
if self.f_w < 20 or self.f_h < 20:
continue
if self.is_blur(self.human_blob, self.threshold):
continue
else:
self.super_res(self.human_blob)
self.create_predictions_blob()
self.extract_detections()
self.perform_classification()
if self.name == "Fighting":
print("[Prediction] Fighting is occurring")
self.play_audio()
if self.name == "Crying":
print("[Prediction] Crying is occurring")
self.play_audio()
if self.name == "Normal":
print("[Prediction] Normal")
if self.use_graphics:
self.create_frame_icons()
cv2.putText(self.orig_frame, self.text, (15, 15), cv2.FONT_HERSHEY_SIMPLEX,
0.45, COLORS[self.colorIndex], 2)
else:
pass
if OPEN_DISPLAY:
cv2.imshow("Frame", self.orig_frame)
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
def clean_up(self):
"""
Clean up the cv2 video capture.
:return:
"""
cv2.destroyAllWindows()
# self.vs.release()
def thread_for_rescue_detection(self):
"""
Callable function that will run the mask detector and can be invoked in a thread.
:return:
"""
try:
self.loop_over_frames()
except Exception as e:
pass
finally:
self.clean_up()
if __name__ == "__main__":
Rescue_PI.perform_job(preferableTarget=cv2.dnn.DNN_TARGET_MYRIAD)
|
{"/Rescue_Pi.py": ["/constants.py"]}
|
25,579
|
elongton/recipebook
|
refs/heads/master
|
/recipebook/views.py
|
from django.shortcuts import render
from django.views.generic import ListView
from django.views.generic.edit import FormView, CreateView
from .models import Recipe
from .forms import AddRecipeForm
class RecipeList(ListView):
model = Recipe
context_object_name = 'recipes'
class AddRecipeView(CreateView):
model = Recipe
fields = ('title', 'instructions',)
template_name = 'recipebook/add_recipe.html'
success_url = '/'
|
{"/recipebook/views.py": ["/recipebook/models.py", "/recipebook/forms.py"], "/recipebook/forms.py": ["/recipebook/models.py"], "/recipebook/admin.py": ["/recipebook/models.py"]}
|
25,580
|
elongton/recipebook
|
refs/heads/master
|
/recipebook/forms.py
|
from django import forms
from .models import (Unit,
Ingredient,
Recipe,
IngredientSection,
IngredientQuantity,)
class AddRecipeForm(forms.ModelForm):
class Meta():
model = Recipe
fields = ('title', 'instructions',)
|
{"/recipebook/views.py": ["/recipebook/models.py", "/recipebook/forms.py"], "/recipebook/forms.py": ["/recipebook/models.py"], "/recipebook/admin.py": ["/recipebook/models.py"]}
|
25,581
|
elongton/recipebook
|
refs/heads/master
|
/recipebook/admin.py
|
from django.contrib import admin
from .models import Ingredient, Unit, IngredientQuantity, IngredientSection, Recipe
# Register your models here.
admin.site.register(Recipe)
admin.site.register(IngredientSection)
admin.site.register(Ingredient)
admin.site.register(Unit)
class IngredientQuantityAdmin(admin.ModelAdmin):
list_display = ('ingredient', 'quantity', 'unit')
def unit(self, obj):
return obj.ingredient.unit
admin.site.register(IngredientQuantity, IngredientQuantityAdmin)
|
{"/recipebook/views.py": ["/recipebook/models.py", "/recipebook/forms.py"], "/recipebook/forms.py": ["/recipebook/models.py"], "/recipebook/admin.py": ["/recipebook/models.py"]}
|
25,582
|
elongton/recipebook
|
refs/heads/master
|
/recipebook/migrations/0001_initial.py
|
# Generated by Django 2.1 on 2018-08-05 19:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Ingredient',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100)),
],
options={
'verbose_name': 'Ingredient',
'verbose_name_plural': 'Ingredients',
},
),
migrations.CreateModel(
name='IngredientQuantity',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('quantity', models.DecimalField(decimal_places=1, max_digits=5)),
('ingredient', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='recipebook.Ingredient')),
],
options={
'verbose_name': 'Ingredient Quantity',
'verbose_name_plural': 'Ingredient Quantities',
},
),
migrations.CreateModel(
name='IngredientSection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('section_name', models.CharField(max_length=100)),
],
options={
'verbose_name': 'Ingredient Section',
'verbose_name_plural': 'Ingredient Sections',
},
),
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=150)),
('instructions', models.TextField(blank=True)),
],
options={
'verbose_name': 'Recipe',
'verbose_name_plural': 'Recipes',
},
),
migrations.CreateModel(
name='Unit',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50)),
('short_name', models.CharField(blank=True, max_length=25)),
],
options={
'verbose_name': 'Unit',
'verbose_name_plural': 'Units',
},
),
migrations.AddField(
model_name='ingredientsection',
name='recipe',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ingredient_sections', to='recipebook.Recipe'),
),
migrations.AddField(
model_name='ingredientquantity',
name='ingredient_section',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ingredient_quantities', to='recipebook.IngredientSection'),
),
migrations.AddField(
model_name='ingredient',
name='unit',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='recipebook.Unit'),
),
]
|
{"/recipebook/views.py": ["/recipebook/models.py", "/recipebook/forms.py"], "/recipebook/forms.py": ["/recipebook/models.py"], "/recipebook/admin.py": ["/recipebook/models.py"]}
|
25,583
|
elongton/recipebook
|
refs/heads/master
|
/recipebook/models.py
|
from django.db import models
# Create your models here.
class Unit(models.Model):
class Meta:
verbose_name = 'Unit'
verbose_name_plural = 'Units'
id = models.AutoField(primary_key = True)
name = models.CharField(max_length=50)
short_name = models.CharField(max_length=25, blank=True)
def __str__(self):
return self.name
class Ingredient(models.Model):
class Meta:
verbose_name = 'Ingredient'
verbose_name_plural = 'Ingredients'
id = models.AutoField(primary_key = True)
name = models.CharField(max_length=100)
unit = models.ForeignKey(Unit, on_delete=models.PROTECT)
def __str__(self):
return self.name
class Recipe(models.Model):
class Meta:
verbose_name = 'Recipe'
verbose_name_plural = 'Recipes'
id = models.AutoField(primary_key = True)
title = models.CharField(max_length=150)
instructions = models.TextField(blank=True)
def __str__(self):
return self.title
class IngredientSection(models.Model):
class Meta:
verbose_name = 'Ingredient Section'
verbose_name_plural = 'Ingredient Sections'
section_name = models.CharField(max_length=100)
recipe = models.ForeignKey(Recipe, related_name='ingredient_sections', on_delete = models.CASCADE, null=True)
def __str__(self):
return str(self.section_name)
class IngredientQuantity(models.Model):
class Meta:
verbose_name = 'Ingredient Quantity'
verbose_name_plural = 'Ingredient Quantities'
id = models.AutoField(primary_key = True)
ingredient = models.ForeignKey(Ingredient, on_delete=models.PROTECT)
quantity = models.DecimalField(max_digits=5, decimal_places=1)
ingredient_section = models.ForeignKey(IngredientSection, related_name='ingredient_quantities', on_delete = models.CASCADE, null=True)
def __str__(self):
return str(self.quantity) + str(' ') + str(self.ingredient)
|
{"/recipebook/views.py": ["/recipebook/models.py", "/recipebook/forms.py"], "/recipebook/forms.py": ["/recipebook/models.py"], "/recipebook/admin.py": ["/recipebook/models.py"]}
|
25,597
|
fgassert/eeUtil
|
refs/heads/master
|
/eeUtil/eeutil.py
|
import os
import ee
import logging
import time
import datetime
import json
import math
import warnings
from . import gsbucket
STRICT = True
GEE_JSON = os.getenv("GEE_JSON")
GEE_SERVICE_ACCOUNT = os.getenv("GEE_SERVICE_ACCOUNT") or "service account"
GOOGLE_APPLICATION_CREDENTIALS = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
GEE_PROJECT = os.getenv("GEE_PROJECT") or os.getenv("CLOUDSDK_CORE_PROJECT")
GEE_STAGING_BUCKET = os.getenv("GEE_STAGING_BUCKET")
GEE_STAGING_BUCKET_PREFIX = os.getenv("GEE_STAGING_BUCKET_PREFIX")
FOLDER_TYPES = (ee.data.ASSET_TYPE_FOLDER, ee.data.ASSET_TYPE_FOLDER_CLOUD)
IMAGE_COLLECTION_TYPES = (ee.data.ASSET_TYPE_IMAGE_COLL, ee.data.ASSET_TYPE_IMAGE_COLL_CLOUD)
IMAGE_TYPES = ('Image', 'IMAGE')
TABLE_TYPES = ('Table', 'TABLE')
# Unary GEE home directory
_cwd = ''
_gs_bucket_prefix = ''
logger = logging.getLogger(__name__)
#######################
# 0. Config functions #
#######################
def init(service_account=GEE_SERVICE_ACCOUNT,
credential_path=GOOGLE_APPLICATION_CREDENTIALS,
project=GEE_PROJECT, bucket=GEE_STAGING_BUCKET,
bucket_prefix=GEE_STAGING_BUCKET_PREFIX,
credential_json=GEE_JSON):
'''
Initialize Earth Engine and Google Storage bucket connection.
Defaults to read from environment.
If no service_account is provided, will attempt to use credentials saved by
`earthengine authenticate`, and `gcloud auth application-default login`
utilities.
`service_account` Service account name. Will need access to both GEE and
Storage
`credential_path` Path to json file containing private key
`project` GCP project for earthengine and storage bucket
`bucket` Storage bucket for staging assets for ingestion
`bucket_prefix` Default bucket folder for staging operations
`credential_json` Json-string to use instead of `credential_path`
https://developers.google.com/earth-engine/service_account
'''
global _gs_bucket_prefix
init_opts = {}
if credential_json:
init_opts['credentials'] = ee.ServiceAccountCredentials(service_account, key_data=credential_json)
elif credential_path:
init_opts['credentials'] = ee.ServiceAccountCredentials(service_account, key_file=credential_path)
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credential_path
if project:
init_opts['project'] = project
ee.Initialize(**init_opts)
try:
gsbucket.init(bucket, **init_opts)
except Exception as e:
logger.warning("Could not authenticate Google Cloud Storage Bucket. Upload and download functions will not work.")
logger.error(e)
if bucket_prefix:
_gs_bucket_prefix = bucket_prefix
def initJson(credential_json=GEE_JSON, project=GEE_PROJECT,
bucket=GEE_STAGING_BUCKET):
'''
Writes json string to credential file and initializes
Defaults from GEE_JSON env variable
'''
init('service_account', None, project, bucket, credential_json)
def setBucketPrefix(prefix=''):
'''Set the default prefix to be used for storage bucket operations'''
global _gs_bucket_prefix
_gs_bucket_prefix = prefix
########################
# 1. Utility functions #
########################
def formatDate(date):
'''Format date as ms since last epoch'''
if isinstance(date, int):
return date
seconds = (date - datetime.datetime.utcfromtimestamp(0)).total_seconds()
return int(seconds * 1000)
#################################
# 2. Asset management functions #
#################################
def getHome():
'''Get user root directory'''
project = ee._cloud_api_utils._cloud_api_user_project
if project == ee.data.DEFAULT_CLOUD_API_USER_PROJECT:
assetRoots = ee.data.getAssetRoots()
if not len(assetRoots):
raise Exception(f"No available assets for provided credentials in project {project}")
return assetRoots[0]['id']
else:
return f'projects/{project}/assets/'
def getCWD():
'''Get current directory or root directory'''
global _cwd
if not _cwd:
_cwd = getHome()
return _cwd
def cd(path):
'''Change CWD'''
global _cwd
path = os.path.normpath(_path(path))
if isFolder(path):
_cwd = path
else:
raise Exception(f"{path} is not a folder")
return _cwd
def _path(path):
'''Add cwd to path if not full path'''
if path:
abspath = path[0] == '/'
path = path[1:] if abspath else path
if len(path) > 6 and path[:6] == 'users/':
return f'projects/{ee.data.DEFAULT_CLOUD_API_USER_PROJECT}/{path}'
elif len(path) > 9 and path[:9] == 'projects/':
return path
else:
basepath = 'projects/earthengine-public/assets/' if abspath else getCWD()
return os.path.join(basepath, path)
return getCWD()
def getQuota():
'''Get GEE usage quota'''
return ee.data.getAssetRootQuota(getHome())
def info(asset=''):
'''Get asset info'''
return ee.data.getInfo(_path(asset))
def exists(asset):
'''Check if asset exists'''
return True if info(asset) else False
def isFolder(asset, image_collection_ok=True):
'''Check if path is folder or imageCollection'''
if ee._cloud_api_utils.is_asset_root(asset):
return True
asset_info = info(asset)
folder_types = FOLDER_TYPES
if image_collection_ok:
folder_types += IMAGE_COLLECTION_TYPES
return asset_info and asset_info['type'] in folder_types
def ls(path='', abspath=False, details=False, pageToken=None):
'''List assets in path'''
resp = ee.data.listAssets({'parent': _path(path), 'pageToken':pageToken})
for a in resp['assets']:
a['name'] = a['name'] if abspath else os.path.basename(a['name'])
yield (a if details else a['name'])
if 'nextPageToken' in resp:
for a in ls(path, abspath, details, pageToken=resp['nextPageToken']):
yield a
def _tree(folder, details=False, _basepath=''):
for item in ls(folder, abspath=True, details=True):
if item['type'] in FOLDER_TYPES+IMAGE_COLLECTION_TYPES:
for child in _tree(item['name'], details, _basepath):
yield child
if _basepath and item['name'][:len(_basepath)] == _basepath:
item['name'] = item['name'][len(_basepath):]
yield (item if details else item['name'])
def tree(folder, abspath=False, details=False):
'''Recursively list all assets in folder
Args:
folder (string): Earth Engine folder or image collection
relpath (bool): Return the relative path of assets in the folder
details (bool): Return a dict representation of each asset instead of only the assetId string
Returns:
If details is False:
list: paths to assets
If details is True:
list: asset info dicts
'''
folder = _path(folder)
_basepath = '' if abspath else f'{folder.rstrip("/")}/'
return _tree(folder, details, _basepath)
def getAcl(asset):
'''Get ACL of asset or folder'''
return ee.data.getAssetAcl(_path(asset))
def setAcl(asset, acl={}, overwrite=False, recursive=False):
'''Set ACL of asset
`acl` ('public'|'private'| ACL specification )
`overwrite` If false, only change specified values
'''
path = _path(asset)
if recursive and isFolder(path, image_collection_ok=False):
children = ls(path, abspath=True)
for child in children:
setAcl(child, acl, overwrite, recursive)
_acl = {} if overwrite else getAcl(path)
_acl.pop('owners', None)
if acl == 'public':
_acl["all_users_can_read"] = True
elif acl == 'private':
_acl["all_users_can_read"] = False
else:
_acl.update(acl)
acl = json.dumps(_acl)
logger.debug('Setting ACL to {} on {}'.format(acl, path))
ee.data.setAssetAcl(path, acl)
def setProperties(asset, properties={}):
'''Set asset properties'''
return ee.data.setAssetProperties(_path(asset), properties)
def createFolder(path, image_collection=False, overwrite=False,
public=False):
'''Create folder or image collection,
Automatically creates intermediate folders a la `mkdir -p`
'''
path = _path(path)
upper = os.path.split(path)[0]
if not isFolder(upper):
createFolder(upper)
if overwrite or not isFolder(path):
ftype = (ee.data.ASSET_TYPE_IMAGE_COLL if image_collection
else ee.data.ASSET_TYPE_FOLDER)
logger.debug(f'Created {ftype} {path}')
ee.data.createAsset({'type': ftype}, path, overwrite)
if public:
setAcl(path, 'public')
def createImageCollection(path, overwrite=False, public=False):
'''Create image collection'''
createFolder(path, True, overwrite, public)
def copy(src, dest, overwrite=False, recursive=False):
'''Copy asset'''
if dest[-1] == '/':
dest = dest + os.path.basename(src)
if recursive and isFolder(src):
is_image_collection = info(src)['type'] in IMAGE_COLLECTION_TYPES
createFolder(dest, is_image_collection)
for child in ls(src):
copy(os.path.join(src, child), os.path.join(dest, child), overwrite, recursive)
else:
ee.data.copyAsset(_path(src), _path(dest), overwrite)
def move(src, dest, overwrite=False, recursive=False):
'''Move asset'''
if dest[-1] == '/':
dest = dest + os.path.basename(src)
src = _path(src)
copy(src, _path(dest), overwrite, recursive)
remove(src, recursive)
def remove(asset, recursive=False):
'''Delete asset from GEE'''
if recursive and isFolder(asset):
for child in ls(asset, abspath=True):
remove(child, recursive)
logger.debug('Deleting asset {}'.format(asset))
ee.data.deleteAsset(_path(asset))
################################
# 3. Task management functions #
################################
def getTasks(active=False):
'''Return a list of all recent tasks
If active is true, return tasks with status in
'READY', 'RUNNING', 'UNSUBMITTED'
'''
if active:
return [t for t in ee.data.getTaskList() if t['state'] in (
ee.batch.Task.State.READY,
ee.batch.Task.State.RUNNING,
ee.batch.Task.State.UNSUBMITTED,
)]
return ee.data.getTaskList()
def _checkTaskCompleted(task_id):
'''Return True if task completed else False'''
status = ee.data.getTaskStatus(task_id)[0]
if status['state'] in (ee.batch.Task.State.CANCELLED,
ee.batch.Task.State.FAILED):
if 'error_message' in status:
logger.error(status['error_message'])
if STRICT:
raise Exception(f"Task {status['id']} ended with state {status['state']}")
return True
elif status['state'] == ee.batch.Task.State.COMPLETED:
return True
return False
def waitForTasks(task_ids=[], timeout=3600):
'''Wait for tasks to complete, fail, or timeout
Waits for all active tasks if task_ids is not provided
Note: Tasks will not be canceled after timeout, and
may continue to run.
'''
if not task_ids:
task_ids = [t['id'] for t in getTasks(active=True)]
start = time.time()
elapsed = 0
while elapsed < timeout or timeout == 0:
elapsed = time.time() - start
finished = [_checkTaskCompleted(task) for task in task_ids]
if all(finished):
logger.info(f'Tasks {task_ids} completed after {elapsed}s')
return True
time.sleep(5)
logger.warning(f'Stopped waiting for {len(task_ids)} tasks after {timeout} seconds')
if STRICT:
raise Exception(f'Stopped waiting for {len(task_ids)} tasks after {timeout} seconds')
return False
def waitForTask(task_id, timeout=3600):
'''Wait for task to complete, fail, or timeout'''
return waitForTasks([task_id], timeout)
#######################
# 4. Import functions #
#######################
def ingestAsset(gs_uri, asset, date=None, wait_timeout=None, bands=[]):
'''[DEPRECATED] please use eeUtil.ingest instead'''
warnings.warn('[DEPRECATED] please use eeUtil.ingest instead', DeprecationWarning)
return ingest(gs_uri, asset, wait_timeout, bands)
def _guessIngestTableType(path):
if os.path.splitext(path)[-1] in ['.csv', '.zip']:
return True
return False
def ingest(gs_uri, asset, wait_timeout=None, bands=[], ingest_params={}):
'''
Ingest asset from GS to EE
`gs_uri` should be formatted `gs://<bucket>/<blob>`
`asset` destination path
`wait_timeout` if non-zero, wait timeout secs for task completion
`bands` optional band name list
`ingest_params`dict optional additional ingestion params to pass to
ee.data.startIngestion() or ee.data.startTableIngestion()
'id' and 'sources' are provided by this function
'''
asset_id = _path(asset)
params = ingest_params.copy()
if _guessIngestTableType(gs_uri):
params.update({'id': asset_id, 'sources': [{'primaryPath': gs_uri}]})
request_id = ee.data.newTaskId()[0]
task_id = ee.data.startTableIngestion(request_id, params, True)['id']
else:
# image asset
params = {'id': asset_id, 'tilesets': [{'sources': [{'primaryPath': gs_uri}]}]}
if bands:
if isinstance(bands[0], str):
bands = [{'id': b} for b in bands]
params['bands'] = bands
request_id = ee.data.newTaskId()[0]
task_id = ee.data.startIngestion(request_id, params, True)['id']
logger.info(f"Ingesting {gs_uri} to {asset}: {task_id}")
if wait_timeout is not None:
waitForTask(task_id, wait_timeout)
return task_id
def uploadAsset(filename, asset, gs_prefix='', date='', public=False,
timeout=3600, clean=True, bands=[]):
'''[DEPRECATED] please use eeUtil.upload instead'''
warnings.warn('[DEPRECATED] please use eeUtil.upload instead', DeprecationWarning)
return upload([filename], [asset], gs_prefix, public, timeout, clean, bands)[0]
def uploadAssets(files, assets, gs_prefix='', dates=[], public=False,
timeout=3600, clean=True, bands=[]):
'''[DEPRECATED] please use eeUtil.upload instead'''
warnings.warn('[DEPRECATED] please use eeUtil.upload instead', DeprecationWarning)
return upload(files, assets, gs_prefix, public, timeout, clean, bands)
def upload(files, assets, gs_prefix='', public=False,
timeout=3600, clean=True, bands=[], ingest_params={}):
'''Stage files to cloud storage and ingest into Earth Engine
Currently supports `tif`, `zip` (shapefile), and `csv`
`files` local file path or list of paths
`assets` destination asset ID or list of asset IDs
`gs_prefix` storage bucket folder for staging (else files are staged to bucket root)
`public` set acl public after upload if True
`timeout` wait timeout secs for completion of GEE ingestion
`clean` delete files from storage bucket after completion
`bands` optional band names to assign, all assets must have the same number of bands
`ingest_params`optional additional ingestion params to pass to
ee.data.startIngestion() or ee.data.startTableIngestion()
'''
if type(files) is str and type(assets) is str:
files = [files]
assets = [assets]
if len(assets) != len(files):
raise Exception(f"Files and assets must be of same length. Found {len(files)}, {len(assets)}")
gs_prefix = gs_prefix or _gs_bucket_prefix
task_ids = []
gs_uris = gsbucket.stage(files, gs_prefix)
for i in range(len(files)):
task_ids.append(ingest(gs_uris[i], assets[i], timeout, bands))
try:
waitForTasks(task_ids, timeout)
if public:
for asset in assets:
setAcl(asset, 'public')
except Exception as e:
logger.error(e)
if clean:
gsbucket.remove(gs_uris)
return assets
#######################
# 5. Export functions #
#######################
def _getAssetCrs(assetInfo):
return assetInfo['bands'][0]['crs']
def _getAssetCrsTransform(assetInfo):
return assetInfo['bands'][0]['crs_transform']
def _getAssetProjection(assetInfo):
return ee.Projection(_getAssetCrs(assetInfo), _getAssetCrsTransform(assetInfo))
def _getAssetScale(assetInfo):
return _getAssetProjection(assetInfo).nominalScale()
def _getExportDescription(path):
desc = path.replace('/', ':')
return desc[-100:] if len(desc)>100 else desc
def _getAssetBounds(assetInfo):
coordinates = assetInfo['properties']['system:footprint']['coordinates']
if coordinates[0][0] in ['-Infinity', 'Infinity']:
coordinates = [[-180, -90], [180, -90], [180, 90], [-180, 90], [-180, -90]]
if _getAssetCrs(assetInfo) == 'EPSG:4326':
return ee.Geometry.LinearRing(
coords=coordinates,
proj='EPSG:4326',
geodesic=False
)
return ee.Geometry.LinearRing(coordinates)
def _getAssetBitdepth(assetInfo):
bands = assetInfo['bands']
bit_depth = 0
for band in bands:
if band['data_type']['precision'] == 'double':
bit_depth += 64
elif band['data_type'].get('max'):
minval = band['data_type'].get('min', 0)
maxval = band['data_type'].get('max')
bit_depth += math.log(maxval-minval + 1, 2)
else:
bit_depth += 32
return bit_depth
def _getAssetExportDims(proj, scale, bounds, bit_depth, cloudOptimized=False):
MAX_EXPORT_BYTES = 2**34 # 17179869184
proj = ee.Projection(proj) if isinstance(proj, str) else proj
proj = proj.atScale(scale)
proj_coords = bounds.bounds(1, proj).coordinates().getInfo()[0]
topright = proj_coords[2]
bottomleft = proj_coords[0]
x = topright[0] - bottomleft[0]
y = topright[1] - bottomleft[1]
x = math.ceil(x / 256.0) * 256
y = math.ceil(y / 256.0) * 256
byte_depth = bit_depth / 8
total_bytes = x * y * byte_depth
if total_bytes > MAX_EXPORT_BYTES:
depth = int(math.log(MAX_EXPORT_BYTES / byte_depth, 2))
y = 2 ** (depth // 2)
x = 2 ** (depth // 2 + depth % 2)
logger.warning(f'Export size (2^{math.log(total_bytes,2)}) more than 2^{math.log(MAX_EXPORT_BYTES,2)} bytes, dicing to {x}x{y} tiles')
return x,y
def _getImageExportArgs(image, bucket, fileNamePrefix,
description=None, region=None, scale=None, crs=None,
maxPixels=1e13, fileDimensions=None, fileFormat='GeoTIFF',
cloudOptimized=False, **kwargs):
assetInfo = ee.Image(image).getInfo()
description = description or _getExportDescription(f'gs://{bucket}/{fileNamePrefix}')
scale = scale or _getAssetScale(assetInfo)
crs = crs or _getAssetProjection(assetInfo)
region = region or _getAssetBounds(assetInfo)
fileDimensions = fileDimensions or _getAssetExportDims(crs, scale, region, _getAssetBitdepth(assetInfo), cloudOptimized)
args = {
'image': image,
'description': description,
'bucket': bucket,
'fileNamePrefix': fileNamePrefix,
'region': region,
'scale': scale,
'crs': crs,
'maxPixels': maxPixels,
'fileDimensions': fileDimensions,
'fileFormat': fileFormat,
'formatOptions': {
'cloudOptimized': cloudOptimized,
}
}
args.update(kwargs)
return args
def _getImageSaveArgs(image, assetId, description=None, pyramidingPolicy='mean', region=None, scale=None, crs=None,
maxPixels=1e13, **kwargs):
assetInfo = ee.Image(image).getInfo()
assetInfo = image.getInfo()
description = description or _getExportDescription(assetId)
scale = scale or _getAssetScale(assetInfo)
crs = crs or _getAssetProjection(assetInfo)
region = region or _getAssetBounds(assetInfo)
pyramidingPolicy = {'.default': pyramidingPolicy} if isinstance(pyramidingPolicy, str) else pyramidingPolicy
args = {
'image': image,
'description': description,
'assetId': assetId,
'pyramidingPolicy': pyramidingPolicy,
'region': region,
'crs': crs,
'scale': scale,
'maxPixels': maxPixels,
}
args.update(kwargs)
return args
def _cast(image, dtype):
'''Cast an image to a data type'''
return {
'uint8': image.uint8,
'uint16': image.uint16,
'uint32': image.uint32,
'int8': image.int8,
'int16': image.int16,
'int32': image.int32,
'int64': image.int64,
'byte': image.byte,
'short': image.short,
'int': image.int,
'long': image.long,
'float': image.float,
'double': image.double
}[dtype]()
def saveImage(image, assetId, dtype=None, pyramidingPolicy='mean', wait_timeout=None, **kwargs):
'''Export image to asset
Attempts ot guess export args from image metadata if it exists
Args:
image (ee.Image): the Image to export
assetId (str): the asset path to export to
dtype (str): Cast to image to dtype before export ['byte'|'int'|'float'|'double'...]
pyramidingPolicy (str, dict): default or per-band asset pyramiding policy ['mean', 'mode', 'sample', 'max'...]
wait_timeout (bool): if not None, wait at most timeout secs for export completion
**kwargs: additional parameters to pass to ee.batch.Export.image.toAsset()
Returns:
str: task id
'''
path = _path(assetId)
if dtype:
image = _cast(image, dtype)
args = _getImageSaveArgs(image, path, pyramidingPolicy=pyramidingPolicy, **kwargs)
logger.info(f'Exporting image to {path}')
task = ee.batch.Export.image.toAsset(**args)
task.start()
if wait_timeout is not None:
waitForTask(task.id, wait_timeout)
return task.id
def findOrSaveImage(image, assetId, wait_timeout=None, **kwargs):
'''Export an Image to asset, or return the image asset if it already exists
Will avoid duplicate exports by checking for existing tasks with matching descriptions.
Args:
image (ee.Image): The image to cache
asset_id (str): The asset path to export to or load from
wait_timeout (bool): If not None, wait at most timeout secs for export completion
kwargs: additional export arguments to pass to eeUtil.saveImage()
Returns:
ee.Image: the cached image if it exists, or the image that was just exported
'''
path = _path(assetId)
if exists(path):
logger.debug(f'Asset {os.path.basename(path)} exists, using cached asset.')
return ee.Image(path)
description = kwargs.get('description', _getExportDescription(path))
existing_task = next(filter(lambda t: t['description'] == description, getTasks(active=True)), None)
if existing_task:
logger.info(f'Task with description {description} already in progress, skipping export.')
task_id = existing_task['id']
else:
task_id = saveImage(image, path, **kwargs)
if wait_timeout is not None:
waitForTask(task_id, wait_timeout)
return image
def exportImage(image, blob, bucket=None, fileFormat='GeoTIFF', cloudOptimized=False, dtype=None,
overwrite=False, wait_timeout=None, **kwargs):
'''Export an Image to cloud storage
Args:
image (ee.Image): Image to export
blob (str): Filename to export to (excluding extention)
bucket (str): Cloud storage bucket
fileFormat (str): Export file format ['geotiff'|'tfrecord']
cloudOptimized (bool): (GeoTIFF only) export as Cloud Optimized GeoTIFF
dtype (str): Cast to image to dtype before export ['byte'|'int'|'float'|'double'...]
overwrite (bool): Overwrite existing files
wait_timeout (int): If non-zero, wait timeout secs for task completion
**kwargs: Additional parameters to pass to ee.batch.Export.image.toCloudStorage()
Returns:
(str, str): taskId, destination uri
'''
bucket = gsbucket._defaultBucketName(bucket)
if dtype:
image = _cast(image, dtype)
ext = {'geotiff':'.tif', 'tfrecord':'.tfrecord'}[fileFormat.lower()]
uri = gsbucket.asURI(blob+ext, bucket)
exists = gsbucket.getTileBlobs(uri)
if exists and not overwrite:
logger.info(f'{len(exists)} blobs matching {blob} exists, skipping export')
return
args = _getImageExportArgs(image, bucket, blob, cloudOptimized=cloudOptimized, **kwargs)
task = ee.batch.Export.image.toCloudStorage(**args)
task.start()
logger.info(f'Exporting to {uri}')
if wait_timeout is not None:
waitForTask(task.id)
return task.id, uri
def exportTable(table, blob, bucket=None, fileFormat='GeoJSON',
overwrite=False, wait_timeout=None, **kwargs):
'''
Export FeatureCollection to cloud storage
Args:
table (ee.FeatureCollection): FeatureCollection to export
blob (str): Filename to export to (excluding extention)
bucket (str): Cloud storage bucket
fileFormat (str): Export file format ['csv'|'geojson'|'shp'|'tfrecord'|'kml'|'kmz']
overwrite (bool): Overwrite existing files
wait_timeout (int): If non-zero, wait timeout secs for task completion
**kwargs: Additional parameters to pass to ee.batch.Export.image.toCloudStorage()
Returns:
(str, str): taskId, destination uri
'''
blobname = f'{blob}.{fileFormat.lower()}'
uri = gsbucket.asURI(blobname, bucket)
exists = gsbucket.exists(uri)
if exists and not overwrite:
logger.info(f'Blob matching {blobname} exists, skipping export')
return
args = {
'collection': table,
'description': _getExportDescription(uri),
'bucket': gsbucket._defaultBucketName(bucket),
'fileFormat': fileFormat,
'fileNamePrefix': blob
}
args.update(kwargs)
task = ee.batch.Export.table.toCloudStorage(**args)
task.start()
logger.info(f'Exporting to {uri}')
if wait_timeout is not None:
waitForTask(task.id)
return task.id, uri
def export(assets, bucket=None, prefix='', recursive=False,
overwrite=False, wait_timeout=None, cloudOptimized=False, **kwargs):
'''Export assets to cloud storage
Exports one or more assets to cloud storage.
FeatureCollections are exported as GeoJSON.
Images are exported as GeoTIFF.
Use `recursive=True` to export all assets in folders or ImageCollections.
Args:
assets (str, list): Asset(s) to export
bucket (str): Google cloud storage bucket name
prefix (str): Optional folder to export assets to (prepended to asset names)
recursive (bool): Export all assets in folder or image collection (asset)
overwrite (bool): Overwrite existing assets
wait_timeout (int): If not None, wait timeout secs for task completion
cloudOptimized (bool): Export Images as Cloud Optimized GeoTIFFs
**kwargs: Additional export arguments passed to ee.batch.Export.{}.toCloudStorage()
Returns:
(list, list): TaskIds, URIs
'''
prefix = prefix or _gs_bucket_prefix
assets = (assets,) if isinstance(assets, str) else assets
paths = [os.path.basename(a) for a in assets]
infos = [info(a) for a in assets]
for item in infos[:]:
if item is None:
raise Exception('Asset does not exist.')
if item['type'] in FOLDER_TYPES+IMAGE_COLLECTION_TYPES:
if recursive:
folder = f"{item['name']}/"
for c in tree(item['name'], abspath=True, details=True):
infos.append(c)
paths.append(c['name'][len(folder):])
else:
raise Exception(f"{item['name']} is a folder/ImageCollection. Use recursive=True to export all assets in folder")
tasks = []
uris = []
for item, path in zip(infos, paths):
blob = os.path.join(prefix, path)
result = None
if item['type'] in IMAGE_TYPES:
image = ee.Image(item['name'])
result = exportImage(image, blob, bucket, cloudOptimized=cloudOptimized, overwrite=overwrite, **kwargs)
elif item['type'] in TABLE_TYPES:
table = ee.FeatureCollection(item['name'])
result = exportTable(table, blob, bucket, overwrite=overwrite, **kwargs)
if result:
task, uri = result
tasks.append(task)
uris.append(uri)
if wait_timeout is not None:
waitForTasks(tasks)
return tasks, uris
def download(assets, directory=None, gs_bucket=None, gs_prefix='', clean=True, recursive=False, timeout=3600, **kwargs):
'''Export image assets to cloud storage, then downloads to local machine
`asset` Asset ID or list of asset IDs
`directory` Optional local directory to save assets to
`gs_prefix` GS bucket for staging (else default bucket)
`gs_prefix` GS folder for staging (else files are staged to bucket root)
`clean` Remove file from GS after download
`recursive` Download all assets in folders
`timeout` Wait timeout secs for GEE export task completion
`kwargs` Additional args to pass to ee.batch.Export.{}.toCloudStorage()
'''
gs_prefix = gs_prefix or _gs_bucket_prefix
if directory and not os.path.isdir(directory):
raise Exception(f"Folder {directory} does not exist")
tasks, uris = export(assets, gs_bucket, gs_prefix, recursive, overwrite=True, wait_timeout=timeout, **kwargs)
filenames = []
for uri in uris:
for _uri in gsbucket.getTileBlobs(uri):
path = gsbucket.pathFromURI(_uri)
fname = path[len(gs_prefix):].lstrip('/') if gs_prefix else path
filenames.append(fname)
gsbucket.download(_uri, fname, directory=directory)
if clean:
gsbucket.remove(_uri)
return filenames
#ALIAS
mkdir = createFolder
rm = remove
mv = move
cp = copy
# old function names
removeAsset = remove
downloadAsset = download
downloadAssets = download
|
{"/eeUtil/eeutil.py": ["/eeUtil/__init__.py"], "/eeUtil/__init__.py": ["/eeUtil/eeutil.py"], "/eeUtil/gsbucket.py": ["/eeUtil/__init__.py"]}
|
25,598
|
fgassert/eeUtil
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/env python
from setuptools import setup
with open('README.md') as f:
desc = f.read()
setup(
name='eeUtil',
version='0.3.0',
description='Python wrapper for easier data management on Google Earth Engine.',
long_description=desc,
long_description_content_type='text/markdown',
license='MIT',
author='Francis Gassert',
url='https://github.com/fgassert/eeUtil',
packages=['eeUtil'],
install_requires=[
'earthengine-api>=0.1.232,<0.2',
'google-cloud-storage>=1.31.1,<2',
'google-api-core>=1.22.2<2'
]
)
|
{"/eeUtil/eeutil.py": ["/eeUtil/__init__.py"], "/eeUtil/__init__.py": ["/eeUtil/eeutil.py"], "/eeUtil/gsbucket.py": ["/eeUtil/__init__.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.