text stringlengths 38 1.54M |
|---|
from django.db import models
from django.utils import timezone
from users.models import User
#from shoppings.models import ShoppingList
# Create your models here.
def upload_path(instance, filename):
return '/'.join(['images/deliveries', filename])
class Delivery(models.Model):
id = models.AutoField(primary_key=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
#shoppingList = models.ForeignKey(ShoppingList, on_delete=models.CASCADE)
deliverydate = models.DateTimeField(default=timezone.now)
delcreateddate = models.DateTimeField(auto_now_add=True)
# the users address as default would be nice
deliveryadress = models.CharField(default="None", max_length=100)
name = models.CharField(default="None", max_length=100)
img = models.ImageField(blank=True, null=True, upload_to=upload_path)
# User = To whom belongs this delivery (=> cross reference to items)
# items = reference to Shopping list and make a list out of it
|
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status, generics
from api.admins.serializers import AccountSerializer
from api.publics.serializers import UserSerializer
from .models import User
# class RoomAPIView(APIView):
# def post(self, request):
# try:
# floor = request.data['floor']
# password = request.data['password']
# get_id = User.objects.get(floor=floor, password=password)
# serializer = AccountSerializer(get_id)
# return Response(serializer.data, status=status.HTTP_200_OK)
# except:
# return Response({"message":"CHECK_YOUR_NUMBER"},status=status.HTTP_400_BAD_REQUEST)
#from indent_corp.auth import HouseHoldAuthentication
#from indent_corp.models import HouseHold, DoorUseLog
#from indent_corp.serializers import HouseHoldSerializer
FEE_PER_COUNT = 1
# custom authentication_classes
from rest_framework.authentication import BaseAuthentication
class PublicUserAuthentication(BaseAuthentication):
def authenticate(self, request):
print("++")
print(f"request : {dir(request)}")
authentication = request.META.get('HTTP_AUTH', 'default')
params = request.query_params.get('key')
print(f"auth : {authentication}")
print(f"params : {params}")
return
class PublicUserDetailView(generics.RetrieveAPIView):
authentication_classes = [PublicUserAuthentication, ]
queryset = User.objects.all()
serializer_class = UserSerializer |
from celery.schedules import crontab
# Will need to add "celery -A proj beat" for these to run.
# this needs to run on the webserver
# celery -A proj beat -s /webapp/logs/celery/celerybeat-schedule
CELERYBEAT_SCHEDULE = {
'set_recording_duration': {
'task': 'corpus.tasks.set_all_recording_durations',
'schedule': crontab(minute='03', hour='13', day_of_week='*'),
},
'transcode_all_audio': {
'task': 'corpus.tasks.transcode_all_audio',
'schedule': crontab(minute='5', hour='*', day_of_week='*'),
},
}
|
from dataclasses import dataclass
from joblib import Parallel, delayed
from ..datasets import FaceLandmarksDataset
from ..transformers import ConditionalFilter
from ..transformers import extractors as ex
from ..types import Directory, Tuple
from ..utils import data_dir
from ..visualizers import points_on_video
@dataclass
class DrawKeypoints(object):
'''Draws different extractors' points on dataset's videos, saves to disk
Extracted points are lowpass filtered for better human perception
'''
dataset_dir: Directory
extractors: Tuple[str] = ('Dlib', 'Fa', 'San')
cutoff: float = 2.5
n_jobs: int = 1
artifacts_dir: Directory = data_dir / 'tmp'
verbose: bool = False
@staticmethod
def _draw(i, row, datasets, dataset_dir, save_dir, verbose):
filename = dataset_dir / row['filename']
save_file = save_dir / f'{filename.stem}_points.mp4'
if save_file.exists():
if verbose:
print(f"{row['id']} exists")
return
if verbose:
print(f"{row['id']} is generating")
points_on_video(
filename,
{name: ds[i] for name, ds in datasets.items()},
row['fps'],
title=f"recording {row['id']}",
save_to=save_file,
)
def evaluate(self):
save_dir = self.artifacts_dir / f'points_{self.dataset_dir.name}'
save_dir.mkdir(parents=True, exist_ok=True)
datasets = {
name: FaceLandmarksDataset(
self.dataset_dir,
getattr(ex, f'{name}Extractor')(device='cpu'),
ConditionalFilter((self.cutoff,), 4, 'lowpass'),
)
for name in self.extractors
}
markup = datasets[self.extractors[0]].markup
Parallel(n_jobs=self.n_jobs)(
delayed(self._draw)(
i, row, datasets, self.dataset_dir, save_dir, self.verbose
)
for i, row in markup.iterrows()
)
|
import os, requests
from django.core.mail import EmailMultiAlternatives
from rest_framework.exceptions import APIException
from django.template.loader import get_template
from django.template import Context
from django.conf import settings
from twilio.rest import Client
def send_email(slug, to, data={}):
if settings.BREATHECODE_SETTINGS['EMAIL_NOTIFICATIONS']:
template = get_template_content(slug, data, ["email"])
# print('Email notification '+slug+' sent')
return requests.post(
"https://api.mailgun.net/v3/mailgun.breathecode.co/messages",
auth=(
"api",
os.environ.get('MAILGUN_API_KEY')),
data={
"from": os.environ.get('MAILGUN_FROM') +
" <mailgun@mailgun.jobcore.co>",
"to": to,
"subject": template['subject'],
"text": template['text'],
"html": template['html']}).status_code == 200
else:
# print('Email not sent because notifications are not enabled')
return True
def send_sms(slug, phone_number, data={}):
template = get_template_content(slug, data, ["sms"])
# Your Account Sid and Auth Token from twilio.com/console
# DANGER! This is insecure. See http://twil.io/secure
TWILLIO_SID = os.environ.get('TWILLIO_SID')
TWILLIO_SECRET = os.environ.get('TWILLIO_SECRET')
client = Client(TWILLIO_SID, TWILLIO_SECRET)
try:
message = client.messages.create(
body=template['sms'],
from_='+15017122661',
to='+1'+phone_number
)
return True
except Exception:
return False
def send_mobile_notification(slug, registration_ids, data={}):
if(len(registration_ids) > 0 and push_service):
template = get_template_content(slug, data, ["email", "mobile"])
if 'mobile' not in template:
raise APIException(
"The template " +
slug +
" does not seem to have a valid mobile notification version")
message_title = template['subject']
message_body = template['message']
if 'DATA' not in data:
raise Exception("There is no data for the notification")
message_data = data['DATA']
result = push_service.notify_multiple_devices(
registration_ids=registration_ids,
message_title=message_title,
message_body=message_body,
data_message=message_data)
return result
else:
return False
def send_fcm_notification(slug, device_set=[], data={}):
registration_ids = [device.registration_id for device in device_set]
send_fcm(slug, registration_ids, data)
def get_template_content(slug, data={}, formats=[]):
templates = []
if "email" in formats:
plaintext = get_template(slug + '.txt')
html = get_template(slug + '.html')
templates["text"] = plaintext.render(data)
templates["html"] = html.render(data)
if "mobile" in formats:
fms = get_template(slug + '.mobile')
templates["mobile"] = fms.render(data)
if "sms" in formats:
sms = get_template(slug + '.sms')
templates["sms"] = sms.render(data)
return templates |
# Aumento de 15%
print('{:=^40}'.format('Aumento!!'))
n = float(input('Qual seu salário atual? '))
aumento = n * 0.15
print('Parabens você recebeu um aumento de 15% agora seu salario é de {:.2f}R$'.format(n + aumento))
|
# Punto 1
def sumatoria(n):
if n == 1:
return 1
else:
return n+sumatoria(n-1)
print(sumatoria(100))
# Punto 2. La entrada debe ser un numero natural
def suma_cifras(n):
if n < 10:
return n
else:
return n%10+suma_cifras(n//10)
n = 8
print('El input es', n)
print(suma_cifras(n))
|
from flask import Flask, request, Response, jsonify, g
from gb import app, session
from gb.models import User
from controllers import protected
@app.route('/api/users/',methods=['GET'])
@protected
def get_user():
user_id = g.current_user_id
user = session.query(User).get(user_id)
if user:
return jsonify( username=user.username, user_id=user.id, email=user.email )
else:
return jsonify(response="Can't fetch user with id: {}".format(user_id)),404
@app.route('/api/users',methods=['POST'])
def new_user():
# Parse the json
json = request.get_json()
u_name = json.get('username')
email = json.get('email')
password = json.get('password')
# Ensure username and email were passed
if not u_name:
return jsonify(response="Could not create user, username field is required"), 404
elif u_name == "default":
return jsonify(response="Invalid username."), 400
if not email:
return jsonify(response="Could not create user. email field is required"),404
# Create user and commit
new_user = User(username=json.get('username'),email=json.get('email'),password=json.get('password'))
session.add(new_user)
session.commit()
# Last check to make sure it was commited properly
if not new_user.id:
return jsonify(response="Could not create user"),404
# Return the user id on success
return jsonify(user_id=new_user.id)
@app.route('/api/users/',methods=['PUT'])
@protected
def update_user():
user_id = g.current_user_id
json = request.get_json()
user = session.query(User).get(user_id)
if not user:
return jsonify(response="Can't fetch user with id: "+user_id),404
if json.get('username'):
user.username = json.get('username')
if json.get('email'):
user.email = json.get('email')
if json.get('password'):
user.password = user.hash_password(json.get('password'))
session.commit()
return jsonify(username=user.username, user_id=user.id, email=user.email)
@app.route('/api/users/',methods=['DELETE'])
@protected
def delete_user():
user_id = g.current_user_id
user = session.query(User).get(user_id)
if not user:
return jsonify(response="Could not fetch user with id: "+user_id),404
session.delete(user)
session.commit()
return jsonify()
|
import os
import gc
import json
import pandas as pd
import gzip
from gensim.models import TfidfModel, LsiModel
from gensim.corpora import Dictionary
from nlpia.data.loaders import BIGDATA_PATH, read_csv
KEEP_N = 300000 # max vocab size
NO_BELOW = 5 # min DF (count)
NO_ABOVE = .7 # max DF (fraction)
def lsa_twitter(cased_tokens):
""" Latent Sentiment Analyis on random sampling of twitter search results for words listed in cased_tokens """
# Only 5 of these tokens are saved for a no_below=2 filter:
# PyCons NLPS #PyCon2016 #NaturalLanguageProcessing #naturallanguageprocessing
if cased_tokens is None:
cased_tokens = ('PyConOpenSpaces PyCon PyCon2017 PyCon2018 PyCon2016 PyCon2015 OpenSpace PyconTutorial ' +
'NLP NaturalLanguageProcessing NLPInAction NaturalLanguageProcessingInAction NLPIA Twote Twip'
).split()
cased_tokens += [s + 's' for s in cased_tokens]
cased_tokens += 'TotalGood TotalGoods HobsonLane Hob Hobs TotalGood.com ' \
'www.TotalGood.com http://www.TotalGood.com https://www.TotalGood.com'.split()
allcase_tokens = cased_tokens + [s.lower() for s in cased_tokens]
allcase_tokens += [s.title() for s in cased_tokens]
allcase_tokens += [s.upper() for s in cased_tokens]
KEEP_TOKENS = allcase_tokens + ['#' + s for s in allcase_tokens]
# takes 15 minutes and 10GB of RAM for 500k tweets if you keep all 20M unique tokens/names URLs
vocab_path = os.path.join(BIGDATA_PATH, 'vocab939370.pkl')
if os.path.isfile(vocab_path):
print('Loading vocab: {} ...'.format(vocab_path))
vocab = Dictionary.load(vocab_path)
print(' len(vocab) loaded: {}'.format(len(vocab.dfs)))
else:
tweets_path = os.path.join(BIGDATA_PATH, 'tweets.csv.gz')
print('Loading tweets: {} ...'.format(tweets_path))
tweets = read_csv(tweets_path)
tweets = pd.np.array(tweets.text.str.split())
with gzip.open(os.path.join(BIGDATA_PATH, 'tweets.txt.gz'), 'w') as f:
for tokens in tweets:
f.write((' '.join(tokens) + '\n').encode('utf-8'))
# tweets['text'] = tweets.text.apply(lambda s: eval(s).decode('utf-8'))
# tweets['user'] = tweets.user.apply(lambda s: eval(s).decode('utf-8'))
# tweets.to_csv('tweets.csv.gz', compression='gzip')
print('Computing vocab from {} tweets...'.format(len(tweets)))
vocab = Dictionary(tweets, no_below=NO_BELOW, no_above=NO_ABOVE, keep_tokens=set(KEEP_TOKENS))
vocab.filter_extremes(no_below=NO_BELOW, no_above=NO_ABOVE, keep_n=KEEP_N, keep_tokens=set(KEEP_TOKENS))
print(' len(vocab) after filtering: {}'.format(len(vocab.dfs)))
# no time at all, just a bookeeping step, doesn't actually compute anything
tfidf = TfidfModel(id2word=vocab, dictionary=vocab)
tfidf.save(os.path.join(BIGDATA_PATH, 'tfidf{}.pkl'.format(len(vocab.dfs))))
tweets = [vocab.doc2bow(tw) for tw in tweets]
json.dump(tweets, gzip.open(os.path.join(BIGDATA_PATH, 'tweet_bows.json.gz'), 'w'))
gc.collect()
# LSA is more useful name than LSA
lsa = LsiModel(tfidf[tweets], num_topics=200, id2word=vocab, extra_samples=100, power_iters=2)
return lsa
if __name__ == '__main__':
lsa = lsa_twitter()
# these models can be big
lsa.save(os.path.join(BIGDATA_PATH, 'lsa_tweets'))
|
# Función original
def f(x):
return (x ** 4) - (8.6 * (x ** 3)) - (35.51 * (x ** 2)) + (464 * x) - 998.46
# Derivada de la función original
def fprima(x):
return (4 * (x ** 3)) - (25.8 * (x ** 2)) - (71.02 * x) + 464
x0 = 7 # Valor inicial
itera = 0 # Número de iteraciones
for i in range(100):
itera += 1
x1 = x0 - (f(x0) / fprima(x0))
fx1 = f(x1) # Evaluación en la ecuación original
if abs(fx1) < .0001: # Condición de parada
break
x0 = x1
print("El valor de la raiz es: %.5f"%x0)
print("Número de iteraciones: %i"%itera) |
from numpy import genfromtxt, array
from math import *
from random import random
import numpy as np
import pylab as plt
import sys
import pickle
class Spam_Classifier(object):
def __init__(self):
# constant
self.num_feature = 57
self.exponential_norm = 100
# container
self.norm = []
def set_weight(self):
modelname = str(sys.argv[1])
weight = genfromtxt(modelname, delimiter = '\n')
self.w = np.empty((self.num_feature+1, 1), float);
#print weight
for i in range(self.w.shape[0]):
self.w[i][0] = weight[i]
def normalization(self):
self.norm = [1.0, 0.1, 0.028, 0.281, 0.005, 0.158, 0.275, 0.018, 0.13, 0.11,
0.273, 0.072, 0.573, 0.114, 0.111, 0.013, 0.142, 0.155, 0.126, 1.536,
0.106, 0.874, 1.0, 0.182, 0.117, 0.399, 0.233, 0.612, 0.045, 1.0,
0.045, 0.045, 1.0, 1.0, 1.0, 0.045, 0.06, 0.011, 1.0, 1.0, 0.015,
1.0, 0.093, 1.0, 1.0, 0.41, 0.09, 1.0, 1.0, 0.012, 0.11,
0.007, 0.439, 0.052, 0.022, 3.253, 51.6, 325.85]
def sigmoid(self, num):
return 1/(1+np.exp(-num/self.exponential_norm))
def compute_test_error(self):
filename = str(sys.argv[2])
test_data = genfromtxt(filename, delimiter = ',')
foutname = str(sys.argv[3])
fout = open(foutname, 'w')
fout.write("id,label\n")
for i in range(len(test_data)):
test_x = [] # change to clear later
test_x.append([1])
for j in range(self.num_feature):
test_x[0].append(test_data[i][j+1])
test_x = array(test_x)
test_x = test_x/self.norm
y_dot = self.sigmoid(np.dot(test_x, self.w))
if y_dot >= 0.5:
result = 1
else:
result = 0
#print "%d,%d" % (i+1, result)
fout.write("%d,%d\n" % (i+1, result))
fout.close()
#model = Spam_Classifier()
model = pickle.load(open(sys.argv[1], "rb"))
#model.normalization()
#model.set_weight()
model.compute_test_error()
|
import pygame
import threading
import sys
from pygame.locals import *
from lib.statemachine import StateMachine
from config import Config as cfg
from lib.sprite import Sprite
class Scene(StateMachine):
def __init__(self):
super().__init__()
#self.input_thread = threading.Thread(None, self.input)
#self.prep_thread = threading.Thread(None, self.process)
#self.render_thread = threading.Thread(None, self.render)
self.event = None
self.buffer = pygame.Surface((cfg.SCENE_WIDTH, cfg.SCENE_HEIGHT))
def update(self):
if(self.state == 0):
self.load()
#self.input_thread.start()
#self.prep_thread.start()
#self.render_thread.start()
self.state = 1
if(self.state == 1):
self.input()
self.process()
self.render()
def load(self):
pass
def input(self):
pass
def process(self):
pass
def render(self):
pass
|
from config import BEACON, DEACTIVATE, ATTACH, QUERY, ACTIVATE, \
BATCH_DELETE
class URLBuilder(object):
"""
docstring for BeaconHelper
"""
def beacon_deactivation_url(self, beacon_details):
"""
Returns URL to deactivate the beacons
"""
return BEACON + beacon_details.beacon_name + DEACTIVATE
def beacon_activation_url(self, beacon_details):
"""
Returns URL to deactivate the beacons
"""
return BEACON + beacon_details.beacon_name + ACTIVATE
def beacon_modification_url(self, beacon):
"""
Returns URL to modify beacon details
"""
return BEACON + beacon.beacon_name
def beacon_attachment_url(self, beacon_details):
"""
Returns URL to attach data to the beacons
"""
return BEACON + beacon_details.beacon_name + ATTACH
def beacon_view_attachment_url(self, beacon_details):
"""
Returns URL to attach data to the beacons
"""
return BEACON + beacon_details.beacon_name + ATTACH + QUERY
def batch_delete_url(self, beacon):
"""
Returns URL to batch delete attachments
"""
return BEACON + beacon.beacon_name + ATTACH + BATCH_DELETE
def __init__(self):
super(URLBuilder, self).__init__()
|
from django.contrib import admin
from blog.models import Blog
class BlogAdmin(admin.ModelAdmin):
list_display = ('title','is_published', 'date')
list_display_links = ('title',)
admin.site.register(Blog,BlogAdmin)
|
from google_nest_sdm.device import Device
def test_doorbell_chime():
raw = {
"name": "my/device/name",
"traits": {
"sdm.devices.traits.DoorbellChime": {},
},
}
device = Device.MakeDevice(raw, auth=None)
assert "sdm.devices.traits.DoorbellChime" in device.traits
|
import traceback
import os
import datetime
import base64
import geojson
from six.moves.urllib.parse import urlparse
from wsgiref.util import FileWrapper
from django.db.models import Q, Min
from django.db import transaction
from django.http import HttpResponse
from django.core.files.base import ContentFile
from django.core.exceptions import ValidationError
from django.conf import settings
from django.contrib import messages
from django.views.decorators.http import require_http_methods
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from rest_framework import viewsets, serializers, status, generics, views
from rest_framework.decorators import detail_route, list_route, renderer_classes
from rest_framework.response import Response
from rest_framework.renderers import JSONRenderer
from rest_framework.permissions import IsAuthenticated, AllowAny, IsAdminUser, BasePermission
from rest_framework.pagination import PageNumberPagination
from datetime import datetime, timedelta
from collections import OrderedDict
from django.core.cache import cache
from ledger.accounts.models import EmailUser, Address
from ledger.address.models import Country
from datetime import datetime, timedelta, date
from django.urls import reverse
from django.shortcuts import render, redirect, get_object_or_404
from disturbance.components.approvals.models import (
Approval
)
from disturbance.components.approvals.serializers import (
ApprovalSerializer,
ApprovalCancellationSerializer,
ApprovalSuspensionSerializer,
ApprovalSurrenderSerializer,
ApprovalUserActionSerializer,
ApprovalLogEntrySerializer
)
from disturbance.helpers import is_customer, is_internal
from rest_framework_datatables.pagination import DatatablesPageNumberPagination
from disturbance.components.proposals.api import ProposalFilterBackend, ProposalRenderer
class ApprovalPaginatedViewSet(viewsets.ModelViewSet):
filter_backends = (ProposalFilterBackend,)
pagination_class = DatatablesPageNumberPagination
renderer_classes = (ProposalRenderer,)
page_size = 10
queryset = Approval.objects.none()
serializer_class = ApprovalSerializer
def get_queryset(self):
if is_internal(self.request):
return Approval.objects.all()
elif is_customer(self.request):
user_orgs = [org.id for org in self.request.user.disturbance_organisations.all()]
queryset = Approval.objects.filter(applicant_id__in = user_orgs)
return queryset
return Approval.objects.none()
# def list(self, request, *args, **kwargs):
# response = super(ProposalPaginatedViewSet, self).list(request, args, kwargs)
#
# # Add extra data to response.data
# #response.data['regions'] = self.get_queryset().filter(region__isnull=False).values_list('region__name', flat=True).distinct()
# return response
@list_route(methods=['GET',])
def approvals_external(self, request, *args, **kwargs):
"""
Paginated serializer for datatables - used by the internal and external dashboard (filtered by the get_queryset method)
To test:
http://localhost:8000/api/approval_paginated/approvals_external/?format=datatables&draw=1&length=2
"""
#import ipdb; ipdb.set_trace()
#qs = self.queryset().order_by('lodgement_number', '-issue_date').distinct('lodgement_number')
#qs = ProposalFilterBackend().filter_queryset(self.request, qs, self)
ids = self.get_queryset().order_by('lodgement_number', '-issue_date').distinct('lodgement_number').values_list('id', flat=True)
qs = Approval.objects.filter(id__in=ids)
qs = self.filter_queryset(qs)
# on the internal organisations dashboard, filter the Proposal/Approval/Compliance datatables by applicant/organisation
applicant_id = request.GET.get('org_id')
if applicant_id:
qs = qs.filter(applicant_id=applicant_id)
self.paginator.page_size = qs.count()
result_page = self.paginator.paginate_queryset(qs, request)
serializer = ApprovalSerializer(result_page, context={'request':request}, many=True)
return self.paginator.get_paginated_response(serializer.data)
class ApprovalViewSet(viewsets.ModelViewSet):
#queryset = Approval.objects.all()
queryset = Approval.objects.none()
serializer_class = ApprovalSerializer
def get_queryset(self):
if is_internal(self.request):
return Approval.objects.all()
elif is_customer(self.request):
user_orgs = [org.id for org in self.request.user.disturbance_organisations.all()]
queryset = Approval.objects.filter(applicant_id__in = user_orgs)
return queryset
return Approval.objects.none()
def list(self, request, *args, **kwargs):
#queryset = self.get_queryset()
queryset = self.get_queryset().order_by('lodgement_number', '-issue_date').distinct('lodgement_number')
# Filter by org
org_id = request.GET.get('org_id',None)
if org_id:
queryset = queryset.filter(applicant_id=org_id)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
@list_route(methods=['GET',])
def filter_list(self, request, *args, **kwargs):
""" Used by the external dashboard filters """
#import ipdb; ipdb.set_trace()
region_qs = self.get_queryset().filter(current_proposal__region__isnull=False).values_list('current_proposal__region__name', flat=True).distinct()
activity_qs = self.get_queryset().filter(current_proposal__activity__isnull=False).values_list('current_proposal__activity', flat=True).distinct()
data = dict(
regions=region_qs,
activities=activity_qs,
approval_status_choices = [i[1] for i in Approval.STATUS_CHOICES],
)
return Response(data)
# @list_route(methods=['GET',])
# def approvals_paginated(self, request, *args, **kwargs):
# """
# Paginated serializer for datatables - used by the external dashboard
#
# To test:
# http://localhost:8000/api/approvals/approvals_paginated/?format=datatables&draw=1&length=2
# """
#
# #import ipdb; ipdb.set_trace()
# qs = self.get_queryset().order_by('lodgement_number', '-issue_date')
# qs = ProposalFilterBackend().filter_queryset(self.request, qs, self)
# #qs = qs.order_by('lodgement_number', '-issue_date').distinct('lodgement_number')
#
# self.renderer_classes = (ProposalRenderer,)
# paginator = DatatablesPageNumberPagination()
# paginator.page_size = qs.count()
# result_page = paginator.paginate_queryset(qs, request)
# serializer = ApprovalSerializer(result_page, context={'request':request}, many=True)
# return paginator.get_paginated_response(serializer.data)
# @list_route(methods=['GET',])
# def user_list(self, request, *args, **kwargs):
# user_orgs = [org.id for org in request.user.disturbance_organisations.all()];
# qs = []
# #qs.extend(list(self.get_queryset().filter(submitter = request.user).exclude(processing_status='discarded').exclude(processing_status=Proposal.PROCESSING_STATUS_CHOICES[13][0])))
# #qs.extend(list(self.get_queryset().filter(applicant_id__in = user_orgs)))
# qset = self.get_queryset().order_by('lodgement_number', '-issue_date').distinct('lodgement_number')
# qs.extend(list(qset.filter(applicant_id__in = user_orgs)))
# queryset = list(set(qs))
# serializer = self.get_serializer(queryset, many=True)
# return Response(serializer.data)
# @list_route(methods=['GET',])
# def user_list(self, request, *args, **kwargs):
# queryset = self.get_queryset().order_by('lodgement_number', '-issue_date').distinct('lodgement_number')
# serializer = self.get_serializer(queryset, many=True)
# return Response(serializer.data)
# @list_route(methods=['GET',])
# def user_list_paginated(self, request, *args, **kwargs):
# """
# Placing Paginator class here (instead of settings.py) allows specific method for desired behaviour),
# otherwise all serializers will use the default pagination class
#
# https://stackoverflow.com/questions/29128225/django-rest-framework-3-1-breaks-pagination-paginationserializer
# """
# queryset = self.get_queryset().order_by('lodgement_number', '-issue_date').distinct('lodgement_number')
# paginator = DatatablesPageNumberPagination()
# paginator.page_size = queryset.count()
# result_page = paginator.paginate_queryset(queryset, request)
# #serializer = ListProposalSerializer(result_page, context={'request':request}, many=True)
# serializer = self.get_serializer(result_page, context={'request':request}, many=True)
# return paginator.get_paginated_response(serializer.data)
@detail_route(methods=['POST',])
def approval_cancellation(self, request, *args, **kwargs):
try:
instance = self.get_object()
serializer = ApprovalCancellationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
instance.approval_cancellation(request,serializer.validated_data)
serializer = ApprovalSerializer(instance,context={'request':request})
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
if hasattr(e,'error_dict'):
raise serializers.ValidationError(repr(e.error_dict))
else:
raise serializers.ValidationError(repr(e[0].encode('utf-8')))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@detail_route(methods=['POST',])
def approval_suspension(self, request, *args, **kwargs):
try:
instance = self.get_object()
serializer = ApprovalSuspensionSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
instance.approval_suspension(request,serializer.validated_data)
serializer = ApprovalSerializer(instance,context={'request':request})
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
if hasattr(e,'error_dict'):
raise serializers.ValidationError(repr(e.error_dict))
else:
raise serializers.ValidationError(repr(e[0].encode('utf-8')))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@detail_route(methods=['POST',])
def approval_reinstate(self, request, *args, **kwargs):
try:
instance = self.get_object()
instance.reinstate_approval(request)
serializer = self.get_serializer(instance)
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
if hasattr(e,'error_dict'):
raise serializers.ValidationError(repr(e.error_dict))
else:
raise serializers.ValidationError(repr(e[0].encode('utf-8')))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@detail_route(methods=['POST',])
def approval_surrender(self, request, *args, **kwargs):
try:
instance = self.get_object()
serializer = ApprovalSurrenderSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
instance.approval_surrender(request,serializer.validated_data)
serializer = ApprovalSerializer(instance,context={'request':request})
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
if hasattr(e,'error_dict'):
raise serializers.ValidationError(repr(e.error_dict))
else:
raise serializers.ValidationError(repr(e[0].encode('utf-8')))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@detail_route(methods=['GET',])
def approval_pdf_view_log(self, request, *args, **kwargs):
try:
instance = self.get_object()
instance.pdf_view_log(request)
serializer = ApprovalSerializer(instance,context={'request':request})
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
if hasattr(e,'error_dict'):
raise serializers.ValidationError(repr(e.error_dict))
else:
raise serializers.ValidationError(repr(e[0].encode('utf-8')))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@detail_route(methods=['GET',])
def action_log(self, request, *args, **kwargs):
try:
instance = self.get_object()
qs = instance.action_logs.all()
serializer = ApprovalUserActionSerializer(qs,many=True)
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
print(traceback.print_exc())
raise serializers.ValidationError(repr(e.error_dict))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@detail_route(methods=['GET',])
def comms_log(self, request, *args, **kwargs):
try:
instance = self.get_object()
qs = instance.comms_logs.all()
serializer = ApprovalLogEntrySerializer(qs,many=True)
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
print(traceback.print_exc())
raise serializers.ValidationError(repr(e.error_dict))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@detail_route(methods=['POST',])
@renderer_classes((JSONRenderer,))
def add_comms_log(self, request, *args, **kwargs):
try:
with transaction.atomic():
instance = self.get_object()
request.data['approval'] = u'{}'.format(instance.id)
request.data['staff'] = u'{}'.format(request.user.id)
serializer = ApprovalLogEntrySerializer(data=request.data)
serializer.is_valid(raise_exception=True)
comms = serializer.save()
# Save the files
import ipdb; ipdb.set_trace()
for f in request.FILES:
document = comms.documents.create()
document.name = str(request.FILES[f])
document._file = request.FILES[f]
document.save()
# End Save Documents
return Response(serializer.data)
except serializers.ValidationError:
print(traceback.print_exc())
raise
except ValidationError as e:
print(traceback.print_exc())
raise serializers.ValidationError(repr(e.error_dict))
except Exception as e:
print(traceback.print_exc())
raise serializers.ValidationError(str(e))
@list_route(methods=['GET',])
def sti_search(self, request, *args, **kwargs):
""" Used by the internal users to filter for sti name in ptoposal titlei (for use by external systems) """
#import ipdb; ipdb.set_trace()
name = request.GET.get('name')
data = Approval.objects.filter(current_proposal__title__icontains=name).values_list('licence_document___file', flat=True)
return Response(list(data))
@list_route(methods=['GET',])
def sti_unmatched(self, request, *args, **kwargs):
""" Used by the internal users to filter for sti name in ptoposal titlei (for use by external systems) """
import ipdb; ipdb.set_trace()
name = request.GET.get('name')
data = Approval.objects.filter(current_proposal__title__icontains=name).values_list('licence_document___file', flat=True)
#qs = User.objects.all()
#for search_term in ['x', 'y', 'z']:
# qs = qs.filter(first_name__contains=search_term)
return Response(list(data))
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseBadRequest
from django.shortcuts import render, redirect
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth import login, logout
from .models import AuthBackend, User
from .forms import UserRegistrationForm
def user_register(request):
if request.method == 'POST':
user_form = UserRegistrationForm(request.POST)
if user_form.is_valid():
new_user = user_form.save(commit=False)
new_user.set_password(user_form.cleaned_data['password'])
new_user.save()
return redirect('login')
else:
user_form = UserRegistrationForm()
return render(request, 'registration.html', {'user_form': user_form})
def user_login(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
auth_backend = AuthBackend()
user = auth_backend.authenticate(request=request, username=username, password=password)
if user:
if user.is_active:
login(request, user)
return redirect('feed')
else:
login_form = AuthenticationForm()
return render(request, 'login.html', {'form': login_form})
def user_logout(request):
logout(request)
return redirect('feed')
@login_required
def user_subscribe(request, pk):
try:
subscribing = User.objects.get(pk=pk)
except:
return HttpResponseNotFound("404")
if (subscribing not in request.user.subscriptions.all()) and (subscribing != request.user):
request.user.subscriptions.add(subscribing)
request.user.save()
return redirect('feed')
elif subscribing == request.user:
return HttpResponseBadRequest("It's not allowed to subscribe itself")
else:
return HttpResponseBadRequest("user is already subscribed")
|
import kivy
from kivy.app import App
from kivy.uix.label import Label
from kivy.uix.gridlayout import GridLayout
from kivy.uix.textinput import TextInput
from kivy.uix.button import Button
import random
class FoodApp(App):
def build(self):
return MyGrid()
class MyGrid(GridLayout):
def __init__(self, **kwargs):
super(MyGrid, self).__init__(**kwargs)
self.cols = 2
self.label = Label(text="Food")
self.button = Button(text="submit", font_size=20, pos=(200, 400), size_hint=(.8, .8))
self.button.bind(on_press=self.pressed)
self.add_widget(self.label)
self.add_widget(self.button)
self.inside = GridLayout()
self.inside.cols = 2
self.inside.label1 = Label(text="Restaurant")
self.inside.text = TextInput(multiline=False)
self.inside.B_Add = Button(text="Add", font_size=20, pos=(200, 400), size_hint=(2, 2))
self.inside.B_Add.bind(on_press=self.add)
self.inside.B_Display = Button(text="Display", font_size=20, pos=(200, 400), size_hint=(2, 2))
self.inside.B_Display.bind(on_press=self.display)
self.add_widget(self.inside.text)
self.add_widget(self.inside.label1)
self.add_widget(self.inside.B_Add)
self.add_widget(self.inside.B_Display)
self.add_widget(self.inside)
self.food = ["Olive Garden", "Mods Pizza", "Jason's Deli", "Faddis"]
self.add_food = [""]
self.r_num = random.randint(0, 3)
self.random = random.choices(self.add_food)
def pressed(self, instance):
self.r_num = random.randint(0, 3)
self.label.text = self.food[self.r_num]
print(self.food[self.r_num])
def add(self, instance):
content = self.inside.text.text
self.add_food.append(content)
self.inside.text.text = ""
print(self.add_food)
def display(self, instance):
self.r_num = random.randint(0, 3)
self.inside.label1.text = random.choice(self.add_food)
print(self.food[self.r_num])
if __name__ == "__main__":
FoodApp().run() |
class Solution(object):
def rob_dp_iterative(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
len_nums = len(nums)
if len_nums == 0:
return 0
memo = [0] * len_nums
prev_memo = 0
memo[0] = nums[0]
i = 0
for i in range(1, len_nums):
memo[i] = max(prev_memo + nums[i], memo[i - 1])
prev_memo = memo[i - 1]
return memo[i]
def rob_recursive2_improved(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
len_nums = len(nums)
if len_nums == 0:
return 0
memo = [-1] * len_nums
def rob_ans(nums, i, memo):
if i >= len_nums:
return 0
if memo[i] != -1:
return memo[i]
memo[i] = max(nums[i] + rob_ans(nums, i + 2, memo), rob_ans(nums, i + 1, memo))
return memo[i]
rob_ans(nums, 0, memo)
return memo[0]
def rob_recursive2_tle(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
len_nums = len(nums)
if len_nums == 0:
return 0
memo = [0] * len_nums
def rob_ans(nums, i, memo):
if i >= len_nums:
return 0
memo[i] = max(nums[i] + rob_ans(nums, i + 2, memo), rob_ans(nums, i + 1, memo))
return memo[i]
rob_ans(nums, 0, memo)
return memo[0]
def rob_recursive1_tle(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
len_nums = len(nums)
def rob_ans(nums, i, sum):
if i >= len_nums:
return sum
return max(rob_ans(nums, i + 1, sum), rob_ans(nums, i + 2, sum + nums[i]))
return rob_ans(nums, 0, 0)
import unittest
import dynamic_test_case
class TestMySolutions(unittest.TestCase):
pass
x = Solution()
dynamic_test_case.gen_test(TestMySolutions, x,
(([1, 2, 3, 1],), 4),
(([2, 7, 9, 3, 1],), 12),
# evil case
(([],), 0), )
if __name__ == '__main__':
unittest.main()
|
import numpy as np
import math
import abc
class KalmanFilterBase(object):
"""
this is the abstract kalman filter class that defines the common interface
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self, debug=False):
self.debug = debug
self._X = None
self._P = None
self._Q = None
self._R = None
@abc.abstractmethod
def predict(self, delta_t):
"""
use the motion model to predict
:param delta_t: float
:return:
"""
pass
@abc.abstractmethod
def correct(self, Z):
"""
correct the state using measurement model
:param Z:
:return:
"""
pass
##########################################################
# methods implemented below are common for all sub-classes
def is_init_state_set(self):
return self._X is not None
def set_init_state(self, init_state):
"""
set the initial states
:param init_state: numpy array
:return:
"""
self._X = init_state.copy()
def set_init_covariance(self, init_covariance):
"""
set the initial covariance
:param init_covariance: numpy array
:return:
"""
self._P = init_covariance.copy()
def set_process_noise(self, p_noise):
"""
set the process noise for motion update
:param p_noise:
:return:
"""
self._Q = p_noise.copy()
def set_measurement_noise(self, m_noise):
"""
set the measurement noise for measurement model
:param m_noise:
:return:
"""
self._R = m_noise.copy()
class LinearKalmanFilter(KalmanFilterBase):
"""
simple 2D linear Kalman filter.
state is [x, y, vel_x, vel_y]
measurement is [x, y]
"""
def __init__(self, debug=False):
super(LinearKalmanFilter, self).__init__(debug)
self._A = None
self._H = np.array([[1, 0, 0, 0],
[0, 1, 0, 0]], dtype=np.float64)
# process noise covariance
self._Q = np.array([[0.1, 0, 0, 0],
[0, 0.1, 0, 0],
[0, 0, 0.1, 0],
[0, 0, 0, 0.1]], dtype=np.float64)
# measurement noise covariance
self._R = np.array([[0.05, 0],
[0, 0.05]], dtype=np.float64)
# initial state covariance
self._P = np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 1.0]], dtype=np.float64)
def predict(self, delta_t):
assert (self._X is not None)
self._A = np.array([[1, 0, delta_t, 0],
[0, 1, 0, delta_t],
[0, 0, 1, 0],
[0, 0, 0, 1]], dtype=np.float64)
self._X = np.dot(self._A, self._X)
self._P = self._A.dot(self._P).dot(self._A.T) + self._Q
if self.debug:
print("delta_t: {}".format(delta_t))
def correct(self, Z):
K = self._P.dot(self._H.T).dot(np.linalg.inv(self._H.dot(self._P).dot(self._H.T) + self._R))
self._X = self._X + K.dot(Z - self._H.dot(self._X))
self._P = (np.identity(4) - K.dot(self._H)).dot(self._P)
if self.debug:
print("P:\n {}".format(self._P))
print("X:\n {}".format(self._X))
print("K:\n {}".format(K))
class BicycleKalmanFilter(KalmanFilterBase):
"""
kinematic bicyle EKF
state is [x, y, yaw, vel, beta], beta is the angle of the current velocity of the center of mass with respect to the longitudinal axis of the car
measurement is [x, y]
"""
def __init__(self, cm2rear_len=2.0, enable_yaw_observation=False, debug=False):
super(BicycleKalmanFilter, self).__init__(debug)
np.set_printoptions(precision=5)
self.debug = debug
self.cm2rear_len = cm2rear_len # distance from center of mass to rear wheel
self._A = None
if enable_yaw_observation:
self._H = np.array([[1, 0, 0, 0, 0],
[0, 1, 0, 0, 0],
[0, 0, 1, 0, 0]], dtype=np.float64)
self._R = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]], dtype=np.float64)
else:
self._H = np.array([[1, 0, 0, 0, 0],
[0, 1, 0, 0, 0]], dtype=np.float64)
self._R = np.array([[1.0, 0],
[0, 1.0]], dtype=np.float64)
self._X = None
self._Q = np.array([[1e-7, 0, 0, 0, 0],
[0, 1e-7, 0, 0, 0],
[0, 0, 1e-7, 0, 0],
[0, 0, 0, 1e-3, 0],
[0, 0, 0, 0, 1e-2]], dtype=np.float64)
self._P = np.array([[1, 0, 0, 0, 0],
[0, 1, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 1]], dtype=np.float64)
def predict(self, delta_t):
assert (self._X is not None)
# state update
self._X[0] = self._X[0] + self._X[3] * math.cos(self._X[2] + self._X[4]) * delta_t
self._X[1] = self._X[1] + self._X[3] * math.sin(self._X[2] + self._X[4]) * delta_t
self._X[2] = self._X[2] + self._X[3] / self.cm2rear_len * math.sin(self._X[4]) * delta_t
# X[3] and X[4] are vel and beta. both are kept as constant.
# compute Jacobian
j13 = -delta_t * math.sin(self._X[2] + self._X[4]) * self._X[3]
j14 = delta_t * math.cos(self._X[2] + self._X[4])
j15 = -delta_t * math.sin(self._X[2] + self._X[4]) * self._X[3]
j23 = delta_t * math.cos(self._X[2] + self._X[4]) * self._X[3]
j24 = delta_t * math.sin(self._X[2] + self._X[4])
j25 = delta_t * math.cos(self._X[2] + self._X[4]) * self._X[3]
j34 = delta_t * math.sin(self._X[4]) / self.cm2rear_len
j35 = delta_t * math.cos(self._X[4]) / self.cm2rear_len * self._X[3]
F = np.array([[1, 0, j13, j14, j15],
[0, 1, j23, j24, j25],
[0, 0, 1, j34, j35],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 1]])
# covariance update
# self._P = np.matmul(np.matmul(F, self._P), F.T) + self._Q
self._P = F.dot(self._P).dot(F.T) + self._Q
if self.debug:
print("delta_t: {}".format(delta_t))
print("P prior:\n {}".format(self._P))
print("X prior:\n {}".format(self._X))
print('psi prior {} deg'.format(math.degrees(self._X[2])))
print('beta prior {} deg'.format(math.degrees(self._X[4])))
def correct(self, Z):
inv_tmp = np.linalg.inv(np.matmul(np.matmul(self._H, self._P), self._H.T) + self._R)
K = np.matmul(np.matmul(self._P, self._H.T), inv_tmp)
self._X = self._X + np.matmul(K, Z - np.matmul(self._H, self._X))
self._P = np.matmul(np.identity(5) - np.matmul(K, self._H), self._P)
if self.debug:
print("P:\n {}".format(self._P))
print("X:\n {}".format(self._X))
print("K:\n {}".format(K))
print('psi {} deg'.format(math.degrees(self._X[2])))
print('beta {} deg'.format(math.degrees(self._X[4])))
|
#!../venv/Scripts/python.exe
# -*- encoding : utf-8 -*-
"""
@Description: test_pyshor.py provides functions and classes to ensure the proper functioning of PyShor
@Author: Quentin Delamea
@Copyright: Copyright 2020, PyShor
@Credits: [Quentin Delamea]
@License: MIT
@Version: 0.0.1
@Maintainer: Quentin Delamea
@Email: qdelamea@gmail.com
@Status: Dev
"""
import random as rd
import pytest
from pyshor import find_divisor, prime_factorize, gcd, is_prime_number, primer_power, NotPrimerPowerException
class TestPyShorExceptions:
"""
Contains functions to test PyShor's exceptions.
"""
def test_find_divisor(self) -> None:
"""
Tests find_divisor exceptions.
"""
with pytest.raises(TypeError):
find_divisor('a')
find_divisor('12')
with pytest.raises(ValueError):
find_divisor(rd.randint(-100, 1))
find_divisor(rd.choice(3, 5, 7, 11, 13, 17, 19, 23, 29, 31))
def test_prime_factorize(self) -> None:
"""
Tests prime_factorize exceptions.
"""
with pytest.raises(TypeError):
prime_factorize('a')
prime_factorize('12')
with pytest.raises(ValueError):
prime_factorize(rd.randint(-100, 1))
class TestPyShorSubmodules:
"""
Contains functions to test PyShor's submodules.
"""
def test_gcd(self) -> None:
"""
Tests the function gcd.
"""
assert gcd(24, 36) == 12
assert gcd(12, 15) == 3
assert gcd(9, 10) == 1
assert gcd(12, 21) == 3
assert gcd(72, 8) == 8
assert gcd(52, 76) == 4
assert gcd(1701, 3768) == 3
def test_is_prime_number(self) -> None:
"""
Tests the function is_prime_number.
"""
assert is_prime_number(2)
assert is_prime_number(rd.choice([2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71,
73, 79, 83, 89, 97]))
assert is_prime_number(9929)
assert not is_prime_number(rd.choice(25, 18, 27, 28))
assert not is_prime_number(9931)
def test_primer_power(self) -> None:
"""
Tests the function primer_power.
"""
assert primer_power(128) == 2
assert prime_factorize(125) == 5
assert primer_power(98585041) == 9929
with pytest.raises(NotPrimerPowerException):
primer_power(15)
primer_power(225)
primer_power(23)
class TestPyShorFindDivisor:
"""
Contains functions to test the PyShor find_divisor function.
"""
def test_find_divisor(self) -> None:
"""
Tests the function find_divisor.
"""
assert find_divisor(2) == 2
assert find_divisor(4) == 2
assert find_divisor(6) == 2
assert find_divisor(9) == 3
assert find_divisor(12) in [3, 4]
assert find_divisor(15) in [3, 5]
assert find_divisor(21) in [3, 7]
# assert find_divisor(33) in [3, 11]
# assert find_divisor(247) in [13, 19]
class TestPyShorPrimeFactorize:
"""
Contains functions to test the PyShor prime_factorize function.
"""
def test_prime_factorize(self) -> None:
"""
Tests the function prime_factorize.
"""
assert prime_factorize(20) == [(2, 2), (5, 1)]
assert prime_factorize(72) == [(2, 3), (3, 2)]
assert prime_factorize(60) == [(2, 2), (3, 1), (5, 1)]
# assert prime_factorize(135) == [(2, 2), (3, 4), (5, 1)]
|
import qrcode
websiteurl = 'https://davidaparicio.gitlab.io'
filename = 'website.png'
versions = 1 #1 to 40
boxsize = 10 #nb of pixels
bordersize = 4 #min thickness of the border
#img = qrcode.make('Your input text')
img = qrcode.make()
qr = qrcode.QRCode(
version=versions,
error_correction=qrcode.constants.ERROR_CORRECT_H,
box_size=boxsize,
border=4,
)
qr.add_data(websiteurl)
qr.make(fit=True)
img = qr.make_image(fill_color="#000000", back_color="white").convert('RGB')
#img = qr.make_image(fill_color="#160096", back_color="white").convert('RGB')
img.save(filename)
#https://betterprogramming.pub/how-to-generate-and-decode-qr-codes-in-python-a933bce56fd0
""" data = {
'jour' : '31/05/1832',
'heure' : '08h00',
'nom' : 'Galois',
'prenom' : 'Evariste',
'naissance' : '25/11/1811',
'lieu' : 'Bourg-la-Reine',
'adresse' : 'Quelque part 12345 Ville',
'motifs' : 'duel',
}
content = 'Cree le: {jour} a {heure}; Nom: {nom}; Prenom: {prenom}; '\
+ 'Naissance: a {lieu}; Adresse: {adresse}; '\
+ 'Sortie: {jour} a {heure}; Motifs: {motifs};'
qr = qrcode.QRCode(border=0)
qr.add_data(content.format(**data))
qr.make(fit=True) """ |
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5 import QtGui, QtCore
from PyQt5.QtGui import *
import sys,sqlitedict
class Main(QMainWindow):
def __init__(self):
super().__init__()
self.setupUi()
self.dictionarydb = sqlitedict.SqliteDict("DBS2.db",autocommit=True)
self.Database=self.dictionarydb.get('Data',[])
print(self.Database)
#self.dictionarydb.clear()
def setupUi(self):
self.application_width = 750 #Set default height
self.application_height = 550
self.stack = QStackedLayout()#stack widget
self.LoginUI()
self.SignupUI()
self.User()
self.stack.addWidget(self.Login)
self.stack.addWidget(self.Signup)
self.stack.addWidget(self.user)
def User(self):
self.Login.close()
x = Main1()
x.show()
def LoginUI(self):
self.cname = ""
self.cage = ""
self.csex = ""
self.cemail = ""
self.ccontact = ""
self.cuser = ""
self.cpass = ""
self.Login = QWidget()
self.Login.setWindowIcon(QtGui.QIcon("appLogoico.png"))
self.Login.setFixedSize(self.application_width,self.application_height)
oImage = QImage("back.jpg")
sImage = oImage.scaled(QSize(self.application_width, self.application_height))
palette = QPalette()
palette.setBrush(QPalette.Window, QBrush(sImage))
self.Login.setPalette(palette)
layout = QHBoxLayout()
centerlogin = QGroupBox()
centerlogin.setFixedSize(200,150)
centerlogin.setStyleSheet("QGroupBox{border: 1px solid black; border-radius: 10px; background-color: rgba(255,255,255,0.7); padding: 10px;} Qlabel{}")
centerloginlayout = QVBoxLayout()
user_pass = QFormLayout()
self.userl = QLineEdit()
self.passwl = QLineEdit()
user_pass.addRow(QLabel("Username:"),self.userl)
user_pass.addRow(QLabel("Password:"),self.passwl)
user_pass.addRow(QPushButton("Login",clicked = lambda: self.checkuser()),QPushButton("Signup",clicked = lambda: self.stack.setCurrentIndex(1)))
user_pass.setFormAlignment(QtCore.Qt.AlignCenter)
exitbutton = QPushButton("",clicked = lambda: self.Login.close())
exitbutton.setToolTip("Exit Program")
exitbutton.setIcon(QtGui.QIcon("signOutico.ico"))
exitbutton.setStyleSheet("background-color:transparent;")
user_pass.addRow(exitbutton,QLabel(""))
centerloginlayout.addLayout(user_pass)
centerlogin.setLayout(centerloginlayout)
layout.addStretch(1)
layout.addWidget(centerlogin)
layout.addStretch(5)
self.Login.setLayout(layout)
def checkuser(self):#Widget selector changer
d =False
if len(self.dictionarydb) == 0:
error = QMessageBox()
error.setText("Empty Data Base!")
error.setWindowIcon(QtGui.QIcon("appLogoico.png"))
error.setWindowTitle("Error")
error.exec_()
self.userl.clear()
self.passwl.clear()
else:
for x in self.Database:
if x["user"]== self.userl.text() and x["pass"]== self.passwl.text():
d=True
break
self.userl.clear()
self.passwl.clear()
if d == True:
pass
else:
error = QMessageBox()
error.setText("Invalid input")
error.setWindowIcon(QtGui.QIcon("appLogoico.png"))
error.setWindowTitle("Error")
error.exec_()
self.userl.clear()
self.passwl.clear()
def SignupUI(self):
self.Signup = QWidget()
self.Signup.setWindowIcon(QtGui.QIcon("appLogoico.png"))
self.Signup.setFixedSize(self.application_width,self.application_height)
oImage = QImage("back.jpg")
sImage = oImage.scaled(QSize(self.application_width, self.application_height))
palette = QPalette()
palette.setBrush(QPalette.Window, QBrush(sImage))
self.Signup.setPalette(palette)
layout = QHBoxLayout()
centerlogin = QGroupBox()
centerlogin.setFixedSize(300,300)
centerlogin.setStyleSheet("QGroupBox{border: 1px solid black; border-radius: 10px; background-color: rgba(255,255,255,0.7); padding: 10px;} Qlabel{}")
centerloginlayout = QVBoxLayout()
user_pass = QFormLayout()
self.user = QLineEdit()
self.passw = QLineEdit()
self.name = QLineEdit()
self.age = QLineEdit()
self.sex = QLineEdit()
self.email = QLineEdit()
self.contact = QLineEdit()
user_pass.addWidget(QLabel(" Signup GIYA"))
user_pass.addRow(QLabel("Name:"),self.name)
user_pass.addRow(QLabel("Age:"),self.age)
user_pass.addRow(QLabel("Sex:"),self.sex)
user_pass.addRow(QLabel("Email:"),self.email)
user_pass.addRow(QLabel("Cell No.:"),self.contact)
user_pass.addRow(QLabel("Username:"),self.user)
user_pass.addRow(QLabel("Password:"),self.passw)
user_pass.addRow(QPushButton("Login",clicked = lambda: self.stack.setCurrentIndex(0)),QPushButton("Signup",clicked = lambda: self.signup()))
user_pass.setFormAlignment(QtCore.Qt.AlignCenter)
exitbutton = QPushButton("",clicked = lambda: self.Signup.close())
exitbutton.setToolTip("Exit Program")
exitbutton.setIcon(QtGui.QIcon("signOutico.ico"))
exitbutton.setStyleSheet("background-color:transparent;")
user_pass.addRow(exitbutton,QLabel(""))
centerloginlayout.addLayout(user_pass)
centerlogin.setLayout(centerloginlayout)
layout.addStretch(1)
layout.addWidget(centerlogin)
layout.addStretch(5)
self.Signup.setLayout(layout)
def signup(self):
avail = True
for x in (self.Database):
if x["user"] == self.user.text():
error = QMessageBox()
error.setText("Username already taken")
error.setWindowIcon(QtGui.QIcon("appLogoico.png"))
error.setWindowTitle("Error")
error.exec_()
avail = False
break
if avail == True:
det = [self.user.text(),self.passw.text(),self.name.text(),self.age.text(),self.sex.text(),self.email.text(),self.contact.text()]
self.Database.append({"user":det[0],"pass":det[1],"name":det[2],"age":det[3],"sex":det[4],"email":det[5],"contact":det[6]})
self.dictionarydb['Data'] = self.Database
self.user.clear()
self.passw.clear()
self.name.clear()
self.age.clear()
self.sex.clear()
self.email.clear()
self.contact.clear()
class Main1(Main):
def __init__(self):
super().__init__()
self.UsermainUI()
def UsermainUI(self):
self.ui = QWidget()
self.ui.setWindowIcon(QtGui.QIcon("appLogoico.png"))
self.ui.setFixedSize(750,550)
layout = QHBoxLayout()
self.ui.sidepanel = QGroupBox()
self.ui.sidepanel.setFixedSize(150,530)
profile = QGroupBox()
offers = QGroupBox()
layout.addWidget(self.sidepanel)
layout.addWidget(profile)
layout.addWidget(offers)
txt = QLabel(self.cname)
layout.addWidget(txt)
self.ui.setLayout(layout)
self.ui.show()
if __name__ == "__main__":
app = QApplication(sys.argv)
M = Main1()
sys.exit(app.exec())
|
# encoding: UTF-8
"""
@author: hy
"""
from gradedMT.mapGenerator.MapGenerator import MapGenerator
from common import checkLoadSave
from config import DATA_PATH, JSON_FILE_FORMAT, UMLS_CHI_SOURCE
from read.UMLS.ReadMrConso import readMRCONSO_RRF
class UMLSCHIMapGenerator(MapGenerator):
def __init__(self):
super(UMLSCHIMapGenerator, self).__init__()
self.source = UMLS_CHI_SOURCE
self.MAP_JSON = DATA_PATH + '/umlsMT/map/UMLSMapUMLSCHI.json'
self.mapDict = None
@checkLoadSave('mapDict', 'MAP_JSON', JSON_FILE_FORMAT)
def getAUIMapDict(self):
"""实际上'eng'一栏亦为中文
Returns:
dict: {AUI: {'code': code, 'eng': ENG, 'cns': CNS)}
"""
UMLSDataList = readMRCONSO_RRF(DATA_PATH+'/umlsMT/umls/MRCONSO.RRF')
mapDict = {}
for uTermDict in UMLSDataList:
if uTermDict['LAT'] == 'CHI':
AUI = uTermDict['AUI']
code = uTermDict['CODE']
ENG = uTermDict['STR']
CNS = uTermDict['STR']
assert AUI not in mapDict # 检查是否有多个AUI映射到同一个hpo
mapDict[AUI] = ({'code': code, 'eng': ENG, 'cns': CNS})
return mapDict
if __name__ == '__main__':
mapper = UMLSCHIMapGenerator()
mapper.getAUIMapDict()
|
from settings import *
DATABASES['default'] = { 'ENGINE': 'django.db.backends.sqlite3' }
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
class DisableMigrations(object):
def __contains__(self, item):
return True
def __getitem__(self, item):
return 'notmigrations'
MIGRATION_MODULES = DisableMigrations()
|
import sys
filename1 = sys.argv[1]
filename2 = sys.argv[2]
f = open('names.txt','r')
f1 = open(filename1,'r')
f2 = open(filename2,'r')
names = {}
for l in f:
line = l.lower().split()
names[line[0]] = line[1]
#print(f1.read().split())
result = {}
def func(f):
data = f.read().lower().replace('.','').split()
for i in range(len(data)):
if data[i] in names:
if(data[i+1] == names[data[i]]):
ns = data[i]+'-'+data[i+1]
if not ns in result:
result[ns] = 0
result[ns] += 1
func(f1)
func(f2)
for k in sorted(result):
print(k,'=',result[k]) |
# -*- coding :utf8 -*-
testes = int(input('Quantidade de testes: '))
coelho,rato,sapo = 0 , 0 , 0
for i in range(testes):
dados = raw_input('Tipo: ').split()
quantidade, tipo = int(dados[0]) , dados[1].upper()
if tipo == 'C':
coelho += quantidade
elif tipo == 'R':
rato += quantidade
elif tipo == 'S':
sapo += quantidade
total = coelho + rato + sapo
print '\nTotal: {}'.format(total)
print 'Total de coelhos: {}'.format(coelho)
print 'Total de ratos: {}'.format(rato)
print 'Total de sapos: {}'.format(sapo)
print 'Percentual de coelhos: {:.2%}'.format( coelho/float(total) )
print 'Percentual de ratos {:.2%}'.format( rato/float(total) )
print 'Percentual de sapos {:.2%}'.format( sapo/float(total) )
|
import matplotlib.pyplot as plt
import numpy as np
from PlotData import PlotData
#.....Visializing Boundary.....
def PlotBoundary(model,X,y):
if model.get_params()['kernel']=='linear':
w=model.coef_[0]
m=-w[0]/w[1]
xx=np.linspace(min(X[:,0]),max(X[:,0])) # can be given any integers instead of min and max... just to adjust range of fit line
yy=m*xx-(model.intercept_[0]/w[1])
PlotData(np.c_[X,y],'Positive','Negetive')
plt.plot(xx,yy)
plt.show()
if model.get_params()['kernel']=='rbf':
x1_plot=np.linspace(min(X[:,0]),max(X[:,0])+0.2)
x2_plot=np.linspace(min(X[:,1])-0.2,max(X[:,1])) # added and subtracted 0.2 to clearly see the Boundary
x1,x2=np.meshgrid(x1_plot,x2_plot)
vals=np.zeros(np.shape(x1))
for i in range(x1.shape[1]):
vals[:,i]=model.predict(np.c_[x1[:,i],x2[:,i]])
PlotData(np.c_[X,y],'Positive','Negetive')
plt.contour(x1,x2,vals)
plt.show()
"""
NOTE: SKLEARN returns SLOPE-INTERCEPT form
[a*x^n]+[b*x^(n-1)]+[c*x^(n-2)]+.... is the equation of line of fit
....since this is lenear coefficients of x are 'a' and 'b' and the constant is 'c'
training with sklearn returns a value which contains coefficients and intercepts
coef_ returns coefficients 'a' and 'b' [here]....therefore slope of fit line is: m=(-b/a)
intercept_ returns constant 'c'....therefore intercept is (-c/a) in y=mx+c
m and c are being calculated in the above code
not only for svm but also for sklearn.linear_model, this applies. (but not for Gaussian Kernel)
"""
"""
In linear: we set a range of line to fit line
In Gaussian: we set a range of area to fit shape
""" |
from .models import *
from rest_framework import serializers
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ['movie', 'user', 'text', 'date']
class CommentHardSerializer(serializers.Serializer):
comment = serializers.CharField(required=True, min_length=1)
class RatingSerializer(serializers.ModelSerializer):
class Meta:
model = Rating
fields = ['movie', 'user', 'rating', 'date_create']
class RatingDetailSerializer(serializers.Serializer):
rating = serializers.CharField(required=True, min_length=1)
class MovieSerializer(serializers.ModelSerializer):
comments = CommentSerializer(many=True)
rating = RatingSerializer (many=True)
total_rate = serializers.SerializerMethodField()
class Meta:
model=Movie
fields=['title','trailer','video','year','country','description',
'genre','comments','rating','total_rate','avg_rate']
def get_total_rate(self,obj):
avg_rate=0
totall_rate=len(obj.rating.all())
for rating in obj.rating.all():
avg_rate=avg_rate + rating.rating
try:
totall_rate = avg_rate/totall_rate
except ZeroDivisionError:
totall_rate=0
obj.avg_rate=totall_rate
return totall_rate
|
"""
Takes pictures from the Raspberry Pi Camera at regular intervals. Detects faces
and sends cropped pictures of faces to the server
"""
import cv2
import requests
SERVER_URL = 'http://127.0.0.1:5000/image'
cascPath = "/usr/share/opencv/haarcascades/haarcascade_frontalface_alt.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
with picamera.PiCamera() as camera:
camera.resolution = (320, 240)
camera.capture('photo.jpg', format='jpeg')
img = cv2.imread('photo.jpg', 0)
faces = faceCascade.detectMultiScale(
img,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30)
)
# Send each face to the server
for (x, y, w, h) in faces:
cv2.imwrite('cropped.jpg', img[y:y + h, x:x + w])
# Send the image using the requests library
files = {'file': open('cropped.jpg', 'rb')}
requests.post(SERVER_URL, files=files)
|
#!/usr/bin/env python3
#
# MIT License
#
# Copyright (c) 2020-2021 EntySec
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import os
from hatsploit.lib.sessions import Sessions
from hatsploit.lib.storage import LocalStorage
from hatsploit.core.base.types import Types
from hatsploit.core.cli.badges import Badges
from hatsploit.core.db.importer import Importer
from hatsploit.lib.payloads import Payloads
class Modules:
def __init__(self):
self.types = Types()
self.badges = Badges()
self.sessions = Sessions()
self.payloads = Payloads()
self.local_storage = LocalStorage()
self.importer = Importer()
def check_exist(self, name):
all_modules = self.local_storage.get("modules")
if all_modules:
for database in all_modules.keys():
modules = all_modules[database]
if name in modules.keys():
return True
return False
def check_imported(self, name):
imported_modules = self.local_storage.get("imported_modules")
if imported_modules:
if name in imported_modules.keys():
return True
return False
def check_current_module(self):
if self.local_storage.get("current_module"):
if len(self.local_storage.get("current_module")) > 0:
return True
return False
def get_module_object(self, name):
if self.check_exist(name):
database = self.get_database(name)
return self.local_storage.get("modules")[database][name]
return None
def get_current_module_object(self):
if self.check_current_module():
return self.local_storage.get_array("current_module", self.local_storage.get("current_module_number"))
return None
def get_current_module_platform(self):
if self.check_current_module():
return self.local_storage.get_array("current_module",
self.local_storage.get("current_module_number")).details['Platform']
return None
def get_current_module_name(self):
if self.check_current_module():
return self.local_storage.get_array("current_module",
self.local_storage.get("current_module_number")).details['Module']
return None
def get_database(self, name):
all_modules = self.local_storage.get("modules")
if all_modules:
for database in all_modules.keys():
modules = all_modules[database]
if name in modules.keys():
return database
return None
def compare_types(self, value_type, value):
if value_type and not value_type.lower == 'all':
if value_type.lower() == 'mac':
if not self.types.is_mac(value):
self.badges.output_error("Invalid value, expected valid MAC!")
return False
if value_type.lower() == 'ip':
if not self.types.is_ip(value):
self.badges.output_error("Invalid value, expected valid IP!")
return False
if value_type.lower() == 'ipv4':
if not self.types.is_ipv4(value):
self.badges.output_error("Invalid value, expected valid IPv4!")
return False
if value_type.lower() == 'ipv6':
if not self.types.is_ipv6(value):
self.badges.output_error("Invalid value, expected valid IPv6!")
return False
if value_type.lower() == 'ipv4_range':
if not self.types.is_ipv4_range(value):
self.badges.output_error("Invalid value, expected valid IPv4 range!")
return False
if value_type.lower() == 'ipv6_range':
if not self.types.is_ipv6_range(value):
self.badges.output_error("Invalid value, expected valid IPv6 range!")
return False
if value_type.lower() == 'port':
if not self.types.is_port(value):
self.badges.output_error("Invalid value, expected valid port!")
return False
if value_type.lower() == 'port_range':
if not self.types.is_port_range(value):
self.badges.output_error("Invalid value, expected valid port range!")
return False
if value_type.lower() == 'number':
if not self.types.is_number(value):
self.badges.output_error("Invalid value, expected valid number!")
return False
if value_type.lower() == 'integer':
if not self.types.is_integer(value):
self.badges.output_error("Invalid value, expected valid integer!")
return False
if value_type.lower() == 'float':
if not self.types.is_float(value):
self.badges.output_error("Invalid value, expected valid float!")
return False
if value_type.lower() == 'boolean':
if not self.types.is_boolean(value):
self.badges.output_error("Invalid value, expected valid boolean!")
return False
if 'session' in value_type.lower():
session = value_type.lower().replace(' ', '')
session = session.split('->')
session_type = ""
if len(session) == 2:
session_type = session[1]
module_platform = self.get_current_module_platform()
if not self.sessions.check_exist(module_platform, value, session_type):
self.badges.output_error("Invalid value, expected valid session!")
return False
return True
def set_current_module_option(self, option, value):
if self.check_current_module():
current_module = self.get_current_module_object()
if not hasattr(current_module, "options") and not hasattr(current_module, "payload"):
self.badges.output_warning("Module has no options.")
return
if hasattr(current_module, "payload") and option.lower() == "payload":
if self.payloads.check_exist(value):
module_name = self.get_current_module_name()
payload = self.payloads.get_payload_object(value)
module_payload = current_module.payload
valid = 0
if module_payload['Types'] is None or payload['Type'] in module_payload['Types']:
valid += 1
if module_payload['Categories'] is None or payload['Category'] in module_payload['Categories']:
valid += 1
if module_payload['Platforms'] is None or payload['Platform'] in module_payload['Platforms']:
valid += 1
if module_payload['Architectures'] is None or payload['Architecture'] in module_payload['Architectures']:
valid += 1
if valid == 4:
if not self.payloads.add_payload(module_name, value):
self.badges.output_error("Invalid payload, expected valid payload!")
return
self.badges.output_information(option + " ==> " + value)
self.local_storage.set_module_payload(
"current_module",
self.local_storage.get("current_module_number"),
value
)
return
self.badges.output_error("Incompatible payload type, category or platform!")
return
self.badges.output_error("Invalid value, expected valid payload!")
return
if hasattr(current_module, "options"):
if option in current_module.options.keys():
value_type = current_module.options[option]['Type']
if self.compare_types(value_type, value):
self.badges.output_information(option + " ==> " + value)
self.local_storage.set_module_option(
"current_module",
self.local_storage.get("current_module_number"),
option,
value
)
return
if hasattr(current_module, "payload"):
current_payload = self.payloads.get_current_payload()
if current_payload and hasattr(current_payload, "options"):
if option in current_payload.options.keys():
value_type = current_payload.options[option]['Type']
if self.compare_types(value_type, value):
self.badges.output_information(option + " ==> " + value)
self.local_storage.set_payload_option(current_module.details['Module'],
current_payload.details['Payload'], option, value)
else:
self.badges.output_error("Unrecognized option!")
else:
self.badges.output_error("Unrecognized option!")
else:
self.badges.output_error("Unrecognized option!")
else:
self.badges.output_warning("No module selected.")
def import_module(self, name):
modules = self.get_module_object(name)
try:
module_object = self.importer.import_module(modules['Path'])
if not self.local_storage.get("imported_modules"):
self.local_storage.set("imported_modules", dict())
self.local_storage.update("imported_modules", {name: module_object})
except Exception:
return None
return module_object
def add_module(self, name):
modules = self.get_module_object(name)
imported_modules = self.local_storage.get("imported_modules")
if self.check_imported(name):
module_object = imported_modules[name]
self.add_to_global(module_object)
else:
module_object = self.import_module(name)
if module_object:
if hasattr(module_object, "payload"):
payload_name = module_object.payload['Value']
self.badges.output_process("Using default payload " + payload_name + "...")
if self.payloads.check_exist(payload_name):
if self.payloads.add_payload(name, payload_name):
self.add_to_global(module_object)
return
self.badges.output_error("Invalid default payload!")
return
self.add_to_global(module_object)
else:
self.badges.output_error("Failed to select module from database!")
def add_to_global(self, module_object):
if self.check_current_module():
self.local_storage.add_array("current_module", '')
self.local_storage.set("current_module_number", self.local_storage.get("current_module_number") + 1)
self.local_storage.set_array("current_module", self.local_storage.get("current_module_number"),
module_object)
else:
self.local_storage.set("current_module", [])
self.local_storage.set("current_module_number", 0)
self.local_storage.add_array("current_module", '')
self.local_storage.set_array("current_module", self.local_storage.get("current_module_number"),
module_object)
|
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.10.3
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
#
# # 13 Pre-Class Assignment: Projections
# ### Readings for this topic (Recommended in bold)
# * [Heffron Section VI pg 267-275](http://joshua.smcvt.edu/linearalgebra/book.pdf)
# * [Beezer Subsections OV-GSP pg 154-161](http://linear.ups.edu/download/fcla-3.50-tablet.pdf)
# * [**_Boyd Section 5.3-5.4 pg 95-102_**](http://vmls-book.stanford.edu/vmls.pdf)
#
# ### Goals for today's pre-class assignment
#
# 1. [Orthogonal and Orthonormal](#Orthogonal_and_Orthonormal)
# 1. [Code Review](#Code_Review)
# 1. [Gram-Schmidt](#Gram-Schmidt)
# 1. [Assignment Wrap-up](#Assignment_Wrap-up)
# ----
# <a name="Orthogonal_and_Orthonormal"></a>
# ## 1. Orthogonal and Orthonormal
#
# **Definition:** A set of vectors is said to be **orthogonal** if every pair of vectors in the set is orthogonal (the dot product is 0).
# The set is **orthonormal** if it is orthogonal and each vector is a unit vector (norm equals 1).
#
# **Result:** An orthogonal set of nonzero vectors is linearly independent.
#
# **Definition:** A basis that is an orthogonal set is called an orthogonal basis.
# A basis that is an orthonormal set is called an orthonormal basis.
#
# **Result:** Let $\{u_1,\dots,u_n\}$ be an orthonormal basis for a vector space $V$.
# Then for any vector $v$ in $V$, we have
# $$v=(v\cdot u_1)u_1+(v\cdot u_2)u_2 +\dots + (v\cdot u_n)u_n$$
#
# **Definition:** A *square* matrix is **orthogonal** is $A^{-1}=A^\top$.
#
# **Result:** Let $A$ be a square matrix. The following three statements are equivalent.
#
# (a) $A$ is orthogonal.
#
# (b) The column vectors of $A$ form an orthonormal set.
#
# (c) The row vectors of $A$ form an orthonormal set.
#
# (d) $A^{-1}$ is orthogonal.
#
# (e) $A^\top$ is orthogonal.
#
# **Result:** If $A$ is an orthogonal matrix, then we have $|A|=\pm 1$.
# Consider the following vectors $u_1, u_2$, and $u_3$ that form a basis for $R^3$.
#
# $$ u_1 = (1,0,0)$$
# $$ u_2 = (0, \frac{1}{\sqrt(2)}, \frac{1}{\sqrt(2)})$$
# $$ u_3 = (0, \frac{1}{\sqrt(2)}, -\frac{1}{\sqrt(2)})$$
# ✅ **<font color=red>DO THIS:</font>** Show that the vectors $u_1$, $u_2$, and $u_3$ are linearly independent (**HINT:** see the pre-class for 0219-Change_Basis):
# Put your answer to the above here
# ✅ **<font color=red>QUESTION 1:</font>** How do you show that $u_1$, $u_2$, and $u_3$ are orthogonal?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION 2:</font>** How do you show that $u_1$, $u_2$, and $u_3$ are normal vectors?
# Put your answer to the above question here
# ✅ **<font color=red>DO THIS:</font>** Express the vector $v = (7,5,-1)$ as a linear combination of the $u_1$, $u_2$, and $u_3$ basis vectors:
# +
# Put your answer here
# -
# ----
# <a name="Code_Review"></a>
# ## 2. Code Review
#
# In the next in-class assignment, we are going to avoid some of the more advanced libraries ((i.e. no ```numpy``` or ```scipy``` or ```sympy```) to try to get a better understanding about what is going on in the math.
# The following code implements some common linear algebra functions:
#Standard Python Libraries only
import math
import copy
def dot(u,v):
'''Calculate the dot product between vectors u and v'''
temp = 0;
for i in range(len(u)):
temp += u[i]*v[i]
return temp
# ✅ **<font color=red>DO THIS:</font>** Write a quick test to compare the output of the above ```dot``` function with the ```numpy``` dot function.
# +
# Put your test code here
# -
def multiply(m1,m2):
'''Calculate the matrix multiplication between m1 and m2 represented as list-of-list.'''
n = len(m1)
d = len(m2)
m = len(m2[0])
if len(m1[0]) != d:
print("ERROR - inner dimentions not equal")
#make zero matrix
result = [[0 for j in range(m)] for i in range(n)]
# print(result)
for i in range(0,n):
for j in range(0,m):
for k in range(0,d):
#print(i,j,k)
#print('result', result[i][j])
#print('m1', m1[i][k])
#print('m2', m2[k][j])
result[i][j] = result[i][j] + m1[i][k] * m2[k][j]
return result
# ✅ **<font color=red>DO THIS:</font>** Write a quick test to compare the output of the above ```multiply``` function with the ```numpy``` multiply function.
# +
# Put your test code here
# -
# ✅ **<font color=red>QUESTION:</font>** What is the big-O complexity of the above ```multiply``` function?
# Put your answer to the above question here.
# ✅ **<font color=red>QUESTION:</font>** Line 11 in the provided ```multiply``` code initializes a matrix of the size of the output matrix as a list of lists with zeros. What is the big-O complexity of line 11?
# Put your answer to the above question here.
def norm(u):
'''Calculate the norm of vector u'''
nm = 0
for i in range(len(u)):
nm += u[i]*u[i]
return math.sqrt(nm)
# ✅ **<font color=red>DO THIS:</font>** Write a quick test to compare the outputs of the above ```norm``` function with the ```numpy``` norm function.
# +
#Put your test code here
# -
def transpose(A):
'''Calculate the transpose of matrix A represented as list of lists'''
n = len(A)
m = len(A[0])
AT = list()
for j in range(0,m):
temp = list()
for i in range(0,n):
temp.append(A[i][j])
AT.append(temp)
return AT
# ✅ **<font color=red>DO THIS:</font>** Write a quick test to compare the output of the above ```transpose``` function with the ```numpy``` transpose function.
# +
# Put your test code here
# -
# ✅ **<font color=red>QUESTION:</font>** What is the big-O complexity of the above ```transpose``` function?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** Explain any differences in results between the provided functions and their ```numpy``` counterparts.
# Put your answer to the above question here
# ----
# <a name="Gram-Schmidt"></a>
# ## 3. Gram-Schmidt
#
#
# Watch this video for the indroduction of Gram-Schmidt, which we will implement in class.
from IPython.display import YouTubeVideo
YouTubeVideo("rHonltF77zI",width=640,height=360, cc_load_policy=True)
# ----
#
# <a name="Assignment_Wrap-up"></a>
# ## 4. Assignment wrap-up
# ✅ **<font color=red>Assignment-Specific QUESTION:</font>** How do you show that $u_1$, $u_2$, and $u_3$ are orthogonal?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** Summarize what you did in this assignment.
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** What questions do you have, if any, about any of the topics discussed in this assignment after working through the jupyter notebook?
#
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** How well do you feel this assignment helped you to achieve a better understanding of the above mentioned topic(s)?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** What was the **most** challenging part of this assignment for you?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** What was the **least** challenging part of this assignment for you?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** What kind of additional questions or support, if any, do you feel you need to have a better understanding of the content in this assignment?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** Do you have any further questions or comments about this material, or anything else that's going on in class?
# Put your answer to the above question here
# ✅ **<font color=red>QUESTION:</font>** Approximately how long did this pre-class assignment take?
# Put your answer to the above question here
# ----
# Written by Dr. Dirk Colbry, Michigan State University
# <a rel="license" href="http://creativecommons.org/licenses/by-nc/4.0/"><img alt="Creative Commons License" style="border-width:0" src="https://i.creativecommons.org/l/by-nc/4.0/88x31.png" /></a><br />This work is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-nc/4.0/">Creative Commons Attribution-NonCommercial 4.0 International License</a>.
#
#
|
import numpy as np
from sunpy import lightcurve as lc
import matplotlib.pyplot as plt
from pandas import *
from sunpy.time import parse_time
basetime = parse_time('2016-07-24 00:00')
import datetime
from reading_msk_files import read_files
import seaborn as sns
sns.set_style('ticks',{'xtick.direction':'in','ytick.direction':'in'})
sns.set_context('paper')
import matplotlib.dates as dates
t_start = '2016-07-24 11:00:00'
t_end = '2016-07-24 16:00:00'
date = '20160724'
Stations = ['DHO', 'FTA', 'GBZ', 'GQD', 'HWU', 'ICV', 'ITS', 'NAA', 'NAU', 'NPM', 'NRK', 'NWC', 'TBB', 'VTX']
Freq = ['23.4', '20.9', '19.58', '22.1', '18.3', '20.27', '45.9', '24', '40.8', '21.4', '37.5', '19.8', '26.7', '18.2']
files = []
for i in range(0, len(Stations)):
s = Stations[i]+date+'.txt'
files.append(s)
all_stations = []
for i in range(0, len(files)):
t = read_files(files[i], t_start, t_end)
all_stations.append(t)
goess = lc.GOESLightCurve.create(t_start, t_end)
gl = goess.data['xrsb'] #1-8A Channel
gll = ['GOES 1-8 $\mathrm{\AA}$', 0, gl, gl]
all_stations.insert(0, gll)
fig, axarr = plt.subplots(5,3, figsize = (18,18))
phase = True
amp = False
if phase:
c = 3
title = 'Phase (degrees)'
if amp:
c = 2
title = 'Amplitude (dB)'
fig.suptitle("All MSK data ("+title+") recieved at Birr with GOES 1-8 $\mathrm{\AA}$", fontsize=18)
k = 0
while k< len(all_stations):
for i in range(0,5):
for j in range(0,3):
if k == 0:
color = 'g'
else:
color = 'b'
axarr[i,j].plot(all_stations[k][c].index.to_pydatetime(), all_stations[k][c],label = all_stations[k][0], color = color)
axarr[i,j].xaxis.set_major_locator(dates.HourLocator(interval = 1))
axarr[i,j].xaxis.set_major_formatter(dates.DateFormatter('%H.%M'))
axarr[i,j].xaxis.grid(True, which="major")
print k
axarr[i,j].legend(fontsize = 12)
k+=1
axarr[4,1].set_xlabel('Start time ' + t_start[0:16] + ' UT', fontsize = 18)
plt.subplots_adjust(left = 0.05, right = 0.99, top = 0.92, bottom = 0.05, hspace = 0.3, wspace = 0.1)
|
import sys
import os
import time
import pickle
import numpy as np
from gensim.models import Word2Vec
from argparse import ArgumentParser
p = ArgumentParser("Train vector representations")
p.add_argument("corpus", help="Path to tokenized corpus")
p.add_argument("output", help="Path to vector output directory")
p.add_argument("fd", help="Path to file with pickled conditional freq dist")
p.add_argument("rel_min", type=int, help="Minimum frequency for the smallest geography")
p.add_argument("--dims", type=int, help="Number of dimensions for each vector", default=100)
p.add_argument("--window", type=int, help="Max distance between the current and predicted word within a sentence", default=5)
p.add_argument("--workers", type=int, help="Number of cores", default=1)
args = p.parse_args()
cfd = pickle.load(open(args.fd, "rb"))
geos = cfd.keys()
# Number of tokens per geography
num_toks = np.array([fd.N() for fd in cfd.values()])
# Minimum counts by geography
print("==Vec min counts==")
mins = np.ceil((num_toks/np.min(num_toks))*args.rel_min).astype(int)
for i, geo in enumerate(geos):
print(str(geo) + "\t" + str(mins[i]))
print()
# Build models
for i, geo in enumerate(geos):
start = time.time()
print("Building vectors for", geo)
m = Word2Vec(corpus_file=os.path.join(args.corpus, geo), size=args.dims, window=args.window, min_count=mins[i], workers=args.workers)
print("Built in", round(time.time() - start, 2), "seconds")
m.wv.save_word2vec_format(os.path.join(args.output, geo))
|
import collections
import operator as op
from functools import partial
from syn.five import unicode, xrange, izip, STR, NUM
from syn.base_utils import REPL, repl_command, DefaultList, sgn, AttrDict, \
implies, feq, cfeq, tuple_append
from syn.base_utils.float import DEFAULT_TOLERANCE
from syn.base_utils.rand import PRIMITIVE_TYPES
CONTAINERS = (collections.Mapping, collections.Sequence, set, frozenset)
#-------------------------------------------------------------------------------
# NEType
class NEType(object):
def __init__(self, A, B):
self.A = A
self.B = B
def __call__(self):
print(self.message())
self.explorer()()
def __eq__(self, other):
return (self.A == other.A and self.B == other.B and
type(self) is type(other))
def __ne__(self, other):
return not (self == other)
def __str__(self):
try:
return self.message()
except NotImplementedError:
return repr(self)
def explorer(self):
xA = ValueExplorer(self.A)
xB = ValueExplorer(self.B)
return DiffExplorer(xA, xB)
def message(self):
raise NotImplementedError
#-----------------------------------------------------------
class NotEqual(NEType):
def message(self):
return '{} != {}'.format(self.A, self.B)
#-----------------------------------------------------------
class DiffersAtIndex(NEType):
def __init__(self, A, B, index):
super(DiffersAtIndex, self).__init__(A, B)
self.index = index
def __eq__(self, other):
if not super(DiffersAtIndex, self).__eq__(other):
return False
return self.index == other.index
def explorer(self):
xA = ValueExplorer(self.A, index=self.index)
xB = ValueExplorer(self.B, index=self.index)
return DiffExplorer(xA, xB)
def message(self):
iA = self.A[self.index]
iB = self.B[self.index]
return ('Sequences differ at index {}: {} != {}'
.format(self.index, iA, iB))
#-----------------------------------------------------------
class DiffersAtKey(NEType):
def __init__(self, A, B, key):
super(DiffersAtKey, self).__init__(A, B)
self.key = key
def __eq__(self, other):
if not super(DiffersAtKey, self).__eq__(other):
return False
return self.key == other.key
def explorer(self):
xA = ValueExplorer(self.A, key=self.key)
xB = ValueExplorer(self.B, key=self.key)
return DiffExplorer(xA, xB)
def message(self):
iA = self.A[self.key]
iB = self.B[self.key]
return ('Mappings differ at key "{}": {} != {}'
.format(self.key, iA, iB))
#-----------------------------------------------------------
class DiffersAtAttribute(NEType):
def __init__(self, A, B, attr):
super(DiffersAtAttribute, self).__init__(A, B)
self.attr = attr
def __eq__(self, other):
if not super(DiffersAtAttribute, self).__eq__(other):
return False
return self.attr == other.attr
def explorer(self):
xA = ValueExplorer(self.A, attr=self.attr)
xB = ValueExplorer(self.B, attr=self.attr)
return DiffExplorer(xA, xB)
def message(self):
iA = getattr(self.A, self.attr)
iB = getattr(self.B, self.attr)
return ('Objects differ at attribute "{}": {} != {}'
.format(self.attr, iA, iB))
#-----------------------------------------------------------
class DifferentLength(NEType):
def message(self):
return 'Different lengths: {} != {}'.format(len(self.A), len(self.B))
#-----------------------------------------------------------
class DifferentTypes(NEType):
def message(self):
return 'Different types: {} != {}'.format(type(self.A), type(self.B))
#-----------------------------------------------------------
class SetDifferences(NEType):
def __init__(self, A, B):
super(SetDifferences, self).__init__(A, B)
self.diffs = A.difference(B).union(B.difference(A))
def message(self):
return 'Exclusive items: {}'.format(self.diffs)
#-----------------------------------------------------------
class KeyDifferences(NEType):
def __init__(self, A, B):
super(KeyDifferences, self).__init__(A, B)
a = set(self.A.keys())
b = set(self.B.keys())
self.diffs = a.difference(b).union(b.difference(a))
def message(self):
return 'Exclusive keys: {}'.format(self.diffs)
#-------------------------------------------------------------------------------
# ExplorationError
class ExplorationError(Exception):
pass
#-------------------------------------------------------------------------------
# ValueExplorer
class ValueExplorer(REPL):
commands = dict(REPL.commands)
command_help = dict(REPL.command_help)
def __init__(self, value, index=None, key=None, attr=None,
prompt='(ValEx) ', step=1):
super(ValueExplorer, self).__init__(prompt)
self.initial_value = value
self.initial_index = index
self.initial_key = key
self.initial_attr = attr
self.initial_step_value = step
self._initialize(value, index, key, attr, prompt, step)
def _initialize(self, value, index, key, attr, prompt, step):
# Example
# self.index = 1
# self.value = [1, 2, 3, 4]
# self.current_value = 2
# But during depth_first, will step down such that index = 0, value = 2
self.value = value
self.index = index if index is not None else 0
self.key = key
self.attr = attr
self.step_value = step
self.stack = DefaultList(None)
self.stack_index = 0
self.current_value = None
self.at_end = False
self._at_bottom_level()
self._prime()
def _at_bottom_level(self):
self.at_bottom_level = is_visit_primitive(self.value)
def _check_empty(self):
if isinstance(self.value, CONTAINERS):
if len(self.value) == 0 or self.index >= len(self.value):
self.at_end = True
def _prime(self):
from .base import visit
self.iter = visit(self.value, k=self.index, step=self.step_value,
enumerate=True)
self._check_empty()
if not self.at_end:
if (isinstance(self.value, collections.Mapping) and
self.initial_index is None and self.key is not None):
index, pair = next(self.iter)
key, value = pair
while not key == self.key:
try:
index, pair = next(self.iter)
key, value = pair
except StopIteration:
raise ExplorationError('Unable to find key: {}'
.format(self.key))
self.current_value = value
self.index = index
elif self.initial_index is None and self.attr is not None:
index, pair = next(self.iter)
attr, value = pair
while not attr == self.attr:
try:
index, pair = next(self.iter)
attr, value = pair
except StopIteration:
raise ExplorationError('Unable to find attribute: {}'
.format(self.attr))
self.current_value = value
self.index = index
else:
self.step()
def _pop(self):
self.stack_index -= 1
frame = self.stack.pop()
self.value = frame['value']
self.current_value = frame['current_value']
self.index = frame['index']
self.key = frame['key']
self.attr = frame['attr']
self.at_end = frame['at_end']
self.iter = frame['iter']
self._at_bottom_level()
def _push(self):
frame = dict(value=self.value,
current_value=self.current_value,
index=self.index,
key=self.key,
attr=self.attr,
iter=self.iter,
at_end=self.at_end)
self.stack_index += 1
self.stack.append(frame)
self.value = self.current_value
self.current_value = None
self.index = 0
self.key = None
self.attr = None
self.at_end = False
self._at_bottom_level()
self._prime()
def display(self):
return unicode(self.current_value)
def step(self, step=None):
step = int(step) if step is not None else self.step_value
if step != self.step_value:
if sgn(step) != sgn(self.step_value):
if self.at_end:
self.at_end = False
self.step_value = step
self._prime()
try:
index, value = next(self.iter)
self.index = index
if isinstance(self.value, collections.Mapping):
self.key = value[0]
self.current_value = value[1]
return
if isinstance(value, tuple):
if len(value) == 2:
if isinstance(value[0], STR):
self.attr = value[0]
self.current_value = value[1]
return
self.current_value = value
except StopIteration:
self.at_end = True
raise ExplorationError('At last value')
def down(self):
if self.at_bottom_level:
raise ExplorationError('At bottom level')
self._push()
def up(self):
if self.stack_index == 0:
raise ExplorationError('At top level')
self._pop()
def reset(self):
self._initialize(self.initial_value, self.initial_index,
self.initial_key, self.initial_attr, self.prompt,
self.initial_step_value)
def depth_first(self, leaves_only=False):
vars = AttrDict(going_up=False,
going_forward=False)
def step():
try:
self.step()
except ExplorationError:
if self.stack_index > 0:
self.up()
vars.going_up = True
if isinstance(self.value, CONTAINERS) and len(self.value) == 0:
yield self.value
while True:
if self.at_end and self.stack_index == 0:
break
if not vars.going_up and not vars.going_forward:
if implies(leaves_only, self.at_bottom_level):
yield self.value
if vars.going_up:
vars.going_up = False
vars.going_forward = True
step()
elif not self.at_bottom_level:
vars.going_forward = False
self.down()
elif not self.at_end:
step()
@repl_command('c', 'display current_value')
def command_display_current_value(self):
print(self.current_value)
@repl_command('l', 'display value')
def command_display_value(self):
print(self.value)
@repl_command('d', 'go down the stack')
def command_down(self, num='1'):
num = int(num)
for _ in xrange(num):
self.down()
@repl_command('n', 'step')
def command_step(self, step='1'):
step = int(step)
self.step(step)
@repl_command('u', 'go up the stack')
def command_up(self, num='1'):
num = int(num)
for _ in xrange(num):
self.up()
#-------------------------------------------------------------------------------
# DiffExplorer
class DiffExplorer(REPL):
commands = dict(REPL.commands)
command_help = dict(REPL.command_help)
value = property(lambda self: (self.A.value,
self.B.value))
current_value = property(lambda self: (self.A.current_value,
self.B.current_value))
def __init__(self, A, B, prompt='(DiffEx) '):
super(DiffExplorer, self).__init__(prompt)
if not isinstance(A, ValueExplorer):
A = ValueExplorer(A)
if not isinstance(B, ValueExplorer):
B = ValueExplorer(B)
self.A = A
self.B = B
def depth_first(self, **kwargs):
for a, b in izip(self.A.depth_first(**kwargs),
self.B.depth_first(**kwargs)):
yield a, b
def display(self):
a = self.A.display()
b = self.B.display()
return u'A: {}\nB: {}'.format(a, b)
def step(self, *args, **kwargs):
self.A.step(*args, **kwargs)
self.B.step(*args, **kwargs)
def down(self):
self.A.down()
self.B.down()
def up(self):
self.A.up()
self.B.up()
def reset(self):
self.A.reset()
self.B.reset()
@repl_command('c', 'display current_value')
def command_display_current_value(self):
print(self.display())
@repl_command('l', 'display value')
def command_display_value(self):
print("index: " + str(self.A.index))
if self.A.key:
print("key: " + str(self.A.key))
print("A: " + str(self.value[0]))
print("B: " + str(self.value[1]))
@repl_command('d', 'go down the stack')
def command_down(self, num='1'):
num = int(num)
for _ in xrange(num):
self.down()
@repl_command('f', 'find the inequality in the current values')
def command_find(self):
from .base import find_ne
ex = find_ne(self.A.current_value, self.B.current_value)
if ex is not None:
ex()
@repl_command('n', 'step')
def command_step(self, step='1'):
step = int(step)
self.step(step)
@repl_command('u', 'go up the stack')
def command_up(self, num='1'):
num = int(num)
for _ in xrange(num):
self.up()
#-------------------------------------------------------------------------------
# Utilities
def deep_comp(A, B, func=op.eq, **kwargs):
x = DiffExplorer(A, B)
for a, b in x.depth_first(**kwargs):
if not func(a, b):
return False
return True
def feq_comp(a, b, tol=DEFAULT_TOLERANCE, relative=True):
if isinstance(a, STR) or isinstance(b, STR):
return a == b
if isinstance(a, CONTAINERS) or isinstance(b, CONTAINERS):
return type(a) is type(b) and len(a) == len(b)
if isinstance(a, complex) or isinstance(b, complex):
return cfeq(a, b, tol, relative)
return feq(a, b, tol, relative)
def deep_feq(A, B, tol=DEFAULT_TOLERANCE, relative=True):
if type(A) is not type(B) and not isinstance(A, tuple_append(NUM, complex)):
return False
func = partial(feq_comp, tol=tol, relative=relative)
return deep_comp(A, B, func)
def is_visit_primitive(obj):
'''Returns true if properly visiting the object returns only the object itself.'''
from .base import visit
if (isinstance(obj, tuple(PRIMITIVE_TYPES)) and not isinstance(obj, STR)
and not isinstance(obj, bytes)):
return True
if (isinstance(obj, CONTAINERS) and not isinstance(obj, STR) and not
isinstance(obj, bytes)):
return False
if isinstance(obj, STR) or isinstance(obj, bytes):
if len(obj) == 1:
return True
return False
return list(visit(obj, max_enum=2)) == [obj]
#-------------------------------------------------------------------------------
# __all__
__all__ = ('ValueExplorer', 'DiffExplorer', 'ExplorationError',
'deep_comp', 'feq_comp', 'deep_feq', 'is_visit_primitive',
'NEType', 'NotEqual', 'DiffersAtIndex', 'DiffersAtKey',
'DiffersAtAttribute',
'DifferentLength', 'DifferentTypes', 'SetDifferences',
'KeyDifferences')
#-------------------------------------------------------------------------------
|
import json
from datetime import datetime, timedelta
import parse
from dateutil.parser import parse as dateparse
from flask import Blueprint
from sqlalchemy.exc import DataError, IntegrityError
from anubis.models import (
db,
Assignment,
AssignmentRepo,
User,
AssignmentTest,
SubmissionTestResult,
)
from anubis.utils.auth.http import require_admin
from anubis.utils.data import rand
from anubis.utils.data import row2dict, req_assert
from anubis.utils.http.decorators import load_from_id, json_response, json_endpoint
from anubis.utils.http import error_response, success_response
from anubis.lms.assignments import assignment_sync
from anubis.lms.courses import course_context, assert_course_context
from anubis.lms.questions import get_assigned_questions
from anubis.utils.logging import logger
assignments = Blueprint("admin-assignments", __name__, url_prefix="/admin/assignments")
@assignments.route('/repos/<string:id>')
@require_admin()
@load_from_id(Assignment, verify_owner=False)
@json_response
def admin_assignments_repos_id(assignment: Assignment):
"""
:param assignment:
:return:
"""
assert_course_context(assignment)
repos = AssignmentRepo.query.filter(
AssignmentRepo.assignment_id == assignment.id,
).all()
def get_ssh_url(url):
r = parse.parse('https://github.com/{}', url)
path = r[0]
path = path.removesuffix('.git')
return f'git@github.com:{path}.git'
return success_response({'assignment': assignment.full_data, 'repos': [
{
'id': repo.id,
'url': repo.repo_url,
'ssh': get_ssh_url(repo.repo_url),
'github_username': repo.github_username,
'name': repo.owner.name if repo.owner_id is not None else 'N/A',
'netid': repo.owner.netid if repo.owner_id is not None else 'N/A',
}
for repo in repos
]})
@assignments.route("/assignment/<string:id>/questions/get/<string:netid>")
@require_admin()
@load_from_id(Assignment, verify_owner=False)
@json_response
def private_assignment_id_questions_get_netid(assignment: Assignment, netid: str):
"""
Get questions assigned to a given student.
:param assignment:
:param netid:
:return:
"""
user = User.query.filter_by(netid=netid).first()
# Verify that the user exists, and that the assignment
# is within the course context of the current user.
req_assert(user is not None, message='user not found')
assert_course_context(assignment)
return success_response(
{
"netid": user.netid,
"questions": get_assigned_questions(assignment.id, user.id),
}
)
@assignments.route("/get/<string:id>")
@require_admin()
@load_from_id(Assignment, verify_owner=False)
@json_response
def admin_assignments_get_id(assignment: Assignment):
"""
Get the full data for an assignment id. The course context
must be set, and will be checked.
:param assignment:
:return:
"""
# Confirm that the assignment they are asking for is part
# of this course
assert_course_context(assignment)
# Pass back the full data
return success_response({
"assignment": row2dict(assignment),
"tests": [test.data for test in assignment.tests],
})
@assignments.route("/list")
@require_admin()
@json_response
def admin_assignments_list():
"""
List all assignments within the course context.
* The response will be the row2dict of the assignment, not a data prop *
:return:
"""
# Get all the assignment objects within the course context,
# sorted by the due date.
all_assignments = Assignment.query.filter(
Assignment.course_id == course_context.id
).order_by(Assignment.due_date.desc()).all()
# Pass back the row2dict of each assignment object
return success_response({
"assignments": [row2dict(assignment) for assignment in all_assignments]
})
@assignments.route('/tests/toggle-hide/<string:assignment_test_id>')
@require_admin()
@json_response
def admin_assignment_tests_toggle_hide_assignment_test_id(assignment_test_id: str):
"""
Toggle an assignment test being hidden.
:param assignment_test_id:
:return:
"""
# Pull the assignment test
assignment_test: AssignmentTest = AssignmentTest.query.filter(
AssignmentTest.id == assignment_test_id,
).first()
# Make sure the assignment test exists
req_assert(assignment_test is not None, message='test not found')
# Verify that course the assignment test is apart of and
# the course context match
assert_course_context(assignment_test)
# Toggle the hidden field
assignment_test.hidden = not assignment_test.hidden
# Commit the change
db.session.commit()
return success_response({
'status': 'test updated',
'assignment_test': assignment_test.data
})
@assignments.route('/tests/delete/<string:assignment_test_id>')
@require_admin()
@json_response
def admin_assignment_tests_delete_assignment_test_id(assignment_test_id: str):
"""
Delete an assignment test.
:param assignment_test_id:
:return:
"""
# Pull the assignment test
assignment_test = AssignmentTest.query.filter(
AssignmentTest.id == assignment_test_id,
).first()
# Make sure the assignment test exists
req_assert(assignment_test is not None, message='test not found')
# Verify that course the assignment test is apart of and
# the course context match
assert_course_context(assignment_test)
# Save the test name so we can use it in the response
test_name = assignment_test.name
# Delete all the submission test results that are pointing to
# this test
SubmissionTestResult.query.filter(
SubmissionTestResult.assignment_test_id == assignment_test.id,
).delete()
# Delete the test itself
AssignmentTest.query.filter(
AssignmentTest.id == assignment_test_id,
).delete()
# Commit the changes
db.session.commit()
# Pass back the status
return success_response({
'status': f'{test_name} deleted',
'variant': 'warning',
})
@assignments.post('/add')
@require_admin()
@json_response
def admin_assignments_add():
new_assignment = Assignment(
course_id=course_context.id,
name='New Assignment',
description='',
hidden=True,
autograde_enabled=False,
github_repo_required=course_context.github_repo_required,
ide_enabled=True,
theia_image=course_context.theia_default_image,
theia_options=course_context.theia_default_options,
release_date=datetime.now() + timedelta(weeks=1),
due_date=datetime.now() + timedelta(weeks=2),
grace_date=datetime.now() + timedelta(weeks=2),
)
db.session.add(new_assignment)
db.session.commit()
return success_response({
'status': 'New assignment created.',
'assignment': new_assignment.data,
})
@assignments.post("/save")
@require_admin()
@json_endpoint(required_fields=[("assignment", dict)])
def admin_assignments_save(assignment: dict):
"""
Save assignment from raw fields
:param assignment:
:return:
"""
logger.info(json.dumps(assignment, indent=2))
# Get assignment
assignment_id = assignment["id"]
db_assignment = Assignment.query.filter(Assignment.id == assignment_id).first()
# Make sure it exists
if db_assignment is None:
# Create it if it doesn't exist
db_assignment = Assignment()
assignment["id"] = rand()
db.session.add(db_assignment)
assert_course_context(db_assignment)
# Update all it's fields
for key, value in assignment.items():
if 'date' in key and isinstance(value, str):
value = dateparse(value.replace('T', ' ').replace('Z', ''))
setattr(db_assignment, key, value)
# Attempt to commit
try:
db.session.commit()
except (IntegrityError, DataError) as e:
# Tell frontend what error happened
return error_response(str(e))
# Return status
return success_response(
{
"status": "Assignment updated",
}
)
@assignments.route("/sync", methods=["POST"])
@require_admin(unless_debug=True)
@json_endpoint(required_fields=[("assignment", dict)])
def private_assignment_sync(assignment: dict):
"""
Sync assignment data from the CLI. This should be used to create and update assignment data.
body = {
"assignment": {
"name": "{name}",
"course": "CS-UY 3224",
"hidden": true,
"unique_code": "{code}",
"pipeline_image": "registry.digitalocean.com/anubis/assignment/{code}",
"date": {
"release": "{now}",
"due": "{week_from_now}",
"grace": "{week_from_now}"
},
"description": "This is a very long description that encompasses the entire assignment\n",
"questions": [
{
"sequence": 1,
"questions": [
{
"q": "What is 3*4?",
"a": "12"
},
{
"q": "What is 3*2",
"a": "6"
}
]
},
{
"sequence": 2,
"questions": [
{
"q": "What is sqrt(144)?",
"a": "12"
}
]
}
]
}
}
response = {
assignment : {}
questions: {
accepted: [ ... ]
ignored: [ ... ]
rejected: [ ... ]
}
}
:return:
"""
# The course context assertion happens in the sync function
# Create or update assignment
message, success = assignment_sync(assignment)
# If there was an error, pass it back
req_assert(success, message=message, status_code=406)
# Return
return success_response(message)
|
import pygame
from random import randint
from copy import copy
class Obj():
#Variables de movimiento
def imagenes(self,image):
pacman1=pygame.image.load(image)
pacman1=pygame.transform.scale(pacman1,(50,50)) #volver relativo NO gracias
# pacman1=pacman1.convert()
return pacman1
def __init__(self,imagen):
self.imagen=self.imagenes(imagen)
self.rect=self.imagen.get_rect()
def imagenes4040(self,image_surf):
pacman2=pygame.transform.scale(image_surf,(40,40)) #volver relativo NO gracias
return pacman2
class Sup(pygame.Surface):
def __init__(self,width,height):
self.surf=pygame.Surface((width,height))
self.rect=self.surf.get_rect()
class MouseRect(pygame.Rect):
def __init__(self):
pygame.Rect.__init__(self,0,0,1,1)
def update(self):
self.left,self.top=pygame.mouse.get_pos()
class MenuLevels():
def __init__(self,matriztriple):
self.C_dinamicos=matriztriple
self.default = pygame.image.load("Menu.png").convert_alpha()
self.Surf = pygame.Surface((1280,720))
self.Surfx = self.Surf.get_rect()
def update(self):
MouseRect=pygame.Rect(pygame.mouse.get_pos(),(1,1))
self.Surf.blit(self.default,(0,0))
for e in self.C_dinamicos:
if MouseRect.colliderect(e[1]):
self.Surf.blit(pygame.image.load(e[2]),(0,0))
for e in self.C_dinamicos:
if MouseRect.colliderect(e[1]):
self.Surf.blit(pygame.image.load(e[3]).convert_alpha(),(e[1].left,e[1].top-10))
else:
self.Surf.blit(pygame.image.load(e[0]).convert_alpha(),e[1])
class Botones():
def __init__(self,imageorig,imageedit,x,y):
self.imgoriginal=imageorig
self.imgeditada=imageedit
self.imgreal=self.imgoriginal
self.imgpos=self.imgoriginal.get_rect()
self.imgpos.left,self.imgpos.top=(x,y)
def update(self,surface,cursor):
if cursor.colliderect(self.imgpos):
self.imgreal=self.imgeditada
else:
self.imgreal=self.imgoriginal
surface.blit(self.imgreal,self.imgpos)
class Pacman(Obj):
def __init__(self,imagen00,imagen01):
pacman00=self.imagenes(imagen00)
pacman01=self.imagenes(imagen01)
pacman10=pygame.transform.flip(pacman00,True,False)
pacman11=pygame.transform.flip(pacman01,True,False)
pacmanA=pygame.transform.rotate(pacman00,90)
pacmanA1=pygame.transform.rotate(pacman01,90)
pacmanB=pygame.transform.rotate(pacman00,270)
pacmanB1=pygame.transform.rotate(pacman01,270)
self.pacmans=[[pacman00,pacman01],[pacman10,pacman11],[pacmanA,pacmanA1],[pacmanB,pacmanB1]]
self.rect=pacman00.get_rect()
self.rect1=pacman00.get_rect()
self.rect1.width*=0.8
self.rect1.height*=0.8
self.rect1.center=(self.rect.width/2,self.rect.height/2)
self.modulo=0
self.direccion=1
self.coins_eaten=0
self.life_surf=self.imagenes4040(pacman00)
self.lifes=3
self.lifesurf=[[self.life_surf,self.life_surf.get_rect()] for e in range (self.lifes)]
self.limitleft=0
self.limittop=0
def position(self,x,y):
self.rect.top= y
self.rect.left= x
self.rect1.top= y+5
self.rect1.left= x+5
def upgrade(self,surf,Coin_obj,Wall_Obj):
im_pacman=self.pacmans[self.direccion-1][1 if self.modulo<25 else 0]
self.modulo=self.modulo+1 if self.modulo<50 else 0
surf.surf.blit(im_pacman,self.rect)
self.collideCoins(Coin_obj)
self.transport(surf.rect.width,surf.rect.height)
posx,posy=self.rect.left,self.rect.top
posx1,posy1=self.rect1.left,self.rect1.top
if self.direccion==1:
self.rect.left+=surf.rect.width/400
self.rect1.left+=surf.rect.width/400
elif self.direccion==2:
self.rect.left-=surf.rect.width/400
self.rect1.left-=surf.rect.width/400
elif self.direccion==3:
self.rect.top-=surf.rect.height/300
self.rect1.top-=surf.rect.height/300
elif self.direccion==4:
self.rect.top+=surf.rect.height/300
self.rect1.top+=surf.rect.height/300
for g in Wall_Obj.final_wall:
if self.rect1.colliderect(g):
self.rect.left,self.rect.top=posx,posy
self.rect1.left,self.rect1.top=posx1,posy1
def transport(self,width,height):
if self.rect.left>=width+5:
self.rect.left=self.rect.width*-1
self.rect1.left=self.rect1.width*-1-5
if self.rect.left<=self.rect.width*-1-5:
self.rect.left=width
self.rect1.left=width+5
if self.rect.top>=height+5:
self.rect.top=self.rect.height*-1
self.rect1.top=self.rect1.height*-1-5
if self.rect.top<=self.rect.width*-1-5:
self.rect.top=height
self.rect1.top=height+5
def collideCoins(self,Coins_obj):
x=self.rect1.collidelistall(Coins_obj.rects)
for e in sorted(x,reverse=True):
Coins_obj.coins.pop(e)
Coins_obj.rects.pop(e)
self.coins_eaten+=1
Coins_obj.amount-=1
def collideGhosts(self,List):
#return True if self.rect1.collidelist(List)>-1 else False
for g in List:
if self.rect1.colliderect(g.rect):
return True
return False
class Coin(Obj):
def __init__(self,imagen,contadorj,contadori):
self.imagen=pygame.transform.scale(pygame.image.load(imagen),(13,21))
self.rect=self.imagen.get_rect()
#self.rect.top=randint(0,d_height-21)
#self.rect.left=randint(0,d_width-13)
self.rect.left=contadorj*50+19
self.rect.top=contadori*50+15
def upgrade(self,surf):
surf.blit(self.imagen,self.rect)
class CoinOrd(Coin):
def __init__(self,imagen,ev,eve):
self.imagen=pygame.transform.scale(pygame.image.load(imagen),(13,21))
self.rect=self.imagen.get_rect()
self.rect.top=eve
self.rect.left=ev
class Coins(Coin):
def __init__(self,imagen,d_width,d_height,amount,text):
self.amount=amount
self.coins=[]
contadori=0
for e in text:
contadorj=0
for ev in e:
if ev==" ":
self.coins.append(Coin(imagen,contadorj,contadori))
contadorj+=1
contadori+=1
self.coinsave=[]
contadorisave=0
for e in text:
contadorjsave=0
for ev in e:
if ev==" ":
self.coinsave.append(Coin(imagen,contadorjsave,contadorisave))
contadorjsave+=1
contadorisave+=1
self.rectsave=[e.rect for e in self.coinsave]
self.rects=[e.rect for e in self.coins]
def upgrades(self,surf):
for e in self.coins:
e.upgrade(surf)
class Muros():
def __init__(self,muros,surf,texto):
self.muro = []
width = surf.rect.width
height = surf.rect.height
x= {" ":muros[0],"#":muros[1],"1":muros[2],"2":muros[3],"3":muros[4],"4":muros[5],"5":muros[6],"+":muros[7],"6":muros[8],"7":muros[9]}
cant_x = width/len(texto[0])
cant_y = height/len(texto)
acumx = 0
acumy = 0
self.r_wall=[]
self.r1_wall=[]
for linea in texto:
l_wall=[]
l_width=0
for c in linea:
imagen = x[c]
imagenr = imagen.get_rect()
imagenr.left = acumx
imagenr.top = acumy
acumx += imagenr.width
self.muro.append([imagen,imagenr])
if imagen==x["#"] or imagen==x["2"] or imagen==x["3"] or imagen==x["5"] or imagen==x["6"] or imagen==x["7"]:
l_width+=imagenr.width
if imagen==x[" "] or imagen==x["1"] or imagen==x["4"] or imagen==x["+"]:
if l_width:
l_wall.append(pygame.Rect(acumx-l_width-imagenr.width,acumy,l_width,imagenr.height))
l_width=0
if l_width:
l_wall.append(pygame.Rect(acumx-l_width,acumy,l_width,imagenr.height))
self.r_wall+=l_wall
acumy += imagenr.height
acumx = 0
self.final_wall=[]
self.l_height=imagenr.height
while self.r_wall:
self.left1=self.r_wall[0].left
top1=self.r_wall[0].top
height1=self.r_wall[0].height
self.width1=self.r_wall[0].width
i=1
y=len(self.r_wall)-1
while i <= y:
x=self.r_wall[i]
if self.left1==x.left and self.width1==x.width and x.top-top1==imagenr.height:
self.l_height+=imagenr.height
top1+=imagenr.height
del self.r_wall[i]
y-=1
i-=1
i+=1
self.final_wall.append(pygame.Rect(self.left1,self.r_wall[0].top,self.width1,self.l_height))
self.l_height=imagenr.height
del self.r_wall[0]
def update(self,surf):
for e in self.muro:
surf.surf.blit(e[0],e[1])
def MuroColli(self):
self.x=[e[1] for e in self.muro]
#for e in self.muro:
#x.append(e[1])
class LifeUp():
def __init__(self,pos,imagen):
self.img=pygame.image.load(imagen)
self.rect=self.img.get_rect()
self.rect.left=pos[0]
self.rect.top=pos[1]
self.rect1=self.img.get_rect()
self.rect1.left=pos[0]
self.rect1.top=pos[1]
self.flag=True
def update(self,surf):
surf.surf.blit(self.img,self.rect)
class eneMovR():
def __init__(self,Image_l,pos,min,max,vertical,v):
self.image = [pygame.image.load(e) for e in Image_l]
self.actual = self.image[0]
self.rect = self.actual.get_rect()
self.rect.left = pos[0]
self.rect.top = pos[1]
self.juan=self.actual.get_rect()
self.juan.top = pos[1]
self.juan.left = pos[0]
self.vx = 0
self.vy = 0
self.direccion = True
self.velocidad = v
self.modulo = 30
self.vertical=vertical
self.min=min
self.max=max
self.copyjuanmin=min
self.copyjuanmax=max
self.juanmin=min
self.juanmax=max
if self.juanmin==pos[0] or self.juanmin==pos[1]:
self.dirjuan=True
else:
self.dirjuan=False
def Mov_juan(self):
if self.vertical:
if self.juanmax>self.juan.top and self.dirjuan:
self.juan.top+=2
else:
self.dirjuan=False
self.juan.top-=2
if self.juanmin==self.juan.top:
self.dirjuan=True
else:
if self.juanmax>self.juan.left and self.dirjuan:
self.juan.left+=2
if self.juanmax==self.juan.left:
self.image[0]=pygame.transform.flip(self.image[0],True,False)
self.image[1]=pygame.transform.flip(self.image[1],True,False)
else:
self.dirjuan=False
self.juan.left-=2
if self.juanmin==self.juan.left:
self.image[0]=pygame.transform.flip(self.image[0],True,False)
self.image[1]=pygame.transform.flip(self.image[1],True,False)
self.dirjuan=True
def cambiar(self):
if not(self.modulo):
self.modulo = 30
self.actual = self.image[0]
elif self.modulo == 15:
self.actual = self.image[1]
self.modulo -= 1
def Mov_enemie(self):
self.cambiar()
if self.vertical:
if self.rect.top <= self.min:
self.direccion = False
elif self.rect.top >= self.max:
self.direccion = True
if self.direccion:
self.vy=-self.velocidad
else:
self.vy=self.velocidad
else:
if self.rect.left <= self.min:
self.direccion = False
elif self.rect.left >= self.max:
self.direccion = True
if self.direccion:
self.vx=-self.velocidad
else:
self.vx=self.velocidad
self.rect.move_ip(self.vx,self.vy)
|
from django.db.models import Q
from django_filters import rest_framework as filters
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework_gis.filters import DistanceToPointFilter
from seedorf.games.serializers import GameSpotNestedSerializer
from seedorf.locations.models import Address
from seedorf.locations.serializers import AddressSerializer
from seedorf.utils.permissions import IsAdminOrReadOnly
from seedorf.utils.regex import UUID as REGEX_UUID
from .filters import SpotFilter
from .models import Spot, SpotAmenity, SpotImage, SpotOpeningTime
from .serializers import AmenitySerializer, ImageSerializer, OpeningTimeSerializer, SpotSerializer
class SpotViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows spots to be viewed or edited.
"""
queryset = Spot.objects.filter(deleted_at=None)
serializer_class = SpotSerializer
lookup_field = "uuid"
lookup_value_regex = REGEX_UUID
# TODO: In the future, every user can create an adhoc spot
permission_classes = (IsAdminOrReadOnly,)
distance_filter_field = "address__point"
distance_filter_convert_meters = True
filter_backends = (filters.DjangoFilterBackend, DistanceToPointFilter)
filter_class = SpotFilter
class SpotAddressNestedViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows spot address to be viewed or edited.
"""
serializer_class = AddressSerializer
lookup_field = "uuid"
lookup_value_regex = REGEX_UUID
# TODO: In the future, every user can create an adhoc spot
permission_classes = (IsAdminOrReadOnly,)
def get_queryset(self):
spot_uuid = self.kwargs["spot_uuid"]
return Address.objects.filter(spot__uuid=spot_uuid)
class SpotSportOpeningTimesNestedViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows spot opening times to be viewed or edited
"""
serializer_class = OpeningTimeSerializer
lookup_field = "uuid"
lookup_value_regex = REGEX_UUID
permission_classes = (IsAdminOrReadOnly,)
def get_queryset(self):
spot_uuid = self.kwargs["spot_uuid"]
sport_uuid = self.kwargs["sport_uuid"]
return SpotOpeningTime.objects.filter(spot__uuid=spot_uuid, sport__uuid=sport_uuid)
class SpotSportAmenitesNestedViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows spot amenities belonging to a sport to be viewed or edited
"""
serializer_class = AmenitySerializer
lookup_field = "uuid"
lookup_value_regex = REGEX_UUID
permission_classes = (IsAdminOrReadOnly,)
def get_queryset(self):
spot_uuid = self.kwargs["spot_uuid"]
sport_uuid = self.kwargs["sport_uuid"]
return SpotAmenity.objects.filter(spot__uuid=spot_uuid, sport__uuid=sport_uuid)
class SpotSportImagesNestedViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows images of a sport on a spot to be viewed or edited
"""
serializer_class = ImageSerializer
lookup_field = "uuid"
lookup_value_regex = REGEX_UUID
permission_classes = (IsAdminOrReadOnly,)
def get_queryset(self):
spot_uuid = self.kwargs["spot_uuid"]
sport_uuid = self.kwargs["sport_uuid"]
spot = Q(spot__uuid=spot_uuid)
sport = Q(sport__uuid=sport_uuid)
return SpotImage.objects.filter(sport & spot)
class GameSpotNestedViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows nested spot to be viewed or edited per game.
"""
serializer_class = GameSpotNestedSerializer
lookup_field = "uuid"
lookup_value_regex = REGEX_UUID
# TODO: In the future, every user can create an adhoc spot
permission_classes = (IsAuthenticatedOrReadOnly,)
def get_queryset(self):
game_uuid = self.kwargs["game_uuid"]
return Spot.objects.filter(spot_games__uuid=game_uuid)
|
import random
import sys
import time
import math
from collections import Counter
from objects.topology import Square, Disk, Sphere
CANVAS_HEIGHT = 720
CANVAS_WIDTH = 720
NUM_NODES = 1000
AVG_DEG = 16
MAX_NODES_TO_DRAW_EDGES = 8000
RUN_BENCHMARK = False
def setup():
size(CANVAS_WIDTH, CANVAS_HEIGHT, P3D)
background(0)
def draw():
global curr_vis
global draw_domination
if curr_vis == 0:
topology.drawGraph(MAX_NODES_TO_DRAW_EDGES)
elif curr_vis == 1:
topology.drawSlvo()
elif curr_vis == 2:
topology.drawColoring()
elif curr_vis == 3:
topology.drawPairs(0)
elif curr_vis == 4:
topology.drawPairs(1)
elif curr_vis == 5:
topology.drawPairs(2)
elif curr_vis == 6:
topology.drawPairs(3)
elif curr_vis == 7:
topology.drawBackbones(draw_domination)
def keyPressed():
global curr_vis
global step_size
global vis_names
if key == ' ':
toggleLooping()
elif key == 'c':
if curr_vis == 7:
toggleDrawDomination()
elif key == 'i':
topology.switchFgBg()
elif key == 'l':
incrementVis()
topology.mightResetCurrNode()
print vis_names[curr_vis]
elif key == 'h':
decrementVis()
topology.mightResetCurrNode()
print vis_names[curr_vis]
elif key == 'k':
if curr_vis > 2 and curr_vis < 7:
topology.incrementCurrPair()
elif curr_vis == 7:
topology.incrementCurrBackbone()
else:
topology.incrementCurrNode(step_size)
elif key == 'j':
if curr_vis > 2 and curr_vis < 7:
topology.decrementCurrPair()
elif curr_vis == 7:
topology.decrementCurrBackbone()
else:
topology.decrementCurrNode(step_size)
elif key == 'y':
saveFrame("../report/images/{}-#####.png".format(vis_names[curr_vis]))
elif key >= '0' and key <= '9':
step_size = 2**int(key)
print "New step size:", step_size
elif key == ']':
step_size = 2*step_size
print "New step size:", step_size
elif key == '[':
step_size = step_size/2
print "New step size:", step_size
elif key == 'm':
print "\n---- Help Menu ----"
print "Use 'hjkl' to move between visualizations"
print "Press 'i' to invert the color scheme"
print "Press 'y' to take a screenshot of the current frame"
print "Press 'c' to show the coverage of the backbone"
print "Entering a number n between 0 and 9 will set the step size to 2^n nodes"
print "Using ']' will double the step size, '[' will half it"
print "Press space to pause rotation of the sphere"
def toggleLooping():
global is_looping
if is_looping:
noLoop()
is_looping = False
else:
loop()
is_looping = True
def toggleDrawDomination():
global draw_domination
if draw_domination:
draw_domination = False
else:
draw_domination = True
def incrementVis():
global curr_vis
global topology
if curr_vis < 7:
curr_vis += 1
background(topology.color_bg)
def decrementVis():
global curr_vis
global topology
if curr_vis > 0:
curr_vis -= 1
background(topology.color_bg)
def main():
sys.setrecursionlimit(8000)
global is_looping
global draw_domination
global curr_vis
global step_size
global vis_names
is_looping = True
draw_domination = False
curr_vis = 0
step_size = 1
vis_names = ["rgg", "slvo", "color", "bipartite", "no-tails",
"major-comp", "no-bridge", "backbone"]
global topology
# topology = Square()
# topology = Disk()
topology = Sphere()
topology.num_nodes = NUM_NODES
topology.avg_deg = AVG_DEG
topology.canvas_height = CANVAS_HEIGHT
topology.canvas_width = CANVAS_WIDTH
if RUN_BENCHMARK:
n_benchmark = 0
topology.prepBenchmark(n_benchmark)
run_time = time.clock()
topology.generateNodes()
topology.findEdges(method="cell")
topology.colorGraph()
topology.generateBackbones()
run_time = time.clock() - run_time
print "Average degree: {}".format(topology.findAvgDegree())
print "Min degree: {}".format(topology.getMinDegree())
print "Max degree: {}".format(topology.getMaxDegree())
print "Num edges: {}".format(topology.findNumEdges())
print "Node r: {0:.3f}".format(topology.node_r)
print "Terminal clique size: {}".format(topology.term_clique_size)
print "Number of colors: {}".format(len(set(topology.node_colors)))
print "Max degree when deleted: {}".format(max(topology.deg_when_del.values()))
color_cnt = Counter(topology.node_colors)
print "Max color set size: {} \t color: {}".format(
color_cnt.most_common(1)[0][1], color_cnt.most_common(1)[0][0])
print "Backbone 1 order: {} \t size: {} \t coverage: {}".format(
topology.backbones_meta[0][0], topology.backbones_meta[0][1],
topology.backbones_meta[0][2])
print "Backbone 2 order: {} \t size: {} \t coverage: {}".format(
topology.backbones_meta[1][0], topology.backbones_meta[1][1],
topology.backbones_meta[1][2])
b1_colors = list(set(
[topology.node_colors[i] for i in list(topology.backbones[0])]))
print "Backbone 1 colors: {} {}".format(b1_colors[0], b1_colors[1])
b2_colors = list(set(
[topology.node_colors[i] for i in list(topology.backbones[1])]))
print "Backbone 2 colors: {} {}".format(b2_colors[0], b2_colors[1])
if isinstance(topology, Sphere):
print "Backbone 1 faces: {}".format(topology.num_faces[0])
print "Backbone 2 faces: {}".format(topology.num_faces[1])
print "Run time: {0:.3f} s".format(run_time)
print "\nPress 'm' for the menu"
main()
|
import pandas as pd
from sklearn.utils import shuffle
train_df = pd.read_csv(r'/content/gdrive/My Drive/Dataset _ QG/train.csv') # Put your directory for train.csv
test_df = pd.read_csv(r'/content/gdrive/My Drive/Dataset _ QG/test.csv') # Put your directory for test.csv
train_data = train_df[['context','text','question']]
test_data = test_df[['context','text','question']]
train_data = shuffle(train_data)
test_data = shuffle(test_data)
train_data.to_csv('/content/gdrive/My Drive/Dataset _ QG/train.csv',index=False)
test_data.to_csv('/content/gdrive/My Drive/Dataset _ QG/test.csv',index=False)
|
import os
os.environ['THEANO_FLAGS']='mode=FAST_RUN,device=gpu,floatX=float32'
import utils as ut
import numpy as np
from os.path import basename
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution3D, MaxPooling3D
from keras.optimizers import SGD
from keras.utils import np_utils
from keras.models import load_model
from sklearn.model_selection import KFold
from keras.layers.advanced_activations import PReLU
from keras.callbacks import EarlyStopping
import csv
from sklearn.metrics import roc_curve, auc, confusion_matrix, accuracy_score, roc_auc_score
from sklearn.manifold import TSNE
from scipy.spatial.distance import hamming
def chilyfy_data(data):
data['file_name'] = np.array([basename(fn) for fn in data['mat_files']])
data['mat_files'] = np.array(data['mat_files'])
data['data'] = np.array(data['data'])
if 'target' in data:
data['target'] = np.array(data['target'])
return data
def build_model():
model = Sequential()
model.add(Dense(684, init='he_normal', input_dim=312))
model.add(PReLU())
model.add(Dropout(0.3))
model.add(Dense(584, init='he_normal'))
model.add(PReLU())
model.add(Dropout(0.5))
model.add(Dense(384, init='he_normal'))
model.add(PReLU())
model.add(Dropout(0.2))
model.add(Dense(2, init='he_normal'))
model.add(Activation('softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
return model
def get_feat(spred):
l = len(spred)
ispred = zip(range(spred.shape[-1]), spred>=0.5)
cons = [np.array(list(v))[:, 0] for _, v in groupby(ispred, lambda x: x[1])]
const = filter(lambda x: (spred >= 0.5)[x[0]], cons)
constv = [spred[idx] for idx in const]
s = np.argsort(spred)
f1 = np.mean(spred)
f2 = np.std(spred)
f3 = np.product(spred[s[-4:]])
f4 = np.product(spred[s[-5:]])
f5 = np.product(spred[s[-6:]])
f6 = np.sum(map(lambda v: sum_by(2, v), filter(lambda x: x.shape[-1] >= 2, constv)))
f7 = np.sum(map(lambda v: sum_by(3, v), filter(lambda x: x.shape[-1] >= 3, constv)))
f8 = np.sum(map(lambda v: sum_by(4, v), filter(lambda x: x.shape[-1] >= 4, constv)))
f9 = np.sum(map(lambda v: sum_by(5, v), filter(lambda x: x.shape[-1] >= 5, constv)))
f10 = np.sum(map(lambda v: sum_by(6, v), filter(lambda x: x.shape[-1] >= 6, constv)))
f11 = np.where(spred >= 0.5)[0].shape[-1]/l
feat = [f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11]
return feat
def chilly_build(pid):
data = ut.load_data_for_patient(patient_id=pid, file_name='traditional.npy', dtype='test')
data = chilyfy_data(data)
X = scaler.transform(data['data'])
idx = [map(int, np.array(list(v))[:, 0]) for _, v in groupby(zip(np.arange(0, data['target'].shape[-1]), data['mat_files']), key=lambda x: x[1])]
files = [data['file_name'][ix][0] for ix in idx]
pred = [model.predict_proba(X[ix, :])[:, 1] for ix in idx]
pfeats = map(get_feat, pred)
return np.array(pfeats), files
data = ut.load_data_for_patient(patient_id=2, file_name='traditional.npy')
data = ut.apply_safe_indexes(chilyfy_data(data))
# filter all complete dropouts
good_data_indx = np.where(np.mean(np.abs(data['data'][:, 192:]), axis=1) != 0)
data = ut.apply_indexes(data, good_data_indx)
X = data['data']
scaler = MinMaxScaler()
scaler.fit(X)
X = scaler.transform(X)
Y = np_utils.to_categorical(data['target'])
wdata = ut.load_data(patient_id=0)
model = build_model()
early_stopping = EarlyStopping(monitor='val_loss', patience=4)
weights = np.ones(data['target'].shape)
weights[np.where(data['target'] == 1)] = 60
model.fit(X, Y, validation_split=0.4, sample_weight=weights, nb_epoch=30, callbacks=[early_stopping])
print confusion_matrix(data['target'], model.predict_classes(X))
print accuracy_score(data['target'], model.predict_classes(X))
print roc_auc_score(data['target'], model.predict_proba(X)[:,1])
input()
from itertools import groupby, combinations
idx = [map(int, np.array(list(v))[:, 0]) for _, v in groupby(zip(np.arange(0, data['target'].shape[-1]), data['mat_files']), key=lambda x: x[1])]
def sum_by(n, l):
_sum = 0
for i in range(len(l) - n + 1):
_sum += np.product(l[i:i+n])
return _sum
turget = [np.mean(data['target'][ix]) for ix in idx]
pred = [model.predict_proba(X[ix, :])[:, 1] for ix in idx]
pfeats = map(get_feat, pred)
pfeats = np.array(pfeats)
turget = np.array(turget)
tsmodel = TSNE(n_components=2, verbose=True)
Y = tsmodel.fit_transform(pfeats)
import matplotlib.pyplot as plt
plt.scatter(Y[:, 0], Y[:, 1], c=turget)
plt.axis('off')
plt.axis('tight')
plt.show()
from sklearn import svm
clf = svm.SVC(C=1e5, verbose=True,probability=True)
clf.fit(pfeats, turget)
print confusion_matrix(turget, clf.predict(pfeats))
print accuracy_score(turget, clf.predict(pfeats))
print roc_auc_score(turget, clf.predict_proba(pfeats)[:,1])
F, T = chilly_build(2)
pbuilds = clf.predict_proba(F)
sdicts = {'f':T, 'pbuilds': pbuilds}
np.save('p3_80', sdicts)
p180 = np.load('p1_80.npy').item()
p280 = np.load('p2_80.npy').item()
p380 = np.load('p3_80.npy').item()
import pandas as pd
df = pd.DataFrame()
df['File'] = np.append(np.append(p180['f'], p280['f']), p380['f'])
df['Class'] = np.append(np.append(p180['pbuilds'][:, 1], p280['pbuilds'][:, 1]), p380['pbuilds'][:, 1])
df.sort_values(['File'])
en = pd.read_csv('./ensemble.csv')
en.sort_values(['File'])
df_en = pd.DataFrame()
df_en['File'] = en['File']
df_en['Class'] = (en.Class + df.Class)/2.
df_en.to_csv('please_be_74.csv', index=False)
def warp(x):
"""
Logarithmically rescale values with p < 0.1 and (1-p) < 0.1 to avoid loss of precision
"""
x = np.copy(x)
low = x<0.1
high = x>0.35
# xl = x[low]
# pmin = np.log10(xl[xl!=0].min())-1
# lx = np.log10(xl)
# lx[np.isinf(lx)] = pmin
# lx = (lx+1)*(3./(lx.max()-lx.min()))-1
# xl = 10**lx
# x[low] = xl
xh = x[high]
pmin = np.log10(1-xh[xh!=1].max())-1
lx = np.log10(1-xh)
lx[np.isinf(lx)] = pmin
lx = (lx+1)*(1./(lx.max()-lx.min()))-1
xh = 1-10**lx
x[high] = xh
return x
en_fl = en[en.File.str.match('new_1_*|new_3_*')]
en_nw = pd.DataFrame()
en_nw['File'] = p280['f']
en_nw['Class'] = p280['pbuilds'][:, 1]
en_fn = en_nw.append(en_fl)
en_fn.to_csv('this_is_sparta.csv', index=False)
|
from PIL import ImageGrab, Image
import numpy as np
import cv2
import time
from ahk import AHK
import threading
newImg = cv2.imread('imgs/NN/save.png')
for i in range(6):
# crop the image to one of each door
crop_img = newImg[0:150, (i*120) + 20:((i+1)*120) - 20]
cv2.imwrite('imgs/NN/{}.png'.format(i),crop_img) |
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt
import seaborn as sn
import json
with open('./data.json') as f:
data = json.load(f)
df = data["data"]
y_true = []
y_pred = []
for value in df:
y_t = value[187]
y_p = value[188]
y_true.append(y_t)
y_pred.append(y_p)
cm = confusion_matrix(y_true, y_pred, normalize='true')
ax = sn.heatmap(cm, cmap='cividis', annot=True)
plt.xlabel('xlabel')
plt.ylabel('ylabel')
plt.show()
|
from typing import List
from fastapi import APIRouter, Depends, HTTPException, status
from blog import schemas, database, models
from sqlalchemy.orm import Session
from blog.repository import blog
from utils import oauth2
current_user: schemas.User = Depends(oauth2.get_current_user)
get_db = database.get_db
router = APIRouter(
prefix='/blogs',
tags=['Blogs'],
# dependencies=[Depends(get_token_header)],
# response={404:{"description":"Not Found"}},
)
@router.get('/', response_model=List[schemas.ShowBlog])
def all(db: Session = Depends(get_db), current_user: schemas.User = Depends(oauth2.get_current_user)):
return blog.get_all(db)
@router.get('/{id}', status_code=200, response_model=schemas.ShowBlog)
def retrieve_one(id: int, db: Session = Depends(get_db), current_user: schemas.User = Depends(oauth2.get_current_user)):
return blog.get_one(id,db)
@router.post('/', status_code=status.HTTP_201_CREATED,)
def create(request: schemas.Blog, db: Session = Depends(get_db), current_user: schemas.User = Depends(oauth2.get_current_user)):
return blog.create(request, db)
@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)
def update(id: int, request: schemas.Blog, db: Session = Depends(get_db), current_user: schemas.User = Depends(oauth2.get_current_user)):
return blog.update(id, request, db)
@router.delete('/{id}', status_code=status.HTTP_204_NO_CONTENT)
def destroy(id: int, db: Session = Depends(get_db), current_user: schemas.User = Depends(oauth2.get_current_user)):
return blog.destroy(id, db)
|
from pyscipopt import Branchrule, SCIP_RESULT
import logging
class ReluBranching(Branchrule):
"""Class which implements the ReLU branching rule. Sampling heuristic must run up to the same depth as this
branching rule, otherwise this branching rule does not work, since sampling heuristic fills self.mip.
nodes_by_branch_prio."""
def __init__(self, mip):
self.mip = mip
self.was_branched = set()
self.log = logging.getLogger('main_log')
def branchexeclp(self, allowaddcons):
"""Execute ReLU branching based on LP solution."""
assert allowaddcons
vars, sols, fracs, nlpcands, nprio, nfrac = self.mip.model.getLPBranchCands()
var_names = set(v.name for v in vars[:nprio])
for i, node_name in enumerate(self.mip.nodes_by_branch_prio[self.mip.model.getCurrentNode().getNumber()]):
if node_name in self.mip.relu_nodes and "t_bin_" + node_name in var_names:
down, eq, up = self.mip.model.branchVar(self.mip.binary_variables["bin_" + node_name])
print("branched at node", self.mip.model.getCurrentNode().getNumber(), "on", node_name, self.mip.vars[node_name + "_in"].getLbLocal(),
self.mip.vars[node_name + "_in"].getUbLocal())
self.log.debug("relu branched on %s", node_name)
self.mip.nodes_by_branch_prio[self.mip.model.getCurrentNode().getNumber()][i] = None
self.mip.model.addCons(self.mip.vars[node_name + "_in"] <= 0.0, node=down, local=True)
self.mip.model.addCons(self.mip.vars[node_name + "_in"] == self.mip.vars[node_name], node=up, local=True)
self.mip.model.chgVarUbNode(down, self.mip.vars[node_name + "_in"], 0.0) # in <= 0
self.mip.model.chgVarUbNode(down, self.mip.vars[node_name], 0.0) # out <= 0
self.mip.model.chgVarLbNode(up, self.mip.vars[node_name + "_in"], 0.0) # in >= 0, not necessary for out
return {"result": SCIP_RESULT.BRANCHED}
self.log.debug("relu branching could not branch")
return {"result": SCIP_RESULT.DIDNOTRUN}
def branchexecext(self, allowaddcons):
print("Relu branch ext", allowaddcons)
return {"result": SCIP_RESULT.DIDNOTRUN}
def branchexecps(self, allowaddcons):
print("Relu branch not all fixed", allowaddcons)
return {"result": SCIP_RESULT.DIDNOTRUN}
|
import tensorflow as tf
import nltk
import pdb
import os
import pickle
import numpy as np
from collections import defaultdict
import heapq
import random
import argparse
def init_parser():
parser = argparse.ArgumentParser(description='Evaluate which epoch')
parser.add_argument("--saved_directory",'-dir', default='testing',type=str,dest='saved_directory')
parser.add_argument("--search_space",'-ser', default=100,type=int,dest='search_space')
parser.add_argument("--retain_space",'-re', default=3,type=int,dest='retain_space')
parser.add_argument("--word_embedding_coefficient",'-w', default=0.1,type=float,dest='word_embedding_coefficient')
parser.add_argument("--gender",'-g',default="female",type=str,dest='gender')
return parser.parse_args()
def printing(data, f, f_final, f_final_best,word_embedding_coefficient, words_to_names_rhyme_dict,f_all,prompt):
try:
with open(f_final+".pickle","rb") as pickle_in:
data_old=pickle.load(pickle_in)
except:
with open(f_final+".pickle","wb") as pickle_in:
data_old={"score":[],"adjusted_score":[]}
pickle.dump(data_old,pickle_in)
try:
with open(f_final_best+".pickle","rb") as pickle_in:
data_old_best=pickle.load(pickle_in)
except:
with open(f_final_best+".pickle","wb") as pickle_in:
data_old_best={"score":[],"adjusted_score":[]}
pickle.dump(data_old_best,pickle_in)
data_curr_score=[]
data_curr_adjusted_score=[]
temp_data=defaultdict(list)
for line in data:
temp_data[" ".join(line[3])].append(line)
for t,k in enumerate(temp_data.keys()):
lines=[]
num_of_words_each_line=[0]
for pp in temp_data[k]:
count=0
for ppp in pp[3]:
if ppp=="\n":
count+=1
num_of_words_each_line.append(0)
else:
num_of_words_each_line[count]+=1
break
num_of_words_each_line=num_of_words_each_line[1:-1]
f.write("======================= template: {} ============================ \n".format(t+1))
f.write(k)
f.write("----------------------- original sentences ------------------------------------ \n")
for jj,j in enumerate(temp_data[k]):
adjusted_score=np.mean(j[1])+word_embedding_coefficient*np.mean(j[5])
score=np.mean(j[1])
data_curr_score.append(score)
data_curr_adjusted_score.append(adjusted_score)
f.write("-------------------------score: {}; adjusted_score: {}----------------------- \n".format(score, adjusted_score))
limerick=list(j[2])
limerick[limerick.index("\n")-1]=random.choice(words_to_names_rhyme_dict[j[4][0]])
if jj<3:
f_all.write("{}:{}".format(prompt,score)+"\n")
f_all.write(" ".join(limerick)+"\n")
f.write(" ".join(limerick))
f.write("------------------------- score breakdown ------------------------ \n")
count_w=j[2].index("\n")+1
count_s=1
for s in range(4):
temp_list=[]
for ww,w in enumerate(j[2][count_w:count_w+num_of_words_each_line[s]]):
f.write("({} {:03.2f})".format(w,j[1][count_s+ww]))
temp_list.append(j[1][count_s+ww])
count_s+=ww
count_w+=ww+2
f.write(" line score is : {:04.03f}, look ahead score is : {:04.03f}".format(np.mean(temp_list),j[5][s]))
f.write("\n")
data_old_best_score=data_old_best["score"]
data_old_best_adjusted_score=data_old_best["adjusted_score"]
data_curr_best_score=heapq.nlargest(min(len(data_curr_score),5), data_curr_score, key=lambda x: x)
data_curr_best_adjusted_score=heapq.nlargest(min(len(data_curr_adjusted_score),5), data_curr_score, key=lambda x: x)
data_curr_best_score+=data_old_best_score
data_curr_best_adjusted_score+=data_old_best_adjusted_score
data_curr_best={"score":data_curr_best_score,"adjusted_score":data_curr_best_adjusted_score}
data_old_score=data_old["score"]
data_old_adjusted_score=data_old["adjusted_score"]
data_curr_score+=data_old_score
data_curr_adjusted_score+=data_old_adjusted_score
data_curr={}
data_curr["score"]=data_curr_score
data_curr["adjusted_score"]=data_curr_adjusted_score
with open(f_final+".pickle","wb") as pickle_in:
pickle.dump(data_curr,pickle_in)
with open(f_final_best+".pickle","wb") as pickle_in:
pickle.dump(data_curr_best,pickle_in)
def limericks_generation_gpt(model_name="345M",model_dir='gpt2/models/345M',prompt="blood",args=None):
gender=args.gender
saved_directory=args.saved_directory
search_space=args.search_space
retain_space=args.retain_space
word_embedding_coefficient=args.word_embedding_coefficient
from py_files.Limericks_naive import Limerick_Generate_new
lg = Limerick_Generate_new()
saved_directory=saved_directory
f_final=saved_directory +"/"+"results_"+str(search_space)+"_"+str(retain_space)+"_"+str(word_embedding_coefficient)
f_final_best=saved_directory +"/"+"best_results_"+str(search_space)+"_"+str(retain_space)+"_"+str(word_embedding_coefficient)
f1_path=saved_directory+"/"+"success.txt"
f2_path=saved_directory+"/"+"success.pickle"
print("=========================================")
print(saved_directory)
print("=========================================")
if saved_directory not in os.listdir(os.getcwd()):
os.mkdir(saved_directory)
print("==================== here ===================================")
print(saved_directory)
result_file_path = saved_directory +"/"+ prompt+"_" + gender + '_' +str(search_space)+"_"+str(retain_space)+"_"+str(word_embedding_coefficient)
all_result_file_path=saved_directory +"/" + str(search_space)+"_"+str(retain_space)+"_"+str(word_embedding_coefficient)
previous_data, words_to_names_rhyme_dict=lg.gen_poem_andre_new(gender=gender,prompt=prompt,search_space=search_space, retain_space=retain_space, word_embedding_coefficient=word_embedding_coefficient)
print("==================== here here===================================")
with open(result_file_path+".pickle","wb") as f3:
pickle.dump(previous_data,f3)
print("==================== here here here===================================")
with open(result_file_path+".txt","a+") as f:
with open(all_result_file_path+".txt","a+") as f_all:
printing(previous_data,f, f_final,f_final_best,word_embedding_coefficient, words_to_names_rhyme_dict,f_all,prompt)
print("==================== here here here here===================================")
if len(previous_data)>0:
with open(f1_path,"a+") as f1:
f1.write(prompt+str(search_space)+"_"+str(retain_space)+"_"+str(word_embedding_coefficient)+"\n")
try:
with open(f2_path,"rb") as f2:
data=pickle.load(f2)
data.append(prompt)
with open(f2_path,"wb") as f2:
pickle.dump(data,f2)
except:
with open(f2_path,"wb") as f2:
pickle.dump([],f2)
print("==================== here here here here here===================================")
if __name__ == '__main__':
data1="born, shaken, restore, laugh, tears"
#, surprise, kindness, humiliation, victory, wedding, alien, holiday, christmas, thanksgiving, birthday, injury, pillow, fiance, dawn, traffic, heartbreak, wine, beer, musuem, mountain, river, memory, mud, spider, rain, season, winter, throne, politics, promise, beach, bank, money, limerick"
data2="love, cunning, dog, blood, death, war"
#disease, world, planet, fire, water, sports, love, car, animal, violent, opera, monster, library, market, noble, doctor, funeral, ball, body, smart, exercise, gun, art, music, boxing, forest, philosophy, night, scary, creativity, evil, angry, pride, law, school, light, rich, color, leader, park, airplane, loss, weight, useful, applaud, home, union, child, working, cheat, fall, time, hope, flower, random, impressive"
prompt_list=list(data1.split(", ")+data2.split(", "))
slurm_task_id = int(os.getenv('SLURM_ARRAY_TASK_ID'))
prompt=prompt_list[slurm_task_id]
print(prompt)
limericks_generation_gpt(prompt=prompt,args=init_parser())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from tests.test_codes_list import CodesListTestCase
from tests.buttons.test_result_buttons import ResultButtonsTestCase
from tests.test_result_screen import ResultScreenTestCase
from tests.test_test_result import TestResultTestCase
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestResultTestCase))
suite.addTest(unittest.makeSuite(CodesListTestCase))
suite.addTest(unittest.makeSuite(ResultButtonsTestCase))
suite.addTest(unittest.makeSuite(ResultScreenTestCase))
return suite
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite()) |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from tqdm import tqdm
from utils import *
from config import get_config
#from tensorflow.python.framework.ops import disable_eager_execution
#disable_eager_execution()
config = get_config()
(X,y, group) = loaddata_nosplit_scaled_index(config.input_size, config.feature)
classes = ['A', 'E', 'j', 'L', 'N', 'P', 'R', 'V']
#Xe = np.expand_dims(X, axis=2)
y_new = np.column_stack((np.array(y), np.array(group)))
from sklearn.model_selection import train_test_split
X, Xval, y, yval = train_test_split(X, y_new, test_size=0.25, random_state=1)
group_new= np.split(y,[8,9], axis=1)[1]
y = np.split(y,[8,9], axis=1)[0]
y = np.array(pd.DataFrame(y).apply(pd.to_numeric)) #https://stackoverflow.com/a/34844867
yval_subjectlabel= np.split(yval,[8,9], axis=1)[1]
yval = np.split(yval,[8,9], axis=1)[0]
yval = np.array(pd.DataFrame(yval).apply(pd.to_numeric))
subject = np.unique(group_new)#, return_counts = True)
beat_table = pd.DataFrame()
for s in subject:
beat_table = pd.concat([beat_table, pd.DataFrame(y[np.where(group_new == s)[0]], columns = classes).apply(pd.to_numeric).sum()], axis=1)
beat_table = beat_table.T
beat_table.index = subject
val_subject = np.unique(yval_subjectlabel)#, return_counts = True)
val_beat_table = pd.DataFrame()
for s in val_subject:
val_beat_table = pd.concat([val_beat_table, pd.DataFrame(yval[np.where(yval_subjectlabel == s)[0]], columns = classes).apply(pd.to_numeric).sum()], axis=1)
val_beat_table = val_beat_table.T
val_beat_table.index = val_subject
#selected_beat_type = 'j'
#selected_subject = '207'
print(beat_table)
print(val_beat_table)
'''
beat_index_subject = np.array(beat_table.where(beat_table[selected_beat_type]>0).dropna(how='all').index) #array of subjects with selected beat type
print(beat_index_subject) #subjects that have selected beat type
print(beat_table.where(beat_table[selected_beat_type]>0).sum()) #sum of beat count of subjects with selected type by beat type
print(beat_table.where(beat_table[selected_beat_type]>0).sum().sum()) #total sum of beat count of subjects with selected type
'''
print("=========")
print(X.shape) #original total beat count
X_subjects_total = []
y_subjects_total = []
X_beattype_total = []
y_beattype_total = []
X_subjects_beat_total = []
y_subjects_beat_total = []
X_trip_p_total = []
X_trip_s_total = []
X_trip_r_total = []
for selected_beat_type in tqdm(classes):
#for selected_beat_type in tqdm(['A','E']):
beat_index_subject = np.array(beat_table.where(beat_table[selected_beat_type]>0).dropna(how='all').index) #array of subjects with selected beat type
print(beat_index_subject) #subjects that have selected beat type
print(beat_table.where(beat_table[selected_beat_type]>0).sum()) #sum of beat count of subjects with selected type by beat type
print(beat_table.where(beat_table[selected_beat_type]>0).sum().sum()) #total sum of beat count of subjects with selected type
for selected_subject in tqdm(beat_index_subject):
X_subjects = []
y_subjects = []
X_beattype = []
y_beattype = []
X_subjects_beat = []
y_subjects_beat = []
X_trip_p = []
X_trip_s = []
X_trip_r = []
for x in range(X.shape[0]):
if group_new[x] == selected_subject: #collects beats from selected subject
X_subjects.append(X[x])
y_subjects.append(y[x])
if group_new[x] in beat_index_subject and y[x][classes.index(selected_beat_type)] == 1: #collects beats of the selected beat type from subjects that have the selected beat type
X_beattype.append(X[x])
y_beattype.append(y[x])
if group_new[x] == selected_subject and y[x][classes.index(selected_beat_type)] == 1: #collects beats of the selected beat type from the selected subject
X_subjects_beat.append(X[x])
y_subjects_beat.append(y[x])
if group_new[x] in beat_index_subject and group_new[x] != selected_subject and y[x][classes.index(selected_beat_type)] == 1: #collects beats not of the selected beat type from other subjects that also have the selected beat type
X_trip_p.append(X[x])
if group_new[x] == selected_subject and y[x][classes.index(selected_beat_type)] != 1: #collects beat not of the selected beat type from the selected subject
X_trip_s.append(X[x])
if len(X_subjects_beat) > 500 and len(X_trip_s) > 0 or len(X_beattype) > 3000: #undersample majority beat class types by stopping data collection
break
if len(X_trip_s) == 0:
X_trip_s = X_subjects_beat #in case the selected subject has no other beats other than the selected beat type
selected_subject_count = len(X_subjects)
beattype_count = len(X_beattype)
subject_beat_count = len(X_subjects_beat)
print(selected_subject_count, beattype_count, subject_beat_count)
trip_p_count = len(X_trip_p)
trip_s_count = len(X_trip_s)
print(trip_p_count, trip_s_count)
print("=========================")
#to ensure the data is of the same size, random resampling of the beat collections is used to match the largest collection
import random
def random_oversample(data, label, count):
r = random.randint(0,count-1)
data.append(data[r])
label.append(label[r])
return data, label
def random_oversample_1(data, count):
r = random.randint(0,count-1)
data.append(data[r])
return data
top_count = selected_subject_count if selected_subject_count > beattype_count else beattype_count #sets the size of the largest collection as target count
#if collection is smaller than top_count, randomly oversample existing collections until all collection sizes match
if selected_subject_count > beattype_count:
while selected_subject_count > beattype_count:
X_beattype, y_beattype = random_oversample(X_beattype, y_beattype, beattype_count)
beattype_count += 1
else:
while beattype_count > selected_subject_count:
X_subjects, y_subjects = random_oversample(X_subjects, y_subjects, selected_subject_count)
selected_subject_count += 1
while top_count > subject_beat_count:
X_subjects_beat, y_subjects_beat = random_oversample(X_subjects_beat, y_subjects_beat, subject_beat_count)
subject_beat_count += 1
if trip_p_count > top_count:
X_trip_p_temp = []
X_trip_p_temp_count = 0
while top_count > X_trip_p_temp_count:
X_trip_p_temp = random_oversample_1(X_trip_p, trip_p_count)
X_trip_p_temp_count += 1
X_trip_p = X_trip_p_temp
while top_count > trip_p_count:
X_trip_p = random_oversample_1(X_trip_p, trip_p_count)
trip_p_count += 1
if trip_s_count > top_count:
X_trip_s_temp = []
X_trip_s_temp_count = 0
while top_count > X_trip_s_temp_count:
X_trip_s_temp = random_oversample_1(X_trip_p, trip_p_count)
X_trip_s_temp_count += 1
X_trip_s_temp = X_trip_s_temp
while top_count > trip_s_count:
X_trip_s = random_oversample_1(X_trip_s, trip_s_count)
trip_s_count += 1
for x in range(X.shape[0]):
if group[x] != selected_subject and y[x][classes.index(selected_beat_type)] != 1:
r = random.randint(0,1)
if (r == 1): X_trip_r.append(X[x])
if np.array(X_trip_r).shape[0] >= top_count:
break
trip_ref_count = np.array(X_trip_r).shape[0]
print(top_count)
#collect beat types from this loop instance
def collect_values(collector, data):
return collector + data
X_subjects_total = collect_values(X_subjects_total, X_subjects)
y_subjects_total = collect_values(y_subjects_total, y_subjects)
X_beattype_total = collect_values(X_beattype_total, X_beattype)
y_beattype_total = collect_values(y_beattype_total, y_beattype)
X_subjects_beat_total = collect_values(X_subjects_beat_total, X_subjects_beat)
y_subjects_beat_total = collect_values(y_subjects_beat_total, y_subjects_beat)
X_trip_p_total = collect_values(X_trip_p_total, X_trip_p)
X_trip_s_total = collect_values(X_trip_s_total, X_trip_s)
X_trip_r_total = collect_values(X_trip_r_total, X_trip_r)
X_test = np.array([np.array(X_beattype_total), np.array(X_subjects_total), np.array(X_subjects_beat_total), np.array(X_trip_p_total), np.array(X_trip_s_total), np.array(X_trip_r_total)])
y_test = np.array([np.array(y_beattype_total), np.array(y_subjects_total), np.array(y_subjects_beat_total)])
print(X_test.shape, y_test.shape)
#X_test = X_test.reshape(X_test.shape[0], X_test.shape[1]*X_test.shape[2], X_test.shape[3])
#y_test = y_test.reshape(y_test.shape[0], y_test.shape[1]*y_test.shape[2], y_test.shape[3])
#print(X_test.shape, y_test.shape)
print("=========================")
X_test_temp = []
for x in X_test:
X_test_temp.append(np.expand_dims(x, axis=2))
X_test = X_test_temp
'''
y_test_temp = []
for y in y_test:
y_test_temp.append(np.array(pd.DataFrame(y).idxmax(axis=1)))
y_test = y_test_temp
'''
# Define and save data
input_train = X_test
target_train = y_test
Xvale = np.expand_dims(Xval, axis=2)
yvale =yval.reshape(yval.shape[0], 1, yval.shape[1])
input_test = Xvale
target_test = yvale
import deepdish as dd
dd.io.save('dataset/traindata_tri.hdf5', input_train)
dd.io.save('dataset/trainlabel_tri.hdf5', target_train)
dd.io.save('dataset/testdata_tri.hdf5', input_test)
dd.io.save('dataset/testlabel_tri.hdf5', target_test) |
from loader import dp, bot
from keyboards.default.MainMenu import main_menu
# MongoDB init
from utils.db_api import users_db
from utils.db_api import profiles_db
# Init aiogram
import aiogram.utils.markdown as md
from aiogram import types
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters.state import State, StatesGroup
from aiogram.types import ParseMode
from aiogram.utils import executor
from states.UserStats import Form
#for datetime
from datetime import datetime
@dp.message_handler(content_types=['contact'], state=Form.Phone)
async def contact_hand(message, state: FSMContext):
textback = ""
async with state.proxy() as data:
data['phone'] = str(message.contact.phone_number).replace('+', '').replace(' ', '')
req_db = users_db.update_one({'phone': data['phone']}, {'$set': {
"phone": data['phone'],
"updated": datetime.now()
}
}, upsert=True)
if (req_db.matched_count):
textback = "Я рад снова видеть Вас {}".format(dict(message['chat'])['first_name'])
else:
textback = "Добро пожаловать {}".format(dict(message['chat'])['first_name'])
await Form.SuppotLoginSelect.set()
markup = types.ReplyKeyboardMarkup(resize_keyboard=True, selective=True)
markup.add("Войти")
await bot.send_message(
message.chat.id,
md.text(
md.text(textback),
md.text("Нажмите Войти"),
sep='\n',
),
reply_markup=markup,
parse_mode=ParseMode.MARKDOWN,
)
@dp.message_handler(state=Form.Phone)
async def contact_hand(message):
await bot.send_message(
message.chat.id,
md.text(
md.text("Отправить номер"),
sep='\n',
),
parse_mode=ParseMode.MARKDOWN,
)
@dp.message_handler(lambda message: message.text not in ["Войти"], state=Form.SuppotLoginSelect)
async def choose_invalid(message: types.Message):
return await message.reply("Пожалуйста, выберите!")
@dp.message_handler(lambda message: message.text in ["Войти"], state=Form.SuppotLoginSelect)
async def process_SuppotLoginSelect(message: types.Message, state: FSMContext):
async with state.proxy() as data:
data['login'] = message.text
print(data)
await Form.LoginEnter.set()
# And send message
await bot.send_message(
message.chat.id,
md.text(
md.text("Введите логин"),
sep='\n',
),
reply_markup=types.ReplyKeyboardRemove(),
parse_mode=ParseMode.MARKDOWN,
)
@dp.message_handler(state=Form.LoginEnter)
async def process_LoginEnter(message: types.Message, state: FSMContext):
async with state.proxy() as data:
data['login'] = message.text
await Form.PasswordEnter.set()
# And send message
await bot.send_message(
message.chat.id,
md.text(
md.text("Введите пароль"),
sep='\n',
),
reply_markup=types.ReplyKeyboardRemove(),
parse_mode=ParseMode.MARKDOWN,
)
@dp.message_handler(state=Form.PasswordEnter)
async def process_PasswordEnter(message: types.Message, state: FSMContext):
async with state.proxy() as data:
data['password'] = message.text
await Form.AuthMongo.set()
password = data['password']
print(("GOT\nLogin: {}\npassword: {}\n").format(data['login'], password))
profile_result = profiles_db.find_one(filter={'login': data['login'], 'password': password})
if profile_result != None:
profiles_db.find_and_modify({'login': data['login'], 'password': password},
{'$set': {'last_login': datetime.now()}})
await message.answer("{} вы зарегистрировались, выберите следующие разделы, чтобы использовать бот".format(message.from_user.full_name), reply_markup=main_menu)
await state.finish()
else:
await Form.SuppotLoginSelect.set()
markup = types.ReplyKeyboardMarkup(resize_keyboard=True, selective=True)
markup.add("Войти")
await bot.send_message(message.chat.id,
md.text(md.text("Логин или пароль неверны, попробуйте еще раз!"),
sep='\n', ), reply_markup=markup, parse_mode=ParseMode.MARKDOWN, )
update_result = users_db.find_and_modify({"chat.id": message.chat.id}, {'$set': {'login': data['login']}})
# print(update_result)
|
"""
Script to test GNOME with chesapeake bay data (netCDF 3D triangle grid)
Eventually update to use Grid Map rather than BNA
"""
import os
from datetime import datetime, timedelta
import numpy as np
from gnome import scripting
from gnome import utilities
from gnome.basic_types import datetime_value_2d, numerical_methods
from gnome.utilities.remote_data import get_datafile
from gnome.model import Model
from gnome.map import MapFromBNA
from gnome.environment import Wind
from gnome.spill import point_line_release_spill, continuous_release_spill
from gnome.movers import RandomMover, constant_wind_mover, GridCurrentMover
from gnome.movers.py_wind_movers import PyWindMover
from gnome.environment import WindTS, GridCurrent
from gnome.movers.py_current_movers import PyCurrentMover
from gnome.outputters import Renderer
import gnome.utilities.profiledeco as pd
# define base directory
base_dir = os.path.dirname(__file__)
def make_model(images_dir=os.path.join(base_dir, 'images')):
print 'initializing the model'
start_time = datetime(2015, 9, 24, 1, 1)
# start_time = datetime(2015, 12, 18, 06, 01)
# 1 day of data in file
# 1/2 hr in seconds
model = Model(start_time=start_time,
duration=timedelta(hours=47),
time_step=300)
mapfile = get_datafile(os.path.join(base_dir, 'columbia_river.bna'))
print 'adding the map'
model.map = MapFromBNA(mapfile, refloat_halflife=0.0) # seconds
# draw_ontop can be 'uncertain' or 'forecast'
# 'forecast' LEs are in black, and 'uncertain' are in red
# default is 'forecast' LEs draw on top
renderer = Renderer(
mapfile, images_dir, image_size=(600, 1200))
renderer.delay = 15
# renderer.viewport = ((-123.35, 45.6), (-122.68, 46.13))
# renderer.viewport = ((-122.9, 45.6), (-122.6, 46.0))
print 'adding outputters'
model.outputters += renderer
print 'adding a spill'
# for now subsurface spill stays on initial layer
# - will need diffusion and rise velocity
# - wind doesn't act
# - start_position = (-76.126872, 37.680952, 5.0),
spill1 = continuous_release_spill(initial_elements=10000,
num_elements=400,
start_position=(-122.625,
45.609,
0.0),
release_time=start_time,
end_position=(-122.6, 45.605, 0.0),
end_release_time=start_time + timedelta(seconds=36000))
model.spills += spill1
print 'adding a RandomMover:'
# model.movers += RandomMover(diffusion_coef=10000)
print 'adding a wind mover:'
series = []
for i in [(1, (5, 90)), (7, (5, 180)), (13, (5, 270)), (19, (5, 0)), (25, (5, 90))]:
series.append((start_time + timedelta(hours=i[0]), i[1]))
wind1 = WindTS.constant_wind('wind1', 0.5, 0, 'm/s')
wind2 = WindTS(timeseries=series, units='knots', extrapolate=True)
# wind = Wind(timeseries=series, units='knots')
model.movers += PyWindMover(wind=wind1)
print 'adding a current mover:'
# url = ('http://geoport.whoi.edu/thredds/dodsC/clay/usgs/users/jcwarner/Projects/Sandy/triple_nest/00_dir_NYB05.ncml')
# test = GridCurrent.from_netCDF(name='gc1', filename=url)
curr_file = get_datafile('COOPSu_CREOFS24.nc')
curr = GridCurrent.from_netCDF(name='gc2', filename=curr_file,)
c_mover = PyCurrentMover(curr, extrapolate=True, default_num_method='Trapezoid')
# renderer.add_grid(curr.grid)
# renderer.add_vec_prop(curr)
model.movers += c_mover
print 'adding a random mover'
model.movers += RandomMover(diffusion_coef=1000)
# curr_file = get_datafile(os.path.join(base_dir, 'COOPSu_CREOFS24.nc'))
# c_mover = GridCurrentMover(curr_file)
# model.movers += c_mover
return model
if __name__ == "__main__":
pd.profiler.enable()
startTime = datetime.now()
scripting.make_images_dir()
model = make_model()
print "doing full run"
rend = model.outputters[0]
# rend.graticule.set_DMS(True)
for step in model:
if step['step_num'] == 0:
rend.set_viewport(((-122.9, 45.6), (-122.6, 46.0)))
# rend.set_viewport(((-123.25, 48.125), (-122.5, 48.75)))
# if step['step_num'] == 0:
# rend.set_viewport(((-122.8, 45.65), (-122.75, 45.7)))
# rend.set_viewport(((-123.1, 48.55), (-122.95, 48.65)))
# print step
print "step: %.4i -- memuse: %fMB" % (step['step_num'],
utilities.get_mem_use())
print datetime.now() - startTime
pd.profiler.disable()
pd.print_stats(0.2)
|
# classification using Random forests without one hot encoding
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn import metrics
from sklearn.ensemble import RandomForestClassifier
df = pd.read_csv('new3.csv')
# Import train_test_split function
X = df[['emp_length_int', 'home_ownership_cat', 'annual_inc', 'loan_amount',
'term_cat', 'application_type_cat', 'purpose_cat', 'interest_payment_cat', 'interest_rate',
'grade_cat', 'dti', 'total_pymnt', 'total_rec_prncp', 'installment']] # Features
y = df['loan_condition_cat'] # Labels
# Split dataset into training set and test set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.7) # 70% training and 30% test
loanmod = RandomForestClassifier(n_estimators=50)
print("Training in Process.....")
loanmod.fit(X_train, y_train) # training the model
y_pred = loanmod.predict(X_test)
# evaluation
print("Accuracy:", metrics.accuracy_score(y_test, y_pred))
print("Precision:", metrics.precision_score(y_test, y_pred))
print("Recall:", metrics.recall_score(y_test, y_pred))
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import plot_precision_recall_curve
import matplotlib.pyplot as plt
from sklearn.metrics import average_precision_score
average_precision = average_precision_score(y_test, y_pred)
print('Average precision-recall score: {0:0.2f}'.format(
average_precision))
disp = plot_precision_recall_curve(loanmod, X_test, y_test)
disp.ax_.set_title('2-class Precision-Recall curve: '
'AP={0:0.2f}'.format(average_precision))
plt.show()
|
import xml.etree.ElementTree
import csv
def joinXMLWithCSV(XMLFileName, csvFileName):
products = extractProductsFromXMLFile(XMLFileName)
products_and_prices_csv_writer = createCSVWriter('prices_and_products.csv');
addFirstRow(products_and_prices_csv_writer, ['customer','Local code','Name','Price','Description','URL','LEN Code'])
compareCodesAndWriteProducts(products, csvFileName, products_and_prices_csv_writer)
def extractProductsFromXMLFile(XMLFileName):
return xml.etree.ElementTree.parse(XMLFileName).getroot().findall('Products')[0]
def createCSVWriter(csvToCreateName):
products_and_prices_csv_writer = csv.writer(open(csvToCreateName, 'w', newline='', encoding='utf-8'),
delimiter='|')
return products_and_prices_csv_writer
def addFirstRow(products_and_prices_csv_writer, firstRow):
products_and_prices_csv_writer.writerow(firstRow)
def compareCodesAndWriteProducts(products, csvFileName, csvWriter):
for product in products.findall('Product'):
code = product.attrib.get('LocalCode')
name = product.findall('Name')[0].text
with open(csvFileName) as csvFile:
pricesCSV = csv.reader(csvFile, delimiter=';')
for row in pricesCSV:
if row[1] == code:
csvWriter.writerow([row[0], row[1], name, row[5], row[3], row[4], row[2]])
break
|
#!/usr/bin/env python3
import matplotlib.pyplot as plt
import numpy
from scipy.optimize import curve_fit
time, decays = numpy.loadtxt('decay.txt', unpack=True, delimiter=',')
print(time)
print(decays)
plt.plot(time, decays, 'ko')
plt.semilogy()
plt.show()
def f(t, l, n0):
return n0 * numpy.exp(-l * t)
pars, _ = curve_fit(f, time, decays)
print(pars)
plt.plot(time, decays, 'ko')
plt.plot(time, f(time, *pars), 'r-')
plt.semilogy()
plt.show()
|
num=raw_input()
num=num.split()
frst=int(num[0])
sec=int(num[1])
inp=raw_input()
inp=inp.split()
lis=[]
for i in range(frst):
lis.append(int(inp[i]))
if sec in lis:
print("yes")
else:
print("no")
|
import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow.keras.layers import GlobalAveragePooling2D
from tensorflow.keras.layers import GlobalMaxPooling2D
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Reshape
from tensorflow.keras.layers import Multiply
from tensorflow.keras.layers import Add
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Lambda
from tensorflow.keras.layers import Concatenate
from tensorflow.keras.layers import Conv2D
from model.layers import _normalization
from model.layers import _activation
def _se_block(inputs, se_ratio=16, name=None):
channel = K.int_shape(inputs)[-1]
x = GlobalAveragePooling2D(name=name+'_gap')(inputs)
x = Dense(channel//se_ratio, name=name+'_fc1')(x)
x = _activation(x, activation='relu', name=name+'_relu')
x = Dense(channel, name=name+'_fc2')(x)
x = _activation(x, activation='sigmoid', name=name+'_sigmoid')
x = Reshape([1, 1, channel], name=name+'_reshape')(x)
x = Multiply(name=name+'_multiply')([inputs, x])
return x
def _cbam_block(inputs, ratio=8, name=None):
def _channel_attention(_inputs, cbam_ratio=8):
channel = K.int_shape(_inputs)[-1]
shared_layer_one = Dense(channel // cbam_ratio,
activation='relu',
kernel_initializer='he_normal',
use_bias=True,
bias_initializer='zeros',
name=name+'_sl1')
shared_layer_two = Dense(channel,
kernel_initializer='he_normal',
use_bias=True,
bias_initializer='zeros',
name=name+'_sl2')
avg_pool = GlobalAveragePooling2D(name=name+'_gap')(_inputs)
avg_pool = Reshape((1, 1, channel))(avg_pool)
avg_pool = shared_layer_one(avg_pool)
avg_pool = shared_layer_two(avg_pool)
max_pool = GlobalMaxPooling2D(name=name+'_gmp')(_inputs)
max_pool = Reshape((1, 1, channel))(max_pool)
max_pool = shared_layer_one(max_pool)
max_pool = shared_layer_two(max_pool)
cbam_feature = Add()([avg_pool, max_pool])
cbam_feature = _activation(cbam_feature, activation='sigmoid', name=name+'_sigmoid')
return Multiply()([_inputs, cbam_feature])
def _spatial_attention(_inputs, kernel_size=7):
channel = K.int_shape(_inputs)[-1]
cbam_feature = _inputs
avg_pool = Lambda(lambda x: K.mean(x, axis=-1, keepdims=True), name=name+'_sap')(cbam_feature)
max_pool = Lambda(lambda x: K.max(x, axis=-1, keepdims=True), name=name+'_smp')(cbam_feature)
concat = Concatenate()([avg_pool, max_pool])
cbam_feature = Conv2D(1, kernel_size,
strides=1,
padding='same',
activation='sigmoid',
kernel_initializer='he_normal',
use_bias=False,
name=name+'_conv')(concat)
return Multiply()([_inputs, cbam_feature])
x = _channel_attention(inputs, ratio)
x = _spatial_attention(x)
return x |
#Michael Pu
#2016/11/21
#ICS2O1
#Ms.Strelkovska
#Looping Through Lists Exercises 1
#Get User Input
listInputs = []
for i in ["first", "second"]:
print("Enter the items for the %s list. Seperate list items with a comma (,)." %i)
listInputs.append(input())
#Seperate Input Into Lists
lists = []
for stringList in listInputs:
tempList = stringList.split(",")
#Take Out Trailing or Leading Whitespace
for item in tempList:
tempList[tempList.index(item)] = item.strip(" ")
lists.append(tempList)
match = False
for listItem in lists[0]:
if listItem in lists[1]:
match = True
break
print(match)
|
import random
def play():
words = ["python", "java", "kotlin", "javascript"]
word = random.choice(words)
guessed = set()
lives = 8
print("H A N G M A N")
while lives > 0:
print()
revealed = generate_display(word, guessed)
print(revealed)
if revealed == word:
"You guessed the word!"
break
guess = input("Input a letter: ")
if not valid_guess(guess, guessed):
continue
guessed.add(guess)
if guess not in word:
print("That letter doesn't appear in the word")
lives -= 1
if lives > 0:
print("You survived!")
else:
print("You lost!")
def valid_guess(guess, guessed):
alphabet = "abcdefghijklmnopqrstuvwxyz"
if len(guess) != 1:
print("You should input a single letter")
return False
if guess not in alphabet:
print("Please enter a lowercase English letter ")
return False
if guess in guessed:
print("You've already guessed this letter")
return False
return True
def generate_display(word, found):
display = ""
for c in word:
if c in found:
display += c
else:
display += "-"
return display
play()
|
# Generated by Django 2.1.5 on 2019-03-09 17:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datasets', '0011_more_indexes_opposite'),
]
operations = [
migrations.RemoveIndex(
model_name='eviction',
name='datasets_ev_execute_df309c_idx',
),
migrations.RemoveIndex(
model_name='acrisreallegal',
name='datasets_ac_documen_43e391_idx',
),
migrations.RemoveIndex(
model_name='doblegacyfiledpermit',
name='datasets_do_dobrund_8199f1_idx',
),
migrations.RemoveIndex(
model_name='lispenden',
name='datasets_li_filedda_d10e72_idx',
),
migrations.RemoveIndex(
model_name='dobissuedpermit',
name='datasets_do_issueda_182d33_idx',
),
migrations.RemoveIndex(
model_name='housinglitigation',
name='datasets_ho_caseope_06acde_idx',
),
migrations.AddIndex(
model_name='eviction',
index=models.Index(fields=['executeddate', 'bbl'], name='datasets_ev_execute_5a8679_idx'),
),
migrations.AddIndex(
model_name='acrisreallegal',
index=models.Index(fields=['documentid', 'bbl'], name='datasets_ac_documen_c65051_idx'),
),
migrations.AddIndex(
model_name='doblegacyfiledpermit',
index=models.Index(fields=['dobrundate', 'bbl'], name='datasets_do_dobrund_64fc28_idx'),
),
migrations.AddIndex(
model_name='lispenden',
index=models.Index(fields=['fileddate', 'bbl'], name='datasets_li_filedda_c549ca_idx'),
),
migrations.AddIndex(
model_name='dobissuedpermit',
index=models.Index(fields=['issuedate', 'bbl'], name='datasets_do_issueda_c5f2b2_idx'),
),
migrations.AddIndex(
model_name='housinglitigation',
index=models.Index(fields=['caseopendate', 'bbl'], name='datasets_ho_caseope_8e1ddf_idx'),
),
]
|
# flake8: noqa
from mstrio.server.project import *
from mstrio.utils import helper
helper.deprecation_warning(
'`mstrio.server.application`',
'`mstrio.server.project`',
'11.3.4.101', # NOSONAR
False)
|
from typing import List
class Solution:
def findMinArrowShots(self, points: List[List[int]]) -> int:
points.sort()
number = len(points)
overlap_end = None
for point in points:
if overlap_end and overlap_end >= point[0]:
number -= 1
overlap_end = min(overlap_end, point[1])
else:
overlap_end = point[1]
return number
# Runtime: 464 ms, faster than 43.87% of Python3 online submissions for Minimum Number of Arrows to Burst Balloons.
# Memory Usage: 19.2 MB, less than 97.86% of Python3 online submissions for Minimum Number of Arrows to Burst Balloons.
|
import numpy as np
import cv2
from constants import NUMPY_IMAGES_DATA_PATH, NUMPY_LABEL_DATA_PATH, FACE_SIZE, EMOTIONS, MODEL_CHECKPOINT, MODEL_PATH, FACE_CASCADE_PATH
import tflearn
from tflearn.layers.core import input_data, dropout, fully_connected
from tflearn.layers.conv import conv_2d, max_pool_2d
from tflearn.layers.estimator import regression
from os.path import isfile
class FaceMe:
def __init__(self):
print("[:)] Initializing app")
# Defining the model
network = input_data(shape=[None, FACE_SIZE, FACE_SIZE, 1])
network = conv_2d(network, 64, 5, activation='relu')
network = max_pool_2d(network, 3, strides=2)
network = conv_2d(network, 64, 5, activation='relu')
network = max_pool_2d(network, 3, strides=2)
network = conv_2d(network, 128, 4, activation='relu')
network = dropout(network, 0.3)
network = fully_connected(network, 3072, activation='relu')
network = fully_connected(
network, len(EMOTIONS), activation='softmax')
network = regression(
network,
optimizer='momentum',
loss='categorical_crossentropy'
)
self.model = tflearn.DNN(
network,
checkpoint_path=MODEL_CHECKPOINT,
max_checkpoints=1,
tensorboard_verbose=2
)
self.model.load(MODEL_PATH)
print("[:)] Model loaded from ", MODEL_PATH)
def predict(self, image):
if self.model is None:
print("[:(] Cannot find the model")
return None
if image is None:
print("[:(] Image is none")
return None
image = image.reshape([-1, FACE_SIZE, FACE_SIZE, 1])
return self.model.predict(image)[0]
def apply_offsets(face_coordinates, offsets):
x, y, width, height = face_coordinates
x_off, y_off = offsets
return x - x_off, x + width + x_off, y - y_off, y + height + y_off
def draw_bounding_box(face_coordinates, image_array, color):
x, y, w, h = face_coordinates
cv2.rectangle(image_array, (x, y), (x + w, y + h), color, 2)
def draw_text(coordinates, image_array, text, color, x_offset=0, y_offset=0,
font_scale=2, thickness=2):
x, y = coordinates[:2]
cv2.putText(image_array, text, (x + x_offset, y + y_offset),
cv2.FONT_HERSHEY_SIMPLEX,
font_scale, color, thickness, cv2.LINE_AA)
def show_summary(result):
if not isinstance(result, dict):
return
total = 0
for key in result.keys():
total += result[key]
print("[:)] Emotion statistics")
for key in result.keys():
print(key, ":", result[key] / total)
if __name__ == "__main__":
fm = FaceMe()
emotion_offsets = (30, 30)
face_cascade = cv2.CascadeClassifier(FACE_CASCADE_PATH)
cap = cv2.VideoCapture('/Users/roshanalwis/Documents/Projects/Python/FaceMe/ODD.mp4')
result = {}
while cap.isOpened(): # True:
ret, bgr_image = cap.read()
if bgr_image is None:
break
gray_image = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2GRAY)
rgb_image = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2RGB)
faces = face_cascade.detectMultiScale(gray_image, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30),
flags=cv2.CASCADE_SCALE_IMAGE)
for face_coordinates in faces:
x1, x2, y1, y2 = apply_offsets(face_coordinates, emotion_offsets)
if x1 < 0 or x2 < 0 or y1 < 0 or y2 < 0:
continue
gray_face = gray_image[y1:y2, x1:x2]
try:
gray_face = cv2.resize(gray_face, (48, 48))
except:
continue
# gray_face = preprocess_input(gray_face, True)
# gray_face = np.expand_dims(gray_face, 0)
# gray_face = np.expand_dims(gray_face, -1)
prediction = fm.predict(gray_face)
prediction_text = EMOTIONS[np.argmax(prediction)]
if prediction_text in result.keys():
result[prediction_text] += 1
else:
result[prediction_text] = 1
draw_bounding_box(face_coordinates, rgb_image, 1)
draw_text(face_coordinates, rgb_image, prediction_text,
200, 0, -45, 1, 1)
bgr_image = cv2.cvtColor(rgb_image, cv2.COLOR_RGB2BGR)
cv2.imshow('window_frame', bgr_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
show_summary(result)
cap.release()
cv2.destroyAllWindows()
|
from pyasn1.type import univ
from pyasn1.codec.der import encoder as DERencoder, decoder as DERdecoder
from j2735 import *
from Plausability import nmea2deg
import datetime
BSMcounter = 0
def createJ2735BSM(status, nmealatitude,nmealongitude, altitude, speed, vehicleWidth, vehicleLength):
# - Fixed TemporaryID
# - Fixed Accuracy
# - speed in m/s
# - Fixed Heading
# - Fixed Angle
# - Fixed Acceleration
BSM = BasicSafetyMessage()
BSM.setComponentByName('msgID',2) #basicSafetyMessage
global BSMcounter
BSM.setComponentByName('msgCnt',BSMcounter)
BSM.setComponentByName('id',status)
nowTime = datetime.datetime.now()
timeMS = int(str(nowTime.microsecond)[0:4])
#print "[createJ2735BSM]\tConverting to Degrees " + nmealatitude + " - " + nmealongitude
newlat = nmea2deg(nmealatitude)
newlon = nmea2deg(nmealongitude)
#print "[createJ2735BSM]\tAfter Conversion " + str(newlat) + " - " + str(newlon)
#FAZER SPLIT PELO PONTO E FAZER O ENCODE
newlat = str(newlat).replace(".","")
newlon = str(newlon).replace(".","")
BSM.setComponentByName('secMark',timeMS)
BSM.setComponentByName('lat',newlat)
BSM.setComponentByName('long',newlon)
BSM.setComponentByName('elev',altitude)
BSM.setComponentByName('accuracy',"0000")
newSpeed = int(float(speed)*100)
#newSpeedHex = hex(newSpeed)
#newSpeedHex = newSpeedHex.split("x")
transAndSpeed = TransmissionAndSpeed()
transAndSpeed.setComponentByName('state',7)
transAndSpeed.setComponentByName('speed',newSpeed)
BSM.setComponentByName('speed',transAndSpeed)
BSM.setComponentByName('heading',0)
BSM.setComponentByName('angle',"0")
BSM.setComponentByName('accelSet',"acceler")
BSM.setComponentByName('brakes',"nn")
vehicleSize = VehicleSize()
vehicleSize.setComponentByName('width',vehicleWidth)
vehicleSize.setComponentByName('length',vehicleLength)
BSM.setComponentByName('size',vehicleSize)
#print(BSM.prettyPrint())
# Increase BSM sequencenumber and Prevent overflow
BSMcounter = BSMcounter+1
if(BSMcounter==127):
BSMcounter = 0
encodedMessage = DERencoder.encode(BSM)
return encodedMessage
def createALaCarte(appID,initTS, recvTS, sourceIP, destinationIP, destPort ,content):
ALC = ALaCarte()
ALC.setComponentByName('msgID', 1)
ALC.setComponentByName('appID',appID)
ALC.setComponentByName('initTS',initTS)
ALC.setComponentByName('recvTS',recvTS)
ALC.setComponentByName('source',sourceIP)
ALC.setComponentByName('destination',destinationIP)
ALC.setComponentByName('destPort',destPort)
ALC.setComponentByName('appData', content)
encodedMessage = DERencoder.encode(ALC)
return encodedMessage
|
import unittest
import pytest
import os
from .. import field
from .. import board
from .. import run
# https://flask.palletsprojects.com/en/1.0.x/testing/
# try:
# sys.path.append("..")
# import field
# import board
# import run
# except:
# from .. import field
# from .. import board
# from .. import run
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
class TestBoard(unittest.TestCase):
def test_Field(self):
f1 = field.Field(1, 4, False)
self.assertEqual("UNTOUCH", f1.get_condition())
f1Dict = f1.toJson()
self.assertEqual(f1Dict["cordX"], 1)
self.assertEqual(f1Dict["cordY"], 4)
self.assertEqual(f1Dict["condition"], "UNTOUCH")
f1.set_Bomb()
self.assertTrue(f1.is_Bomb())
f1.flag()
self.assertEqual("FLAG", f1.get_condition())
f1.unFlag()
self.assertEqual("UNTOUCH", f1.get_condition())
def test_Field2(self):
f1 = field.Field(2, 5, False)
f1.dig()
f1Dict = f1.toJson()
self.assertEqual("DUG", f1.get_condition())
self.assertEqual("DUG", f1Dict["condition"])
f1.setBombCount(2)
f1Dict2 = f1.toJson()
self.assertEqual(2, f1Dict2["bomb_count"])
def test_field_string(self):
f1 = field.Field(2, 5, False)
self.assertEqual("-", f1.toString())
f1.flag()
self.assertEqual("F", f1.toString())
f1.dig()
self.assertEqual(" ", f1.toString())
f1.setBombCount(1)
self.assertEqual("1", f1.toString())
def test_create_board(self):
b1 = board.Board("small")
b1Dict = b1.toJson()
self.assertEqual(b1Dict["cordX"], 9)
self.assertEqual(b1Dict["cordY"], 9)
fields_len = len(b1Dict["fieldList"])
self.assertEqual(81, fields_len)
b2 = board.Board("medium")
b2Dict = b2.toJson()
self.assertEqual(b2Dict["cordX"], 16)
self.assertEqual(b2Dict["cordY"], 16)
fields_len2 = len(b2Dict["fieldList"])
self.assertEqual(256, fields_len2)
b3 = board.Board("large")
b3Dict = b3.toJson()
self.assertEqual(b3Dict["cordX"], 30)
self.assertEqual(b3Dict["cordY"], 24)
fields_len3 = len(b3Dict["fieldList"])
self.assertEqual(720, fields_len3)
def test_create_board2(self):
b4 = board.Board("small")
f4 = b4.get_field(0)
self.assertEqual(0, f4.getX())
self.assertEqual(0, f4.getY())
f5 = b4.get_field(9)
self.assertEqual(0, f5.getX())
self.assertEqual(1, f5.getY())
f6 = b4.get_field(17)
self.assertEqual(8, f6.getX())
self.assertEqual(1, f6.getY())
def test_count_bombs(self):
# 0.16 bombos vienam laukeliui
b1 = board.Board("small")
bomb_count = 0
for i in range(9*9):
f = b1.get_field(i)
if f.is_Bomb():
bomb_count += 1
self.assertEqual(13, bomb_count)
# 0.16 bombos vienam laukeliui
b2 = board.Board("medium")
bomb_count2 = 0
for i in range(16*16):
f = b2.get_field(i)
if f.is_Bomb():
bomb_count2 += 1
self.assertEqual(41, bomb_count2)
def test_parse_board(self):
file1 = os.path.join(__location__, 'boards/board2')
b2 = board.Board("any", file1)
self.assertEqual(4, b2.getXandY()[0])
self.assertEqual(5, b2.getXandY()[1])
bomb_count = 0
for i in range(4*5):
f = b2.get_field(i)
if f.is_Bomb():
bomb_count += 1
f1 = b2.get_field(0)
f2 = b2.get_field(16)
f3 = b2.get_field(19)
f4 = b2.get_field(18)
self.assertEqual(3, bomb_count)
self.assertTrue(f1.is_Bomb())
self.assertTrue(f2.is_Bomb())
self.assertTrue(f3.is_Bomb())
self.assertFalse(f4.is_Bomb())
def test_return_index(self):
i1 = board.Board.return_index(0, 0, 2, 3)
self.assertEqual(0, i1)
i5 = board.Board.return_index(1, 1, 3, 2)
self.assertEqual(4, i5)
# B 1 0
# 1 1 0
def test_count_right(self):
file1 = os.path.join(__location__, 'boards/board4')
b2 = board.Board("any", file1)
answer_string = "0 1 0 1 1 0"
answer = answer_string.split(" ")
for i in range(2*3):
f = b2.get_field(i)
a = int(answer[i])
self.assertEqual(a, f.getBombCount())
# B 2 B 1 0
# 1 2 1 1 0
# 1 1 1 0 0
# 1 B 1 1 1
# 2 3 2 3 B
# B 2 B 3 B
def test_count_full(self):
file1 = os.path.join(__location__, 'boards/board3')
b2 = board.Board("any", file1)
answer_string = "0 2 0 1 0 " + "1 2 1 1 0 " + "1 1 1 0 0 " + "1 0 1 1 1 " + "2 3 2 3 1 " + "0 2 0 3 1"
answer = answer_string.split(" ")
for i in range(5*6):
f = b2.get_field(i)
a = int(answer[i])
self.assertEqual(a, f.getBombCount())
# 0 1
# 2 3
# 3 4
def test_return_x_y(self):
x = board.Board.return_x_y(0, 2, 3)[0]
y = board.Board.return_x_y(0, 2, 3)[1]
self.assertEqual(0, x)
self.assertEqual(0, y)
x = board.Board.return_x_y(1, 2, 3)[0]
y = board.Board.return_x_y(1, 2, 3)[1]
self.assertEqual(1, x)
self.assertEqual(0, y)
# 0 1
# 2 3
# 4 5
x = board.Board.return_x_y(3, 2, 3)[0]
y = board.Board.return_x_y(3, 2, 3)[1]
self.assertEqual(1, x)
self.assertEqual(1, y)
# 0 1 2
# 3 4 5
# 6 7 8
x = board.Board.return_x_y(8, 3, 3)[0]
y = board.Board.return_x_y(8, 3, 3)[1]
self.assertEqual(2, x)
self.assertEqual(2, y)
# 1 0 0
# 0 0 0
def test_dig_board(self):
file1 = os.path.join(__location__, 'boards/board4')
b1 = board.Board("any", file1)
qty1 = b1.dig(5)
b1Dict = b1.toJson()
fl = b1Dict["fieldList"][5]["condition"]
self.assertEqual("DUG", fl)
self.assertEqual(4, qty1)
# 0 0 0 0
def test_dig_recursive(self):
file1 = os.path.join(__location__, 'boards/board6')
b1 = board.Board("any", file1)
answ = "- - - -"
self.assertEqual(answ, b1.toString())
qty1 = b1.dig(0)
fields = b1.toJson()["fieldList"]
# print("fields", fields)
for f in fields:
print("f", f)
self.assertEqual("DUG", fields[f]["condition"])
answ = " "
self.assertEqual(answ, b1.toString())
self.assertEqual(4, qty1)
# 5 6
# 0 0 0 0 0
# 0 0 0 0 0
# 0 0 0 0 0 # 2 2
# 0 0 0 1 1
# 1 2 1 2 B
# B 2 B 2 1
def test_dig_recursive_full(self):
file1 = os.path.join(__location__, 'boards/board7_1')
b1 = board.Board("any", file1)
answ1 = "- - - - -\n" + "- - - - -\n" + "- - - - -\n" + "- - - - -\n"\
+ "- - - - -\n" + "- - - - -"
self.assertEqual(answ1, b1.toString())
qty1 = b1.dig(13)
answ2 = " \n" + " \n" + " \n" + " 1 1\n"\
+ "1 2 1 2 -\n" + "- - - - -"
self.assertEqual(answ2, b1.toString())
self.assertEqual(24, qty1)
# 5 6
# B 2 B 1 0
# 1 2 1 1 0
# 1 1 1 0 0
# 1 B 1 1 1 //<-
# 2 3 2 3 B
# B 2 B 3 B
def test_dig_bomb1(self):
file1 = os.path.join(__location__, 'boards/board3')
b1 = board.Board("any", file1)
qty1 = b1.dig(9)
answ = "- - - 1 \n" + "- - 1 1 \n" + "- - 1 \n" \
+ "- - 1 1 1\n" + "- - - - -\n" + "- - - - -"
self.assertEqual(answ, b1.toString())
self.assertEqual(11, qty1)
b1.dig_bomb(16)
f1 = b1.get_field(16)
self.assertFalse(f1.is_Bomb())
self.assertEqual("DUG", f1.get_condition())
self.assertEqual(0, f1.getBombCount())
# 5 6
# B 2 B 1 0
# 1 2 1 1 0
# 0 0 0 0 0
# 0 0 0 1 1 //<-
# 1 2 1 3 B
# B 2 B 3 B
answ2 = "- - - 1 \n" + "1 2 1 1 \n" + " \n" + \
" 1 1\n" + "1 2 1 3 -\n" + "- - - - -"
self.assertEqual(answ2, b1.toString())
b1.dig_bomb(0)
f2 = b1.get_field(0)
self.assertFalse(f2.is_Bomb())
self.assertEqual("DUG", f2.get_condition())
self.assertEqual(0, f2.getBombCount())
# 5 6
# B 2 B 1 0
# 1 2 1 1 0
# 0 0 0 0 0
# 0 0 0 1 1 //<-
# 1 2 1 3 B
# B 2 B 3 B
#
answ3 = " 1 - 1 \n" + " 1 1 1 \n" + " \n" + \
" 1 1\n" + "1 2 1 3 -\n" + "- - - - -"
# print("b1.toString()", b1.toString())
# print("b1.toJson()", b1.toJson())
self.assertEqual(answ3, b1.toString())
# 0 0 0 0
# 0 0 1 1
# 0 1 2 B
# 0 1 B 2
# 0 1 1 1
def test_dig_bomb3(self):
file1 = os.path.join(__location__, 'boards/board8')
b1 = board.Board("any", file1)
b1.dig(0)
answ = " \n" + " 1 1\n" + " 1 2 -\n" + " 1 - -\n" + " 1 - -"
# print("from board", b1.toString())
self.assertEqual(answ, b1.toString())
# print("START DIG")
b1.dig_bomb(14)
# print("DIG 14!", b1.toString())
answ1 = " \n" + " 1 1\n" + " 1 -\n" + " 1 1\n" + " "
self.assertEqual(answ1, b1.toString())
# B 1 0
# 1 1 0
def test_win_board(self):
file1 = os.path.join(__location__, 'boards/board4')
b1 = board.Board("any", file1)
gameWin = b1.toJson()["gameWin"]
self.assertFalse(gameWin)
b1.dig(2)
gameWin1 = b1.toJson()["gameWin"]
self.assertFalse(gameWin1)
b1.dig(3)
b1.dig(4)
b1.dig(5)
gameWin2 = b1.toJson()["gameWin"]
self.assertFalse(gameWin2)
b1.flag(0)
gameWin3 = b1.toJson()["gameWin"]
self.assertTrue(gameWin3)
# B 1 0
# 1 1 0
def test_win_board2(self):
file1 = os.path.join(__location__, 'boards/board4')
b1 = board.Board("any", file1)
gameWin = b1.toJson()["gameWin"]
self.assertFalse(gameWin)
b1.dig(5)
b1.dig_bomb(0)
gameWin1 = b1.toJson()["gameWin"]
print("b1.toJson, gameWin", gameWin, b1.toJson())
self.assertTrue(gameWin1)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
from math import log
from random import choice
# the upper bound of regard
def optimal_m(n, delta):
m = int(4 / (delta ** 2) * log(n * (delta ** 2) / 4) + 1)
return max(1, m)
# the upper bound of regard
def regret_upper_bound(delta, n):
max1 = n * delta
max2 = delta + 4 / delta * (1 + max([0, log(n * (delta ** 2) / 4)]))
return min(max1, max2)
'''
Explore-then-commit Algorithm:
input:
bandit -- the bandit to play
n -- number of total rounds
m -- number of explore rounds
'''
def ETC(bandit, n, m):
k = bandit.K
results = [0] * k
# playing each arm a fixed m number of times
for i in range(k):
for j in range(m):
results[i] += bandit.pull(i)
# randomly choice the arm with the best reward
options = [i for i, x in enumerate(results) if x == max(results)]
opt = choice(options)
# playing the chosen arm the remaining round
for i in range(m * k, n):
bandit.pull(opt)
|
# -*- coding: utf-8 -*-
from selenium import webdriver
import time
import pandas as pd
from pyquery import PyQuery as pq
# df_all = pd.DataFrame(columns=['title', 'content'])
def crawl_webPage(n):
web_url='https://www.99zuowen.com/yilunwen/chuzhong/'
# doc=pq(web_url)
page_url=[]
title_list=[]
# content_list=[]
for i in range(1,n+1):
doc = pq(web_url+'826-'+str(i)+'.html')
for i in doc('.article li a').items():
if i.attr('title') is not None:
page_url.append(i.attr('href'))
title_list.append(i.attr('title').strip('<b>'))
return page_url, title_list
def crawl_content(page, title):
page_url, title_list = page, title
flag = 0
content_list = []
for tmp in page_url:
doc=pq(tmp)
content_tmp=''
for i in doc('.content div p').items():
content_tmp+=i.text()
content_list.append(content_tmp)
flag+=1
if flag%50 == 0:
print(flag)
# for i in range(len(title_list)):
# df_all.loc[len(df_all)]=[title_list[i],content_list[i]]
print('title:', len(title_list))
print('content:', len(content_list))
data = {'title':title_list, 'content':content_list}
df_all = pd.DataFrame(data)
df_all.to_csv('./zuowen.csv',index=False,encoding='utf_8_sig')
def main():
page, title = crawl_webPage(9)
print(len(page))
crawl_content(page, title)
main()
|
"""Server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from Auth.views import UserViewSet, ObtainAuthToken
from KittyKrawler.views import SaveView
from django.contrib.auth import views as auth_views
router = routers.DefaultRouter()
router.register(r'user', UserViewSet)
router.register(r'save', SaveView)
router.register(r'token', ObtainAuthToken, base_name='token')
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^password_reset/$', auth_views.password_reset, name='password_reset'),
url(r'^password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
auth_views.password_reset_confirm, name='password_reset_confirm'),
url(r'^reset/done$', auth_views.password_reset_complete, name='password_reset_complete'),
url(r'^admin/', admin.site.urls),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^', include('website.urls'))
]
# To obtain a security token:
# curl -k -H "Content-Type: application/json" -X POST -d '{"username":"[username]","password":"[password]"}' https://byteme.online/auth/token/
# Returns:
# { 'token' : '9944b09199c62bcf9418ad846dd0e4bbdfc6ee4b' }
# Then make more requests like:
# curl -k -H 'Authorization: Token 9944b09199c62bcf9418ad846dd0e4bbdfc6ee4b' -X GET https://byteme.online/auth/user/ |
import asyncio
import unittest
from decimal import Decimal
from hummingbot.connector.gateway.gateway_in_flight_order import GatewayInFlightOrder
from hummingbot.core.data_type.common import OrderType, TradeType
from hummingbot.core.data_type.in_flight_order import OrderState, OrderUpdate
s_decimal_0 = Decimal("0")
class GatewayInFlightOrderUnitTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.ev_loop = asyncio.get_event_loop()
cls.base_asset = "COINALPHA"
cls.quote_asset = "HBOT"
cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}"
cls.client_order_id = "someClientOrderId"
cls.exchange_order_id = "someTxHash"
cls.nonce = 1
def test_order_life_cycle_of_token_approval_requests(self):
order: GatewayInFlightOrder = GatewayInFlightOrder(
client_order_id=self.client_order_id,
trading_pair=self.quote_asset,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
creation_timestamp=1652324823,
initial_state=OrderState.PENDING_APPROVAL,
)
# Assert that order is in fact a Approval Request
self.assertTrue(order.is_approval_request)
self.assertTrue(order.is_pending_approval)
order_update: OrderUpdate = OrderUpdate(
trading_pair=order.trading_pair,
update_timestamp=1652324824,
new_state=OrderState.APPROVED,
client_order_id=order.client_order_id,
exchange_order_id=self.exchange_order_id,
)
order.update_with_order_update(order_update=order_update)
self.assertFalse(order.is_pending_approval)
def test_order_life_cycle_of_trade_orders(self):
order: GatewayInFlightOrder = GatewayInFlightOrder(
client_order_id=self.client_order_id,
trading_pair=self.quote_asset,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal("1"),
amount=Decimal("1000"),
creation_timestamp=1652324823,
initial_state=OrderState.PENDING_CREATE,
)
# Nonce is not provided upon creation
self.assertEqual(order.nonce, -1)
# Exchange Order Id for GatewayInFlightOrder is only assigned after a TradeUpdate
self.assertIsNone(order.exchange_order_id)
# CancelTxHash is not initialized on creation
self.assertIsNone(order.cancel_tx_hash)
def test_update_creation_transaction_hash_with_order_update(self):
order: GatewayInFlightOrder = GatewayInFlightOrder(
client_order_id=self.client_order_id,
trading_pair=self.quote_asset,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal("1"),
amount=Decimal("1000"),
creation_timestamp=1652324823,
initial_state=OrderState.PENDING_CREATE,
creation_transaction_hash=None,
)
self.assertIsNone(order.creation_transaction_hash)
desired_creation_transaction_hash = "someTransactionHash"
order_update = OrderUpdate(
trading_pair=self.trading_pair,
update_timestamp=1652324823 + 1,
new_state=OrderState.OPEN,
client_order_id=self.client_order_id,
exchange_order_id="someExchangeOrderID",
misc_updates={
"creation_transaction_hash": desired_creation_transaction_hash,
}
)
order.update_with_order_update(order_update=order_update)
self.assertEqual(desired_creation_transaction_hash, order.creation_transaction_hash)
def test_update_cancelation_transaction_hash_with_order_update(self):
order: GatewayInFlightOrder = GatewayInFlightOrder(
client_order_id=self.client_order_id,
trading_pair=self.quote_asset,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal("1"),
amount=Decimal("1000"),
creation_timestamp=1652324823,
initial_state=OrderState.PENDING_CREATE,
)
self.assertIsNone(order.creation_transaction_hash)
desired_cancelation_transaction_hash = "someTransactionHash"
order_update = OrderUpdate(
trading_pair=self.trading_pair,
update_timestamp=1652324823 + 1,
new_state=OrderState.OPEN,
client_order_id=self.client_order_id,
exchange_order_id="someExchangeOrderID",
misc_updates={
"cancelation_transaction_hash": desired_cancelation_transaction_hash,
}
)
order.update_with_order_update(order_update=order_update)
self.assertEqual(desired_cancelation_transaction_hash, order.cancel_tx_hash)
def test_to_and_from_json(self):
base_order = GatewayInFlightOrder(
client_order_id=self.client_order_id,
trading_pair=self.quote_asset,
order_type=OrderType.LIMIT,
trade_type=TradeType.BUY,
price=Decimal("1"),
amount=Decimal("1000"),
creation_timestamp=1652324823,
initial_state=OrderState.PENDING_CREATE,
)
base_order.last_update_timestamp = 1652324824
order_json = base_order.to_json()
derived_order = GatewayInFlightOrder.from_json(order_json)
self.assertEqual(base_order, derived_order)
|
#!/usr/bin/env python2
'''
not sure if original awk produced best/intended matrix scores:
see also donor. Not consistent behaviour.
replicated original at this point
### org awk code below ###
#USAGE= $BIN/preparetrimatrixstart4parameter.awk 4 5 6 7 $DATA/set2/sites/starts.geneid.dimatrix
BEGIN {
st = ARGV[1];
nd = ARGV[2];
rd = ARGV[3];
th = ARGV[4];
ARGV[1] = ARGV[2] = ARGV[3] = ARGV[4] = "";
}
{
if (($1 == st && $2 !~ /A$/) ||
($1 == nd && $2 != "AT") ||
($1 == rd && $2 != "TG") ||
($1 == th && $2 !~ /^G/))
{
print $1, $2, -9999;
} else
if (($1 == st && $2 ~ /A$/) ||\
($1 == nd && $2 == "AT") ||
($1 == rd && $2 == "TG"))
print $1, $2, 0;
else
print;
}
'''
import sys
A_offset = int(sys.argv[1])
B_offset = int(sys.argv[2])
C_offset = int(sys.argv[3])
D_offset = int(sys.argv[4])
in_fn = sys.argv[5]
#print pre_offset, new_offset, pos_offset, in_fn
for line in open(in_fn).readlines():
sl = line.split()
col_one = int(sl[0])
seq = sl[1]
col_three = float(sl[2])
#print sl
#print pre_offset, new_offset, pos_offset,
#print col_one, seq, seq[-1], col_three
#-9999 vals
if ((col_one == A_offset) and (seq[-1] != "A" )):
print col_one, seq, -9999
elif ((col_one == B_offset) and (seq != "AT")):
print col_one, seq, -9999
elif ((col_one == C_offset) and (seq != "TG" )):
print col_one, seq, -9999
elif ((col_one == D_offset) and (seq[0] != "G" )):
print col_one, seq, -9999
#zeros
elif ((col_one == A_offset) and (seq[-1] == "A")):
print col_one, seq, 0
elif ((col_one == B_offset) and (seq == "AT")):
print col_one, seq, 0
elif ((col_one == C_offset) and (seq == "TG")):
print col_one, seq, 0
#
#elif ((col_one == D_offset ) and (seq[0] == "G")):
# print col_one, seq, 0
else:
print col_one, seq, col_three
|
1,IO:计算机输入时input,输出是output,因此我们把输入输出称为input/output,简写IO。python的IO是input/print.
2,计算机类型
整形
浮点型
字符串:单引号双引号需要用\转义eg:str='I\'m ok';
特殊 \n 换行 \t 制表符 \自身转义
r''内的字符串默认不转义
''''...'''提示符 换行
布尔值
空值:None
3,变量:变量名必须是大小写英文、数字、和_的组合,且不能用数字开头
4,常量:通常用大写的变量名来表示常量
5,字符编码:ord('a')获取字符整数表示;chr(65)把编码转换成对应的字符
Python的字符串类型时str,在内存中以Unicode表示,一个字符对应若干字节,如果在网络上传输,或者保存子磁盘上,就需要把str变为以字节为单位的bytes
'A'.encode('utf-8') 对str类型转换成bytes类型
b'A'.decode('utf-8')把bytes类型转换成str类型
str类型:字符为单位;bytes类型:字节为单位
注释:#!/usr/bin/env python3 这是一个python可执行程序
注释:# -*- coding: utf-8 -*-告诉python解释器,按照utf-8读取源代码
格式化:%用来格式化字符串
常见占位符:%d 整数%f 浮点数%s字符串%x十六进制数
eg:'hello,%s,成绩提高了%.2f%%' %('小明',23.342)
format()方法
eg:'Hello,{0},成绩提高了{1:.2f}%'.format('xiaoming ,23.342')
6, list()列表数据类型(相当于数组)
追加元素:classmates.append('') classmates.insert(index,'')
删除元素:classmates.pop() classmates.pop(index)
tuple()和list()类似都是python内置的有序集合,但是一旦初始化无法更改,没有append、insert、pop方法
特别:只有一个值时,末尾一定要加逗号 (2,)
7,if判断:只要非零数值、非空字符串、非空list就判断为True
在python中str类型和int类型不能够直接比较eg if 200>'100':
8,循环:for in 针对list和tuple
while 中break循环过程中直接退出循环和continue提前结束本轮循环,直接开始下一轮循环,都必须配合if语句使用
9,dict键值储存d={'Michale':95,'lee':85,'lucy':78} print(d['lee'] )
判断key是否存在 方法一:'Michael' in d 方法二: d.get('Michael')
添加:d['Jack']=75 直接添加
删除:d.pop('Jack') 使用pop方法
注意:dict内部存放的顺序和key放入的顺序是没有关系的
和list比较,dict特点:查找和插入的速度极快,不会随着key的增加变慢,但是需要占用大量内存,内存浪费多,是用空间换时间的方法
key:是不可变的,通过hash算法计算位置,只可以用字符串和整数等,不可以用list
10,set:创建set需要一个list作为输入的集合s=set([1,3,4,5,3,4]),
添加元素:add(key)
删除元素: remove(key)
交集,并集 s1&s2 ;s1|s2
11,不可变对象:对于不可变对象,调用对象自身的任意方法,也不会改变改对象自身的内容。相反,这些方法会创建新的对象并返回。
eg:'abc'.replace('a','A')
12,内置函数:
abs()
max() arguments可以是多个参数,也可以是list、tuple,set
int()
bool()
float()
str()
hex()整数转换十六进制数
13,自定义函数:def return关键字
空函数 :def nop(): pass
对参数类型做检查
def my_abs(x):
if not isinstance(x,(int,float)):
raise TypeError('bad operand type')
if x<0:
return -x
else:
return x
python函数返回多个值其实就是返回一个tuple
参数:位置参数
默认参数:默认参数必须指向不变对象
可变参数:在参数前加*,参数则会收到一个tuple
关键字参数:def person(name,age,**kw): **kw 传入一个dict
命名关键字参数:def person(name,age,*,city,address): *后只接收city、address作为关键字参数
在python中参数 传入的顺序是:必选参数、默认参数、可变参数、命名关键字参数、关键字参数
14,切片
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from briefcase.models import DocumentStatus, DocumentType, Document
class DocumentStatusAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
class DocumentTypeAdmin(admin.ModelAdmin):
pass
class DocumentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'type', 'status', 'added_by', 'added_at', 'updated_at',)
list_display_links = ('__unicode__',)
list_filter = ('type', 'status',)
def queryset(self, request):
u"""
Forces a JOIN to DocumentType and User models.
Cannot be achieved by setting list_select_related=True, because
the foreign key fields have null=True. We have an OUTER JOIN here.
"""
qs = super(DocumentAdmin, self).queryset(request)
return qs.select_related('type', 'added_by')
def save_model(self, request, obj, form, change):
if not change:
obj.added_by = request.user
super(DocumentAdmin, self).save_model(request, obj, form, change)
admin.site.register(DocumentStatus, DocumentStatusAdmin)
admin.site.register(DocumentType, DocumentTypeAdmin)
admin.site.register(Document, DocumentAdmin)
|
from rest_framework import serializers
from .models import *
#class UsuarioSerializer(serializers.ModelSerializer):
# class Meta:
# model = Usuario
# fields = ('id','username', 'password',)
class PosicionUsuarioSerializer(serializers.ModelSerializer):
class Meta:
model = PosicionUsuario
fields = ('id', 'username', 'latitud', 'longitud',)
class PosicionBusSerializer(serializers.ModelSerializer):
class Meta:
model = PosicionBus
field = ('','fecha','hora','latitud','longitud',)
class PublicacionSerializer(serializers.ModelSerializer):
class Meta:
model = Publicacion
field = ('id','username','fecha','descripcion',)
class ConductorSerializer(serializers.ModelSerializer):
class Meta:
model = Conductor
field = ('id','nombre','cedula',)
class BusSerializer(serializers.ModelSerializer):
class Meta:
model = Bus
field = ('nDisco','conductor','tipo','marca','placa','modelo','cSentados','cParados',)
class HorarioSerializer(serializers.ModelSerializer):
class Meta:
model = Horario
depth = 1
field = ('bus','conductor','creador','fecha',)
def posicionbus(queryset):
if not queryset:
return []
return [{
'position_first': {
'disco': pb[0].bus_id,
'fecha': str(pb[0].fecha),
'lat': pb[0].latitud,
'lon': pb[0].longitud,
},
'position_second': {
'disco': pb[1].bus_id,
'fecha': str(pb[1].fecha),
'lat': pb[1].latitud,
'lon': pb[1].longitud,
}
} for pb in queryset ]
|
import os
import tempfile
import mock
import pytest
@pytest.fixture(scope="session", autouse=True)
def _httpie_config_dir():
"""Set path to HTTPie configuration directory."""
# HTTPie can optionally read a path to configuration directory from
# environment variable. In order to avoid messing with user's local
# configuration, HTTPIE_CONFIG_DIR environment variable is patched to point
# to a temporary directory instead. But here's the thing, HTTPie is not ran
# in subprocess in these tests, and so the environment variable is read
# only once on first package import. That's why it must be set before
# HTTPie package is imported and that's why the very same value must be
# used for all tests (session scope). Otherwise, tests may fail because
# they will look for credentials file in different directory.
with tempfile.TemporaryDirectory() as tmpdir:
with mock.patch.dict(os.environ, {"HTTPIE_CONFIG_DIR": tmpdir}):
yield tmpdir
|
"""lectur URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from lectur_app import views;
from lectur_app.views import Home, Register,RegisterProfile, UpdateProfile , Prueba, Comunidades, VistaComunidad, Explora, Destacados, Inicio_sesion, Perfil, VistaForo, VistaCategoriaForo, VistaTemaForo, CrearComunidad, CrearTaller, RegistrarEspacio, VistaComentario, CrearTema, Vistajax;
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib.auth.decorators import login_required
from django.conf.urls.static import static
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', Home.as_view(), name='index'),
url(r'^registrate/$', Register.as_view(), name='registrar'),
url(r'^register/perfil/$', RegisterProfile.as_view(), name='registrar_perfil'),
url(r'^update/perfil/$', UpdateProfile.as_view(), name='actualizar_perfil'),
url(r'^prueba/$', Prueba.as_view(), name='Prueba'),
url(r'^comunidades/$', Comunidades.as_view(), name='todas_comunidades'),
url(r'^comunidades/(?P<comunidad>[-_\w]+)/$', VistaComunidad.as_view(), name='comunidad'),
url(r'^comunidades/(?P<comunidad>[-_\w]+)/(?P<tipo>[-_\w]+)/$', VistaComunidad.as_view(), name='comunidad_dos'),
url(r'^explora/$', Explora.as_view(), name='explora_comunidades'),
url(r'^destacados/$', Destacados.as_view(), name='destacados'),
url(r'^inicia_sesion/$', Inicio_sesion.as_view(), name='inicia_sesion'),
url(r'^cerrar_sesion/$', views.cerrar_sesion, name='cerrar_sesion'),
url(r'^perfil/(?P<nombre>[-_\w]+)/$', Perfil.as_view(), name='vista_perfil'),
url(r'^foro/(?P<foro>[-_\w]+)/$', VistaForo.as_view(), name='foro'),
url(r'^foro/(?P<foro>[-_\w]+)/(?P<categoria>[-_\w]+)/$', VistaCategoriaForo.as_view(), name='foro_categoria'),
url(r'^foro/(?P<foro>[-_\w]+)/(?P<categoria>[-_\w]+)/(?P<tema>[-_\w]+)/$', VistaTemaForo.as_view(), name='foro_tema'),
url(r'^crea/comunidad/$', CrearComunidad.as_view(), name='crear_comunidad'),
url(r'^crea/taller/(?P<comunidad>[-_\w]+)/$', CrearTaller.as_view(), name='crear_taller'),
url(r'^crea/tema/(?P<comunidad>[-_\w]+)/(?P<categoria>[-_\w]+)/$', CrearTema.as_view(), name='crear_tema'),#tema
url(r'^crea/espacio/(?P<comunidad>[-_\w]+)/$', RegistrarEspacio.as_view(), name='crear_espacio'),
url(r'^comentar/foro/(?P<username>[-_\w]+)/(?P<pk>[0-9]+)/$', login_required(views.register_comment), name='crear_comentario'),
url(r'^vista-comentario/(?P<pk>[0-9]+)/$', VistaComentario.as_view(), name='ver_comentario'),
url(r'^respuestajax/(?P<respuesta>[ -_\w]+)/$', Vistajax.as_view(), name='vista_ajax'),
url(r'^registrar-usuario-comunidad/(?P<username>[-_\w]+)/(?P<comunidad>[-_\w]+)/$', login_required(views.registrarUsuarioComunidad), name='registrar_usuario_comunidad'),
url(r'^ver-notificaciones/(?P<username>[-_\w]+)/$', login_required(views.ver_notificaciones), name='ver_notificaciones'),
url(r'^asistir-actividad/(?P<username>[-_\w]+)/(?P<taller>[-_\w]+)/$', login_required(views.unirActividad), name='unir_actividad'),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
print("Podaj 1 liczbe")
a = int(input())
print("Podaj 2 liczbe")
b = int(input())
print("Podaj 3 liczbe")
c = int(input())
if a < b :
elif a <c
if b < a and b < c
srednia = b
najmnieszaj = c
if a > b and a < c :
srednia = a
elif
najwieksza = b
najmnieszaj = c
else
najmnieszaj = a
srednia =
najwieksza =
|
from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required(login_url='pages/login')
def Base(request):
return render(request, 'pages/base.html', {})
def dashboard(request):
return render(request, 'pages/dashboard.html', {})
def testing(request):
return render(request, 'pages/testing.html', {})
def allprojects(request):
return render(request, 'pages/allprojects.html', {})
def conference(request):
return render(request, 'pages/conference.html', {})
def createproject(request):
return render(request, 'pages/createproject.html', {})
def calendar(request):
return render(request, 'pages/calendar.html', {})
def cost(request):
return render(request, 'pages/cost.html', {})
def login(request):
context = ()
return render(request, 'pages/login.html', {})
def profile(request):
return render(request, 'pages/profile.html', {})
def reportcreation(request):
return render(request, 'pages/reportcreation.html', {})
def risk(request):
return render(request, 'pages/risk.html', {})
def skillgap(request):
return render(request, 'pages/skillgap.html', {})
def task(request):
return render(request, 'pages/task.html', {})
def datavisual(request):
return render(request, 'pages/datavisual.html', {})
def discussion(request):
return render(request, 'pages/discussion.html', {})
|
import sys
from PyQt5 import QtCore,QtWidgets,Qt
from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QGridLayout
from PyQt5.QtGui import QMovie,QPixmap
app = QApplication(sys.argv)
window = QWidget()
window.setFixedSize(720,480)
# ใส่สีพื้นหลังเป็นสี Light Cyan
window.setStyleSheet("background: #B0E2FF;")
grid = QGridLayout()
i1 = QLabel()
i1.setStyleSheet(
"background-color: #CD5C5C;"+
"border-radius: 15px;"+
"font-size: 30px;"+
"color: 'white';"+
"font: 'TH Sarabun New';"
)
i1.setText("! ! แจ้งเตือนการทานยา ! !")
i1.setAlignment(QtCore.Qt.AlignCenter)
i2pic = QPixmap("icon\medicine.png")
i2 = QLabel()
i2.setPixmap(i2pic)
i2.setAlignment(QtCore.Qt.AlignCenter)
i3 = QLabel()
i3.setStyleSheet("background-color: #CD5C5C;"+
"border-radius: 15px;"+
"font-size: 30px;"+
"color: 'white';"+
"font: 'TH Sarabun New';"
)
i3.setText("ยาฆ่าเชื้อ<br>1 เม็ด")
i3.setAlignment(QtCore.Qt.AlignCenter)
grid.addWidget(i1,0,0,1,2)
grid.addWidget(i2,1,0,4,1)
grid.addWidget(i3,1,1,4,1)
window.setLayout(grid)
window.show()
sys.exit(app.exec_()) |
#082.py
class MyClass:
def get_length(self, seq):
return len(seq)
obj = MyClass()
seq = "ACGTACGT"
print(obj.get_length(seq))
|
# -*- coding: utf-8 -*-
# @Author : nJcx
# @Email : njcx86@gmail.com
from pyspark.streaming.kafka import TopicAndPartition
from settings import RedisPasswd, RedisHost
from utils import RedisTool
g_offset_ranges = []
redis_utils = RedisTool(RedisPasswd, RedisHost)
def get_offset_ranges(topic):
ranges = None
key = '{topic}:offsets'.format(topic=topic)
if redis_utils.exists(key):
mapping = redis_utils.hgetall(key)
ranges = dict()
for k, v in mapping.items():
tp = TopicAndPartition(topic, int(k))
ranges[tp] = long(v)
return ranges
def update_offset_ranges(rdd):
for o in g_offset_ranges:
key = '{topic}:offsets'.format(topic=o.topic)
redis_utils.hset(key, o.partition, o.untilOffset)
def store_offset_ranges(rdd):
global g_offset_ranges
g_offset_ranges = rdd.offsetRanges()
return rdd |
from __future__ import unicode_literals
import importlib
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.utils.functional import cached_property
from django.utils.encoding import python_2_unicode_compatible
from model_utils import Choices
from autoslug import AutoSlugField
from autoslug.settings import slugify as default_slugify
from model_utils.models import TimeStampedModel
from formulator.conf import settings
if settings.FORMULATOR_CRISPY_ENABLED:
from crispy_forms.helper import FormHelper
from crispy_forms import layout
def custom_slugify(value):
return default_slugify(value).replace('-', '_')
@python_2_unicode_compatible
class BaseModel(TimeStampedModel):
def __str__(self):
return '<%s:%s>' % (self.__class__.__name__, self.pk)
class Meta:
abstract=True
BaseModelClass = settings.FORMULATOR_BASE_MODEL or BaseModel
@python_2_unicode_compatible
class Form(BaseModelClass):
ENCTYPES = Choices((0, 'urlencoded', 'application/x-www-form-urlencoded'),
(1, 'multipart', 'multipart/form-data'),
(2, 'plain', 'text/plain'))
METHODS = Choices((0, 'get', 'GET'), (1, 'post', 'POST'))
name = models.CharField(max_length=100, help_text='Name of the Form type')
# common HTML form attributes
form_name = models.CharField(max_length=100, blank=True)
form_action = models.CharField(max_length=250, blank=True)
form_method = models.IntegerField(choices=METHODS, default=METHODS.post)
form_id = AutoSlugField(populate_from='name')
form_class = models.CharField(max_length=250, blank=True)
form_accept_charset = models.CharField(max_length=100, blank=True)
form_autocomplete = models.BooleanField(default=False)
form_novalidate = models.BooleanField(default=False)
form_enctype = models.IntegerField(choices=ENCTYPES, default=ENCTYPES.urlencoded)
form_target = models.CharField(max_length=50, blank=True)
@cached_property
def fieldsets(self):
return self.fieldset_set.all().prefetch_related('field_set')
def form_class_factory(self, form_class=None, attrs=None):
if attrs is None:
attrs = {}
if form_class is None:
form_class = settings.FORMULATOR_DEFAULT_FORM_CLASS
# again make sure that we have everything we need to create a class
self.full_clean()
for field in self.field_set.all():
attrs[field.field_id.replace('-', '_')] = field.formfield_instance_factory()
# set choices
for choice in Choice.objects.all():
if choice.field in self.field_set.all():
attrs[choice.field.field_id.replace('-', '_')].choices.append((choice.key, choice.value))
if settings.FORMULATOR_CRISPY_ENABLED:
layouts = []
for fieldset in self.fieldsets:
fieldset_fields = fieldset.fields
fieldset_layout = layout.Fieldset(fieldset.safe_legend, *[f.field_id for f in fieldset_fields])
layouts.append(fieldset_layout)
for field in fieldset_fields:
attrs[field.field_id] = field.formfield_instance_factory()
helper = getattr(form_class, 'helper', FormHelper())
helper.form_id = self.form_id
helper.form_action = self.form_action
helper.form_method = self.METHODS[self.form_method]
helper.attrs = {}
if self.form_accept_charset:
helper.attrs['accept-charset'] = self.form_accept_charset
if self.form_autocomplete:
helper.attrs['autocomplete'] = self.form_autocomplete
if self.form_novalidate:
helper.attrs['novalidate'] = self.form_novalidate
if self.form_enctype:
helper.attrs['enctype'] = self.form_enctype
if self.form_target:
helper.attrs['target'] = self.form_target
helper.layout = layout.Layout(*layouts)
attrs['helper'] = helper
return type(str(self.form_id), (form_class,), attrs)
def __str__(self):
return '%s %s: %s' % (self.__class__.__name__, self.pk, self.name)
class FieldSet(BaseModelClass):
form = models.ForeignKey(Form)
position = models.PositiveIntegerField(default=0, db_index=True)
name = models.CharField(max_length=100)
legend = models.CharField(max_length=200)
@cached_property
def safe_legend(self):
try:
return self.legend
except:
return self.name.title()
@cached_property
def fields(self):
return self.field_set.all()
class Meta:
ordering = ['form', 'position']
unique_together = ['form', 'position']
@python_2_unicode_compatible
class Field(BaseModelClass):
"""
Stores the information for a django form field.
"""
form = models.ForeignKey(Form)
position = models.PositiveIntegerField(default=0, db_index=True)
fieldset = models.ForeignKey(FieldSet, null=True)
label = models.CharField(
max_length=200,
help_text=_("""A verbose name for this field, for use in displaying this
field in a form. By default, Django will use a "pretty"
version of the form field name, if the Field is part of a
Form. """)
)
field_id = AutoSlugField(unique_with='form', populate_from='label', slugify=custom_slugify, sep='_')
field_type = models.CharField(max_length=100, choices=settings.FORMULATOR_FIELDS)
max_length = models.IntegerField(blank=True, null=True)
placeholder = models.CharField(max_length=255, blank=True)
required = models.BooleanField(default=True)
help_text = models.TextField(
blank=True,
help_text=_("An optional string to use as 'help text' for this Field.")
)
initial = models.CharField(
max_length=200,
blank=True,
help_text=_("""A value to use in this Field's initial display. This value
is *not* used as a fallback if data isn't given. """)
)
widget = models.CharField(
max_length=100,
choices=settings.FORMULATOR_WIDGETS,
blank=True,
help_text=_("""A Widget class, or instance of a Widget class, that should
be used for this Field when displaying it. Each Field has a
default Widget that it'll use if you don't specify this. In
most cases, the default widget is TextInput.""")
)
show_hidden_initial = models.BooleanField(
default=False,
help_text=_('Boolean that specifies whether the field is hidden.'))
class Meta:
ordering = ['form', 'position']
def formfield_instance_factory(self, field_class=None, field_attrs=None, widget_attrs=None):
"""Returns an instance of a form field"""
# Get the field class for this particular field
# if field_class is None:
# for cls, n in settings.FORMULATOR_FIELDS:
# if n == self.field_type:
# field_class = cls
if field_class is None:
field_class = self.field_type
if field_attrs is None:
field_attrs = dict(self.fieldattribute_set.values_list('key', 'value'))
if widget_attrs is None:
widget_attrs = dict(self.widgetattribute_set.values_list('key', 'value'))
module_name, class_name = field_class.rsplit(".", 1)
module = importlib.import_module(module_name)
field = getattr(module, class_name)
# Get the widget class for this particular field
if not self.widget:
widget = getattr(field, 'widget', None)
else:
module_name, class_name = self.widget.rsplit(".", 1)
module = importlib.import_module(module_name)
widget = getattr(module, class_name)
field_attrs.update({
'required': self.required,
'label': self.label,
'initial': self.initial,
'help_text': self.help_text,
'show_hidden_initial': self.show_hidden_initial,
})
if self.max_length:
widget_attrs['max_length'] = self.max_length
if self.placeholder:
widget_attrs['placeholder'] = self.placeholder
if widget:
field_attrs['widget'] = widget(attrs=widget_attrs)
return field(**field_attrs)
def __str__(self):
return '%s %s: %s' % (self.__class__.__name__, self.pk, self.field_id)
class FieldAttribute(BaseModelClass):
field = models.ForeignKey(Field)
key = models.CharField(max_length=100)
value = models.CharField(max_length=100, blank=True)
class WidgetAttribute(BaseModelClass):
field = models.ForeignKey(Field)
key = models.CharField(max_length=100)
value = models.CharField(max_length=100)
class Choice(BaseModelClass):
field = models.ForeignKey(Field)
position = models.PositiveIntegerField(default=0, db_index=True)
key = models.CharField(max_length=100)
value = models.CharField(max_length=100, blank=True)
class Meta:
ordering = ['field', 'position']
|
#author unknown
import os
import hashlib
def find_duplicate_files(starting_directory='F:\Books'):
files_seen_already = {}
queue = [starting_directory]
# we'll track tuples of (duplicate_file, original_file)
duplicates = []
while len(queue) > 0:
current_path = queue.pop()
# if it's a directory,
# put the contents in our queue
if os.path.isdir(current_path):
for path in os.listdir(current_path):
full_path = os.path.join(current_path, path)
queue.append(full_path)
# if it's a file:
else:
# get its contents
file_hash = sample_hash_file(current_path)
# get its last edited time
current_last_edited_time = os.path.getmtime(current_path)
# if we've seen it before:
if file_hash in files_seen_already:
existing_last_edited_time, existing_path = files_seen_already[file_hash]
if current_last_edited_time > existing_last_edited_time:
# current file is the dupe!
duplicates.append((current_path, existing_path))
else:
# old file is the dupe!
duplicates.append((existing_path, current_path))
# but also update the hash to have the new file's info:
files_seen_already[file_hash] = \
(current_last_edited_time, current_path)
# if it's a new file, throw it in the hash and
# record its path and its last edited time,
# so we can tell later if it's a dupe
else:
files_seen_already[file_hash] = \
(current_last_edited_time, current_path)
return duplicates
def sample_hash_file(path):
num_bytes_to_read_per_sample = 4000
total_bytes = os.path.getsize(path)
hasher = hashlib.sha512()
# (this "with" block ensures that our file gets closed when we're done)
with open(path, 'rb') as file:
# first bytes
sample = file.read(num_bytes_to_read_per_sample)
hasher.update(sample)
# middle bytes
file.seek(total_bytes / 2)
sample = file.read(num_bytes_to_read_per_sample)
hasher.update(sample)
# last bytes
# but only if our file is big enough
if total_bytes > num_bytes_to_read_per_sample * 3:
file.seek(-num_bytes_to_read_per_sample, os.SEEK_END)
sample = file.read(num_bytes_to_read_per_sample)
hasher.update(sample)
return hasher.hexdigest()
def main():
tel = {'jack': 4098, 'sape': 4139}
for value in tel.values():
print(value)
dup = find_duplicate_files()
for value in dup:
print(value);
if __name__=="__main__":main()
|
# -*- coding:utf-8 -*-
"""Main script."""
from brain_games.engine import run
from brain_games.games import brain_even
def main():
"""Start brain_even game."""
run(brain_even)
if __name__ == '__main__':
main()
|
from lantz.drivers.keysight.Keysight_66322A import Keysight_33622A
from lantz.drivers.keysight.arbseq_class import Arbseq_Class
class PulseSequence(object):
def __init__(self, name, timestep):
self.timestep = timestep
self.repeat_unit = None
self.arbseq = None
def build_pulse(self):
pulse = [off(0) for 1 sec, on(amp, trigger) for 2 sec, on(amp) for 5 sec, off(0) for 5 sec,]
for loop over items in pulse:
find the smallest segment,
def off(self, time):
arbseq = Arbseq_Class(name, timestep)
if __name__=='main':
pulse = PulseSequence('Test Pulse', 1e-9)
pulse.build_pulse(off(1e-6)) |
import testc
my_test = testc.test()
my_test.equal(4, 5)
my_test.equal(5, 5)
my_test.output("Hello", "World", 42, expected="Hello World 42")
my_test.summary()
with testc.test() as test:
test.equal(4, 5)
test.equal(5, 5)
test.output("Hello", "World", 42, expected="Hello World 42")
with testc.test() as test:
test.equal(4, 5)
test.output("Hello", "World", 42, expected="Hello World 42")
|
'''
Created on Sep 22, 2018
@author: l0t0y
'''
from labs.module02.TempSensorEmulator import TempSensorEmulator
from time import sleep
TS = TempSensorEmulator(5)
TS.daemon = True
print("Daemon Thread Starting...")
TS.setEnableTempEmulator(True)
TS.start()
while(True):
sleep(5)
pass
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 06 10:17:03 2016
@author: v-wujin
"""
from sklearn import datasets
from sklearn.cross_validation import train_test_split
from sklearn.cross_validation import cross_val_score
from sklearn.metrics import accuracy_score
from sklearn import svm
import numpy as np
#Import dataset
iris = datasets.load_iris()
X = iris.data
Y = iris.target
#Split into training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size = 0.4)
#Observations in dataset
len(X)
#Observations in test set
len(X_test)
#Observations in training set
len(X_train)
#SVM classifier
#Create SVM object
clf = svm.SVC()
#Fit classification model with training data
clf.fit(X_train, y_train)
#Predict data in training set
svm_pred = clf.predict(X_train)
#Evaluate accuracy of training set
accuracy_score(y_train, svm_pred)
#0.978
#Predict data in test set
svm_pred2 = clf.predict(X_test)
#Evaluate accuracy of test set
accuracy_score(y_test, svm_pred2)
#0.983
#Accuracy of validation set using k-fold cross validation
#5 folds
accu = cross_val_score(clf, X_train, y_train, scoring = 'accuracy', cv = 5)
#Take average of all cross validation folds
np.mean(accu)
#0.966
#Standard deviation of all cross validation folds
np.std(accu)
#0.043 |
a = float(input("insira a coordenada a: "))
b= float(input("insira a coordenada b "))
r = float(input("insira o raio r: "))
if (b > 0):
print("Superiores")
if (b < 0):
print("Inferiores")
|
#!/usr/bin/env python
#
# Even more functional-like...
import sys
def num_layers(num_rows, num_cols):
return min(num_rows, num_cols) / 2
def len_layer(num_rows, num_cols, layer):
return 2 * (num_rows + num_cols - 2 - 4 * layer)
def get_area_row_col(num_rows, num_cols, row, col):
if row < num_rows / 2:
if col < num_cols / 2 and col < row:
return 4
elif col >= num_cols - row:
return 2
else:
return 1
else:
if col < num_cols / 2 and col < num_rows - row - 1:
return 4
elif col > num_cols - num_rows + row:
return 2
else:
return 3
def get_layer_index(num_rows, num_cols, row, col):
area = get_area_row_col(num_rows, num_cols, row, col)
if area == 1:
layer = row
index = col - row
elif area == 2:
layer = num_cols - col - 1
index = num_cols - 3 * layer + row - 1
elif area == 3:
layer = num_rows - row - 1
index = 2 * num_cols + num_rows - 3 - 5 * layer - col
else:
layer = col
index = 2 * (num_cols + num_rows) - 4 - 7 * layer - row
return layer, index
def get_area_layer_index(num_rows, num_cols, layer, index):
if index < num_cols - 2 * layer:
return 1
elif index < num_cols + num_rows - 4 * layer - 1:
return 2
elif index < 2 * num_cols + num_rows - 6 * layer - 2:
return 3
else:
return 4
def get_row_col(num_rows, num_cols, layer, index):
area = get_area_layer_index(num_rows, num_cols, layer, index)
if area == 1:
row = layer
col = row + index
elif area == 2:
col = num_cols - layer - 1
row = index - num_cols + 3 * layer + 1
elif area == 3:
row = num_rows - layer - 1
col = 2 * num_cols + num_rows - 3 - 5 * layer - index
else:
col = layer
row = 2 * (num_cols + num_rows) - 4 - 7 * layer - index
return row, col
def main():
(num_rows, num_cols, rotation) = (int(x) for x in raw_input().split())
assert min(num_rows, num_cols) % 2 == 0
matrix = []
for row in range(num_rows):
line = [int(x) for x in raw_input().split()]
assert len(line) == num_cols
matrix.append(line)
for row in range(num_rows):
for col in range(num_cols):
layer, idx = get_layer_index(num_rows, num_cols, row, col)
idx = (idx + rotation) % len_layer(num_rows, num_cols, layer)
rot_row, rot_col = get_row_col(num_rows, num_cols, layer, idx)
sys.stdout.write("%d " % matrix[rot_row][rot_col])
sys.stdout.write("\n")
if __name__ == '__main__':
main()
|
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from .models import Purchase, Car, Equipment
from django.test import TestCase, Client
from django.urls import reverse
class IndexViewTests(TestCase):
def test_no_products(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['cars'], [])
class PurchaseTest(TestCase):
def test_no_purchase(self):
response = self.client.get(reverse('mypurchase'))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['purchase'], [])
class ConfigTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
username='testuser',
password='testpassword',
email='testemail@test.ru'
)
Car.objects.create(
id=1,
name='tiguan',
manufacturer = 'testman',
count=100
)
Equipment.objects.create(
id=1,
name='test',
description='testsdescr',
cost=800000,
car_id=1
)
def test_no_configuration(self):
response = self.client.get("/configurator/1/0/0")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['Compl'], 1)
class BuyTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
username='testuser',
password='testpassword',
email='testemail@test.ru'
)
Car.objects.create(
id=1,
name='tiguan',
manufacturer = 'testman',
count=100
)
def test_no_buy(self):
response = self.client.get("/buy/1/100000")
self.assertEqual(response.status_code, 200)
buy = Purchase.objects.all()
self.assertEqual(buy.count(), 1)
class LogInTest(TestCase):
def setUp(self):
self.credentials = {
'username': 'testuser',
'password': 'secret'
}
User.objects.create_user(**self.credentials)
def test_login(self):
response = self.client.post('/accounts/login/', self.credentials, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(response.context['user'].is_authenticated)
class RegisterTest(TestCase):
def setUp(self):
self.credentials = {
'username': 'testuser',
'email':'testuser@mail.ru',
'password1': '400120Pav98',
'password2': '400120Pav98'
}
def test_registration(self):
response = self.client.post(reverse('register'), self.credentials)
self.assertEqual(response.status_code, 302)
users = get_user_model().objects.all()
self.assertEqual(users.count(), 1)
|
# -------------------------------------------------------------------------------
# Name: AutStageE.py
# Description: Data preliminary processing (Delete features from a feature class based on an expression)
# Purpose: Automated prediction system for vegetation cover based on MODIS- NDVI satellite data and neural networks
# Author: Sohaib K. M. Abujayyab
# Created: 11/02/2019
# Requirements: None
# -------------------------------------------------------------------------------
# Import system modules
import arcpy, os
from arcpy import env
arcpy.env.overwriteOutput = True
arcpy.env.addOutputsToMap = True
ListOftiles = arcpy.GetParameterAsText(0)
a = arcpy.GetParameterAsText(1)
# List of Fields
ListOfFields = arcpy.GetParameterAsText(2) # list of selected fields
str1 = arcpy.GetParameterAsText(3)
str2 = arcpy.GetParameterAsText(4)
# Set local variables
tempLayer = "pointsLayer"
for feature in ListOftiles.split(';'):
arcpy.AddMessage("Tile " + str(feature))
s = len(ListOfFields.split(';')) - 1
count =0
arcpy.AddMessage("count of fields " + str(s))
Expression4 = ""
Expression55 = ""
for field in ListOfFields.split(';'):
# Set local variables
Expression1 = '"' + str(field) + '"' + "<=" + str1
Expression2 = '"' + field + '"' + ">=" + str2
if count == s:
Expression3 = Expression1 + " OR " + Expression2
Expression4 = Expression4 + Expression3
else:
Expression3 = Expression1 + " OR " + Expression2 + " OR "
Expression4 = Expression4 + Expression3
count = count + 1
# Execute MakeFeatureLayer
arcpy.MakeFeatureLayer_management(feature, tempLayer)
# Execute SelectLayerByAttribute to determine which features to delete
arcpy.SelectLayerByAttribute_management(tempLayer, "NEW_SELECTION",
Expression4)
# Execute GetCount and if some features have been selected, then
# execute DeleteFeatures to remove the selected features.
arcpy.DeleteFeatures_management(tempLayer) |
for i in range(83, 864):
print "require('./Images/" + str(i) + ".jpg'),"
const images = [
require('./Images/0.jpg'),
require('./Images/1.jpg'),
require('./Images/2.jpg'),
require('./Images/3.jpg'),
require('./Images/4.jpg'),
require('./Images/5.jpg'),
require('./Images/6.jpg'),
require('./Images/7.jpg'),
require('./Images/8.jpg'),
require('./Images/9.jpg'),
require('./Images/10.jpg'),
require('./Images/11.jpg'),
require('./Images/13.jpg'),
require('./Images/14.jpg'),
require('./Images/15.jpg'),
require('./Images/16.jpg'),
require('./Images/17.jpg'),
require('./Images/18.jpg'),
require('./Images/19.jpg'),
require('./Images/20.jpg'),
require('./Images/21.jpg'),
require('./Images/22.jpg'),
require('./Images/23.jpg'),
require('./Images/24.jpg'),
require('./Images/25.jpg'),
require('./Images/26.jpg'),
require('./Images/27.jpg'),
require('./Images/28.jpg'),
require('./Images/29.jpg'),
require('./Images/30.jpg'),
require('./Images/31.jpg'),
require('./Images/32.jpg'),
require('./Images/33.jpg'),
require('./Images/34.jpg'),
require('./Images/35.jpg'),
require('./Images/36.jpg'),
require('./Images/37.jpg'),
require('./Images/38.jpg'),
require('./Images/39.jpg'),
require('./Images/40.jpg'),
require('./Images/41.jpg'),
require('./Images/42.jpg'),
require('./Images/43.jpg'),
require('./Images/44.jpg'),
require('./Images/45.jpg'),
require('./Images/46.jpg'),
require('./Images/47.jpg'),
require('./Images/48.jpg'),
require('./Images/49.jpg'),
require('./Images/50.jpg'),
require('./Images/51.jpg'),
require('./Images/52.jpg'),
require('./Images/53.jpg'),
require('./Images/54.jpg'),
require('./Images/55.jpg'),
require('./Images/56.jpg'),
require('./Images/57.jpg'),
require('./Images/58.jpg'),
require('./Images/59.jpg'),
require('./Images/60.jpg'),
require('./Images/61.jpg'),
require('./Images/62.jpg'),
require('./Images/63.jpg'),
require('./Images/64.jpg'),
require('./Images/65.jpg'),
require('./Images/66.jpg'),
require('./Images/67.jpg'),
require('./Images/68.jpg'),
require('./Images/69.jpg'),
require('./Images/70.jpg'),
require('./Images/71.jpg'),
require('./Images/72.jpg'),
require('./Images/73.jpg'),
require('./Images/74.jpg'),
require('./Images/75.jpg'),
require('./Images/76.jpg'),
require('./Images/77.jpg'),
require('./Images/78.jpg'),
require('./Images/79.jpg'),
require('./Images/80.jpg'),
require('./Images/81.jpg'),
require('./Images/82.jpg'),
require('./Images/83.jpg'),
require('./Images/84.jpg'),
require('./Images/85.jpg'),
require('./Images/86.jpg'),
require('./Images/87.jpg'),
require('./Images/88.jpg'),
require('./Images/89.jpg'),
require('./Images/90.jpg'),
require('./Images/91.jpg'),
require('./Images/92.jpg'),
require('./Images/93.jpg'),
require('./Images/94.jpg'),
require('./Images/95.jpg'),
require('./Images/96.jpg'),
require('./Images/97.jpg'),
require('./Images/98.jpg'),
require('./Images/99.jpg'),
require('./Images/100.jpg'),
require('./Images/101.jpg'),
require('./Images/102.jpg'),
require('./Images/103.jpg'),
require('./Images/104.jpg'),
require('./Images/105.jpg'),
require('./Images/106.jpg'),
require('./Images/107.jpg'),
require('./Images/108.jpg'),
require('./Images/109.jpg'),
require('./Images/110.jpg'),
require('./Images/111.jpg'),
require('./Images/112.jpg'),
require('./Images/113.jpg'),
require('./Images/114.jpg'),
require('./Images/115.jpg'),
require('./Images/116.jpg'),
require('./Images/117.jpg'),
require('./Images/118.jpg'),
require('./Images/119.jpg'),
require('./Images/120.jpg'),
require('./Images/121.jpg'),
require('./Images/122.jpg'),
require('./Images/123.jpg'),
require('./Images/124.jpg'),
require('./Images/125.jpg'),
require('./Images/126.jpg'),
require('./Images/127.jpg'),
require('./Images/128.jpg'),
require('./Images/129.jpg'),
require('./Images/130.jpg'),
require('./Images/131.jpg'),
require('./Images/132.jpg'),
require('./Images/133.jpg'),
require('./Images/134.jpg'),
require('./Images/135.jpg'),
require('./Images/136.jpg'),
require('./Images/137.jpg'),
require('./Images/138.jpg'),
require('./Images/139.jpg'),
require('./Images/140.jpg'),
require('./Images/141.jpg'),
require('./Images/142.jpg'),
require('./Images/143.jpg'),
require('./Images/144.jpg'),
require('./Images/145.jpg'),
require('./Images/146.jpg'),
require('./Images/147.jpg'),
require('./Images/148.jpg'),
require('./Images/149.jpg'),
require('./Images/150.jpg'),
require('./Images/151.jpg'),
require('./Images/152.jpg'),
require('./Images/153.jpg'),
require('./Images/154.jpg'),
require('./Images/155.jpg'),
require('./Images/156.jpg'),
require('./Images/157.jpg'),
require('./Images/158.jpg'),
require('./Images/159.jpg'),
require('./Images/160.jpg'),
require('./Images/161.jpg'),
require('./Images/162.jpg'),
require('./Images/163.jpg'),
require('./Images/164.jpg'),
require('./Images/165.jpg'),
require('./Images/166.jpg'),
require('./Images/167.jpg'),
require('./Images/168.jpg'),
require('./Images/169.jpg'),
require('./Images/170.jpg'),
require('./Images/171.jpg'),
require('./Images/172.jpg'),
require('./Images/173.jpg'),
require('./Images/174.jpg'),
require('./Images/175.jpg'),
require('./Images/176.jpg'),
require('./Images/177.jpg'),
require('./Images/178.jpg'),
require('./Images/179.jpg'),
require('./Images/180.jpg'),
require('./Images/181.jpg'),
require('./Images/182.jpg'),
require('./Images/183.jpg'),
require('./Images/184.jpg'),
require('./Images/185.jpg'),
require('./Images/186.jpg'),
require('./Images/187.jpg'),
require('./Images/188.jpg'),
require('./Images/189.jpg'),
require('./Images/190.jpg'),
require('./Images/191.jpg'),
require('./Images/192.jpg'),
require('./Images/193.jpg'),
require('./Images/194.jpg'),
require('./Images/195.jpg'),
require('./Images/196.jpg'),
require('./Images/197.jpg'),
require('./Images/198.jpg'),
require('./Images/199.jpg'),
require('./Images/200.jpg'),
require('./Images/201.jpg'),
require('./Images/202.jpg'),
require('./Images/203.jpg'),
require('./Images/204.jpg'),
require('./Images/205.jpg'),
require('./Images/206.jpg'),
require('./Images/207.jpg'),
require('./Images/208.jpg'),
require('./Images/209.jpg'),
require('./Images/210.jpg'),
require('./Images/211.jpg'),
require('./Images/212.jpg'),
require('./Images/213.jpg'),
require('./Images/214.jpg'),
require('./Images/215.jpg'),
require('./Images/216.jpg'),
require('./Images/217.jpg'),
require('./Images/218.jpg'),
require('./Images/219.jpg'),
require('./Images/220.jpg'),
require('./Images/221.jpg'),
require('./Images/222.jpg'),
require('./Images/223.jpg'),
require('./Images/224.jpg'),
require('./Images/225.jpg'),
require('./Images/226.jpg'),
require('./Images/227.jpg'),
require('./Images/228.jpg'),
require('./Images/229.jpg'),
require('./Images/230.jpg'),
require('./Images/231.jpg'),
require('./Images/232.jpg'),
require('./Images/233.jpg'),
require('./Images/234.jpg'),
require('./Images/235.jpg'),
require('./Images/236.jpg'),
require('./Images/237.jpg'),
require('./Images/238.jpg'),
require('./Images/239.jpg'),
require('./Images/240.jpg'),
require('./Images/241.jpg'),
require('./Images/242.jpg'),
require('./Images/243.jpg'),
require('./Images/244.jpg'),
require('./Images/245.jpg'),
require('./Images/246.jpg'),
require('./Images/247.jpg'),
require('./Images/248.jpg'),
require('./Images/249.jpg'),
require('./Images/250.jpg'),
require('./Images/251.jpg'),
require('./Images/252.jpg'),
require('./Images/253.jpg'),
require('./Images/254.jpg'),
require('./Images/255.jpg'),
require('./Images/256.jpg'),
require('./Images/257.jpg'),
require('./Images/258.jpg'),
require('./Images/259.jpg'),
require('./Images/260.jpg'),
require('./Images/261.jpg'),
require('./Images/262.jpg'),
require('./Images/263.jpg'),
require('./Images/264.jpg'),
require('./Images/265.jpg'),
require('./Images/266.jpg'),
require('./Images/267.jpg'),
require('./Images/268.jpg'),
require('./Images/269.jpg'),
require('./Images/270.jpg'),
require('./Images/271.jpg'),
require('./Images/272.jpg'),
require('./Images/273.jpg'),
require('./Images/274.jpg'),
require('./Images/275.jpg'),
require('./Images/276.jpg'),
require('./Images/277.jpg'),
require('./Images/278.jpg'),
require('./Images/279.jpg'),
require('./Images/280.jpg'),
require('./Images/281.jpg'),
require('./Images/282.jpg'),
require('./Images/283.jpg'),
require('./Images/284.jpg'),
require('./Images/285.jpg'),
require('./Images/286.jpg'),
require('./Images/287.jpg'),
require('./Images/288.jpg'),
require('./Images/289.jpg'),
require('./Images/290.jpg'),
require('./Images/291.jpg'),
require('./Images/292.jpg'),
require('./Images/293.jpg'),
require('./Images/294.jpg'),
require('./Images/295.jpg'),
require('./Images/296.jpg'),
require('./Images/297.jpg'),
require('./Images/298.jpg'),
require('./Images/299.jpg'),
require('./Images/300.jpg'),
require('./Images/301.jpg'),
require('./Images/302.jpg'),
require('./Images/303.jpg'),
require('./Images/304.jpg'),
require('./Images/305.jpg'),
require('./Images/306.jpg'),
require('./Images/307.jpg'),
require('./Images/308.jpg'),
require('./Images/309.jpg'),
require('./Images/310.jpg'),
require('./Images/311.jpg'),
require('./Images/312.jpg'),
require('./Images/313.jpg'),
require('./Images/314.jpg'),
require('./Images/315.jpg'),
require('./Images/316.jpg'),
require('./Images/317.jpg'),
require('./Images/318.jpg'),
require('./Images/319.jpg'),
require('./Images/320.jpg'),
require('./Images/321.jpg'),
require('./Images/322.jpg'),
require('./Images/323.jpg'),
require('./Images/324.jpg'),
require('./Images/325.jpg'),
require('./Images/326.jpg'),
require('./Images/327.jpg'),
require('./Images/328.jpg'),
require('./Images/329.jpg'),
require('./Images/330.jpg'),
require('./Images/331.jpg'),
require('./Images/332.jpg'),
require('./Images/333.jpg'),
require('./Images/334.jpg'),
require('./Images/335.jpg'),
require('./Images/336.jpg'),
require('./Images/337.jpg'),
require('./Images/338.jpg'),
require('./Images/339.jpg'),
require('./Images/340.jpg'),
require('./Images/341.jpg'),
require('./Images/342.jpg'),
require('./Images/343.jpg'),
require('./Images/344.jpg'),
require('./Images/345.jpg'),
require('./Images/346.jpg'),
require('./Images/347.jpg'),
require('./Images/348.jpg'),
require('./Images/349.jpg'),
require('./Images/350.jpg'),
require('./Images/351.jpg'),
require('./Images/352.jpg'),
require('./Images/353.jpg'),
require('./Images/354.jpg'),
require('./Images/355.jpg'),
require('./Images/356.jpg'),
require('./Images/357.jpg'),
require('./Images/358.jpg'),
require('./Images/359.jpg'),
require('./Images/360.jpg'),
require('./Images/361.jpg'),
require('./Images/362.jpg'),
require('./Images/363.jpg'),
require('./Images/364.jpg'),
require('./Images/365.jpg'),
require('./Images/366.jpg'),
require('./Images/367.jpg'),
require('./Images/368.jpg'),
require('./Images/369.jpg'),
require('./Images/370.jpg'),
require('./Images/371.jpg'),
require('./Images/372.jpg'),
require('./Images/373.jpg'),
require('./Images/374.jpg'),
require('./Images/375.jpg'),
require('./Images/376.jpg'),
require('./Images/377.jpg'),
require('./Images/378.jpg'),
require('./Images/379.jpg'),
require('./Images/380.jpg'),
require('./Images/381.jpg'),
require('./Images/382.jpg'),
require('./Images/383.jpg'),
require('./Images/384.jpg'),
require('./Images/385.jpg'),
require('./Images/386.jpg'),
require('./Images/387.jpg'),
require('./Images/388.jpg'),
require('./Images/389.jpg'),
require('./Images/390.jpg'),
require('./Images/391.jpg'),
require('./Images/392.jpg'),
require('./Images/393.jpg'),
require('./Images/394.jpg'),
require('./Images/395.jpg'),
require('./Images/396.jpg'),
require('./Images/397.jpg'),
require('./Images/398.jpg'),
require('./Images/399.jpg'),
require('./Images/400.jpg'),
require('./Images/401.jpg'),
require('./Images/402.jpg'),
require('./Images/403.jpg'),
require('./Images/404.jpg'),
require('./Images/405.jpg'),
require('./Images/406.jpg'),
require('./Images/407.jpg'),
require('./Images/408.jpg'),
require('./Images/409.jpg'),
require('./Images/410.jpg'),
require('./Images/411.jpg'),
require('./Images/412.jpg'),
require('./Images/413.jpg'),
require('./Images/414.jpg'),
require('./Images/415.jpg'),
require('./Images/416.jpg'),
require('./Images/417.jpg'),
require('./Images/418.jpg'),
require('./Images/419.jpg'),
require('./Images/420.jpg'),
require('./Images/421.jpg'),
require('./Images/422.jpg'),
require('./Images/423.jpg'),
require('./Images/424.jpg'),
require('./Images/425.jpg'),
require('./Images/426.jpg'),
require('./Images/427.jpg'),
require('./Images/428.jpg'),
require('./Images/429.jpg'),
require('./Images/430.jpg'),
require('./Images/431.jpg'),
require('./Images/432.jpg'),
require('./Images/433.jpg'),
require('./Images/434.jpg'),
require('./Images/435.jpg'),
require('./Images/436.jpg'),
require('./Images/437.jpg'),
require('./Images/438.jpg'),
require('./Images/439.jpg'),
require('./Images/440.jpg'),
require('./Images/441.jpg'),
require('./Images/442.jpg'),
require('./Images/443.jpg'),
require('./Images/444.jpg'),
require('./Images/445.jpg'),
require('./Images/446.jpg'),
require('./Images/447.jpg'),
require('./Images/448.jpg'),
require('./Images/449.jpg'),
require('./Images/450.jpg'),
require('./Images/451.jpg'),
require('./Images/452.jpg'),
require('./Images/453.jpg'),
require('./Images/454.jpg'),
require('./Images/455.jpg'),
require('./Images/456.jpg'),
require('./Images/457.jpg'),
require('./Images/458.jpg'),
require('./Images/459.jpg'),
require('./Images/460.jpg'),
require('./Images/461.jpg'),
require('./Images/462.jpg'),
require('./Images/463.jpg'),
require('./Images/464.jpg'),
require('./Images/465.jpg'),
require('./Images/466.jpg'),
require('./Images/467.jpg'),
require('./Images/468.jpg'),
require('./Images/469.jpg'),
require('./Images/470.jpg'),
require('./Images/471.jpg'),
require('./Images/472.jpg'),
require('./Images/473.jpg'),
require('./Images/474.jpg'),
require('./Images/475.jpg'),
require('./Images/476.jpg'),
require('./Images/477.jpg'),
require('./Images/478.jpg'),
require('./Images/479.jpg'),
require('./Images/480.jpg'),
require('./Images/481.jpg'),
require('./Images/482.jpg'),
require('./Images/483.jpg'),
require('./Images/484.jpg'),
require('./Images/485.jpg'),
require('./Images/486.jpg'),
require('./Images/487.jpg'),
require('./Images/488.jpg'),
require('./Images/489.jpg'),
require('./Images/490.jpg'),
require('./Images/491.jpg'),
require('./Images/492.jpg'),
require('./Images/493.jpg'),
require('./Images/494.jpg'),
require('./Images/495.jpg'),
require('./Images/496.jpg'),
require('./Images/497.jpg'),
require('./Images/498.jpg'),
require('./Images/499.jpg'),
require('./Images/500.jpg'),
require('./Images/501.jpg'),
require('./Images/502.jpg'),
require('./Images/503.jpg'),
require('./Images/504.jpg'),
require('./Images/505.jpg'),
require('./Images/506.jpg'),
require('./Images/507.jpg'),
require('./Images/508.jpg'),
require('./Images/509.jpg'),
require('./Images/510.jpg'),
require('./Images/511.jpg'),
require('./Images/512.jpg'),
require('./Images/513.jpg'),
require('./Images/514.jpg'),
require('./Images/515.jpg'),
require('./Images/516.jpg'),
require('./Images/517.jpg'),
require('./Images/518.jpg'),
require('./Images/519.jpg'),
require('./Images/520.jpg'),
require('./Images/521.jpg'),
require('./Images/522.jpg'),
require('./Images/523.jpg'),
require('./Images/524.jpg'),
require('./Images/525.jpg'),
require('./Images/526.jpg'),
require('./Images/527.jpg'),
require('./Images/528.jpg'),
require('./Images/529.jpg'),
require('./Images/530.jpg'),
require('./Images/531.jpg'),
require('./Images/532.jpg'),
require('./Images/533.jpg'),
require('./Images/534.jpg'),
require('./Images/535.jpg'),
require('./Images/536.jpg'),
require('./Images/537.jpg'),
require('./Images/538.jpg'),
require('./Images/539.jpg'),
require('./Images/540.jpg'),
require('./Images/541.jpg'),
require('./Images/542.jpg'),
require('./Images/543.jpg'),
require('./Images/544.jpg'),
require('./Images/545.jpg'),
require('./Images/546.jpg'),
require('./Images/547.jpg'),
require('./Images/548.jpg'),
require('./Images/549.jpg'),
require('./Images/550.jpg'),
require('./Images/551.jpg'),
require('./Images/552.jpg'),
require('./Images/553.jpg'),
require('./Images/554.jpg'),
require('./Images/555.jpg'),
require('./Images/556.jpg'),
require('./Images/557.jpg'),
require('./Images/558.jpg'),
require('./Images/559.jpg'),
require('./Images/560.jpg'),
require('./Images/561.jpg'),
require('./Images/562.jpg'),
require('./Images/563.jpg'),
require('./Images/564.jpg'),
require('./Images/565.jpg'),
require('./Images/566.jpg'),
require('./Images/567.jpg'),
require('./Images/568.jpg'),
require('./Images/569.jpg'),
require('./Images/570.jpg'),
require('./Images/571.jpg'),
require('./Images/572.jpg'),
require('./Images/573.jpg'),
require('./Images/574.jpg'),
require('./Images/575.jpg'),
require('./Images/576.jpg'),
require('./Images/577.jpg'),
require('./Images/578.jpg'),
require('./Images/579.jpg'),
require('./Images/580.jpg'),
require('./Images/581.jpg'),
require('./Images/582.jpg'),
require('./Images/583.jpg'),
require('./Images/584.jpg'),
require('./Images/585.jpg'),
require('./Images/586.jpg'),
require('./Images/587.jpg'),
require('./Images/588.jpg'),
require('./Images/589.jpg'),
require('./Images/590.jpg'),
require('./Images/591.jpg'),
require('./Images/592.jpg'),
require('./Images/593.jpg'),
require('./Images/594.jpg'),
require('./Images/595.jpg'),
require('./Images/596.jpg'),
require('./Images/597.jpg'),
require('./Images/598.jpg'),
require('./Images/599.jpg'),
require('./Images/600.jpg'),
require('./Images/601.jpg'),
require('./Images/602.jpg'),
require('./Images/603.jpg'),
require('./Images/604.jpg'),
require('./Images/605.jpg'),
require('./Images/606.jpg'),
require('./Images/607.jpg'),
require('./Images/608.jpg'),
require('./Images/609.jpg'),
require('./Images/610.jpg'),
require('./Images/611.jpg'),
require('./Images/612.jpg'),
require('./Images/613.jpg'),
require('./Images/614.jpg'),
require('./Images/615.jpg'),
require('./Images/616.jpg'),
require('./Images/617.jpg'),
require('./Images/618.jpg'),
require('./Images/619.jpg'),
require('./Images/620.jpg'),
require('./Images/621.jpg'),
require('./Images/622.jpg'),
require('./Images/623.jpg'),
require('./Images/624.jpg'),
require('./Images/625.jpg'),
require('./Images/626.jpg'),
require('./Images/627.jpg'),
require('./Images/628.jpg'),
require('./Images/629.jpg'),
require('./Images/630.jpg'),
require('./Images/631.jpg'),
require('./Images/632.jpg'),
require('./Images/633.jpg'),
require('./Images/634.jpg'),
require('./Images/635.jpg'),
require('./Images/636.jpg'),
require('./Images/637.jpg'),
require('./Images/638.jpg'),
require('./Images/639.jpg'),
require('./Images/640.jpg'),
require('./Images/641.jpg'),
require('./Images/642.jpg'),
require('./Images/643.jpg'),
require('./Images/644.jpg'),
require('./Images/645.jpg'),
require('./Images/646.jpg'),
require('./Images/647.jpg'),
require('./Images/648.jpg'),
require('./Images/649.jpg'),
require('./Images/650.jpg'),
require('./Images/651.jpg'),
require('./Images/652.jpg'),
require('./Images/653.jpg'),
require('./Images/654.jpg'),
require('./Images/655.jpg'),
require('./Images/656.jpg'),
require('./Images/657.jpg'),
require('./Images/658.jpg'),
require('./Images/659.jpg'),
require('./Images/660.jpg'),
require('./Images/661.jpg'),
require('./Images/662.jpg'),
require('./Images/663.jpg'),
require('./Images/664.jpg'),
require('./Images/665.jpg'),
require('./Images/666.jpg'),
require('./Images/667.jpg'),
require('./Images/668.jpg'),
require('./Images/669.jpg'),
require('./Images/670.jpg'),
require('./Images/671.jpg'),
require('./Images/672.jpg'),
require('./Images/673.jpg'),
require('./Images/674.jpg'),
require('./Images/675.jpg'),
require('./Images/676.jpg'),
require('./Images/677.jpg'),
require('./Images/678.jpg'),
require('./Images/679.jpg'),
require('./Images/680.jpg'),
require('./Images/681.jpg'),
require('./Images/682.jpg'),
require('./Images/683.jpg'),
require('./Images/684.jpg'),
require('./Images/685.jpg'),
require('./Images/686.jpg'),
require('./Images/687.jpg'),
require('./Images/688.jpg'),
require('./Images/689.jpg'),
require('./Images/690.jpg'),
require('./Images/691.jpg'),
require('./Images/692.jpg'),
require('./Images/693.jpg'),
require('./Images/694.jpg'),
require('./Images/695.jpg'),
require('./Images/696.jpg'),
require('./Images/697.jpg'),
require('./Images/698.jpg'),
require('./Images/699.jpg'),
require('./Images/700.jpg'),
require('./Images/701.jpg'),
require('./Images/702.jpg'),
require('./Images/703.jpg'),
require('./Images/704.jpg'),
require('./Images/705.jpg'),
require('./Images/706.jpg'),
require('./Images/707.jpg'),
require('./Images/708.jpg'),
require('./Images/709.jpg'),
require('./Images/710.jpg'),
require('./Images/711.jpg'),
require('./Images/712.jpg'),
require('./Images/713.jpg'),
require('./Images/714.jpg'),
require('./Images/715.jpg'),
require('./Images/716.jpg'),
require('./Images/717.jpg'),
require('./Images/718.jpg'),
require('./Images/719.jpg'),
require('./Images/720.jpg'),
require('./Images/721.jpg'),
require('./Images/722.jpg'),
require('./Images/723.jpg'),
require('./Images/724.jpg'),
require('./Images/725.jpg'),
require('./Images/726.jpg'),
require('./Images/727.jpg'),
require('./Images/728.jpg'),
require('./Images/729.jpg'),
require('./Images/730.jpg'),
require('./Images/731.jpg'),
require('./Images/732.jpg'),
require('./Images/733.jpg'),
require('./Images/734.jpg'),
require('./Images/735.jpg'),
require('./Images/736.jpg'),
require('./Images/737.jpg'),
require('./Images/738.jpg'),
require('./Images/739.jpg'),
require('./Images/740.jpg'),
require('./Images/741.jpg'),
require('./Images/742.jpg'),
require('./Images/743.jpg'),
require('./Images/744.jpg'),
require('./Images/745.jpg'),
require('./Images/746.jpg'),
require('./Images/747.jpg'),
require('./Images/748.jpg'),
require('./Images/749.jpg'),
require('./Images/750.jpg'),
require('./Images/751.jpg'),
require('./Images/752.jpg'),
require('./Images/753.jpg'),
require('./Images/754.jpg'),
require('./Images/755.jpg'),
require('./Images/756.jpg'),
require('./Images/757.jpg'),
require('./Images/758.jpg'),
require('./Images/759.jpg'),
require('./Images/760.jpg'),
require('./Images/761.jpg'),
require('./Images/762.jpg'),
require('./Images/763.jpg'),
require('./Images/764.jpg'),
require('./Images/765.jpg'),
require('./Images/766.jpg'),
require('./Images/767.jpg'),
require('./Images/768.jpg'),
require('./Images/769.jpg'),
require('./Images/770.jpg'),
require('./Images/771.jpg'),
require('./Images/772.jpg'),
require('./Images/773.jpg'),
require('./Images/774.jpg'),
require('./Images/775.jpg'),
require('./Images/776.jpg'),
require('./Images/777.jpg'),
require('./Images/778.jpg'),
require('./Images/779.jpg'),
require('./Images/780.jpg'),
require('./Images/781.jpg'),
require('./Images/782.jpg'),
require('./Images/783.jpg'),
require('./Images/784.jpg'),
require('./Images/785.jpg'),
require('./Images/786.jpg'),
require('./Images/787.jpg'),
require('./Images/788.jpg'),
require('./Images/789.jpg'),
require('./Images/790.jpg'),
require('./Images/791.jpg'),
require('./Images/792.jpg'),
require('./Images/793.jpg'),
require('./Images/794.jpg'),
require('./Images/795.jpg'),
require('./Images/796.jpg'),
require('./Images/797.jpg'),
require('./Images/798.jpg'),
require('./Images/799.jpg'),
require('./Images/800.jpg'),
require('./Images/801.jpg'),
require('./Images/802.jpg'),
require('./Images/803.jpg'),
require('./Images/804.jpg'),
require('./Images/805.jpg'),
require('./Images/806.jpg'),
require('./Images/807.jpg'),
require('./Images/808.jpg'),
require('./Images/809.jpg'),
require('./Images/810.jpg'),
require('./Images/811.jpg'),
require('./Images/812.jpg'),
require('./Images/813.jpg'),
require('./Images/814.jpg'),
require('./Images/815.jpg'),
require('./Images/816.jpg'),
require('./Images/817.jpg'),
require('./Images/818.jpg'),
require('./Images/819.jpg'),
require('./Images/820.jpg'),
require('./Images/821.jpg'),
require('./Images/822.jpg'),
require('./Images/823.jpg'),
require('./Images/824.jpg'),
require('./Images/825.jpg'),
require('./Images/826.jpg'),
require('./Images/827.jpg'),
require('./Images/828.jpg'),
require('./Images/829.jpg'),
require('./Images/830.jpg'),
require('./Images/831.jpg'),
require('./Images/832.jpg'),
require('./Images/833.jpg'),
require('./Images/834.jpg'),
require('./Images/835.jpg'),
require('./Images/836.jpg'),
require('./Images/837.jpg'),
require('./Images/838.jpg'),
require('./Images/839.jpg'),
require('./Images/840.jpg'),
require('./Images/841.jpg'),
require('./Images/842.jpg'),
require('./Images/843.jpg'),
require('./Images/844.jpg'),
require('./Images/845.jpg'),
require('./Images/846.jpg'),
require('./Images/847.jpg'),
require('./Images/848.jpg'),
require('./Images/849.jpg'),
require('./Images/850.jpg'),
require('./Images/851.jpg'),
require('./Images/852.jpg'),
require('./Images/853.jpg'),
require('./Images/854.jpg'),
require('./Images/855.jpg'),
require('./Images/856.jpg'),
require('./Images/857.jpg'),
require('./Images/858.jpg'),
require('./Images/859.jpg'),
require('./Images/860.jpg'),
require('./Images/861.jpg'),
require('./Images/862.jpg'),
require('./Images/863.jpg'),
]
|
class Stack(object):
def __init__(self):
self._data = list()
def push(self, data):
self._data.append(data)
def pop(self):
try:
x = self._data.pop()
except IndexError:
x = None
return x
def size(self):
return len(self._data)
class QueueUsing2Stacks(object):
def __init__(self):
self._data = Stack()
self._temp = Stack()
def enqueue(self, data):
self._data.push(data)
def dequeue(self):
i = 0
if self._data.size() < 1:
return None
sz = self._data.size()
while i < sz - 1:
self._temp.push(self._data.pop())
i += 1
x = self._data.pop()
i = 0
sz = self._temp.size()
while i < sz:
self._data.push(self._temp.pop())
i += 1
return x
q = QueueUsing2Stacks()
q.enqueue(1)
q.enqueue(2)
q.enqueue(3)
print(q.dequeue())
print(q.dequeue())
print(q.dequeue())
|
# import Module.Utility
# import Module.ClickOnButton
# import Module.clickOnMenuAndSubmenu
# import Module.enterText
# import Class.SeleniumDriver
# import Module.verifyTextOnScreen
# import Module.selectDropDownOption
#
#
# Class.SeleniumDriver.getWebDriver()
# Class.SeleniumDriver.openUrl()
# Class.SeleniumDriver.login()
# Class.SeleniumDriver.gotodefaultframe()
# Module.clickOnMenuAndSubmenu.clickOnMenuAndSubmenu("Take a Call","Customer")
# Class.SeleniumDriver.gotocorrectframe()
# Module.enterText.enterText("MSISDN","40720011515")
# Module.ClickOnButton.clickOnButton("Search")
# Module.verifyTextOnScreen.verifyTextOnScreen("Verify Customer")
# Module.selectDropDownOption.selectDropDownOption("ID Number(10)","National ID")
# Module.selectDropDownOption.selectDropDownOption("Currency","USD")
# Module.ClickOnButton.clickOnButton("Pass")
# Module.ClickOnButton.clickOnButton("Verify")
# Module.ClickOnButton.clickOnButton("Customer Info")
#
#
# print(Module.Utility.ReadDataFromJsonFile("tool","browserType"))
# print(Module.Utility.ReadDataFromJsonFile("sut","url"))
#
#import pytest
import Class.Automation
import time
import Module.Utility
# idtype,idvalue = Module.Utility.CheckIfDefinedElementExistInRepo("text1","MSISDN")
#
# print(idtype)
# print(idvalue)
# def test():
# obj = Class.Automation.Automation()
# obj.openURL()
# obj.login()
# obj.clickOnLink("PPVGE as Operator Administrator")
# obj.clickOnLink("Platform as Platform Support & IITC")
# obj.clickOnLink("Tools")
# obj.clickOnLink("All tasks")
# obj.verifyTextOnScreen("Tasks")
# obj.clickOnButton("Create task")
# obj.verifyTextOnScreen("Task detail")
# obj.enterTextArea("Description", "Test Task")
# obj.clickOnButton("Create")
# obj.verifyTextOnScreen("The task was successfully created!")
# obj.clickOnLink("See task detail")
# obj.verifyTextOnScreen("Details")
# obj.logout()
# print("Success")
#
#
#
# test()
|
class Solution:
def numDecodings(self, s):
# 时间复杂度O(N),空间复杂度O(N)
if not s:
return 0
if len(s)==1 and int(s) > 0:
return 1
if int(s[0])==0:
return 0
dp = [0 for _ in range(len(s)+1)]
dp[0], dp[1] = 1,1
for i in range(2, len(dp)):
tmp = int(s[i-2:i])
if 9 < tmp < 27:
dp[i] = dp[i-2]
if s[i-1] != '0':
dp[i] += dp[i-1]
return dp[-1]
|
# Manual practice replicating IF_curve_LIF.py originally written by Brian2 team.
# Small changes (plotted variables and added input current) made
# Same as One_LIF.py, but uses 1000 neurons.
from brian2 import *
n = 1000
duration = 1 * second
tau = 10*ms
dvdtExt = 1.0*mV/ms # Every 0.1 mV/ms pushes effective v0 threshold for spiking down by 1 mV
eqs = '''
dv/dt = dvdtExt + (v0-v)/tau : volt (unless refractory)
Iext : amp
v0 : volt
'''
group = NeuronGroup(n, eqs, threshold = 'v > 10*mV', reset = 'v = 0*mV', refractory = 5*ms)
#initial voltage:
group.v = 0*mV
# Assign different v0 to each neuron:
group.v0 = '20*mV * i / (n-1)' # crashes if n=1
monitor = SpikeMonitor(group)
run(duration)
plot(monitor.t/ms, monitor.i, '.k')
xlabel('spike time (ms)')
ylabel('neuron index')
show() |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Session.desc'
db.delete_column(u'website_session', 'desc')
def backwards(self, orm):
# Adding field 'Session.desc'
db.add_column(u'website_session', 'desc',
self.gf('django.db.models.fields.CharField')(default=None, max_length=50),
keep_default=False)
models = {
u'website.session': {
'Meta': {'object_name': 'Session'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
u'website.speaker': {
'Meta': {'object_name': 'Speaker'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'speakers'", 'to': u"orm['website.Session']"})
}
}
complete_apps = ['website'] |
#! /usr/bin/env python
from time import sleep, time
from .test_sequoia import TestSequoia
import pytest
# Verify that there are at least N images added after an InitiateCapture with N
# cameras activated. Do this for all combinations of activated sensors.
number_of_cameras = 5
# TODO: Put this function in a separate module of test helpers.
def initiate_capture(sequoia):
'''Initiate capture.'''
capture_response = sequoia.initiate_capture()
# If the device is doing something else, try again ten times waiting a
# second.
tries = 0
while capture_response.ResponseCode != 'OK' and tries < 10:
tries += 1
sleep(1)
capture_response = sequoia.initiate_capture()
if capture_response.ResponseCode != 'OK':
print(capture_response)
assert capture_response.ResponseCode == 'OK', \
'Could not initiate capture after 10 tries.'
return capture_response
def set_valid_mask(sequoia, mask):
'''Set PhotoSensorEnableMask. Return false when invalid.'''
enable_response = sequoia.set_device_prop_value(
'PhotoSensorEnableMask',
sequoia._UInt32.build(mask)
)
# If the combination of enabled cameras is invalid, skip it.
if enable_response.ResponseCode == 'InvalidDevicePropValue':
return False
# If the device is busy, try again ten times waiting a second.
tries = 0
while enable_response.ResponseCode != 'OK' and tries < 10:
tries += 1
sleep(1)
enable_response = sequoia.set_device_prop_value(
'PhotoSensorEnableMask',
sequoia._UInt32.build(mask)
)
if enable_response.ResponseCode != 'OK':
print(enable_response)
assert enable_response.ResponseCode == 'OK', \
'Could not set PhotoSensorEnableMask {}'.format(bin(mask))
return True
def set_keep_on(sequoia, mask):
'''Turn masked sensors on'''
set_valid_mask(sequoia, mask)
tries = 0
keep_on = sequoia.get_device_prop_desc('PhotoSensorsKeepOn')
while keep_on.CurrentValue != 1 and tries < 10:
tries += 1
sleep(1)
sequoia.set_device_prop_value(
'PhotoSensorsKeepOn',
sequoia._UInt32.build(1)
)
keep_on = sequoia.get_device_prop_desc('PhotoSensorsKeepOn')
assert keep_on.CurrentValue == 1, \
'Could not turn sensors on after 10 tries (10 s).'
def unset_keep_on(sequoia):
'''Turn masked sensors on'''
tries = 0
keep_on = sequoia.get_device_prop_desc('PhotoSensorsKeepOn')
while keep_on.CurrentValue != 0 and tries < 10:
tries += 1
sleep(1)
sequoia.set_device_prop_value(
'PhotoSensorsKeepOn',
sequoia._UInt32.build(0)
)
keep_on = sequoia.get_device_prop_desc('PhotoSensorsKeepOn')
assert keep_on.CurrentValue == 0, \
'Could not turn sensors off after 10 tries (10 s).'
class TestSequoiaEnableCapture(TestSequoia):
def test_keep_on(self, sequoia):
'''Verify a PhotoSensorsKeepOn does not block the Sequoia.'''
with sequoia.session():
set_keep_on(sequoia, 31)
unset_keep_on(sequoia)
def test_keep_on_capture(self, sequoia):
'''Verify that a capture finishes with sensors on.'''
with sequoia.session():
# Capture image and wait for CaptureComplete
set_keep_on(sequoia, 31)
capture = initiate_capture(sequoia)
tic = time()
while True:
evt = sequoia.event()
if (
evt and
evt.EventCode == 'CaptureComplete' and
evt.TransactionID == capture.TransactionID
):
break
assert time() - tic <= 40,\
'Waited for 40 seconds before giving up.\n'\
'No CaptureComplete received for InitiateCapture.'
keep_on = sequoia.get_device_prop_desc('PhotoSensorsKeepOn')
if keep_on.CurrentValue == 1:
sequoia.set_device_prop_value(
'PhotoSensorsKeepOn',
sequoia._UInt32.build(0)
)
sleep(5)
unset_keep_on(sequoia)
@pytest.mark.parametrize(
('mask'),
range(2**number_of_cameras),
)
def test_enable_capture(self, mask, sequoia):
'''Verify that a capture with N enabled sensors poduces N images.'''
with sequoia.session():
keep_on = sequoia.get_device_prop_desc('PhotoSensorsKeepOn')
if keep_on.CurrentValue == 1:
sequoia.set_device_prop_value(
'PhotoSensorsKeepOn',
sequoia._UInt32.build(0)
)
# If mask is invalid, skip.
if not set_valid_mask(sequoia, mask):
return
# Capture image and count the ObjectAdded events.
capture = initiate_capture(sequoia)
acquired = 0
n_added = 0
expected = bin(mask).count('1')
tic = time()
while acquired < expected:
# Check events
evt = sequoia.event()
# If object added verify is it is an image
if (
evt and
evt.TransactionID == capture.TransactionID and
evt.EventCode == 'ObjectAdded'
):
n_added += 1
info = sequoia.get_object_info(evt.Parameter[0])
if (
info and
('TIFF' in info.ObjectFormat or
'EXIF_JPEG' in info.ObjectFormat)
):
acquired += 1
# Otherwise if the capture is complete, tally up.
elif evt and evt.EventCode == 'CaptureComplete':
if not pytest.config.getoption('--ideal'):
pytest.skip('Use --ideal to check for non-PTP errors.')
assert acquired == expected,\
'{} images were expected than received. '\
'This is not a violation of PTP.'\
.format('More' if acquired < expected else 'Less')
return
# Allow for sixty second delays in events... Though the
# asynchronous event may take an indefinite amount of time,
# anything longer than about ten seconds indicates there's
# something wrong.
assert time() - tic <= 40,\
'Waited for 40 seconds before giving up.\n'\
'No CaptureComplete received.\n'\
'Failed with {} images ({} ObjectAdded) for mask {} {} {}'\
.format(acquired, n_added, mask, hex(mask), bin(mask))
|
employee1 = {
"name": "Ron Swanson",
"age": 55,
"department": "Management",
"phone": "555-1234",
"salary": "$87,000"}
employee2 = {
"name": "Leslie Knope",
"age": 45,
"department": "Middle Management",
"phone": "555-4321",
"salary": "$70,000"}
employee3 = {
"name": "Andy Dwyer",
"age": 63,
"department": "Shoe Shining",
"phone": "555-1122",
"salary": "$50,000"}
employee4 = {
"name": "April Ludgate",
"age": 35,
"department": "Administration",
"phone": "555-3345",
"salary": "$60,000"}
#employees is the company directory
employees = [employee1, employee2, employee3, employee4]
for employee in employees:
print(employee["name"] ,"in", employee["department"], "can be reached at ", end = '')
print(employee["phone"],'.', sep = '')
|
# Generated by Django 3.0.8 on 2020-12-10 11:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('common', '0007_remove_attachment_attachmentonly'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='orignal_filename',
field=models.CharField(max_length=260),
),
]
|
# Generated by Django 3.0.7 on 2020-09-02 18:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('product', '0004_migration_product'),
]
operations = [
migrations.AlterUniqueTogether(
name='productpackaging',
unique_together={('product', 'conditioning', 'type_packaging')},
),
]
|
import matplotlib.pyplot as plt
print ("Aqui")
x = [100,50]
y = [2,10]
x1 = [70,80]
x2 = [5,8]
y1 = [60,90]
y2 = [4,7]
plt.bar(x,y,label ="Grupo 1")
plt.bar (x1,x2,label = "Grupo 2")
plt.bar (y1,y2,label = "Grupo 3")
plt.title("Grafico")
plt.legend()
plt.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.