text
stringlengths
38
1.54M
#!/bin/python3 #https://www.hackerrank.com/challenges/sherlock-and-anagrams import math import os import random import re import sys import string # Complete the sherlockAndAnagrams function below. def sherlockAndAnagrams(s): substrings = {} alphabet = string.ascii_lowercase for start in range(len(s)): for finish in range(start, len(s)): # init substring = [0 for _ in alphabet] for letter in s[start:finish+1]: substring[ord(letter) - ord(alphabet[0])] += 1 # tuple is hashable. dict and list aren't substring = tuple(substring) substrings[substring] = substrings.get(substring, 0) + 1 res = 0 for count in substrings.values(): res += count * (count-1) // 2 return res if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input()) for q_itr in range(q): s = input() result = sherlockAndAnagrams(s) fptr.write(str(result) + '\n') fptr.close()
import numpy as np import cv2 as cv from scipy import stats import loaddata import Method def double2uint8(I, ratio=1.0): return np.clip(np.round(I*ratio), 0, 255).astype(np.uint8) def search(img): img_YCC = cv.cvtColor(img,cv.COLOR_BGR2YCrCb) Y,Cr,Cb = cv.split(img_YCC) # cv.imshow('Y',Y) # cv.imshow('U',Cr) # cv.imshow('V',Cb) # cv.waitKey() # cv.destroyAllWindows() r = np.zeros((img.shape[0],img.shape[1])) Cr_most = stats.mode(Cr)[0][0][0] Cb_most = stats.mode(Cb)[0][0][0] Cr_diff = abs(Cr.astype(int) - Cr_most) Cb_diff = abs(Cb.astype(int) - Cb_most) Y_mean = np.mean(Y) Y_diff = Y - Y_mean diff = Cr_diff + Cb_diff diff_mean = np.mean(diff) r[(diff >= 5)] = 255 r[(Y < 60)] = 0 r[diff >= 15] = 255 for i in range(r.shape[0]): for j in range(r.shape[1]): if match(r, i, j): r[i,j] = 255 return r def match(r, i, j): row = r.shape[0] col = r.shape[1] score = 0 if i==0 or i==row-1 or j==0 or j==col-1: return False if r[i+1][j]==255: score += 1 if r[i-1][j]==255: score += 1 if r[i][j+1]==255: score += 1 if r[i][j-1]==255: score += 1 if score >= 3: return True else: return False if __name__ == '__main__': datas = loaddata.Dataset() for i in range(100): print(i) img = datas.getitem(i) r = search(img) datas.writeitem('./train_detect/',double2uint8(r),i) # cv.imshow('r',r) # cv.waitKey(0) # cv.destroyAllWindows()
#!/usr/bin/env python # coding=utf-8 import os from PyQt4 import QtCore from squery import socketQuery class MainControl(QtCore.QThread): def __init__(self, parent=None): QtCore.QThread.__init__(self) self.sock = socketQuery() self.log = self.sock.log def init_gui(self, params=dict()): self.emit(QtCore.SIGNAL("exec_cmd(QString)"), "self.init_gui("+str(param)+")") def rev_data(self): while True: try: r,w,e = select.select([self.sock.sd], [], [], 0.0001) if self.sock.sd in r: full_data = "" try: length = '' while True: ''' recv packet length ''' data = self.sock.sd.recv(1) if (data == "\n"): break length = length + data len_data = 0 ''' recv packet data by length ''' while (len_data != int(length)): data = self.sock.sd.recv(int(length) - len(data)) len_data += len(data) full_data = full_data + data if full_data: self.process(full_data) except Exception, ex: self.log.error("recv_data error when recv %s" % str(ex)) except Exception, ex: self.log.error("rev_data error when select data %s" % str(ex)) def process(self, data): self.log.info("processing %s" % data) def run(self): i = 0 while True: try: i += 1 self.sock.setup() ## test if sock server is ok... break except: if i < 10: time.sleep(2) continue else: os.exit(-1) self.rev_data() # if __name__ == "__main__": pass
from .version import __version__ from .f2p import f2p, f2p_list, f2p_word from .f2p import dictionary as f2p_dictionary
from pymongo import MongoClient from jobqueue import JobQueue import unittest host = 'localhost' port = 27017 pair = '%s:%d' % (host, port) class TestJobQueue(unittest.TestCase): @classmethod def setUpClass(cls): client = MongoClient(host, port) client.pymongo_test.jobqueue.drop() cls.db = client.pymongo_test def test_init(self): jq = JobQueue(self.db) self.assertTrue(jq.valid()) self.assertRaises(Exception, jq._create) jq.clear_queue() def test_valid(self): jq = JobQueue(self.db) jq.db['jobqueue'].drop() jq._create(capped=False) self.assertFalse(jq.valid()) self.assertRaises(Exception, jq._create) jq.clear_queue() def test_publish(self): jq = JobQueue(self.db) job = {'message': 'hello world!'} jq.pub(job) self.assertEquals(jq.queue_count(), 1) jq.clear_queue() jq.q = None # erase the queue self.assertRaises(Exception, jq.pub, job) def test_next(self): jq = JobQueue(self.db) self.assertRaises(Exception, jq.next) job = {'message': 'hello world!'} jq.pub(job) row = jq.next() self.assertEquals(row['data']['message'], 'hello world!') jq.clear_queue() # def test_iter(self): # jq = JobQueue(self.db) # job = {'message': 'hello world!'} # jq.pub(job) # for job in jq: # if job: # self.assertTrue(True, "Found job") # jq.clear_queue() # return # self.assertEquals(False, "No jobs found!") # jq.clear_queue() if __name__ == '__main__': unittest.main()
from __future__ import unicode_literals from django.core.validators import MinValueValidator, MaxValueValidator from django.db import models from home import choices class Race(models.Model): name = models.CharField(max_length=64) def __str__(self): return self.name class Army(models.Model): name = models.CharField(max_length=64) icon = models.ImageField(upload_to='faction', null=True) race = models.ForeignKey(Race, null=True) def __str__(self): return self.name class Role(models.Model): name = models.CharField(max_length=64) icon = models.ImageField(upload_to='role', null=True) display_order = models.SmallIntegerField(default=0) class Meta: ordering = ['display_order'] def __str__(self): return self.name class Keyword(models.Model): name = models.CharField(max_length=64) def __str__(self): return self.name class Unit(models.Model): name = models.CharField(max_length=64) power = models.SmallIntegerField(default=0, validators=[MinValueValidator(0)]) role = models.ForeignKey(Role, null=True) army = models.ForeignKey(Army, related_name='figurines', null=True) image = models.ImageField(upload_to='uploaded_images', null=True, blank=True) def __str__(self): return self.name @property def profile(self): return self.profiles.all().first() class Weapon(models.Model): name = models.CharField(max_length=64) def __str__(self): return self.name class DegressiveProfile(models.Model): life_1 = models.CharField(max_length=32, choices=choices.LargeNumericalEnum.CHOICES, default=choices.LargeNumericalEnum.ZERO) life_2 = models.CharField(max_length=32, choices=choices.LargeNumericalEnum.CHOICES, default=choices.LargeNumericalEnum.ZERO) life_3 = models.CharField(max_length=32, choices=choices.LargeNumericalEnum.CHOICES, default=choices.LargeNumericalEnum.ZERO) movement_1 = models.CharField(max_length=32, choices=choices.NumericalEnum.CHOICES, default=choices.NumericalEnum.ZERO) movement_2 = models.CharField(max_length=32, choices=choices.NumericalEnum.CHOICES, default=choices.NumericalEnum.ZERO) movement_3 = models.CharField(max_length=32, choices=choices.NumericalEnum.CHOICES, default=choices.NumericalEnum.ZERO) melee_1 = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) melee_2 = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) melee_3 = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) range_1 = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) range_2 = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) range_3 = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) attacks_1 = models.CharField(max_length=32, choices=choices.NumericalDiceEnum.CHOICES, default=choices.NumericalDiceEnum.ZERO) attacks_2 = models.CharField(max_length=32, choices=choices.NumericalDiceEnum.CHOICES, default=choices.NumericalDiceEnum.ZERO) attacks_3 = models.CharField(max_length=32, choices=choices.NumericalDiceEnum.CHOICES, default=choices.NumericalDiceEnum.ZERO) class Profile(models.Model): name = models.CharField(max_length=64) movement = models.CharField(max_length=32, choices=choices.DegressiveNumericalEnum.CHOICES, default=choices.DegressiveNumericalEnum.ZERO) melee = models.CharField(max_length=32, choices=choices.DegressiveDiceSuccessEnum.CHOICES, default=choices.DegressiveDiceSuccessEnum.NONE) range = models.CharField(max_length=32, choices=choices.DegressiveDiceSuccessEnum.CHOICES, default=choices.DegressiveDiceSuccessEnum.NONE) strength = models.CharField(max_length=32, choices=choices.NumericalEnum.CHOICES, default=choices.NumericalEnum.ZERO) toughness = models.CharField(max_length=32, choices=choices.NumericalEnum.CHOICES, default=choices.NumericalEnum.ZERO) life = models.CharField(max_length=32, choices=choices.DegressiveLargeNumericalEnum.CHOICES, default=choices.DegressiveLargeNumericalEnum.ZERO) attacks = models.CharField(max_length=32, choices=choices.DegressiveNumericalDiceEnum.CHOICES, default=choices.DegressiveNumericalDiceEnum.ZERO) command = models.CharField(max_length=32, choices=choices.NumericalEnum.CHOICES, default=choices.NumericalEnum.ZERO) armor = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) invulnerability = models.CharField(max_length=32, choices=choices.DiceSuccessEnum.CHOICES, default=choices.DiceSuccessEnum.NONE) points = models.SmallIntegerField(default=1, validators=[MinValueValidator(1)]) unit = models.ForeignKey(Unit, null=True, related_name='profiles') degressive = models.ForeignKey(DegressiveProfile, null=True, related_name='base_profiles', on_delete=models.SET_NULL) def __str__(self): return self.name
import sys def combine_lex_cols(source_files_name): syl_en_file = open(source_files_name + ".syl_en", "r") syl_vie_file = open(source_files_name + ".syl_vie", "r") syl_lex_file = open(source_files_name + ".lex", "w") syls_en = [] syls_vie = [] for line in syl_en_file: syls_en.append(line.strip()) for line in syl_vie_file: syls_vie.append(line.strip()) for i in range(len(syls_en)): syl_lex_file.write(syls_en[i] + "\t" + syls_vie[i] + "\n") syl_en_file.close() syl_vie_file.close() syl_lex_file.close()
# -*- coding: utf-8 -*- from apollo.settings import TIMEZONE from collections import defaultdict from datetime import datetime from dateutil.rrule import rrule, DAILY from logging import getLogger from pytz import timezone from sqlalchemy import and_, false, func, or_, not_ from sqlalchemy.orm import aliased, Load from sqlalchemy.dialects.postgresql import array from apollo.core import db from apollo.locations.models import Location, LocationPath, LocationTypePath from apollo.submissions.models import Submission import pandas as pd logger = getLogger(__name__) def get_coverage(query, form, group=None, location_type=None): if group is None and location_type is None: return _get_global_coverage(query, form) else: return _get_group_coverage(query, form, group, location_type) def _get_coverage_results(query, depth): ancestor_location = aliased(Location) location_closure = aliased(LocationPath) dataset = query.join( location_closure, location_closure.descendant_id == Submission.location_id ).join( ancestor_location, ancestor_location.id == location_closure.ancestor_id ).filter( location_closure.depth == depth ).with_entities( ancestor_location, func.count(Submission.id) ).options( Load(ancestor_location).load_only('id', 'name_translations') ).group_by(ancestor_location.id).all() return [(item[0].id, item[0].name, item[1]) for item in dataset] def _get_group_coverage(query, form, group, location_type): coverage_list = [] # check that we have data if not (db.session.query(query.exists()).scalar() and form and location_type): # noqa return coverage_list group_tags = form.get_group_tags(group['name']) # get the location closure table depth sample_sub = query.first() sub_location_type = sample_sub.location.location_type try: depth_info = LocationTypePath.query.filter_by( ancestor_id=location_type.id, descendant_id=sub_location_type.id).one() except Exception: # TODO: replace with the proper SQLA exception classes return coverage_list # get conflict submissions first if group_tags: conflict_query = query.filter( Submission.conflicts != None, Submission.conflicts.has_any(array(group_tags)), Submission.unreachable == False) # noqa else: conflict_query = query.filter(false()) if group_tags: missing_query = query.filter( ~Submission.data.has_any(array(group_tags)), or_( Submission.conflicts == None, ~Submission.conflicts.has_any(array(group_tags))), Submission.unreachable == False) # noqa else: missing_query = query if group_tags: complete_query = query.filter( or_( Submission.conflicts == None, # noqa ~Submission.conflicts.has_any(array(group_tags))), Submission.data.has_all(array(group_tags))) else: complete_query = query.filter(false()) if group_tags: partial_query = query.filter( or_( Submission.conflicts == None, ~Submission.conflicts.has_any(array(group_tags))), ~Submission.data.has_all(array(group_tags)), Submission.data.has_any(array(group_tags)), Submission.unreachable == False) # noqa else: partial_query = query.filter(false()) if group_tags: offline_query = query.filter( and_( Submission.unreachable == True, # noqa not_( and_( Submission.data.has_all(array(group_tags)), Submission.unreachable == True ) ) )) else: offline_query = query.filter(false()) dataset = defaultdict(dict) for loc_id, loc_name, count in _get_coverage_results( complete_query, depth_info.depth): dataset[loc_name].update({ 'Complete': count, 'id': loc_id, 'name': loc_name }) for loc_id, loc_name, count in _get_coverage_results( conflict_query, depth_info.depth): dataset[loc_name].update({ 'Conflict': count, 'id': loc_id, 'name': loc_name }) for loc_id, loc_name, count in _get_coverage_results( missing_query, depth_info.depth): dataset[loc_name].update({ 'Missing': count, 'id': loc_id, 'name': loc_name }) for loc_id, loc_name, count in _get_coverage_results( partial_query, depth_info.depth): dataset[loc_name].update({ 'Partial': count, 'id': loc_id, 'name': loc_name }) for loc_id, loc_name, count in _get_coverage_results( offline_query, depth_info.depth): dataset[loc_name].update({ 'Offline': count, 'id': loc_id, 'name': loc_name }) for name in sorted(dataset.keys()): loc_data = dataset.get(name) loc_data.setdefault('Complete', 0) loc_data.setdefault('Conflict', 0) loc_data.setdefault('Missing', 0) loc_data.setdefault('Partial', 0) loc_data.setdefault('Offline', 0) coverage_list.append(loc_data) return coverage_list def _get_global_coverage(query, form): coverage_list = [] # check that we have data if not (db.session.query(query.exists()).scalar() and form): return coverage_list groups = form.data['groups'] if not groups: return coverage_list for group in groups: group_tags = form.get_group_tags(group['name']) if group_tags: conflict_query = query.filter( Submission.conflicts != None, Submission.conflicts.has_any(array(group_tags)), Submission.unreachable == False) # noqa else: conflict_query = query.filter(false()) if group_tags: missing_query = query.filter( or_( ~Submission.conflicts.has_any(array(group_tags)), Submission.conflicts == None), ~Submission.data.has_any(array(group_tags)), Submission.unreachable == False) # noqa else: missing_query = query if group_tags: complete_query = query.filter( or_( Submission.conflicts == None, # noqa ~Submission.conflicts.has_any(array(group_tags))), Submission.data.has_all(array(group_tags))) else: complete_query = query.filter(false()) if group_tags: partial_query = query.filter( or_( Submission.conflicts == None, ~Submission.conflicts.has_any(array(group_tags))), ~Submission.data.has_all(array(group_tags)), Submission.data.has_any(array(group_tags)), Submission.unreachable == False) # noqa else: partial_query = query.filter(false()) if group_tags: offline_query = query.filter( and_( Submission.unreachable == True, # noqa not_( and_( Submission.data.has_all(array(group_tags)), Submission.unreachable == True ) ) )) else: offline_query = query.filter(false()) data = { 'Complete': complete_query.count(), 'Conflict': conflict_query.count(), 'Missing': missing_query.count(), 'Partial': partial_query.count(), 'Offline': offline_query.count(), 'name': group['name'], 'slug': group['slug'] } coverage_list.append(data) return coverage_list def get_daily_progress(query, event): query_with_entities = query.with_entities(Submission.participant_updated) df = pd.read_sql( query_with_entities.statement, query_with_entities.session.bind, index_col='participant_updated', parse_dates=['participant_updated']).tz_localize(TIMEZONE) df['count'] = 1 tz = timezone(TIMEZONE) start = tz.localize(datetime.combine( event.start.astimezone(tz), datetime.min.time())) end = tz.localize( datetime.combine(event.end.astimezone(tz), datetime.min.time())) df_resampled = df.loc[df.index.notnull()].append( pd.DataFrame({'count': 0}, index=[start])).append( pd.DataFrame({'count': 0}, index=[end])).resample('D').sum() progress = df_resampled.truncate(before=start, after=end) progress.loc[progress.index == start.strftime( '%Y-%m-%d'), 'count'] = int( df_resampled[df_resampled.index <= start].sum()) progress.loc[progress.index == end.strftime( '%Y-%m-%d'), 'count'] = int( df_resampled[df_resampled.index >= end].sum()) dp = { idx.date(): int(progress.loc[idx]['count']) for idx in progress.index } dp.update({'total': progress['count'].sum()}) return dp def event_days(event): tz = timezone(TIMEZONE) start = tz.localize(datetime.combine( event.start.astimezone(tz), datetime.min.time())) end = tz.localize( datetime.combine(event.end.astimezone(tz), datetime.min.time())) dates = [d.date() for d in rrule(DAILY, dtstart=start, until=end)] return dates def get_stratified_daily_progress(query, event, location_type): response = [] ancestor_location = aliased(Location) location_closure = aliased(LocationPath) sample_sub = query.first() sub_location_type = sample_sub.location.location_type depth_info = LocationTypePath.query.filter_by( ancestor_id=location_type.id, descendant_id=sub_location_type.id).first() if depth_info: _query = query.join( location_closure, location_closure.descendant_id == Submission.location_id).join( ancestor_location, ancestor_location.id == location_closure.ancestor_id ).filter( location_closure.depth == depth_info.depth ).with_entities( ancestor_location.name, ancestor_location.code, Submission.participant_updated ).options( Load(ancestor_location).load_only( 'id', 'code', 'name_translations') ).group_by(ancestor_location.id, Submission.participant_updated) df = pd.read_sql( _query.statement, _query.session.bind, index_col=['participant_updated'], parse_dates=['participant_updated']).tz_localize(TIMEZONE) df['count'] = 1 tz = timezone(TIMEZONE) start = tz.localize(datetime.combine( event.start.astimezone(tz), datetime.min.time())) end = tz.localize( datetime.combine(event.end.astimezone(tz), datetime.min.time())) locations = Location.query.filter( Location.location_set == location_type.location_set, Location.location_type == location_type) for location in locations: if location.name not in df.loc[df.index.notnull()]['getter'].unique(): # noqa df = df.append(pd.DataFrame( {'getter': location.name, 'count': 0, 'code': location.code}, # noqa index=[start])) df2 = df.loc[df.index.notnull()].groupby(['getter', 'code']).resample('D').sum() # noqa df2 = df2.sort_index(level='code') df2.index = df2.index.droplevel('code') for location in df2.index.get_level_values(0).unique(): # noqa df_resampled = df2.loc[location].append( pd.DataFrame({'count': 0}, index=[start])).append( pd.DataFrame({'count': 0}, index=[end])).resample( 'D').sum() progress = df_resampled.truncate(before=start, after=end) progress.loc[progress.index == start.strftime( '%Y-%m-%d'), 'count'] = int( df_resampled[df_resampled.index <= start].sum()) progress.loc[progress.index == end.strftime( '%Y-%m-%d'), 'count'] = int( df_resampled[df_resampled.index >= end].sum()) dp = { idx.date(): int(progress.loc[idx]['count']) for idx in progress.index } dp.update({'total': progress['count'].sum()}) response.append({'name': location, 'data': dp}) return response
from __future__ import (absolute_import, division, print_function) import matplotlib as mpl import matplotlib.pyplot as plt # The two statemens below are used mainly to set up a plotting # default style that's better than the default from Matplotlib 1.x # Matplotlib 2.0 supposedly has better default styles. import seaborn as sns plt.style.use('bmh') from pathlib import Path data_folder = Path('D:/OneDrive/AAMOSM2018/0828mapdata') #data_folder = Path('C:/Users/langzx/OneDrive/AAMOSM2018/0828mapdata') from shapely.geometry import Point, Polygon from matplotlib.lines import Line2D import pandas as pd import geopandas as gpd from geopandas import GeoSeries, GeoDataFrame #import pyepsg mpl.__version__, pd.__version__, gpd.__version__ from shapely.wkt import loads from fiona.crs import from_epsg from pyproj import Proj #Proj('init=epsg:4326', preserve_flags=True) # Determine the CRS of the GeoDataFrame GeoSeries([loads('POINT(1 2)'), loads('POINT(1.5 2.5)'), loads('POINT(2 3)')]) subbasin = gpd.read_file(data_folder/"subbasins.shp") subbasin_p4 = subbasin.to_crs(epsg=4326) subbasin.crs subbasin.crs = from_epsg(4326) subbasin_p4 = subbasin.to_crs(epsg = 4326) subbasin.crs = {'init' :'epsg:4326'} subbasin.crs subbasin['geometry'] #subbasin.plot( color='red', markersize=100, figsize=(4, 4)) gage = gpd.read_file(data_folder/"gage2.shp") gs = GeoSeries([Point(-120, 45), Point(-121.2, 46), Point(-122.9, 47.5),Point(-122.9, 47.5),Point(-122.9, 47.5)]) #subbasin['Watershed_Zone'] = subbasin.watershed.map(str) + " " + subbasin.Zone #subbasin = gpd.read_file("C:\\Users\langzx\\Onedrive\\AAMOSM2018\\newmapdata\\subs1.shp") stream = gpd.read_file(data_folder/ "LeSueur_Streams.shp") ## project subbasin.crs = {'init' :'epsg:4326'} subbasin.crs #subbasin_p4 = subbasin.to_crs(epsg=4326) gage.crs = {'init' :'epsg:4326'} #gage_p4.plot(marker='*', color='red', markersize=100, figsize=(4, 4)) #gage2_p4 = gage2.to_crs stream.crs= {'init' :'epsg:4326'} len(subbasin.columns.values) subbasin.columns.values type(subbasin['geometry']) subbasin['coords'] = subbasin['geometry'].apply(lambda x: x.representative_point().coords[:]) subbasin['coords'] = [coords[0] for coords in subbasin['coords']] f, ax = plt.subplots(1, figsize=(20, 20)) ax.set_title('') # Other nice categorical color maps (cmap) include 'Set2' and 'Set3' subbasin.plot(ax = ax, column = 'watershed', linewidth=0.8, cmap='summer_r',edgecolor='#B3B3B3', legend = True) stream.plot(ax = ax, edgecolor='blue') gage.plot(ax = ax, marker='*', color='red', markersize=400) ax.set_ylim([43.6, 44.3]) plt.axis('equal'); ##Annotation for idx, row in subbasin.iterrows(): ax.annotate(s=row['Zone'], xy=row['coords'], verticalalignment='center') #plt.axis('equal'); custom_lines = [Line2D([0], [0], color=cmap(0.), lw=20), Line2D([0], [0], color=cmap(.5), lw=20), Line2D([0], [0], color=cmap(1.), lw=20), Line2D([0], [0], marker='*', color='w',markersize=20, markerfacecolor='r'), Line2D([0], [0], color='w',markersize=20, markerfacecolor='w'), Line2D([0], [0], color='w',markersize=20, markerfacecolor='w'), Line2D([0], [0], color='w',markersize=20, markerfacecolor='w')] legend = ax.legend(custom_lines, ['COB', 'LES', 'MAP','Gages','Zone 1 = Upland','Zone 2 = Transitional','Zone 3 = Incised'], fontsize=20) legend.get_frame().set_facecolor('white') type(subbasin.ix[23, 'geometry']) subbasin['coords'] = subbasin['geometry'].apply(lambda x: x.representative_point().coords[:]) subbasin['coords'] = [coords[0] for coords in subbasin['coords']] subbasin.plot( cmap='summer_r', legend=True, k=3, figsize=(12, 15)) for idx, row in subbasin.iterrows(): plt.annotate(s=row['Zone'], xy=row['coords'], horizontalalignment='center') gage.plot( marker='*', color='red', markersize=200) stream.plot(ax=ax, edgecolor='blue') subbasin.plot(column='watershed', cmap='summer_r', k=3, figsize=(8, 10)); subbasin.plot(color='white', edgecolor='black', figsize=(8, 8)); # Visualize ax = subbasin.plot() gdf.columns.values gdf.plot(cmap='Set2', figsize=(10, 10)); ax = subbasin.plot(color='white', edgecolor='black') ax.set_axis_off() gdf.plot(ax = ax, edgecolor='white') plt.show() ################## plt.figure(figsize=(20,5)) cmap = plt.cm.summer_r t = ("Zone 1 = Upland\n" "Zone 2 = Transitional\n" "Zone 3 = Incised") f, ax = plt.subplots(1, figsize=(20,20)) ax.set_facecolor('white') ax.set_ylim([43.6, 44.3]) ax.grid(False) # Other nice categorical color maps (cmap) include 'Set2' and 'Set3' type(subbasin.ix[23, 'geometry']) subbasin['coords'] = subbasin['geometry'].apply(lambda x: x.representative_point().coords[:]) subbasin['coords'] = [coords[0] for coords in subbasin['coords']] subbasin.plot(ax=ax,column='watershed',linewidth=0.8, cmap='summer_r',edgecolor='#B3B3B3', legend = True) gage.plot(ax = ax, marker='*', color='red', markersize=200) stream.plot(ax = ax, edgecolor='blue') for idx, row in subbasin.iterrows(): ax.annotate(s=row['Zone'], xy=row['coords'], verticalalignment='center') #plt.axis('equal'); custom_lines = [Line2D([0], [0], color=cmap(0.), lw=20), Line2D([0], [0], color=cmap(.5), lw=20), Line2D([0], [0], color=cmap(1.), lw=20), Line2D([0], [0], marker='*', color='w',markersize=20, markerfacecolor='r'), Line2D([0], [0], color='w',markersize=20, markerfacecolor='w'), Line2D([0], [0], color='w',markersize=20, markerfacecolor='w'), Line2D([0], [0], color='w',markersize=20, markerfacecolor='w')] legend = ax.legend(custom_lines, ['COB', 'LES', 'MAP','Gages','Zone 1 = Upland','Zone 2 = Transitional','Zone 3 = Incised'], fontsize=20) legend.get_frame().set_facecolor('white') #ax.text(0.97, 0.87, t, # verticalalignment='top', horizontalalignment='right', # transform=ax.transAxes, # fontsize=15,wrap=True) #ax.text(0.5,0.5,u'\u25B2 \nN ', ha='center', fontsize=20, family='Arial') plt.savefig('mosmmap.pdf', bbox_inches='tight') subbasin.crs # Visualize ax = subbasin.plot() gdf.columns.values gdf.plot(cmap='Set2', figsize=(10, 10)); ax = subbasin.plot(color='white', edgecolor='black') ax.set_axis_off() gdf.plot(ax = ax, edgecolor='white') plt.show()
import warnings import sys import pandas as pd import numpy as np import mlflow import mlflow.pyfunc import fbprophet from fbprophet import Prophet from fbprophet.diagnostics import cross_validation from fbprophet.diagnostics import performance_metrics from stream import db_mem mlflow.set_tracking_uri("http://localhost:5000") import logging logging.basicConfig(level=logging.WARN) logger = logging.getLogger(__name__) class FbProphetWrapper(mlflow.pyfunc.PythonModel): def __init__(self, model): self.model = model super().__init__() def load_context(self, context): from fbprophet import Prophet return def predict(self, context, model_input): return self.model.predict(model_input) def train_model(df, _name, _entity, _index): warnings.filterwarnings("ignore") np.random.seed(40) # Useful for multiple runs (only doing one run in this sample notebook) with mlflow.start_run(): m = Prophet() m.fit(df) # Evaluate Metrics print("Executando a validação cruzada...") df_cv = cross_validation(m, initial='10 days', horizon="4 days", period="2 days") print("Processando a performance do dados...") df_p = performance_metrics(df_cv) # Print out metrics print("Prophet model :") print(" CV: \n%s" % df_cv.head()) print(" Perf: \n%s" % df_p.head()) # Log parameter, metrics, and model to MLflow df_p_mean = df_p.groupby(pd.Grouper(key='horizon', freq='D')).mean() for index, row in df_p_mean.iterrows(): mlflow.log_metric("rmse", row.rmse) mlflow.log_metric("mape", row.mape) mlflow.log_metric("mse", row.mse) mlflow.log_metric("mae", row.mae) mlflow.pyfunc.log_model(_name, python_model=FbProphetWrapper(m)) model_uri = "runs:/{run_id}/{key}".format(run_id=mlflow.active_run().info.run_id, key=_name) print("Logged model with URI: {uri}".format(uri=model_uri)) db_mem.gerarModel(_entity, model_uri, _index)
from datetime import datetime from django.contrib.auth.decorators import login_required, permission_required from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render, redirect from django.contrib.auth.models import User from django.views import View from .models import Announcement from .forms import AnnouncementForm from account.models import Account, Announcer from account.utils import prepare_context from schedule.models import Station # Create your views here. def dashboard(request): context = prepare_context(request, show_navbar=True) context = { 'show_navbar': True, 'announcement': Announcement.objects.order_by("-announce_time"), 'stations': Station.objects.all() } return render(request, 'dashboard.html', context=context) class ViewAnnouncement(LoginRequiredMixin, PermissionRequiredMixin, View): permission_required = 'announcement.view_announcement' def get(self, request): context = prepare_context(request, show_navbar=True) user = request.user context['user'] = user account = Account.objects.get(user_id_id=user.id) # announcer = Announcer.objects.get(user_id=account.id) context['announcement'] = Announcement.objects.all() context['is_announcer'] = account.user_type == 'AN' return render(request, 'announcement/viewAnnouncement.html', context=context) class CreateAnnouncement(LoginRequiredMixin, PermissionRequiredMixin, View): permission_required = 'announcement.add_announcement' def get(self, request): context = prepare_context(request, show_navbar=True) user = request.user # announcer = Announcer.objects.get(user_id=account.id) context['announcement'] = Announcement.objects.filter( announcer_user_id=account.id) context['user'] = user announcement_form = AnnouncementForm() context['announcement_form'] = announcement_form return render(request, 'announcement/createAnnouncement.html', context=context) def post(self, request): context = prepare_context(request, show_navbar=True) annoucement_form = AnnouncementForm(request.POST) user = request.user account = Account.objects.get(user_id_id=user.id) if annoucement_form.is_valid(): data_form = annoucement_form.clean() if data_form['announce_text'] is None: context['error'] = { 'errorMsg': 'Please fill in the announcement form' } else: announcement = Announcement.objects.create( announce_text=data_form['announce_text'], announce_time=datetime.now(), view_count=0, announcer_user_id=account.id ) announcement.save() return redirect('/announcement/') context['announcement_form'] = annoucement_form return render(request, 'announcement/createAnnouncement.html', context=context) class EditAnnouncement(LoginRequiredMixin, PermissionRequiredMixin, View): permission_required = 'announcement.change_announcement' def get(self, request, announcement_id): context = prepare_context(request, show_navbar=True) # announcer = Announcer.objects.get(user_id=account.id) announcement_form = AnnouncementForm() context['announcement_form'] = announcement_form context['announcement'] = Announcement.objects.get( pk=announcement_id) return render(request, 'announcement/editAnnouncement.html', context=context) def post(self, request, announcement_id): context = prepare_context(request, show_navbar=True) announcement = Announcement.objects.get(pk=announcement_id) text = request.POST.get('announce_text') count = request.POST.get('view_count') if text != announcement.announce_text: announcement.announce_text = text if count != announcement.view_count: announcement.view_count = count announcement.announce_time = datetime.now() announcement.save() return redirect('/announcement/') @login_required() @permission_required('announcement.delete_announcement') def DeleteAnnouncement(request, announcement_id): announcement = Announcement.objects.get(pk=announcement_id) announcement.delete() return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
from google.appengine.ext import db class FileMetadata(db.Model): """A helper class that will hold metadata for the user's blobs. Specifially, we want to keep track of who uploaded it, where they uploaded it from (right now they can only upload from their computer, but in the future urlfetch would be nice to add), and links to the results of their MR jobs. To enable our querying to scan over our input data, we store keys in the form 'user/date/blob_key', where 'user' is the given user's e-mail address, 'date' is the date and time that they uploaded the item on, and 'blob_key' indicates the location in the Blobstore that the item can be found at. '/' is not the actual separator between these values - we use '..' since it is an illegal set of characters for an e-mail address to contain. """ __SEP = ".." __NEXT = "./" owner = db.UserProperty() filename = db.StringProperty() uploadedOn = db.DateTimeProperty() source = db.StringProperty() blobkey = db.StringProperty() wordcount_link = db.StringProperty() index_link = db.StringProperty() phrases_link = db.StringProperty() @staticmethod def getFirstKeyForUser(username): """Helper function that returns the first possible key a user could own. This is useful for table scanning, in conjunction with getLastKeyForUser. Args: username: The given user's e-mail address. Returns: The internal key representing the earliest possible key that a user could own (although the value of this key is not able to be used for actual user data). """ return db.Key.from_path("FileMetadata", username + FileMetadata.__SEP) @staticmethod def getLastKeyForUser(username): """Helper function that returns the last possible key a user could own. This is useful for table scanning, in conjunction with getFirstKeyForUser. Args: username: The given user's e-mail address. Returns: The internal key representing the last possible key that a user could own (although the value of this key is not able to be used for actual user data). """ return db.Key.from_path("FileMetadata", username + FileMetadata.__NEXT) @staticmethod def getKeyName(username, date, blob_key): """Returns the internal key for a particular item in the database. Our items are stored with keys of the form 'user/date/blob_key' ('/' is not the real separator, but __SEP is). Args: username: The given user's e-mail address. date: A datetime object representing the date and time that an input file was uploaded to this app. blob_key: The blob key corresponding to the location of the input file in the Blobstore. Returns: The internal key for the item specified by (username, date, blob_key). """ sep = FileMetadata.__SEP return str(username + sep + str(date) + sep + blob_key)
# -*- coding: utf-8 -*- # flake8: noqa from .tslibs import ( iNaT, NaT, Timestamp, Timedelta, OutOfBoundsDatetime, Period)
from django.urls import path from .views import Pizzatypeview,Pizzasizeview,Pizzatoppingview,makepizza\ ,PizzaToppingDeleteView,filterpizza,getpizzabyid,PizzaSizeDeleteView,PizzaTypeDeleteView urlpatterns = [ path('pizzatype/',Pizzatypeview.as_view()), path('pizzasize/',Pizzasizeview.as_view()), path('pizzatopping/',Pizzatoppingview.as_view()), path('makepizza/',makepizza.as_view()), path('deletetopping/<str:topping>', PizzaToppingDeleteView.as_view()), path('deletesize/<str:size>', PizzaSizeDeleteView.as_view()), path('deletetype/<str:type>', PizzaTypeDeleteView.as_view()), path('makepizza/<int:id>', getpizzabyid.as_view()), path('filtertype/<str:type>', filterpizza.as_view({'get': 'get_selected_pizza_type'})), path('filtersize/<str:size>', filterpizza.as_view({'get': 'get_selected_pizza_size'})), ]
def decode(instructions, posmax): min = 0 max=posmax indexMax = len(instructions)-1 for i in range(indexMax): mid = (((max+1) - min) / 2) + min if instructions[i] in ['F', 'L']: max = mid - 1 else: min = mid if instructions[indexMax] in ['F', 'L']: return(int(min)) else: return(int(max)) def getMySeat(occupiedSeats): previousUnoccupiedId = -1 for i in range(128): for j in range(8): id = i*8+j if id in occupiedSeats: continue if (id-1) != previousUnoccupiedId: return(id) previousUnoccupiedId = id f = open("Z:\donnees\developpement\Python\AdventOfCode\day5.txt", "r") occupiedSeats = [] highestID = 0 for line in f: boarding = list(line.rstrip("\n")) row = decode(boarding[:7], 127) col = decode(boarding[7:], 7) id = 8*row+col if id > highestID: highestID = id occupiedSeats.append(id) f.close() print("HighestId = {0}".format(highestID)) print("Nomber of seats Occupied = {0}".format(len(occupiedSeats))) print("My seat ID = {0}".format(getMySeat(occupiedSeats)))
""" @author: Vuong Quoc Viet @version: 1.0 @since: Sep 27, 2017 """ from database.dynamodb.list_rating import ListRating class Rating: def __init__(self): self.__connection = ListRating() def get_list(self): return self.__connection.get_rating()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sat May 4 00:20:09 2019 @author: xie """ n = int(input()) a = list(map(int, input().split())) b = list(map(int, input().split())) ji_a = 0 ji_b = 0 for i in a: if i % 2 == 1: ji_a += 1 for i in b: if i % 2 == 1: ji_b += 1 ou_a = n - ji_a ou_b = n - ji_b print(min(ji_a,ou_b)+min(ji_b,ou_a))
from django.shortcuts import render from .forms import BlogPostModelForm from .models import BlogPost from django.http import HttpResponse # Create your views here. def list_blogs(request): template_name = "blog_list.html" blogs = BlogPost.objects.all() context = {"blogs": blogs} return render(request, template_name, context) def create_blog(request): template_name = "create_blog.html" form = BlogPostModelForm(request.POST or None) if form.is_valid(): form.save() form = BlogPostModelForm() context = {"form": form} return render(request, template_name, context) def read_blog(request, slug): template_name = "readblog.html" blog_obj = BlogPost.objects.filter(slug=slug) if blog_obj.count() >= 1: blog_obj = blog_obj.first() context = {"blog": blog_obj} return render(request, template_name, context) def update_blog(request, slug): template_name = "update_blog.html" blog_obj = BlogPost.objects.get(slug=slug) form = BlogPostModelForm(request.POST or None, instance=blog_obj) if form.is_valid(): form.save() context = {"blog": blog_obj, "form": form} return render(request, template_name, context)
#!/usr/bin/env python #import pdb grid = [[0 for x in range(3)] for x in range(3)] def print_grid(n): for i in range(0, n + 1): for j in range(0, n +1): print "grid[%d][%d] = %d" % (i, j, grid[i][j]) def robot_paths(x, y, n): # pdb.set_trace() #print "at grid[%d][%d] = %d" % (x, y, grid[x][y]) print "at grid[%d][%d]" % (x, y) #reached end if x == n and y == n: print "\treached end" return 1 #check top if y >= n + 1: print "\ttop" return 0 #check bot elif y <= -1: print "\tbot" return 0 #check left elif x <= -1: print "\tleft" return 0 #check right elif x >= n + 1: print "\tright" return 0 #return robot_paths(x + 1, y, n) + robot_paths(x, y + 1, n) + robot_paths(x + -1, y, n) + robot_paths(x, y - 1, n) return robot_paths(x + 1, y, n) + robot_paths(x, y + 1, n) print_grid(2) count = robot_paths(0, 0, 2) print count
import fileinput lines = [] for line in fileinput.input(): lines.append(line) for line in lines: if int(line) % 2 == 0: print("Bob") else: print("Alice")
# The implementation is adopted from U-2-Net, made publicly available under the Apache 2.0 License # source code avaiable via https://github.com/xuebinqin/U-2-Net from .u2net import U2NET
for i in range(1,11): print(i) for j in range (1,20,2): print(j) for k in range(10,0,-1): print(k) for l in range(3,33,3): print(l) else: print("process terminated") #while basic loop i=1 while(i<=10): print(i) i=i+1 #write a program print to n number n=int(input("enter the number")) i=1 while(i<=n): print(i) i=i+1 #write a program to print from 10 to 1 i=10 while(i>=1): print(i) i=i-1 #sum of n number n=int(input("enter the number")) i=1 sum=0 while(i<=n): sum=sum+i i=i+1 print("sum=",sum)
# Add teams here to collect data from for rotogrinders.py teams = ['TEN', 'IND', 'ARI', 'NEP', 'CLE', 'JAC', 'MIA', 'NYJ', 'LVR', 'ATL', 'NYG', 'CIN', 'LAC', 'BUF', 'CAR', 'MIN', 'BAL', 'PIT']
from setuptools import setup from pip.req import parse_requirements import uuid install_reqs = parse_requirements( 'requirements/main.txt', session=uuid.uuid1()) reqs = [str(ir.req) for ir in install_reqs] setup( name='txes2', version='0.1.6', description="An Elasticsearch client for Twisted", keywords='twisted elasticsearch', author='Lex Toumbourou', author_email='lextoumbourou@gmail.com', url='https://github.com/lextoumbourou/txes2', license='BSD License', packages=['txes2'], include_package_data=True, zip_safe=False, install_requires=reqs, classifiers=[ 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', ] )
# coding: utf-8 # Cross Validation import os import gzip import pickle import numpy as np import tensorflow as tf from utils.utils import FeatureExtractor, LSTMCRFeeder, conll_format from model.BiLSTMCRF import BiLSTMCRFModel def atisfold(fold): assert fold in range(5) fp = gzip.open('data/atis.fold' + str(fold) + '.pkl.gz') train_set, valid_set, test_set, dicts = pickle.load(fp, encoding='iso-8859-1') return train_set, valid_set, test_set, dicts def atisfull(): with open('data/atis.pkl', 'rb') as fp: train_set, test_set, dicts = pickle.load(fp, encoding='iso-8859-1') return train_set, test_set, dicts train_set, valid_set, test_set, dicts = atisfold(1) # train_set, test_set, dicts = atisfull() w2idx, ne2idx, la2idx = dicts['words2idx'], dicts['tables2idx'], dicts['labels2idx'] idx2w = {w2idx[k]: k for k in w2idx} idx2ne = {ne2idx[k]: k for k in ne2idx} idx2la = {la2idx[k]: k for k in la2idx} train_x, train_ne, train_la = train_set val_x, val_ne, val_la = valid_set test_x, test_ne, test_la = test_set def dump_data(prefix, x, ne, la): wlength = 35 with open('data/%s.data' % prefix, 'w') as fp: for sw, se, sl in zip(x, ne, la): for a, b, c in zip(sw, se, sl): fp.write(idx2w[a].ljust(wlength) + idx2ne[b].ljust(wlength) + idx2la[c].ljust(wlength) + '\n') fp.write('\n') print('Dump data...') dump_data('train', train_x, train_ne, train_la) dump_data('valid', val_x, val_ne, val_la) dump_data('test', test_x, test_ne, test_la) print('Load data...') fe = FeatureExtractor() fe.parse_template('data/template') template_vocab_dir = 'dev/template.vocabs' if os.path.exists(template_vocab_dir): fe.construct_vocabs_from_file(template_vocab_dir) else: os.mkdir(template_vocab_dir) fe.construct_vocabs_from_data('data/train.data') fe.save_vocabs(template_vocab_dir) ''' [train_size, max_length, feat_size] feat_size: All unigram features ''' train_feats = fe.extract_features('data/train.data') val_feats = fe.extract_features('data/valid.data') test_feats = fe.extract_features('data/test.data') print('Load model...') num_classes = len(la2idx.keys()) max_length = max( max(map(len, train_la)), max(map(len, test_la)), ) vocab_size = len(w2idx) model = BiLSTMCRFModel(False, fe.feat_size, vocab_size, 128, 256, num_classes, max_length, 0.00001, 0.001, 1.0) print('Start training...') max_epoch = 50 # saver = tf.train.Saver() # feeder = LSTMCRFeeder(train_x, train_feats, train_la, max_length, model.feat_size, 64) # tokens, feats, labels = feeder.feed() sess = tf.Session() sess.run(tf.global_variables_initializer()) feeder = LSTMCRFeeder(train_x, train_feats, train_la, max_length, model.feat_size, 64) emb = np.load('data/emb.npy') model.init_embedding(sess, emb) pre_f1 = [] for epoch in range(1, max_epoch + 1): loss = 0 for step in range(feeder.step_per_epoch): tokens, feats, labels = feeder.feed() step_loss, l2 = model.train_step(sess, tokens, feats, labels) loss += step_loss print('epoch: %d, size: %d/%d, step_loss: %f, epoch_loss: %f' % (epoch, feeder.offset, feeder.size, step_loss, loss) ) tokens, feats = feeder.test(val_x, val_feats) pred = model.test(sess, tokens, feats) f1 = conll_format(val_x, val_la, pred, idx2w, idx2la, 'valid') ''' if len(pre_f1) < 5: pre_f1.append(f1) else: # Early Stop avg = sum(pre_f1) / len(pre_f1) if avg > f1: print("\nFind best epoch = %d, best F1 = %f" % (epoch, f1)) break else: pre_f1.pop(0) pre_f1.append(f1) print("\nEpoch %d, F1 = %f" % (epoch, f1)) ''' pre_f1.append((epoch, f1)) print("\nEpoch %d, F1 = %f" % (epoch, f1)) # tokens, feats, length = feeder.predict(test_x[0], test_feats[0]) # labels = test_la[0] # pred, scores = model.decode(sess, tokens, feats, length, 10) # print('{:<20} {}'.format('golden', labels.tolist())) # saver.save(sess, 'checkpoints/model.ckpt', global_step=model.global_step) print('') feeder.next_epoch() ''' print('Predict...') tokens, feats = feeder.test(test_x, test_feats) pred = model.test(sess, tokens, feats) print('Dump conll format...') conll_format(test_x, test_la, pred, idx2w, idx2la, 'test') compare = np.array(list(map(lambda zz: np.array_equal(zz[0], zz[1]), zip(test_la, pred)))) error_idx = np.where(compare == False)[0] def eval(idx): tokens, feats, length = feeder.predict(test_x[idx], test_feats[idx]) model.decode(sess, tokens, feats, length) print('{:<20} {}'.format('golden', test_la[idx].tolist())) def count_in(topK): total = len(error_idx) total_in = 0 for idx in error_idx: tokens, feats, length = feeder.predict(test_x[idx], test_feats[idx]) pred, scores = model.decode(sess, tokens, feats, length, topK) golden_la = test_la[idx].tolist() if golden_la in pred: total_in += 1 print('{}/{} {}'.format(total_in, total, total_in / total)) '''
from django.contrib.auth import get_user_model from django.db import models from webapp.validators import MinLengthValidators, str_value class BaseModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) class Meta: abstract = True class Status(models.Model): name = models.CharField(max_length=40, verbose_name='Статус') def __str__(self): return f'{self.name}' class Types(models.Model): type = models.CharField(max_length=50, verbose_name='Типы') class Meta: db_table = 'types' verbose_name = 'Тип' verbose_name_plural = 'Типы' def __str__(self): return f'{self.type}' class List(BaseModel): project = models.ForeignKey( 'webapp.Project', on_delete=models.CASCADE, related_name='lists', verbose_name='Проект', null=False, blank=False ) title = models.CharField(max_length=20, blank=False, null=False, verbose_name='заголовка', validators=[ MinLengthValidators(2), str_value]) description = models.TextField(max_length=300, null=False, blank=False, verbose_name='Описание', validators=[ MinLengthValidators(10), str_value]) status = models.ForeignKey('webapp.Status', null=False, blank=False, related_name='lists', verbose_name='Status', on_delete=models.PROTECT) types = models.ManyToManyField('webapp.Types', related_name ='lists', blank=False) about_list = models.TextField(max_length=3000, null=True, blank=True, validators=[MinLengthValidators(5), str_value]) class Meta: db_table = 'the_lists' verbose_name = 'Задача' verbose_name_plural = 'Задачи' def __str__(self): return f'{self.id}, {self.status},{self.created_at}' class Project(BaseModel): begin_at = models.DateField(blank=False, null=False, verbose_name='Дата начало') end_at = models.DateField(blank=True, null=True, verbose_name='Дата окончания') user = models.ManyToManyField(get_user_model(), null=True, blank=True, related_name='projects', verbose_name='Пользователь') title = models.CharField(max_length=30, blank=False, null=False, verbose_name='заголовка', validators=[MinLengthValidators(2), str_value]) description = models.TextField(max_length=300, null=False, blank=False, verbose_name='Описание', validators=[MinLengthValidators(10), str_value]) class Meta: db_table = 'project' verbose_name = 'Проект' verbose_name_plural = 'Проекты' permissions = [ ('add_delete_user', 'удаление, добавление пользователя из проекта'), ]
""" In this simple RPG game, the hero fights the goblin. He has the options to: 1. fight goblin 2. do nothing - in which case the goblin will attack him anyway 3. flee """ from time import sleep from random import random, choice from characters import * from party import * from battle import * from store import * battle_engine = Battle() shopping_engine = Store() print("Welcome to another land. What is your name?") user_name = input("> ") user_hero = Hero(user_name) medic = Medic() brock = HumanShield() hero_party = Party([user_hero, medic, brock]) enemies = [Goblin, Wizard, Zombie, Shadow] while Battle.battles_remaining > 0: enemy_party = [] while len(enemy_party) < 3: enemy_party.append(choice(enemies)()) enemy_party = Party(enemy_party) print("Encountered enemy party!") print("Prepare to battle a %s, %s, and %s!" % (enemy_party.party[0].name, enemy_party.party[1].name, enemy_party.party[2].name)) if not battle_engine.do_battle(hero_party, enemy_party): print("Game Over...") break shopping_engine.go_shopping(user_hero) print("You Win!")
from model import RNNLM_Model import os import pickle import tensorflow as tf import numpy as np import argparse import sys sys.path.append('..') from config import get_configs, experiment_path parser = argparse.ArgumentParser() parser.add_argument("--experiment", "-e", type=int, default=None, help="Which experiment dump to use") parser.add_argument("--verbose", "-v", type=bool, default=False, help="Also dump a text version of each parameter") args = parser.parse_args() def dump_trained_weights(experiment, verbose): config = get_configs(experiment) # Still need to load the model to build graph # Graph is not saved RNNLM_Model(config) init = tf.global_variables_initializer() saver = tf.train.Saver() with tf.Session() as session: session.run(init) saver.restore(session, os.path.join(experiment_path, str(experiment), "tf_dump" ,'rnnlm.weights')) dump_vars = ['HMi', 'HMf', 'HMo', 'HMo', 'HMg', 'IMi', 'IMf', 'IMo', 'IMg', 'bi', 'bf', 'bo', 'bg', 'LM', 'b2'] if config.share_embedding: dump_vars += ['PM'] else: dump_vars += ['UM'] if config.V_table: dump_vars.remove('LM') for i, seg in enumerate(config.embedding_seg): if i != 0: dump_vars += ['VT{}'.format(i)] dump_vars += ['LM{}'.format(i)] weight_dict = tf_weights_to_np_weights_dict(session, dump_vars) if config.D_softmax: # instead save the full patched embedding, split each block in "LM" into list of matrices blocks = [] col_s = 0 for size, s, e in config.embedding_seg: if e is None: e = weight_dict['LM'].shape[0] blocks.append(weight_dict['LM'][s:e, col_s:col_s + size]) col_s += size weight_dict['LM'] = blocks weight_dump_dir = os.path.join(experiment_path, str(experiment), "weights") dump_weights(weight_dict, weight_dump_dir, verbose) def tf_weights_to_np_weights_dict(session, names): dict = {} for name in names: #[<tf.Variable 'Hammer/LSTM/HMi:0' shape=(512, 512) dtype=float32_ref>, <tf.Variable 'Hammer/training/Hammer/LSTM/HMi/Adam:0' shape=(512, 512) dtype=float32_ref>, <tf.Variable 'Hammer/training/Hammer/LSTM/HMi/Adam_1:0' shape=(512, 512) dtype=float32_ref>] vars = [v for v in tf.global_variables() if name in v.name and 'training' not in v.name] print(vars) m = session.run(vars[0]) dict[name] = m return dict def dump_weights(weights_dict, dump_dir, verbose): if not os.path.exists(dump_dir): os.makedirs(dump_dir) pickle.dump(weights_dict, open(os.path.join(dump_dir, "lstm_weights.pkl"), "wb")) print("lstm weights dict dumped in {}".format(dump_dir)) if verbose: for name, m in weights_dict.items(): if type(m) is list: for i, item in enumerate(m): sub_name = '{}{}'.format(name, i) print("dumped {} rows {}".format(sub_name, item.shape)) np.savetxt(os.path.join(dump_dir, sub_name + '.txt'), item) else: print("dumped {} rows {}".format(name, m.shape)) np.savetxt(os.path.join(dump_dir, name + '.txt'), m) build_embedding_with_word(dump_dir) def build_embedding_with_word(dump_dir): with open(os.path.join(dump_dir, "embedding.txt"), 'w') as output: with open(os.path.join(dump_dir, "LM.txt"), 'r') as f: lines = f.readlines() for i, line in enumerate(lines): #print('{} {}'.format(i, line)) output.write('{} {}'.format(i, line)) print("embedding with word id is dumped") ''' def build_compressed_embedding_pkl(experiment ,name): embeddings = [] with open(os.path.join(experiment_path, str(experiment), "weights", name), 'r') as f: lines = f.readlines() for line in lines: tokens = line.strip().split() v = [float(x) for x in tokens[1:]] embeddings.append(v) LM= np.array(embeddings) weight_dump_dir = os.path.join(experiment_path, str(experiment), "weights") pickle.dump(LM, open(os.path.join(weight_dump_dir, "LM.pkl"), "wb")) print('LM size {} dumped'.format(LM.shape)) ''' if __name__ == "__main__": if args.experiment is not None: dump_trained_weights(args.experiment, args.verbose)
print("Tablas de multiplicar del 1 al 12") for i in range(1, 13): for o in range(1, 13): mult = i * o print("{} * {} = {}".format(i, o, mult))
from django.db import models # Create your models here. class Post(models.Model): title = models.CharField(max_length=250) description = models.TextField() createdAt = models.DateTimeField(auto_now=True)
from PyQt5.QtWidgets import QWidget, QMessageBox, QInputDialog from PyQt5 import uic from question import QuestionScreen from new_question import NewQuestionScreen from editor import EditScreen from console import ConsoleScreen import sys import sqlite3 class HomeScreen(QWidget): # домашнийй экран def __init__(self): super().__init__() self.con = sqlite3.connect("database.sqlite") self.cur = self.con.cursor() self.test_counter = 1 if not self.cur.execute("SELECT DISTINCT test_id FROM qwe").fetchall() else \ len(self.cur.execute("SELECT DISTINCT test_id FROM qwe").fetchall()) + 1 self.initUI() def initUI(self): uic.loadUi('home.ui', self) self.start.clicked.connect(self.start_func) self.create_test.clicked.connect(self.createFunc) self.pushButton_3.clicked.connect(self.ex) self.edit_btn.clicked.connect(self.edit) self.edit_2.clicked.connect(self.del_all) self.pushButton_4.clicked.connect(self.data_base) def start_func(self): class IdException(Exception): pass class NameException(Exception): pass try: if (self.idSpinBox.value(),) not in self.cur.execute("SELECT DISTINCT test_id FROM qwe").fetchall(): raise IdException elif self.lineEdit.text() == '': raise NameException except IdException: QMessageBox.question(self, 'Error', "Теста с таким id ещё не создано!", QMessageBox.Ok, QMessageBox.Ok) except NameException: QMessageBox.question(self, 'Error', "Поле имя не может быть пустым!", QMessageBox.Ok, QMessageBox.Ok) else: buttonReply = QMessageBox.question(self, 'Подтверждение', f"Вы хотите пройти тест с id {self.idSpinBox.value()} " f"под именем {self.lineEdit.text()}?", QMessageBox.No | QMessageBox.Yes, QMessageBox.No) if buttonReply == QMessageBox.Yes: self.st = QuestionScreen(self.idSpinBox.value(), self.lineEdit.text(), self) self.st.move(self.x(), self.y()) self.st.show() self.hide() def edit(self): tests = [str(x[0]) for x in self.cur.execute("SELECT DISTINCT test_id FROM qwe").fetchall()] if len(tests): test_id, ok_pressed = QInputDialog.getItem( self, "Редактировние", "Выберите тест:", tests, 0, False) if ok_pressed: self.ed = EditScreen(test_id, self) self.ed.move(self.x(), self.y()) self.ed.show() self.hide() def createFunc(self): self.name, self.ok = QInputDialog.getText(self, "Введите имя", "Как вас зовут?") if self.ok: self.st = NewQuestionScreen(self.name, self) self.st.move(self.x(), self.y()) self.st.show() self.hide() def ex(self): buttonReply = QMessageBox.question(self, 'Exit', "Вы точно собираетесь выйти?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if buttonReply == QMessageBox.Yes: self.con.close() sys.exit(0) def del_all(self): buttonReply = QMessageBox.question(self, 'DELETE ALL', "Вы уверены, что хотите удалить ВСЁ?", QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if buttonReply == QMessageBox.Yes: self.cur.execute("DELETE FROM qwe") self.cur.execute("DELETE FROM res") self.con.commit() self.hide() self.show() def data_base(self): self.cn = ConsoleScreen(self) self.cn.show() def showEvent(self, ev): self.lineEdit.setText('') self.test_counter = len(self.cur.execute("SELECT DISTINCT test_id FROM qwe").fetchall()) + 1 self.label_6.setText( f'Тестов в системе: {str(len(self.cur.execute("SELECT DISTINCT test_id FROM qwe").fetchall()))}') return QWidget.showEvent(self, ev)
import json import logging from typing import Optional, TypedDict from redis import Redis from solders.rpc.responses import RpcConfirmedTransactionStatusWithSignature from src.solana.solana_client_manager import SolanaClientManager logger = logging.getLogger(__name__) class CachedProgramTxInfo(TypedDict): # Signature of latest transaction that has been processed signature: str # Slot corresponding to tx signature slot: int # Block time of latest transaction on chain timestamp: int # Cache the latest value committed to DB in redis # Used for quick retrieval in health check def cache_latest_sol_db_tx( redis: Redis, cache_key: str, latest_tx: CachedProgramTxInfo ): try: redis.set(cache_key, json.dumps(latest_tx)) except Exception as e: logger.error( f"cache_solana_program.py | Failed to cache key {cache_key} latest processed transaction {latest_tx}, {e}" ) raise e def get_latest_sol_db_tx(redis: Redis, cache_key: str): value = redis.get(cache_key) latest_sol_db: Optional[CachedProgramTxInfo] = json.loads(value) if value else None return latest_sol_db # Function that ensures we always cache the latest known transaction in redis # Performed outside of lock acquisition # Ensures a lock held for a long time (usually during catchup scenarios) # does not prevent a refresh of latest known transaction def fetch_and_cache_latest_program_tx_redis( solana_client_manager: SolanaClientManager, redis: Redis, program: str, cache_key: str, ): transactions_history = solana_client_manager.get_signatures_for_address( program, before=None, limit=1 ) transactions_array = transactions_history.value if transactions_array: # Cache latest transaction from chain cache_latest_sol_play_program_tx( redis, program, cache_key, transactions_array[0] ) # Cache the latest chain tx value in redis # Represents most recently seen value from the sol program def cache_latest_sol_play_program_tx( redis: Redis, program: str, cache_key: str, tx: RpcConfirmedTransactionStatusWithSignature, ): try: sig = tx.signature slot = tx.slot timestamp = tx.block_time redis.set( cache_key, json.dumps({"signature": str(sig), "slot": slot, "timestamp": timestamp}), ) except Exception as e: logger.error( f"cache_solana_program.py |Failed to cache sol program {program} latest transaction {tx}, {e}" ) raise e def get_cache_latest_sol_program_tx(redis: Redis, cache_key: str): value = redis.get(cache_key) latest_sol_db: Optional[CachedProgramTxInfo] = json.loads(value) if value else None return latest_sol_db
import turtle import pandas screen = turtle.Screen() image = "blank_states_img.gif" screen.addshape(image) turtle.shape(image) # def get_mouse_click_coor(x,y): # print(x,y) # # turtle.onscreenclick(get_mouse_click_coor) # is_game_on = True # while is_game_on: data = pandas.read_csv("50_states.csv") all_states=data["state"].to_list() print(all_states) # print(data[data["state"] == guess]) # print(map_locaton.x) # print(map_locaton.y) # print(map_locaton.state) guessed_state = [] while len(guessed_state) <50: answer_state = screen.textinput(title=f"{len(guessed_state)}/50 states correct ", prompt="whats another states name?") guess = answer_state.title() map_location = data[data["state"] == guess] guessed_state.append(guess) if guess == 'Exit': missing_state = [] for state in all_states: if state not in guessed_state: missing_state.append(state) print(missing_state) new_states = pandas.DataFrame(missing_state) new_states.to_csv("new_states_to_learn.csv") break if guess in all_states: t = turtle.Turtle() t.hideturtle() t.pu() t.goto(int(map_location.x),int(map_location.y)) t.write(map_location.state.item()) # screen will remain even after completing the execution # turtle.mainloop() screen.exitonclick()
# # @lc app=leetcode.cn id=51 lang=python3 # # [51] N皇后 # from typing import List # @lc code=start class Solution: def solveNQueens(self, n: int) -> List[List[str]]: cols = [0] * n hill_diagonals = [0] * (2 * n - 1) dale_diagonals = [0] * (2 * n - 1) res = [] def could_place(row, col): return not cols[col] and not hill_diagonals[row + col] and not dale_diagonals[row - col] def place_queen(row, col): cols[col] = 1 hill_diagonals[row + col] = 1 dale_diagonals[row - col] = 1 def remove_queen(row, col): cols[col] = 0 hill_diagonals[row + col] = 0 dale_diagonals[row - col] = 0 def backtrack(n, row, cur_stste): if row >= n: res.append(cur_stste) return for col in range(n): if could_place(row, col): place_queen(row, col) backtrack(n, row+1, cur_stste + [col]) remove_queen(row, col) def gr(n): board = [] for s in res: for c in s: board.append("." * c + "Q" + "." * (n-c-1)) return [board[i:i+n] for i in range(0, len(board), n)] backtrack(n, 0, []) return gr(n) # @lc code=end print(Solution().solveNQueens(4))
# -*- coding:utf-8 -*- from collections import defaultdict from collections import deque class Change: def countChanges(self, dic, n, s, t): # write code here newDic = createNewDic(dic, len(s)) return len(self.bfsSearch(newDic, s, t))-1 def bfsSearch(self, dic, source, target): queue = deque() queue.append(source) visitedSet = set() visitedSet.add(source) backMap = {} while len(queue) != 0: strNow = queue.popleft() oneEditedStrs = getOneEditedWord(strNow, dic, visitedSet) for oneEditS in oneEditedStrs: if oneEditS == target: backMap[oneEditS] = strNow path = getPath(backMap, target) return path queue.append(oneEditS) visitedSet.add(oneEditS) backMap[oneEditS] = strNow return False def getOneEditedWord(s, dic, visitedSet): res = [] for i in range(0, len(s)): reStr = s[:i] + "*" + s[i+1:] matchStrs = dic[reStr] for matchStr in matchStrs: if matchStr == s or matchStr in visitedSet: continue res.append(matchStr) return res def getPath(backMap, target): if target is None: return [] paths = getPath(backMap, backMap.get(target)) paths.append(target) return paths def createNewDic(strArr, strLen): dic = defaultdict(list) for s in strArr: if len(s) == strLen: putStrInDic(s, dic) return dic def putStrInDic(s, dic): for string in differentStrs(s): dic[string].append(s) def differentStrs(s): strList = list(s) res = [] for i in range(0, len(s)): cStrList = strList[:] cStrList[i] = "*" res.append("".join(cStrList)) return res if __name__ == '__main__': print Change().countChanges(["vvz","bbaa","f","bbba","bbaa","baoa","btoa","bbba","dcki","bbbb","ge","atoj","baaa","btoj","ae"],15,"atoj","bbbb")
class Parent: def convertToBvalue(self, intValue, fill_size): return bin(int(intValue))[2:].zfill(fill_size) class RFormat(Parent): def __init__(self,destinationFile,executionOutput): """ destinationFile File with binary code """ self.destinationFile = destinationFile self.executionOutput = executionOutput self.ops = { 'and': "100100", 'or' : "100101", "add": '100000', "sub": '100010', 'sll': "000000", 'srl': "000010", 'sra': "000011", 'slt': "101010", 'nor': "100111" } self.binary_format = { "funct": "000000", "shamt": "00000" } #Assmebler execution function def assemble(self,command_arr,registers): #binary format #add $d, $s, $t => 0000 00ss ssst tttt dddd d000 0010 0000 reg_t = command_arr[3] reg_s = command_arr[2] reg_d = command_arr[1] cmd = command_arr[0] reg_h = None if cmd not in ['srl','sra','sll']: self.destinationFile.write(f"{self.binary_format['funct']}{registers[reg_s].binVal}{registers[reg_t].binVal}{registers[reg_d].binVal}{self.binary_format['shamt']}{self.ops[cmd]}\n") else: #sll $d, $t, h => 0000 0000000 t tttt dddd dhhh hh00 0000 reg_h = reg_t reg_t_1 = reg_s self.destinationFile.write( f"00000000000{registers[reg_t_1].binVal}{registers[reg_d].binVal}{self.convertToBvalue(int(reg_h),5)}{self.ops[cmd]}\n") #line execution if cmd == "and": registers[reg_d].value = (registers[reg_s].value & registers[reg_t].value) self.executionOutput.write(f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {self.convertToBvalue(registers[reg_t].value,32)} = {self.convertToBvalue(registers[reg_d].value,32)}\n") elif cmd == "or": registers[reg_d].value = (registers[reg_s].value | registers[reg_t].value) self.executionOutput.write(f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {self.convertToBvalue(registers[reg_t].value,32)} = {self.convertToBvalue(registers[reg_d].value,32)}\n") elif cmd == "add": registers[reg_d].value = registers[reg_s].value + registers[reg_t].value elif cmd == "sub": registers[reg_d].value = registers[reg_s].value - registers[reg_t].value elif cmd == "slt": registers[reg_d].value = registers[reg_s].value < registers[reg_t].value elif (cmd == 'sll'): registers[reg_d].value = (self.convertToBvalue(registers[reg_s].value,5).lstrip("0") + "0"*int(reg_h)).zfill(32) self.executionOutput.write( f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {reg_h} = {registers[reg_d].value}\n") registers[reg_d].value = int(registers[reg_d].value, 2) elif (cmd == 'srl'): registers[reg_d].value = (self.convertToBvalue(registers[reg_s].value,5)[:-int(reg_h)]).zfill(32) self.executionOutput.write(f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {reg_h} = {registers[reg_d].value}\n") registers[reg_d].value = int(registers[reg_d].value,2) elif (cmd == 'sra'): rsbval = self.convertToBvalue(registers[reg_s].value,5) registers[reg_d].value = rsbval.zfill(32)[0]*(int(reg_h)) + (rsbval[:-int(reg_h)]).zfill(32-int(reg_h)) self.executionOutput.write( f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {reg_h} = {registers[reg_d].value}\n") registers[reg_d].value = int(registers[reg_d].value, 2) elif (cmd == 'nor'): registers[reg_d].value = bin((registers[reg_s].value | registers[reg_t].value) ^ 0b11111111111111111111111111111111)[2:] self.executionOutput.write(f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {self.convertToBvalue(registers[reg_t].value,32)} = {registers[reg_d].value}\n") registers[reg_d].value = int(registers[reg_d].value, 2) #registers[reg_d].value = self.convertToBvalue(((registers[reg_s].value|registers[reg_t].value) ^ 31), 5) if cmd not in ['srl','sra','sll','nor','and','or']: self.executionOutput.write(f"{registers[reg_s].value} {cmd} {registers[reg_t].value} = {registers[reg_d].value}\n") #self.executionOutput.write(f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {self.convertToBvalue(registers[reg_t].value,32)} = {self.convertToBvalue(registers[reg_d].value,32)}\n") class JFormat(Parent): def __init__(self, destinationFile, executionOutput): """ destinationFile: File with binary code """ self.destinationFile = destinationFile self.executionOutput = executionOutput self.op = "000010" def assemble(self,target_idx): self.destinationFile.write(f"{self.op}{self.convertToBvalue(target_idx,26)} \n") class IFormat(Parent): def __init__(self,destinationFile,executionOutput): """ destination : destination file to write """ self.destinationFile = destinationFile self.executionOutput = executionOutput self.ops = { 'beq': "000100", 'bne': "000101", 'addi': "001000", 'andi': "001100", 'ori': "001101", 'slti': "001010", 'lui': "001111", 'lw': "100011", 'sw': "101011" } #for beq and bne commands def compareCompStatement(self,command,registers): #beq $s, $t, offset #0001 00ss ssst tttt iiii iiii iiii iiii if command[0] =='beq' or command[0] =='bne': return float(registers[command[1]].value) == float(registers[command[2]].value) else: raise ValueError("No comparison operator with this name") #writing assembly bin command def writeAssemblyComp(self,command,registers,offset): self.destinationFile.write(f"{self.ops[command[0]]}{registers[command[1]].binVal}{registers[command[2]].binVal}{self.convertToBvalue(offset,16)}\n") def assemble(self,command_arr,registers,memory): #binary format cmd = command_arr[0] if cmd not in ['lui','lw','sw']: imm = command_arr[3] reg_s = command_arr[2] reg_t = command_arr[1] self.destinationFile.write(f"{self.ops[cmd]}{registers[reg_s].binVal}{registers[reg_t].binVal}{self.convertToBvalue(imm,16)}\n") # COMMAND EXECUTION if cmd == 'addi': registers[reg_t].value = registers[reg_s].value + int(imm) self.executionOutput.write(f"{registers[reg_s].value} {cmd} {imm} = {registers[reg_t].value}\n") elif cmd == 'andi': registers[reg_t].value = self.convertToBvalue(registers[reg_s].value & int(imm), 32) self.executionOutput.write( f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {self.convertToBvalue(imm,32)} = {self.convertToBvalue(registers[reg_t].value,32)}\n") elif cmd == 'ori': #self.convertToBvalue(registers[reg_s], 32)[:16] + self.convertToBvalue(registers[reg_s] | int(imm), 16)[16:] registers[reg_t].value = self.convertToBvalue(registers[reg_s].value, 32)[:16] + self.convertToBvalue(registers[reg_s].value | int(imm), 16) registers[reg_t].value = int(registers[reg_t].value,2) self.executionOutput.write(f"{self.convertToBvalue(registers[reg_s].value,32)} {cmd} {self.convertToBvalue(imm,32)} = {self.convertToBvalue(registers[reg_t].value,32)}\n") elif cmd == 'slti': registers[reg_t].value="0"*31 + str(int(registers[reg_s].value< int(imm))) self.executionOutput.write( f"{registers[reg_s].value} {cmd} {imm} = {registers[reg_t].value}\n") registers[reg_t].value = int(registers[reg_t].value, 2) elif cmd == 'lui': reg_t = command_arr[1] imm = command_arr[2] self.destinationFile.write(f"{self.ops[cmd]}00000{registers[reg_t].binVal}{self.convertToBvalue(imm,16)}\n") #command execution registers[reg_t].value = registers[reg_t].value + int(imm)*(2**16) self.executionOutput.write(f"reg_t {cmd} {imm}= {registers[reg_t].value}\n") elif cmd in ['sw','lw']: reg_t = command_arr[1] mix = self._parseToExtract(command_arr[2]) offset = mix[0] reg_b = mix[1] if cmd == 'lw': registers[reg_t].value = memory[registers[reg_b].value + int(offset)] self.destinationFile.write(f"100011{registers[reg_b].binVal}{registers[reg_t].binVal}{self.convertToBvalue(offset,16)}\n") else: memory[registers[reg_b].value + int(offset)] = registers[reg_t].value self.destinationFile.write(f"101011{registers[reg_b].binVal}{registers[reg_t].binVal}{self.convertToBvalue(offset,16)}\n") def _parseToExtract(self,word): return word.replace('(', ' ').replace(')', ' ').split()
import os import fitz import pandas as pd import shutil def extract_content(path, mode): if os.path.isdir('content'): #if already exists remove it shutil.rmtree('content') os.mkdir('content') if mode == 0: #make folder folder = 'content/'+(path.split('/')[-1]).split('.')[0] os.mkdir(folder) #open file doc = fitz.open(path) #log files image_path = [] image_coord = [] #extract content for page_no in range(len(doc)): #extract_text page = doc.loadPage(page_no) text = page.getText('text') txt_file= open(folder+'/'+ str(page_no)+'.txt','w') txt_file.write(text) txt_file.close() #image bounding boxes char = page.getText('dict') for block in char['blocks']: if block['type'] == 1: image_coord.append(block['bbox']) #extract_images image_no=0 for image in doc.getPageImageList(page_no): image_no+=1 customxref = image[0] pic = fitz.Pixmap(doc, customxref) finalpic = fitz.Pixmap(fitz.csRGB, pic) finalpic.writePNG(folder+'/'+str(page_no)+ '_'+str(image_no)+'.png') image_path.append(folder+'/'+str(page_no)+ '_'+str(image_no)+'.png') for i in range(len(image_coord)): image_coord[i] = list(image_coord[i]) image_log = pd.concat([pd.Series(image_path),pd.DataFrame(image_coord)], axis=1) image_log.columns = ['path','x1','y1','x2','y2'] image_log.to_csv(folder+'/'+'image_log.csv',index=None) else: #traverse files for file in os.listdir(path): if file.endswith('.pdf'): #make folder folder = 'content/'+(file.split('/')[-1]).split('.')[0] os.mkdir(folder) #open file doc = fitz.open(path+'/'+file) #log files image_path = [] image_coord = [] #extract content for page_no in range(len(doc)): #extract text page = doc.loadPage(page_no) text = page.getText('text') txt_file= open(folder+'/'+ str(page_no)+'.txt','w') txt_file.write(text) txt_file.close() #image bounding boxes char = page.getText('dict') for block in char['blocks']: if block['type'] == 1: image_coord.append(block['bbox']) #extract images image_no=0 for image in doc.getPageImageList(page_no): image_no+=1 customxref = image[0] pic = fitz.Pixmap(doc, customxref) finalpic = fitz.Pixmap(fitz.csRGB, pic) finalpic.writePNG(folder+'/'+str(page_no)+ '_'+str(image_no)+'.png') image_path.append(folder+'/'+str(page_no)+ '_'+str(image_no)+'.png') for i in range(len(image_coord)): image_coord[i] = list(image_coord[i]) image_log = pd.concat([pd.Series(image_path),pd.DataFrame(image_coord)], axis=1) image_log.columns = ['path','x1','y1','x2','y2'] image_log.to_csv(folder+'/'+'image_log.csv',index=None)
import cv2 import numpy as np ESC_KEY = 27 cam = cv2.VideoCapture(0) cam.set(cv2.CAP_PROP_FRAME_WIDTH, 1280) cam.set(cv2.CAP_PROP_FRAME_HEIGHT, 720) fourcc = cv2.VideoWriter_fourcc(*'XVID') out = cv2.VideoWriter('output.avi',fourcc, 20.0, (1280,720)) while True: ret, frame = cam.read() out.write(frame) cv2.imshow('Video Stream', frame) key = cv2.waitKey(1) if key == ESC_KEY or not ret: break cam.release() out.release() cv2.destroyAllWindows()
import kmeans_create import kmeans_from_txt import kmeans_tune import sys import lfa if __name__ == '__main__': args = sys.argv[1:] if len(args) < 1: print('Options include: lfa, kmeans create, kmeans load, kmeans tune') elif args[0] == 'lfa': lfa.run() elif args[0] == 'kmeans': if len(args) < 2: print('Specify create, load, or tune.') elif args[1] == 'create': kmeans_create.run() elif args[1] == 'load': kmeans_from_txt.run() elif args[1] == 'tune': kmeans_tune.run() else: print('Unrecognized command.') else: print('Unrecognized command.')
from collections import namedtuple from os.path import join, dirname, abspath import json ship_schema_fields = ['shield', 'armor', 'hull', 'firepower', 'size', 'weapon_size', 'multishot', 'sensor_strength',] buff_effects = ['shield_recharge', 'armor_local_repair', 'remote_shield', 'remote_armor',] # XXX damage will become a debuff. debuff_effects = ['target_painter', 'tracking_disruption', 'ECM', 'web',] ship_optional_fields = buff_effects + debuff_effects ShipSchema = namedtuple('ShipSchema', ['name'] + ship_schema_fields + ship_optional_fields) # I don't quite like this method of providing default values, but also # don't like overloading the __new__ method... # schema - the full schema. # attributes - ShipAttributes # debuffs - debuffs. _Ship = namedtuple('Ship', ['schema', 'attributes', 'debuffs']) def Ship(schema, attributes, debuffs=None): if not debuffs: # XXX debuffs will become a tuple. # debuffs = ShipDebuffs(*([False] * len(debuff_effects))) debuffs = {} return _Ship(schema, attributes, debuffs) ShipAttributes = namedtuple('ShipAttributes', ['shield', 'armor', 'hull']) ShipDebuffs = namedtuple('ShipDebuffs', debuff_effects) def ship_size_sort_key(obj): return obj.size class ShipLibrary(object): _required_keys = { '': ['sizes', 'ships',], # top level keys 'ships': ship_schema_fields, } def __init__(self, library_filename=None): if library_filename: self.load(library_filename) def _check_missing_keys(self, key_id, value): """ value - the dict to be validated """ required_keys = set(self._required_keys[key_id]) provided_keys = set(value.keys()) return required_keys - provided_keys def load(self, filename): with open(filename) as fd: raw_data = json.load(fd) self._load(raw_data) def _load(self, raw_data): missing = self._check_missing_keys('', raw_data) if missing: raise ValueError(', '.join(missing) + ' not found') self.size_data = {} self.size_data.update(raw_data['sizes']) raw_ship_names = raw_data['ships'].keys() self.ship_data = {} for ship_name, data in raw_data['ships'].items(): missing = self._check_missing_keys('ships', data) if missing: raise ValueError("%s does not have %s attribute" % ( ship_name, ', '.join(missing))) data['size'] = self.size_data[data['size']] data['weapon_size'] = self.size_data[data['weapon_size']] #going to want to depreciate these in the future data['shield_recharge'] = data.get('shield_recharge', data['shield']) data['armor_local_repair'] = data.get('armor_local_repair', 0) data['remote_shield'] = data.get('remote_shield', 0) data['remote_armor'] = data.get('remote_shield', 0) data['target_painter'] = data.get('target_painter', 0) data['tracking_disruption'] = data.get('tracking_disruption', 0) data['ECM'] = data.get('ECM', 0) data['web'] = data.get('web', 0) self.ship_data[ship_name] = ShipSchema(ship_name, **data) multishot_list = data['multishot'] for multishot_target in multishot_list: if multishot_target not in raw_ship_names: raise ValueError(multishot_target + " does not exist as a shiptype") self.ordered_ship_data = sorted(self.ship_data.values(), key=ship_size_sort_key) def get_ship_schemata(self, ship_name): return self.ship_data[ship_name]
from django.shortcuts import render from django.views import View from datetime import datetime, date, timedelta import random from Travel.models.flights import Flight from Travel.models.flight_booking import Flight_booking from Travel.models.hotels import Hotel class Flight_Final_Summary_View(View): def get(self, request): economy_number = request.session.get('flight_economy_number') business_number = request.session.get('flight_business_number') economy_tickets = [] for i in range(1, int(economy_number) + 1): economy_passenger_name = request.GET.get('economy_passenger_name_' + str(i)) economy_passenger_gender = request.GET.get('economy_passenger_gender_' + str(i)) economy_passenger_dob = request.GET.get('economy_passenger_dob_' + str(i)) economy_passenger_phone = request.GET.get('economy_passenger_phone_' + str(i)) # converting date to required format now = date(*map(int, economy_passenger_dob.split('-'))) flight_min_time = datetime.min.time() economy_passenger_dob_adjust = datetime.combine(now, flight_min_time) economy_passenger_dob_formatted = datetime.strftime(economy_passenger_dob_adjust, "%d/%m/%y") # end conversion economy_tickets.append((economy_passenger_name, economy_passenger_gender, economy_passenger_dob_formatted, economy_passenger_phone, "Economy")) business_tickets = [] for i in range(1, int(business_number) + 1): business_passenger_name = request.GET.get('business_passenger_name_' + str(i)) business_passenger_gender = request.GET.get('business_passenger_gender_' + str(i)) business_passenger_dob = request.GET.get('business_passenger_dob_' + str(i)) business_passenger_phone = request.GET.get('business_passenger_phone_' + str(i)) # converting date to required format now = date(*map(int, business_passenger_dob.split('-'))) flight_min_time = datetime.min.time() business_passenger_dob_adjust = datetime.combine(now, flight_min_time) business_passenger_dob_formatted = datetime.strftime(business_passenger_dob_adjust, "%d/%m/%y") # end conversion business_tickets.append((business_passenger_name, business_passenger_gender, business_passenger_dob_formatted, business_passenger_phone, "Business")) flight_id = request.session.get('flight_id') flight_instance = Flight.get_flight_through_id(flight_id) flight_booked = Flight.get_flight_through_id_but_queryset( flight_id) flight_date = request.session.get('flight_date') # converting date to required format now = date(*map(int, flight_date.split('-'))) total_price = (int(economy_number)*float(flight_instance.economy_price)) + \ (int(business_number)*float(flight_instance.business_price)) user = request.user flight_booking_instance = Flight_booking(user_email=user.email, user_fname=user.first_name, user_lname=user.last_name, flight=flight_instance, total_price=total_price, economy_number=int( economy_number), business_number=int( business_number)) flight_booking_instance.save() flight_instance.economy_vacancy -= int(economy_number) flight_instance.business_vacancy -= int(business_number) flight_instance.save() flight_min_time = datetime.min.time() flight_date_adjust = datetime.combine(now, flight_min_time) flight_date_formatted = datetime.strftime(flight_date_adjust, "%A; %d %b. %Y") recent_destination = flight_instance.destination recent_destination_id = flight_instance.destination.id recent_date_early = request.session.get('flight_date') recent_date = datetime.strptime(recent_date_early, '%Y-%m-%d') final_date = recent_date + timedelta(days=4) recent_date = recent_date.strftime('%Y-%m-%d') final_date = final_date.strftime('%Y-%m-%d') # hotel_possible = Hotel.get_correct_hotel_through_location(recent_destination_id) hotel_possible_id = Hotel.objects.filter(location=recent_destination_id).values_list('id', flat=True) hotel_possible_id_list = random.sample(list(hotel_possible_id), min(len(hotel_possible_id), 3)) hotel_possible = Hotel.objects.filter(id__in=hotel_possible_id_list) flight_booking_data = {'hotel_possible': hotel_possible, 'recent_date': recent_date, 'final_date': final_date, 'total_price': total_price, 'flight_date_formatted': flight_date_formatted, 'flight_booked': flight_booked, 'economy_tickets': economy_tickets, 'business_tickets': business_tickets, 'economy_number': economy_number, 'business_number': business_number} return render(request, 'flight_final_summary.html', flight_booking_data)
import unittest import os.path import sudoku.io import sudoku.coloring class ColoringTests(unittest.TestCase): def test_welsh_powell(self): filepath = os.path.join( os.path.dirname(__file__), '../rsc/9/sample-2.sdk') graph = sudoku.io.read(filepath) graph_solved = sudoku.coloring.welsh_powell(graph) def test_sequencial_coloring(self): filepath = os.path.join( os.path.dirname(__file__), '../rsc/25/sample-0.sdk') graph = sudoku.io.read(filepath) graph_solved = sudoku.coloring.sequencial_coloring(graph) def test_class_coloring(self): filepath = os.path.join( os.path.dirname(__file__), '../rsc/9/sample-2.sdk') graph = sudoku.io.read(filepath) graph_solved = sudoku.coloring.class_coloring(graph) def test_class_coloring_backtracking(self): filepath = os.path.join( os.path.dirname(__file__), '../rsc/9/sample-2.sdk') graph = sudoku.io.read(filepath) graph_solved = sudoku.coloring.class_coloring_backtracking(graph) def test_dsatur(self): filepath = os.path.join( os.path.dirname(__file__), '../rsc/9/solved-0.sdk') graph = sudoku.io.read(filepath) graph_solved = sudoku.coloring.dsatur(graph) def test_bfs_heuristic(self): filepath = os.path.join( os.path.dirname(__file__), '../rsc/9/sample-2.sdk') graph = sudoku.io.read(filepath) max_iterations = 100000 graph_solved = sudoku.coloring.bfs_heuristic(graph,max_iterations) def test_dfs_heuristic(self): filepath = os.path.join( os.path.dirname(__file__), '../rsc/9/sample-2.sdk') graph = sudoku.io.read(filepath) max_iterations = 100000 graph_solved = sudoku.coloring.dfs_heuristic(graph,max_iterations)
n,m = map(int,input().split()) h = list(map(int,input().split())) x = [] ans = [0]*n for i in range(m): a,b = map(int,input().split()) a -= 1 b -= 1 if h[a] > h[b]: ans[b] += 1 elif h[a] < h[b]: ans[a] += 1 else: ans[b] += 1 ans[a] += 1 print(ans.count(0))
__author__ = 'dhruv and alex m' from grt.core import GRTMacro import wpilib import threading #constants = Constants() class StraightMacro(GRTMacro): """ Drive Macro; drives forwards a certain distance while maintaining orientation """ DT_NO_TARGET_TURN_RATE = .2 DT_KP = .03 DT_KI = 0 DT_KD = 0 DT_ABS_TOL = 5 DT_OUTPUT_RANGE = .25 POWER = -.7 def __init__(self, dt, navx, timeout=None): """ Pass drivetrain, distance to travel (ft), and timeout (secs) """ super().__init__(timeout) self.dt = dt self.enabled = False self.navx = navx self.setpoint = None self.pid_controller = wpilib.PIDController(self.DT_KP, self.DT_KI, self.DT_KD, self.get_input, self.set_output) self.pid_controller.setAbsoluteTolerance(self.DT_ABS_TOL) self.pid_controller.reset() self.pid_controller.setInputRange(0.0, 360.0) self.pid_controller.setContinuous(True) self.pid_controller.setOutputRange(-.4, .4) self.run_threaded() def enable(self): self.setpoint = self.navx.fused_heading self.pid_controller.setSetpoint(self.setpoint) self.pid_controller.enable() self.enabled = True def disable(self): self.pid_controller.disable() self.setpoint = None self.enabled = False self.dt.set_dt_output(0, 0) def set_output(self, output): """ :param output: (-.5, .5) :return: """ if self.enabled: if not self.pid_controller.onTarget(): self.dt.set_dt_output(self.POWER + output, self.POWER -output) else: self.dt.set_dt_output(self.POWER, self.POWER) print("Setpoint: ", self.pid_controller.getSetpoint()) print("Output: ", output) def get_input(self): print("Input: ", self.navx.fused_heading) return self.navx.fused_heading
#最长重复子数组 动态规划 def findLength(A, B): res = 0 dp = [[0 for _ in range(len(A) + 1)] for _ in range(len(B) + 1)] for i in range(1, len(B) + 1): for j in range(1, len(A) + 1): if B[i - 1] == A[j - 1]: dp[i][j] = dp[i - 1][j - 1] + 1 res = max(res, dp[i][j]) return res # print(findLength([1,2,3,2,1], [3,2,1,4,7])) # 爬楼梯 def climbStairs(n): dp = {} dp[1] = 1 dp[2] = 2 # {1: 1, 2: 2} for i in range(1+2,n+1): # n>=3执行 dp[i] = dp[i-1] + dp[i-2] return dp[n] # print(climbStairs(4)) # 最大子序和 def maxSubArray(nums): for i in range(1,len(nums)): nums[i] = max(nums[i-1]+nums[i],nums[i]) #[-2, 1, -2, 4, 3, 5, 6, 1, 5] or nums[i] += max(nums[i - 1], 0) return max(nums) # print(maxSubArray([-2,1,-3,4,-1,2,1,-5,4])) # 买卖股票的最佳时机 II def maxProfit(prices): profit = 0 for i in range(len(prices)): for j in range(i + 1, len(prices)): profit = max(profit, prices[j] - prices[i]) return profit # print(maxProfit([7, 1, 5, 3, 6, 4])) # 买卖股票的最佳时机 II def maxProfitt(prices): profit = 0 for i in range(1, len(prices)): tmp = prices[i] - prices[i - 1] if tmp > 0: profit += tmp return profit # print(maxProfitt([7, 1, 5, 3, 6, 4])) # 打家劫舍 def rob1(nums): if not nums: return 0 if len(nums) == 1: return nums[0] dp = [0] * len(nums) dp[0] = nums[0] dp[1] = max(nums[0], nums[1]) for i in range(2, len(nums)): dp[i] = max(dp[i - 2] + nums[i], dp[i - 1]) return dp[len(nums) - 1] # print(rob1([2,1,4,5,3,1,1,3])) # 按摩师 def massage(nums): if not nums: return 0 if len(nums) == 1: return nums[0] dp = [0] * (len(nums) + 2) res = 0 for i in range(2, len(nums) + 2): dp[i] = max(dp[i - 2] + nums[i - 2], dp[i - 1]) res = max(res, dp[i]) return res # print(massage([2,1,4,5,3,1,1,3])) # 杨辉三角 def generate(numRows): dp=[[0]*n for n in range(1,numRows+1)] for i in range(numRows): dp[i][0]=dp[i][-1]=1 for i in range(0,numRows): for j in range(i+1): if(dp[i][j]==0): dp[i][j]=dp[i-1][j-1]+dp[i-1][j] return dp # print(generate(5))
def addNumbers(): #Default function print("Hi") print(5 + 10) def subNumbers(x, y): #Parameterised function print(x + y) def mulNumbers(x, y): x = "Kriti" y = 150 addNumbers() subNumbers(10, 20) product = mulNumbers(10, 20, 30) print("The product is", product)
import numpy as np nbit = 4 # float bit """ nx, ny, nz, nvar = 19440, 14904, 160, 3 mx, my = nx * 2 // 3 + 1197, ny * 2 // 3 endx, endy = mx + 2592, my + 2160 nlayer = nx * ny * nvar with open("mesh_0", "wb") as fid: for i in range(nz): data = np.fromfile("../mesh_large_8m_orig.bin_0", dtype='float32', count=nlayer, offset=nlayer * i * nbit).reshape(ny, nx, nvar) data[my : endy, mx : endx, :].tofile(fid) nx, ny, nz, nvar = 19440 // 3, 14904 // 3, 380, 3 mx, my = nx * 2 // 3 + 1197 // 3, ny * 2 // 3 endx, endy = mx + 2592 // 3, my + 2160 // 3 nlayer = nx * ny * nvar with open("mesh_1", "wb") as fid: for i in range(nz): data = np.fromfile("../mesh_large_8m_orig.bin_1", dtype='float32', count=nlayer, offset=nlayer * i * nbit).reshape(ny, nx, nvar) data[my : endy, mx : endx, :].tofile(fid) nx, ny, nz, nvar = 19440 // 9, 14904 // 9, 60, 3 mx, my = nx * 2 // 3 + 1197 // 9, ny * 2 // 3 endx, endy = mx + 2592 // 9, my + 2160 // 9 nlayer = nx * ny * nvar with open("mesh_2", "wb") as fid: for i in range(nz): data = np.fromfile("../mesh_large_8m_orig.bin_2", dtype='float32', count=nlayer, offset=nlayer * i * nbit).reshape(ny, nx, nvar) data[my : endy, mx : endx, :].tofile(fid) """ from awp_processing.check import check_mesh_cont check_mesh_cont("mesh_0", "mesh_1", 2592, 2160, 160) check_mesh_cont("mesh_1", "mesh_2", 2592 // 3, 2160 // 3, 380)
import sys import re gtrack_fname = str(sys.argv[1]) with open(gtrack_fname) as gf: gtrack_file = gf.readlines() for line in gtrack_file: line=line.rstrip() if line.startswith("#"): print line else: line1=re.sub(r'(chr\w+)',r'\1_A',line) line2=re.sub(r'(chr\w+)',r'\1_B',line) print line1 print line2
# Imports from bokeh.plotting import figure from bokeh.io import curdoc from bokeh.models.annotations import Label,LabelSet from bokeh.models import ColumnDataSource from bokeh.models.widgets import Select, Slider from bokeh.layouts import layout from bokeh.models import Range1d # Create column datasource cds_original = ColumnDataSource(dict(average_grade = [7,9,10,5], exam_grade=[9,9,7,4], student_name = ['Simon','Jason','Peter','Aaron'])) cds = ColumnDataSource(dict(average_grade = [7,9,10,5], exam_grade=[9,9,7,4], student_name = ['Simon','Jason','Peter','Aaron'])) # generate figure f = figure(x_range = Range1d(start=0,end=10), y_range = Range1d(start=0,end=10)) f.xaxis.axis_label = "average grade" f.yaxis.axis_label = "exam grade" # Plot glyph f.circle( x = 'average_grade', y = 'exam_grade', source = cds, size = 8, ) # Create filtering function def filter_grades(attr, old, new): cds.data={ key:[value for i,value in enumerate(cds_original.data[key]) if cds_original.data["exam_grade"][i] >= slider.value] for key in cds_original.data } # UI widget functionality def update_labels(attr, old, new): labels.text = select.value # Add labels for bokeh glyphs labels=LabelSet( x='average_grade', y='exam_grade', text='student_name', source=cds, ) f.add_layout(labels) # add description description = Label( x=1, y=1, text="This graph shows average grades and exam grades for 3rd grade students", render_mode="css" ) f.add_layout(description) options = [('average_grade','average grade'), ('exam_grade','exam grade'), ('student_name','student name')] select=Select(title = "Select title to view: ", options = options, ) slider=Slider(start=0, end=10, value=0, step=0.5, title="Above Exam Grade", ) select.on_change("value",update_labels) slider.on_change("value",filter_grades) ui = layout([[select,slider]]) curdoc().add_root(f) curdoc().add_root(ui)
""" https://leetcode.com/problems/range-addition-ii/#/description Given an m * n matrix M initialized with all 0's and several update operations. Operations are represented by a 2D array, and each operation is represented by an array with two positive integers a and b, which means M[i][j] should be added by one for all 0 <= i < a and 0 <= j < b. You need to count and return the number of maximum integers in the matrix after performing all the operations. Example 1: Input: m = 3, n = 3 operations = [[2,2],[3,3]] Output: 4 Explanation: Initially, M = [[0, 0, 0], [0, 0, 0], [0, 0, 0]] After performing [2,2], M = [[1, 1, 0], [1, 1, 0], [0, 0, 0]] After performing [3,3], M = [[2, 2, 1], [2, 2, 1], [1, 1, 1]] So the maximum integer in M is 2, and there are four of it in M. So return 4. Note: 1) The range of m and n is [1,40000]. 2) The range of a is [1,m], and the range of b is [1,n]. 3) The range of operations size won't exceed 10,000. Algorithm: when you take a critical look at the matrix after the updates have been done, you'll realize that the ops with min values for row and col will have the maximum integer. And the count will be row * col. Special case line 44: when ops == [] there will be no updates and the maxinmum integer will be 0 and the count will be m * n https://discuss.leetcode.com/topic/90547/java-solution-find-min """ def max_count(m, n, ops): if not ops: return m * n row, col = float('inf'), float('inf') for op in ops: row = min(row, op[0]) col = min(col, op[1]) return row * col if __name__ == '__main__': print max_count(3, 3, [[2,2],[3,3]]) print('\n') print max_count(26, 17, [[20,10],[26,11],[2,11], [4,16],[2,3],[23,13],[7,15], [11,11],[25,13],[11,13],[13,11], [13,16],[26,17]] )
# Generated by Django 3.1.7 on 2021-04-08 00:02 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('back', '0005_auto_20210407_1900'), ] operations = [ migrations.AlterField( model_name='compra', name='Placa', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='back.carros', unique=True), ), migrations.AlterField( model_name='venta', name='Placa', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='back.carros', unique=True), ), ]
#frontend from tkinter import * import tkinter.messagebox import back1 import os class cust: def __init__(self,root): self.root = root self.root.title("CMS") self.root.geometry("700x700") self.root.configure(bg="black") self.id = StringVar() self.name = StringVar() self.age = StringVar() self.mob = StringVar() self.address = StringVar() self.salary = StringVar() self.gender = StringVar() self.doj = StringVar() self.x=StringVar() self.y=0 btn = Button(root, text="ADD CUSTOMER", font=12, command=self.add) btn.place(x=230, y=50) btn1 = Button(root, text="SEARCH CUSTOMER", font=12, command=self.search) btn1.place(x=230, y=150) # btn2 = Button(root, text="MODIFY CUSTOMER", font=12, command=self.modify) btn2.place(x=230, y=250) # btn3 = Button(root, text="DELETE CUSTOMER", font=12, command=self.delete) btn3.place(x=230, y=350) # btn4 = Button(root, text="SHOW CUSTOMERS", font=12, command=self.show) btn4.place(x=230, y=450) btn5 = Button(root, text="EXIT", font=12, command=self.iexit) btn5.place(x=230, y=550) def iexit(self): iexit = tkinter.messagebox.askyesno("CMS", "Do you want to exit?") if iexit > 0: root.destroy() return def adata(self): x = self.entr1.get() for ro in back1.count(): y = ''.join(ro) if y == x: print("hmmm") tkinter.messagebox.showinfo("error","enter a unique id") back1.addrec(self.entr1.get(), self.entr2.get(), self.entr3.get(), self.entr4.get(), self.entr5.get(), self.entr6.get(),self.entr7.get(), self.entr8.get()) def deli(self): back1.deltrec(self.entr12.get()) def modi(self): back1.modirec(self.entr11.get(), self.entr22.get(), self.entr33.get(), self.entr44.get(), self.entr55.get(), self.entr66.get(),self.entr77.get(), self.entr88.get()) def add(self): win = Tk() win.geometry("800x800") win.title("add") def clr(): self.entr1.delete(0, END) self.entr2.delete(0, END) self.entr3.delete(0, END) self.entr4.delete(0, END) self.entr5.delete(0, END) self.entr6.delete(0, END) self.entr7.delete(0, END) self.entr8.delete(0, END) lb = Label(win, text="ID", bg="yellow", font=32) lb.grid(row=0, column=0, pady=40) lb2 = Label(win, text="NAME", bg="yellow", font=32) lb2.grid(row=1, column=0, pady=10) lb3 = Label(win, text="AGE", bg="yellow", font=32) lb3.grid(row=2, column=0, pady=40) lb4 = Label(win, text="MOBILE NO", bg="yellow", font=32) lb4.grid(row=3, column=0, pady=40) lb5 = Label(win, text="ADDRESS", bg="yellow", font=32) lb5.grid(row=4, column=0, pady=40) lb6 = Label(win, text="SALARY", bg="yellow", font=32) lb6.grid(row=5, column=0, pady=40) lb7 = Label(win, text="GENDER", bg="yellow", font=32) lb7.grid(row=6, column=0, pady=40) lb8 = Label(win, text="DATE OF JOINING", bg="yellow", font=32) lb8.grid(row=7, column=0, pady=40) self.entr1 = Entry(win, font=32,textvariable=self.id) self.entr1.grid(row=0, column=1) self.entr2 = Entry(win, font=32,textvariable=self.name) self.entr2.grid(row=1, column=1) self.entr3 = Entry(win, font=32,textvariable=self.age) self.entr3.grid(row=2, column=1) self.entr4 = Entry(win, font=32,textvariable=self.mob) self.entr4.grid(row=3, column=1) self.entr5 = Entry(win, font=32,textvariable=self.address) self.entr5.grid(row=4, column=1) self.entr6 = Entry(win, font=32,textvariable=self.salary) self.entr6.grid(row=5, column=1) self.entr7 = Entry(win, font=32,textvariable=self.gender) self.entr7.grid(row=6, column=1) self.entr8 = Entry(win, font=32,textvariable=self.doj) self.entr8.grid(row=7, column=1) btn1 = Button(win, text="ADD", font=12, command=self.adata) btn1.place(x=600, y=300) btn2 =Button(win, text="CLEAR", font=12, command=clr) btn2.place(x=600, y=400) # win.mainloop() def modify(self): win = Tk() win.geometry("800x800") win.title("modify") lb = Label(win, text="ID", bg="yellow", font=32) lb.grid(row=0, column=0, pady=40) lb2 = Label(win, text="NAME", bg="yellow", font=32) lb2.grid(row=1, column=0, pady=10) lb3 = Label(win, text="AGE", bg="yellow", font=32) lb3.grid(row=2, column=0, pady=40) lb4 = Label(win, text="MOBILE NO", bg="yellow", font=32) lb4.grid(row=3, column=0, pady=40) lb5 = Label(win, text="ADDRESS", bg="yellow", font=32) lb5.grid(row=4, column=0, pady=40) lb6 = Label(win, text="SALARY", bg="yellow", font=32) lb6.grid(row=5, column=0, pady=40) lb7 = Label(win, text="GENDER", bg="yellow", font=32) lb7.grid(row=6, column=0, pady=40) lb8 = Label(win, text="DATE OF JOINING", bg="yellow", font=32) lb8.grid(row=7, column=0, pady=40) self.entr11 = Entry(win, font=32, textvariable=self.id) self.entr11.grid(row=0, column=1) self.entr22 = Entry(win, font=32, textvariable=self.name) self.entr22.grid(row=1, column=1) self.entr33 = Entry(win, font=32, textvariable=self.age) self.entr33.grid(row=2, column=1) self.entr44 = Entry(win, font=32, textvariable=self.mob) self.entr44.grid(row=3, column=1) self.entr55 = Entry(win, font=32, textvariable=self.address) self.entr55.grid(row=4, column=1) self.entr66 = Entry(win, font=32, textvariable=self.salary) self.entr66.grid(row=5, column=1) self.entr77 = Entry(win, font=32, textvariable=self.gender) self.entr77.grid(row=6, column=1) self.entr88 = Entry(win, font=32, textvariable=self.doj) self.entr88.grid(row=7, column=1) btn0 = Button(win, text="MODIFY", font=12, command=self.modi) btn0.place(x=500, y=400) # win.mainloop() # def search(self): win = Tk() win.geometry("800x800") win.title("search") def sir(): labe.delete(0, END) for row in back1.srchrec(self.entr10.get(),self.entr11.get(),self.entr12.get(),self.entr13.get(),self.entr14.get(),self.entr15.get(),self.entr16.get(),self.entr17.get()): labe.insert(END, row, str("")) labe = Listbox(win, width=45, height=30, bg="white") labe.place(x=500, y=10) lb = Label(win, text="ID", bg="yellow", font=32) lb.place(x=10, y=30) lb2 = Label(win, text="NAME", bg="yellow", font=32) lb2.place(x=10, y=90) lb3 = Label(win, text="AGE", bg="yellow", font=32) lb3.place(x=10, y=150) lb4 = Label(win, text="MOBILE NO", bg="yellow", font=32) lb4.place(x=10, y=210) lb5 = Label(win, text="ADDRESS", bg="yellow", font=32) lb5.place(x=10, y=270) lb6 = Label(win, text="SALARY", bg="yellow", font=32) lb6.place(x=10, y=330) lb7 = Label(win, text="GENDER", bg="yellow", font=32) lb7.place(x=10, y=390) lb8 = Label(win, text="DATE OF JOINING", bg="yellow", font=32) lb8.place(x=10, y=450) self.entr10 = Entry(win, font=32) self.entr10.place(x= 200, y=30) self.entr11 = Entry(win, font=32) self.entr11.place(x=200, y=90) self.entr12 = Entry(win, font=32) self.entr12.place(x=200, y=150) self.entr13 = Entry(win, font=32) self.entr13.place(x=200, y=210) self.entr14 = Entry(win, font=32) self.entr14.place(x=200, y=270) self.entr15 = Entry(win, font=32) self.entr15.place(x=200, y=330) self.entr16 = Entry(win, font=32) self.entr16.place(x=200, y=390) self.entr17 = Entry(win, font=32) self.entr17.place(x=200, y=450) btn = Button(win, text="SEARCH", font=12, command=sir) btn.place(x=370, y=700) # win.mainloop() # # # def delete(self): win = Tk() win.geometry("800x800") win.title("delete") lb = Label(win, text="ID", bg="yellow", font=32) lb.grid(row=0, column=0, pady=40) self.entr12 = Entry(win, font=32) self.entr12.grid(row=0, column=1) btn = Button(win, text="DELETE", font=12, command=self.deli) btn.place(x=370, y=200) # win.mainloop() # def show(self): win = Tk() win.geometry("800x800") win.title("show") win.configure(bg="black") lab = Listbox(win, width=45, height=30, bg="white") lab.pack() def disp(): lab.delete(0, END) for row in back1.view(): lab.insert(END, row, str("")) btn = Button(win, text="show", font=12, command=disp) btn.place(x=370, y=500) def register(): global register_screen register_screen = Toplevel(main_screen) register_screen.title("Register") register_screen.geometry("300x250") global username global password global username_entry global password_entry username = StringVar() password = StringVar() Label(register_screen, text="Please enter details below", bg="blue").pack() Label(register_screen, text="").pack() username_lable = Label(register_screen, text="Username * ") username_lable.pack() username_entry = Entry(register_screen, textvariable=username) username_entry.pack() password_lable = Label(register_screen, text="Password * ") password_lable.pack() password_entry = Entry(register_screen, textvariable=password, show='*') password_entry.pack() Label(register_screen, text="").pack() Button(register_screen, text="Register", width=10, height=1, bg="blue", command=register_user).pack() # Designing window for login def login(): global login_screen login_screen = Toplevel(main_screen) login_screen.title("Login") login_screen.geometry("300x250") Label(login_screen, text="Please enter details below to login").pack() Label(login_screen, text="").pack() global username_verify global password_verify username_verify = StringVar() password_verify = StringVar() global username_login_entry global password_login_entry Label(login_screen, text="Username * ").pack() username_login_entry = Entry(login_screen, textvariable=username_verify) username_login_entry.pack() Label(login_screen, text="").pack() Label(login_screen, text="Password * ").pack() password_login_entry = Entry(login_screen, textvariable=password_verify, show='*') password_login_entry.pack() Label(login_screen, text="").pack() Button(login_screen, text="Login", width=10, height=1, command=login_verify).pack() # Implementing event on register button def register_user(): username_info = username.get() password_info = password.get() file = open(username_info, "w") file.write(username_info + "\n") file.write(password_info) file.close() username_entry.delete(0, END) password_entry.delete(0, END) Label(register_screen, text="Registration Success", fg="green", font=("calibri", 11)).pack() # Implementing event on login button def login_verify(): username1 = username_verify.get() password1 = password_verify.get() username_login_entry.delete(0, END) password_login_entry.delete(0, END) list_of_files = os.listdir() if username1 in list_of_files: file1 = open(username1, "r") verify = file1.read().splitlines() if password1 in verify: login_sucess() else: password_not_recognised() else: user_not_found() # Designing popup for login success def login_sucess(): global login_success_screen login_success_screen = Toplevel(login_screen) login_success_screen.title("Success") login_success_screen.geometry("150x100") Label(login_success_screen, text="Login Success").pack() Button(login_success_screen, text="OK", command=delete_login_success).pack() # Designing popup for login invalid password def password_not_recognised(): global password_not_recog_screen password_not_recog_screen = Toplevel(login_screen) password_not_recog_screen.title("Success") password_not_recog_screen.geometry("150x100") Label(password_not_recog_screen, text="Invalid Password ").pack() Button(password_not_recog_screen, text="OK", command=delete_password_not_recognised).pack() # Designing popup for user not found def user_not_found(): global user_not_found_screen user_not_found_screen = Toplevel(login_screen) user_not_found_screen.title("Success") user_not_found_screen.geometry("150x100") Label(user_not_found_screen, text="User Not Found").pack() Button(user_not_found_screen, text="OK", command=delete_user_not_found_screen).pack() # Deleting popups def delete_login_success(): login_success_screen.destroy() login_screen.destroy() main_screen.destroy() def delete_password_not_recognised(): password_not_recog_screen.destroy() def delete_user_not_found_screen(): user_not_found_screen.destroy() # Designing Main(first) window def main_account_screen(): global main_screen main_screen = Tk() main_screen.geometry("300x250") main_screen.title("Account Login") Label(text="Select Your Choice", bg="blue", width="300", height="2", font=("Calibri", 13)).pack() Label(text="").pack() Button(text="Login", height="2", width="30", command=login).pack() Label(text="").pack() Button(text="Register", height="2", width="30", command=register).pack() main_screen.mainloop() if __name__ == '__main__': main_account_screen() root = Tk() application = cust(root) root.mainloop()
# -*- encoding: utf-8 -*- """ auth_confirmation_email.py - create safe email to confirm user """ from log_config import log, pprint, pformat log.debug ("... loading token_required ...") from flask import current_app as app from itsdangerous import URLSafeTimedSerializer ### + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ### ### !!! - DEPRECATED - ### + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ### ### cf : https://realpython.com/handling-email-confirmation-in-flask/ def generate_confirmation_token(email): """ generate a confirmation email sent once registred """ serializer = URLSafeTimedSerializer(app.config['SECRET_KEY']) return serializer.dumps(email, salt=app.config['SECURITY_PASSWORD_SALT']) def confirm_token(token, expiration=3600): """ confirm the email """ serializer = URLSafeTimedSerializer(app.config['SECRET_KEY']) try: email = serializer.loads( token, salt=app.config['SECURITY_PASSWORD_SALT'], max_age=expiration ) except: return False return email
from mac_vendor_lookup import AsyncMacLookup, MacLookup import asyncio class maclookup(): def __init__(self): self.async_mac = AsyncMacLookup() self.mac = MacLookup() def UpdateVendorList(self): print("Updating MAC address vendor list") self.mac.update_vendors() print("MAC address vendor list has been updated") def lookup(self, addr): try: loop = asyncio.get_event_loop() vendor = loop.run_until_complete(self._lookup(addr)) return vendor except Exception as e: print(e) print(addr) async def _lookup(self, mac_addr): return await self.async_mac.lookup(mac_addr) if __name__ == "__main__": addr = "98:ED:5C:FF:EE:01" mac = maclookup() vendor = mac.lookup(addr) print(vendor)
from tools.chaojiying import Chaojiying_Client from lxml import etree import requests import tools.dict_match class JS: async def get_data(self, info): try: return self.get_jsessionid(info) except Exception as e: print(e) return {"code": 500, "msg": "暂无法查询!"} def get_jsessionid(self,info): url = 'http://www.ezdrving.com/wxjswy/index.htm' headers = { "Host": "www.ezdrving.com", "Connection": "keep-alive", "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 MicroMessenger/6.5.2.501 NetType/WIFI WindowsWechat QBCore/3.43.884.400 QQBrowser/9.0.2524.400", } rs = requests.post(url=url, headers=headers) jssionid = rs.headers.get("Set-Cookie").split(";")[0] # print(jssionid) meta = {"info":info,"jssionid":jssionid} return self.get_capture(meta) def get_capture(self, meta): url = 'http://www.ezdrving.com/wxjswy/portalLogin/verify.htm' headers = { "Cookie": meta["jssionid"] + ";" } rs = requests.get(url=url, headers=headers) # with open("yzm.jpeg", "wb+") as f: # f.write(rs.content) cjy = Chaojiying_Client() # im = open('yzm.jpeg', 'rb').read() im = rs.content result = cjy.PostPic(im, 1902) if result.get("err_no") == -1005: print(result.get("err_str")) return result.get("err_str") else: yzm = result.get("pic_str") # yzm = input("请输入验证码:") meta["yzm"] = yzm return self.parse_data(meta) def parse_data(self, meta): url = 'http://www.ezdrving.com/wxjswy/illegal/doQueryOut.htm' data = f'plateType={meta["info"]["hpzl"]}&plateNO=%E8%8B%8F{meta["info"]["hphm"][1:]}&engineNO={meta["info"]["fdjh"]}&verify={meta["yzm"]}' headers = { "Host": "www.ezdrving.com", "Content-Type": "application/x-www-form-urlencoded", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36", "Cookie": meta["jssionid"] } try: rs = requests.post(url=url, headers=headers, data=data) # print(rs.text) html = etree.HTML(rs.text) trs = html.xpath('//table/tr') # print(len(trs)) result = {"code": 200, "msg": "成功!", "result": []} for x in trs[1:]: single = {} single["hphm"] = meta["info"]["hphm"] single["hpzl"] = meta["info"]["hpzl"] single["fdjh"] = meta["info"]["fdjh"] single["cjh"] = meta["info"]["cjh"] single["xh"] = x.xpath('td/text()')[1] single["wfsj"] = x.xpath('td/text()')[-2] single["wfdz"] = x.xpath('td/text()')[3] single["wfxwzt"] = x.xpath('td/text()')[-4] xw2 = tools.dict_match.js(single["wfxwzt"]) if xw2: single["wfjfs"] = int(xw2.split("-")[0]) single["wfxw"] = xw2.split("-")[1] single["fkje"] = int(x.xpath('td/text()')[-1][:-1]) result["result"].append(single) return result except Exception as e: print(e) return {"code": 500, "msg": "暂无法查询!"} if __name__ == '__main__': p = JS() info = {"hphm": "苏H5011M", "hpzl": "02", "cjh": "HFFML6", "fdjh": "000716"} rs = p.get_data(info) print(rs)
known_chains = {"STEEM" : {"chain_id" : "0" * int(256 / 4), "core_symbol" : "STEEM", "prefix" : "STM"}, }
import pytest from selenium.webdriver.common.by import By class HomePage: #contructor: def __init__(self, driver): self.driver = driver #test data upperNavigatorFirstETxt = "ProtoCommerce" upperNavigatorSecondETxt = "Home" upperNavigatorThirdETxt = "Shop" nameAlertMinChar = "Name should be at least 2 characters" nameAlertEmptyChar = "Name is required" genderDropDown = ['Male', 'Female'] formE2eName = "Itay" formE2eEmail = "itayzisman@gmail.com" formE2ePassword = "itay1234" successMsgAlert = "Success! The Form has been submitted successfully!" #locators: protoCommerce = (By.CLASS_NAME, "navbar-brand") homeBtnLink = (By.LINK_TEXT, "Home") shopBtnLink = (By.LINK_TEXT, "Shop") successMsg = (By.XPATH, "//div[@class='alert alert-success alert-dismissible']") nameField = (By.XPATH, "//label[text()='Name']") nameInput = (By.NAME, "name") nameAlert = (By.XPATH, "//div[@class='alert alert-danger']") email = (By.XPATH, "//form/div[2]/label") emailInput = (By.XPATH, "//form/div[2]/input") password = (By.XPATH, "//form/div[3]/label") passwordInput = (By.XPATH, "//form/div[3]/input") checkBox = (By.XPATH, "//form/div[4]/input") checkBoxText = (By.XPATH, "//form/div[4]/label") gender = (By.XPATH, "//form/div[5]/label") genderSelect = (By.ID, "exampleFormControlSelect1") employmentStatus = (By.XPATH, "//form/div[6]/label") studentRadioBtn = (By.ID, "inlineRadio1") employedRadioBtn = (By.ID, "inlineRadio2") entrepreneurRadioBtn = (By.ID, "inlineRadio3") student = (By.XPATH, "//form/div[6]/div[1]/label") dateOfBirth = (By.XPATH, "//form/div[7]/label") dateOfBirthInput = (By.XPATH, "//form/div[7]/input") submitBtn = (By.XPATH, "//input[@value='Submit']") #elements: def protoCommerceE(self): return self.driver.find_element(*HomePage.protoCommerce) def homeBtnLinkE(self): return self.driver.find_element(*HomePage.homeBtnLink) def shopBtnLinkE(self): return self.driver.find_element(*HomePage.shopBtnLink) def nameFieldE(self): return self.driver.find_element(*HomePage.nameField) def nameInputE(self): return self.driver.find_element(*HomePage.nameInput) def nameAlertE(self): return self.driver.find_element(*HomePage.nameAlert) def emailE(self): return self.driver.find_element(*HomePage.email) def emailInputE(self): return self.driver.find_element(*HomePage.emailInput) def passwordE(self): return self.driver.find_element(*HomePage.password) def passwordInputE(self): return self.driver.find_element(*HomePage.passwordInput) def checkBoxE(self): return self.driver.find_element(*HomePage.checkBox) def checkBoxTextE(self): return self.driver.find_element(*HomePage.checkBoxText) def genderE(self): return self.driver.find_element(*HomePage.gender) def genderSelectE(self): return self.driver.find_element(*HomePage.genderSelect) def employmentStatusE(self): return self.driver.find_element(*HomePage.employmentStatus) def studentRadioBtnE(self): return self.driver.find_element(*HomePage.studentRadioBtn) def employedRadioBtnE(self): return self.driver.find_element(*HomePage.employedRadioBtn) def entrepreneurRadioBtnE(self): return self.driver.find_element(*HomePage.entrepreneurRadioBtn) def dateOfBirthE(self): return self.driver.find_element(*HomePage.dateOfBirth) def dateOfBirthInputE(self): return self.driver.find_element(*HomePage.dateOfBirthInput) def submitBtnE(self): return self.driver.find_element(*HomePage.submitBtn) def successMsgE(self): return self.driver.find_element(*HomePage.successMsg)
# Copyright The OpenTelemetry Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import contextlib from typing import Any, Dict from unittest import mock import botocore.session from botocore.awsrequest import AWSResponse from moto import mock_sns from opentelemetry.instrumentation.botocore import BotocoreInstrumentor from opentelemetry.semconv.trace import ( MessagingDestinationKindValues, SpanAttributes, ) from opentelemetry.test.test_base import TestBase from opentelemetry.trace import SpanKind from opentelemetry.trace.span import Span class TestSnsExtension(TestBase): def setUp(self): super().setUp() BotocoreInstrumentor().instrument() session = botocore.session.get_session() session.set_credentials( access_key="access-key", secret_key="secret-key" ) self.client = session.create_client("sns", region_name="us-west-2") self.topic_name = "my-topic" def tearDown(self): super().tearDown() BotocoreInstrumentor().uninstrument() def _create_topic(self, name: str = None) -> str: if name is None: name = self.topic_name response = self.client.create_topic(Name=name) self.memory_exporter.clear() return response["TopicArn"] @contextlib.contextmanager def _mocked_aws_endpoint(self, response): response_func = self._make_aws_response_func(response) with mock.patch( "botocore.endpoint.Endpoint.make_request", new=response_func ): yield @staticmethod def _make_aws_response_func(response): def _response_func(*args, **kwargs): return AWSResponse("http://127.0.0.1", 200, {}, "{}"), response return _response_func def assert_span(self, name: str) -> Span: spans = self.memory_exporter.get_finished_spans() self.assertEqual(1, len(spans)) span = spans[0] self.assertEqual(SpanKind.PRODUCER, span.kind) self.assertEqual(name, span.name) self.assertEqual( "aws.sns", span.attributes[SpanAttributes.MESSAGING_SYSTEM] ) return span def assert_injected_span(self, message_attrs: Dict[str, Any], span: Span): # traceparent: <ver>-<trace-id>-<span-id>-<flags> trace_parent = message_attrs["traceparent"]["StringValue"].split("-") span_context = span.get_span_context() self.assertEqual(span_context.trace_id, int(trace_parent[1], 16)) self.assertEqual(span_context.span_id, int(trace_parent[2], 16)) @mock_sns def test_publish_to_topic_arn(self): self._test_publish_to_arn("TopicArn") @mock_sns def test_publish_to_target_arn(self): self._test_publish_to_arn("TargetArn") def _test_publish_to_arn(self, arg_name: str): target_arn = self._create_topic(self.topic_name) self.client.publish( **{ arg_name: target_arn, "Message": "Hello message", } ) span = self.assert_span(f"{self.topic_name} send") self.assertEqual( MessagingDestinationKindValues.TOPIC.value, span.attributes[SpanAttributes.MESSAGING_DESTINATION_KIND], ) self.assertEqual( self.topic_name, span.attributes[SpanAttributes.MESSAGING_DESTINATION], ) @mock_sns def test_publish_to_phone_number(self): phone_number = "+10000000000" self.client.publish( PhoneNumber=phone_number, Message="Hello SNS", ) span = self.assert_span("phone_number send") self.assertEqual( phone_number, span.attributes[SpanAttributes.MESSAGING_DESTINATION] ) @mock_sns def test_publish_injects_span(self): message_attrs = {} topic_arn = self._create_topic() self.client.publish( TopicArn=topic_arn, Message="Hello Message", MessageAttributes=message_attrs, ) span = self.assert_span(f"{self.topic_name} send") self.assert_injected_span(message_attrs, span) def test_publish_batch_to_topic(self): topic_arn = f"arn:aws:sns:region:000000000:{self.topic_name}" message1_attrs = {} message2_attrs = {} mock_response = { "Successful": [ {"Id": "1", "MessageId": "11", "SequenceNumber": "1"}, {"Id": "2", "MessageId": "22", "SequenceNumber": "2"}, ], "Failed": [], } # publish_batch not implemented by moto so mock the endpoint instead with self._mocked_aws_endpoint(mock_response): self.client.publish_batch( TopicArn=topic_arn, PublishBatchRequestEntries=[ { "Id": "1", "Message": "Hello message 1", "MessageAttributes": message1_attrs, }, { "Id": "2", "Message": "Hello message 2", "MessageAttributes": message2_attrs, }, ], ) span = self.assert_span(f"{self.topic_name} send") self.assertEqual( MessagingDestinationKindValues.TOPIC.value, span.attributes[SpanAttributes.MESSAGING_DESTINATION_KIND], ) self.assertEqual( self.topic_name, span.attributes[SpanAttributes.MESSAGING_DESTINATION], ) self.assert_injected_span(message1_attrs, span) self.assert_injected_span(message2_attrs, span)
import csv import sys import re reader = sys.argv[1:] writer = csv.writer(sys.stdout) for icsv in reader: acsv = open(icsv) fcsv = csv.reader(acsv) fcsv.next() for i, row in enumerate(fcsv): row = [(int(x) if re.match(r"[-+]?\d+$", x) is not None else x) for x in row] state = row[9].split('/')[4] writer.writerow([i, state] + row)
# -*- coding: utf-8 -*- """ Created on Thu Mar 15 14:29:00 2018 by Meena Sirisha""" a=list(range(1,101)) a import random a=random.sample(range(1,101),100) print(a) print(min(a)) print(max(a)) b=sorted(a) print(b) len(b) b[round(len(b)/2)] len(b)%2==0 round((len(b)/2)-1) (b[round((len(b)/2)-1)]+b[round(len(b)/2)])/2 def median(l): if len(l)%2==0: print(l[round(len(l)/2)]) else: print((l[round((len(l)/2)-1)]+l[round(len(l)/2)])/2) median(b)
#!/usr/bin/env python2.6 """ This is a Python version of the ForestMoon Dynamixel library originally written in C# by Scott Ferguson. The Python version was created by Patrick Goebel (mailto:patrick@pirobot.org) for the Pi Robot Project which lives at http://www.pirobot.org. The original license for the C# version is as follows: This software was written and developed by Scott Ferguson. The current version can be found at http://www.forestmoon.com/Software/. This free software is distributed under the GNU General Public License. See http://www.gnu.org/licenses/gpl.html for details. This license restricts your usage of the software in derivative works. * * * * * Dynamixel interface """ import defs import time import dynamixel_network import re import operator from defs import DEVICE AX12 = DEVICE['AX12'] AXS1 = DEVICE['AXS1'] AXS1PropNames = {} class SensorModule(object): """ Dynamixel AX-S1 class """ def __init__(self, ident, dyn_net): """ Constructor ident - the id for this dynamixel dyn_net - the parent dynamixel network """ self._id = ident self._dyn_net = dyn_net self.cache = {} self.changed = False self._synchronized = True self.registers = AXS1 # Add properties to this class for each register # This allows introspection by dir() for register in self.registers: # Split the Register Name prop = self._transformName(register) AXS1PropNames[prop] = register self.__dict__[prop] = property() def _transformName(self, register): ''' Returns a register name formated for use as a property ''' words = [a.lower() for a in re.split(r'([A-Z][a-z]*)', register) if a] prop = "_".join(words) return prop def __getattribute__(self, name): # Abuse global scope a bit to save a ton of work if name in AXS1PropNames.keys(): regName = AXS1PropNames[name] return self._get_register_value(AXS1[regName]) else: return super(SensorModule, self).__getattribute__(name) def __setattr__(self, name, value): # Abuse global scope a bit to save a ton of work if name in AXS1PropNames.keys(): regName = AXS1PropNames[name] self.set_register_value(AXS1[regName], value) else: return super(SensorModule, self).__setattr__(name, value) def _no_cache(self, register): """ deteremine if a register value should be cached register - register returns True if should not be cached """ return register in [AXS1.CurrentTemperature, AXS1.CurrentVoltage, AXS1.BuzzerTime, AXS1.LeftIrSensorValue, AXS1.CenterIrSensorValue, AXS1.RightIrSensorValue, AXS1.LeftLumin, AXS1.CenterLumin, AXS1.RightLumin, AXS1.ObstacleDetectedFlag, AXS1.LuminDetectedFlag, AXS1.SoundValue, AXS1.SoundValueMax, AXS1.SoundDetectedCount, AXS1.SoundDetectedTime, AXS1.IrRemoconRxData0, AXS1.IrRemoconRxData1] def __getitem__(self, register): """ Get a cache value register - register to retrieve returns value or -1 if not in cache """ data = -1 if register in self.cache: data = self.cache[register] return data def __setitem__(self, register, value): """ Set a cache value register - register to retrieve """ self.cache[register] = value def _get_register_value(self, register): """ Get a register value from the cache, if present, or by reading the value from the Dynamixel reg - register to read return the register value""" register_length = self.register_length(register) if self._no_cache(register): return self._dyn_net.read_register(self._id, register, register_length) else: value = self[register] if value == -1: return self._dyn_net.read_register(self._id, register, register_length) else: return value def register_length(self, register): """ Returns if the logical register is 1 or 2 bytes in length """ # Get this value from the module enum desc = AXS1.description(value = register) return desc['registerLen'] def set_register_value(self, register, value): """Set a register value and record in the cache, if applicable. register - register value - byte or word value """ if register in [AXS1.ModelNumber, AXS1.FirmwareVersion, AXS1.CurrentVoltage, AXS1.CurrentTemperature]: raise ValueError("Cannot set register") register_length = self.register_length(register) # Write to module but not cache if self._no_cache(register): self._dyn_net.write_register(self._id, register, register_length, value, False) return # Previously cached, same value. No-op if self[register] == value: return # Can be cached, new value, write to both module and cache self._dyn_net.write_register(self._id, register, register_length, value, False) self[register] = value def read_all(self): """ Read all register values into the cache """ registers = [] # Create tuples of (RegisterAddress, RegisterLength) for i in sorted(AXS1.values()): description = AXS1.description(value = i) regLen = description['registerLen'] registers.append((i, regLen)) # Get all relevant values values = self._dyn_net.read_registers(self._id, registers) # Put those into the cache for i in range(len(values)): addr = registers[i][0] val = values[i] self[addr] = val def _get_current_voltage(self): """getter""" volts = self._get_register_value(AXS1.CurrentVoltage) return volts / 10.0 class Dynamixel (object): """ Dynamixel AX-12+ class """ def __init__(self, ident, dyn_net): """ Constructor ident - the id for this dynamixel dyn_net - the parent dynamixel network """ self._id = ident self._dyn_net = dyn_net self.cache = {} self.changed = False self._synchronized = True def __getitem__(self, register): """ Get a cache value register - register to retrieve returns value or -1 if not in cache """ data = -1 if register in self.cache: data = self.cache[register] return data def __setitem__(self, register, value): """ Set a cache value register - register to retrieve """ self.cache[register] = value def __str__(self): return "Dyn %d" % (self._id) def _no_cache(self, register): """ deteremine if a register value should be cached register - register returns True if should not be cached """ return register in [AX12.CurrentLoad, AX12.CurrentPosition, AX12.CurrentSpeed, AX12.CurrentTemperature, AX12.CurrentVoltage, AX12.Moving, AX12.TorqueEnable] def _get_register_value(self, register): """ Get a register value from the cache, if present, or by reading the value from the Dynamixel reg - register to read return the register value""" register_length = self.register_length(register) if register in [AX12.GoalPosition, AX12.MovingSpeed]: return self[register] if self._no_cache(register): return self._dyn_net.read_register(self._id, register, register_length) else: value = self[register] if value == -1: return self._dyn_net.read_register(self._id, register, register_length) else: return value def set_register_value(self, register, value): """Set a register value and record in the cache, if applicable. register - register value - byte or word value """ if register in [AX12.GoalPosition, AX12.MovingSpeed]: if self._synchronized: if register == AX12.MovingSpeed and value == 0: value = 1 print "Moving speed %d " % (value) self[register] = value self.changed = True elif register in [AX12.ModelNumber, AX12.FirmwareVersion, AX12.CurrentPosition, AX12.CurrentSpeed, AX12.CurrentLoad, AX12.CurrentVoltage, AX12.CurrentTemperature, AX12.Moving]: raise ValueError("Cannot set register") register_length = self.register_length(register) if self._no_cache(register): self._dyn_net.write_register(self._id, register, register_length, value, False) return if self[register] == value: return self._dyn_net.write_register(self._id, register, register_length, value, False) self[register] = value def register_length(self, register): """ Returns if the logical register is 1 or 2 bytes in length """ # Get this value from the module enum desc = AX12.description(value = register) return desc['registerLen'] def read_all(self): """ Read all register values into the cache """ registers = [] # Create tuples of (RegisterAddress, RegisterLength) for i in sorted(AX12.values()): description = AX12.description(value = i) regLen = description['registerLen'] registers.append((i, regLen)) # Get all relevant values values = self._dyn_net.read_registers(self._id, registers) # Put those into the cache for i in range(len(values)): addr = registers[i][0] val = values[i] self[addr] = val def reset(self, ident): """Resets a dynamixel ident - id to reset Note: This function should be used carefully, if at all. It resets the unit ID to 1, so careful planning is required to to avoid collisions on a network with more than one Dynamixel. """ self._dyn_net.write_instruction(ident, defs.INSTRUCTION.Reset, None) self._dyn_net.await_packet(ident, 0) def reset_registers(self): """Reset register values to factory default""" self._dyn_net.dynamixel_id_change(self, 1) self.reset(self._id) self._id = 1 time.sleep(0.3) self.read_all() def stop(self): """ Stop the Dynamixel from moving. There is no direct way to command a Dynamixel to stop. And there is no way to set the speed to 0, since the value 0 is specially interpreted to mean 'as fast as possibly'. The best we can do is command it to move to its current position and set the speed to 1 to slow it down as much as possible. If there is any significant lag between receiving the CurrentPosition and setting it and the speed, there will be some residual movement as the Dynamixel moves to its observed but stale CurrentPosition. If the Dynamixel is in Sychronized mode, a call to 'DynamixelNextwork.Synchronize' will be required to complete the operation. """ self.goal_position = self.current_position self.moving_speed = 1 ########################################################################### # Properties def _get_synchronized(self): """getter""" return self._synchronized def _set_synchronized(self, value): """ setter """ self._synchronized = value synchronized = property(_get_synchronized, _set_synchronized) def _get_goal_position(self): """getter""" return self._get_register_value(AX12.GoalPosition) def _set_goal_position(self, value): """ setter """ self.set_register_value(AX12.GoalPosition, value) goal_position = property(_get_goal_position, _set_goal_position) def _get_moving_speed(self): """getter""" return self._get_register_value(AX12.MovingSpeed) def _set_moving_speed(self, value): """ setter """ self.set_register_value(AX12.MovingSpeed, value) moving_speed = property(_get_moving_speed, _set_moving_speed) def _get_alarm_led(self): """getter""" return self._get_register_value(AX12.AlarmLED) def _set_alarm_led(self, value): """ setter """ self.set_register_value(AX12.AlarmLED, value) alarm_led = property(_get_alarm_led, _set_alarm_led) def _get_alarm_shutdown(self): """getter""" return self._get_register_value(AX12.AlarmShutdown) def _set_alarm_shutdown(self, value): """ setter """ self.set_register_value(AX12.AlarmShutdown, value) alarm_shutdown = property(_get_alarm_shutdown, _set_alarm_shutdown) def _get_baud_rate(self): """getter""" return self._get_register_value(AX12.BaudRate) def _set_baud_rate(self, value): """ setter """ self.set_register_value(AX12.BaudRate, value) baud_rate = property(_get_baud_rate, _set_baud_rate) def _get_cw_angle_limit(self): """getter""" return self._get_register_value(AX12.CWAngleLimit) def _set_cw_angle_limit(self, value): """ setter """ self.set_register_value(AX12.CWAngleLimit, value) cw_angle_limit = property(_get_cw_angle_limit, _set_cw_angle_limit) def _get_ccw_angle_limit(self): """getter""" return self._get_register_value(AX12.CCWAngleLimit) def _set_ccw_angle_limit(self, value): """ setter """ self.set_register_value(AX12.CCWAngleLimit, value) ccw_angle_limit = property(_get_ccw_angle_limit, _set_ccw_angle_limit) def _get_ccw_compliance_margin(self): """getter""" return self._get_register_value(AX12.CCWComplianceMargin) def _set_ccw_compliance_margin(self, value): """ setter """ self.set_register_value(AX12.CCWComplianceMargin, value) ccw_compliance_margin = property(_get_ccw_compliance_margin, _set_ccw_compliance_margin) def _get_cw_compliance_margin(self): """getter""" return self._get_register_value(AX12.CWComplianceMargin) def _set_cw_compliance_margin(self, value): """ setter """ self.set_register_value(AX12.CWComplianceMargin, value) cw_compliance_margin = property(_get_cw_compliance_margin, _set_cw_compliance_margin) def _get_ccw_compliance_slope(self): """getter""" return self._get_register_value(AX12.CCWComplianceSlope) def _set_ccw_compliance_slope(self, value): """ setter """ self.set_register_value(AX12.CCWComplianceSlope, value) ccw_compliance_slope = property(_get_ccw_compliance_slope, _set_ccw_compliance_slope) def _get_cw_compliance_slope(self): """getter""" return self._get_register_value(AX12.CWComplianceSlope) def _set_cw_compliance_slope(self, value): """ setter """ self.set_register_value(AX12.CWComplianceSlope, value) cw_compliance_slope = property(_get_cw_compliance_slope, _set_cw_compliance_slope) def _get_current_load(self): """getter""" val = self._get_register_value(AX12.CurrentLoad) if (val & 0x400) != 0: return -(val & 0x3FF) return val current_load = property(_get_current_load) def _get_current_position(self): """getter""" return self._get_register_value(AX12.CurrentPosition) current_position = property(_get_current_position) def _get_current_speed(self): """getter""" val = self._get_register_value(AX12.CurrentSpeed) if (val & 0x400) != 0: return -(val & 0x3FF) return val current_speed = property(_get_current_speed) def _get_current_temperature(self): """getter""" return self._get_register_value(AX12.CurrentTemperature) current_temperature = property(_get_current_temperature) def _get_current_voltage(self): """getter""" volts = self._get_register_value(AX12.CurrentVoltage) return volts / 10.0 current_voltage = property(_get_current_voltage) def _get_torque_enable(self): """getter""" return (self._get_register_value(AX12.TorqueEnable) != 0) def _set_torque_enable(self, value): """ setter """ self.set_register_value(AX12.TorqueEnable, 1 if value else 0) torque_enable = property(_get_torque_enable, _set_torque_enable) def _get_firmware_version(self): """getter""" return self._get_register_value(AX12.FirmwareVersion) firmware_version = property(_get_firmware_version) def _get_id(self): """getter""" return self._id def _set_id(self, value): """change id of the dynamixel""" broadcast_id = dynamixel_network.DynamixelInterface.BROADCAST_ID if value < 0 or value >= broadcast_id: raise ValueError("Id must be in range 0 to 253") if value == self._id: return self._dyn_net.dynamixel_id_change(self, value) registerLength = self.register_length(AX12.Id) self._dyn_net.write_register(self._id, AX12.Id, registerLength, value, False) self._id = value id = property(_get_id, _set_id) def _get_led(self): """getter""" return (self._get_register_value(AX12.LED) != 0) def _set_led(self, value): """setter""" self.set_register_value(AX12.LED, 1 if value else 0) led = property(_get_led, _set_led) def _get_lock(self): """getter""" return (self._get_register_value(AX12.Lock) != 0) lock = property(_get_lock) def _get_temperature_limit(self): """getter""" return self._get_register_value(AX12.TemperatureLimit) def _set_temperature_limit(self, value): """setter""" self.set_register_value(AX12.TemperatureLimit, value) temperature_limit = property(_get_temperature_limit, _set_temperature_limit) def _get_max_torque(self): """getter""" return self._get_register_value(AX12.MaxTorque) def _set_max_torque(self, value): """setter""" self.set_register_value(AX12.MaxTorque, value) max_torque = property(_get_max_torque, _set_max_torque) def _get_high_voltage_limit(self): """getter""" return self._get_register_value(AX12.HighVoltageLimit)/10.0 def _set_high_voltage_limit(self, value): """setter""" adj_value = int(round(value* 10.0)) self.set_register_value(AX12.HighVoltageLimit, adj_value) high_voltage_limit = property(_get_high_voltage_limit, _set_high_voltage_limit) def _get_low_voltage_limit(self): """getter""" return self._get_register_value(AX12.LowVoltageLimit)/10.0 def _set_low_voltage_limit(self, value): """setter""" adj_value = int(round(value * 10.0)) self.set_register_value(AX12.LowVoltageLimit, adj_value) low_voltage_limit = property(_get_low_voltage_limit, _set_low_voltage_limit) def _get_model_number(self): """getter""" return self._get_register_value(AX12.ModelNumber) model_number = property(_get_model_number) def _get_moving(self): """getter""" is_moving = (self._get_register_value(AX12.Moving) != 0) return self.changed or is_moving moving = property(_get_moving) def _get_punch(self): """getter""" return self._get_register_value(AX12.Punch) def _set_punch(self, value): """setter""" self.set_register_value(AX12.Punch, value) punch = property(_get_punch, _set_punch) def _get_registered_instruction(self): """getter""" result = self._get_register_value(AX12.RegisteredInstruction) return result != 0 def _set_registered_instruction(self, value): """setter""" self.set_register_value(AX12.RegisteredInstruction, 1 if value else 0) registered_instruction = property(_get_registered_instruction, _set_registered_instruction) def _get_return_delay(self): """getter""" return self._get_register_value(AX12.ReturnDelay) * 2 def _set_return_delay(self, value): """setter""" self.set_register_value(AX12.ReturnDelay, value / 2) return_delay = property(_get_return_delay, _set_return_delay) def _get_status_return_level(self): """getter""" return self._get_register_value(AX12.StatusReturnLevel) def _set_status_return_level(self, value): """setter""" self.set_register_value(AX12.StatusReturnLevel, value) status_return_level = property(_get_status_return_level, _set_status_return_level) def _get_torque_limit(self): """getter""" return self._get_register_value(AX12.TorqueLimit) def _set_torque_limit(self, value): """setter""" self.set_register_value(AX12.TorqueLimit, value) torque_limit = property(_get_torque_limit, _set_torque_limit)
from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton choose_language = InlineKeyboardMarkup( inline_keyboard= [ [ InlineKeyboardButton(text="Українська", callback_data="lang_uk")], [ InlineKeyboardButton(text="Русский", callback_data="lang_ru"), InlineKeyboardButton(text="English", callback_data="lang_en"), ] ] )
# -*- coding: utf-8 -*- # @Author: gzliuxin # @Email: gzliuxin@corp.netease.com # @Date: 2017-07-14 19:47:51 import json from poco import Poco from poco.agent import PocoAgent from poco.utils.simplerpc.utils import sync_wrapper from poco.freezeui.hierarchy import FrozenUIHierarchy, FrozenUIDumper from poco.utils.airtest import AirtestInput, AirtestScreen from poco.utils.simplerpc.rpcclient import RpcClient from poco.utils.simplerpc.transport.ws import WebSocketClient __all__ = ['CocosJsPoco'] DEFAULT_ADDR = "ws://localhost:5003" class CocosJsPocoAgent(PocoAgent): def __init__(self, addr=DEFAULT_ADDR): # init airtest env try: # new version from airtest.core.api import connect_device, device as current_device if not current_device(): connect_device("Android:///") except ImportError: # old version from airtest.cli.runner import device as current_device from airtest.core.main import set_serialno if not current_device(): set_serialno() # cocos games poco sdk listens on Android localhost:5003 localport = addr.split(":")[-1] current_device().adb.forward("tcp:%s" % localport, "tcp:5003", False) self.conn = WebSocketClient(addr) self.c = RpcClient(self.conn) self.c.DEBUG = False # self.c.run(backend=True) self.c.wait_connected() hierarchy = FrozenUIHierarchy(Dumper(self.c)) screen = AirtestScreen() input = AirtestInput() super(CocosJsPocoAgent, self).__init__(hierarchy, input, screen, None) class Dumper(FrozenUIDumper): def __init__(self, rpcclient): super(Dumper, self).__init__() self.rpcclient = rpcclient @sync_wrapper def dumpHierarchy(self): return self.rpcclient.call("dump") class CocosJsPoco(Poco): """docstring for CocosJsPoco""" def __init__(self, addr=DEFAULT_ADDR, **options): agent = CocosJsPocoAgent(addr) if 'action_interval' not in options: options['action_interval'] = 0.5 super(CocosJsPoco, self).__init__(agent, **options) def on_pre_action(self, action, proxy, args): # airteset log用 from airtest.core.api import snapshot snapshot(msg=unicode(proxy)) # test code def dump(): from websocket import create_connection ws = create_connection(DEFAULT_ADDR, timeout=2) ws.send('{"jsonrpc": "2.0", "params": {}, "method": "dump", "id": 0}') # print("Sent") # print("Receiving...") result = ws.recv() # print("Received '%s'" % result) ws.close() data = json.loads(result) return data["result"]
#!/usr/bin/env python from oauth2client.client import GoogleCredentials from googleapiclient.discovery import build project_id='reddit-corpus-analysis' # Grab the application's default credentials from the environment. credentials = GoogleCredentials.get_application_default() # Construct the service object for interacting with the BigQuery API. bigquery_service = build('bigquery', 'v2', credentials=credentials) fh_reddit_tables = """ [fh-bigquery:reddit_comments.2007], [fh-bigquery:reddit_comments.2008], [fh-bigquery:reddit_comments.2009], [fh-bigquery:reddit_comments.2010], [fh-bigquery:reddit_comments.2011], [fh-bigquery:reddit_comments.2012], [fh-bigquery:reddit_comments.2013], [fh-bigquery:reddit_comments.2014], [fh-bigquery:reddit_comments.2015_01], [fh-bigquery:reddit_comments.2015_02], [fh-bigquery:reddit_comments.2015_03], [fh-bigquery:reddit_comments.2015_04], [fh-bigquery:reddit_comments.2015_05], [fh-bigquery:reddit_comments.2015_06], [fh-bigquery:reddit_comments.2015_07], [fh-bigquery:reddit_comments.2015_08] """ query_request = bigquery_service.jobs() shakespeare = True if shakespeare: query_data = { 'query': ( 'SELECT TOP(corpus, 10) as title, ' 'COUNT(*) as unique_words ' 'FROM [publicdata:samples.shakespeare];') } else: query_data = {'query': ('select count(*) from %s' % fh_reddit_tables)} query_response = query_request.query( projectId=project_id, body=query_data).execute() print('Query Results:') for row in query_response['rows']: print('\t'.join(field['v'] for field in row['f']))
import torch import numpy as np from scipy.linalg import sqrtm class Preprocessor: """ Base class for various preprocessing actions. Sub-classes are called with a subclass of `_Recording` and operate on these instances in-place. Any modifications to data specifically should be implemented through a subclass of :any:`BaseTransform`, and returned by the method :meth:`get_transform()` """ def __call__(self, recording, **kwargs): """ Preprocess a particular recording. This is allowed to modify aspects of the recording in-place, but is not strictly advised. Parameters ---------- recording : kwargs : dict New :any:`_Recording` subclasses may need to provide additional arguments. This is here for support of this. """ raise NotImplementedError() def get_transform(self): """ Generate and return any transform associated with this preprocessor. Should be used after applying this to a dataset, i.e. through :meth:`preprocess` Returns ------- transform : BaseTransform """ raise NotImplementedError()
from flask_wtf import Form from wtforms import StringField, SubmitField, DecimalField, IntegerField from wtforms.validators import DataRequired from wtforms.fields.simple import PasswordField class LoginForm(Form): user_id = IntegerField('ID:', validators=[DataRequired()]) password = PasswordField('Password:', validators=[DataRequired()]) submit = SubmitField('Login') class BooklistForm(Form): submit = SubmitField('New') class BookForm(Form): isbn = IntegerField('ISBN: ', validators=[DataRequired()]) title = StringField('Title:', validators=[DataRequired()]) price = DecimalField('Price: ', validators=[DataRequired()])
#from os import stat_result from tkinter import* #import 뒤에 *를 꼭 붙여주세요... 저거 안해서 못한 거였어... root = Tk() root.title ('Filp the switch') #root.iconbitmap ('C:\Users\USER\Documents\GitHub\NadoPython\Personal\Jess.Kim\Toggle_on.jpg') root.geometry("500x300") my_label = Label(root, text="Switch On", fg="green", font=("Helvetica", 32)) my_label.pack(pady=20) #창까지는 띄웠는데 아래 내용에 오류가 있음 ''' on = PhotoImage(file="C:\Users\USER\Documents\GitHub\NadoPython\Personal\Jess.Kim\Toggle_on.jpg") off = PhotoImage(file="C:\Users\USER\Documents\GitHub\NadoPython\Personal\Jess.Kim\Toggle_off.jpg") on_button = Button(root, image=on) on_button.pack(pady=50) ''' root.mainloop()
from django.http import HttpResponse from django.shortcuts import redirect def unauthenticated_user(view_func): def check_user(request, *args, **kwargs): if request.user.is_authenticated: return redirect('info') else: return view_func(request, *args, **kwargs) return check_user def allowed_users(allowed_roles=[]): def decorator(view_func): def check_user(request, *args, **kwargs): group=None if request.user.groups.exists(): group=request.user.groups.all()[0].name if group in allowed_roles: return view_func(request, *args, **kwargs) else: return HttpResponse("<h1>You Are not Authorized</h1>") return check_user return decorator def admin_only(view_func): def check_user(request, *args, **kwargs): group=None if request.user.groups.exists(): group=request.user.groups.all()[0].name if group=="admin": return view_func(request, *args, **kwargs) if group=="student": return redirect('info') return check_user
''' Created on Jan 4, 2019 @author: sumit ''' import os import xlsxwriter import json from logging import raiseExceptions try: from Lib import LibUtils except: pass #import LibUtils def DirCheck(folder): ''' Return True if Directory Exist else False. :param Directory ''' return True if folder and os.path.isdir(folder) else False def GetFiles(folder=""): ''' Return List of files(MP4 LRV JPG). :param folder ''' FilesDic={} files = [f for f in os.listdir(folder) if f.endswith('.MP4') or f.endswith('.mp4') or f.endswith('.JPG') or f.endswith('.360')] for count in range(len(files)): FilesDic[str(files[count])]=os.path.join(folder,str(files[count])) return FilesDic def GetMp4Files(folder=""): ''' Return List of files(MP4). :param folder ''' FILES = [f for f in os.listdir(folder) if f.endswith('.MP4') or f.endswith('.mp4') or f.endswith('.360')] return(FILES) def GetLRVFiles(folder=""): ''' Return List of files(LRV). :param folder ''' FILES = [f for f in os.listdir(folder) if f.endswith('.LRV')] return(FILES) def GetJPGFiles(folder=""): ''' Return List of files(JPG). :param folder ''' FILES = [f for f in os.listdir(folder) if f.endswith('.JPG')] return(FILES) def CreateFolder(folder="", foldercount=1, foldername='Report'): ''' Create folder with name Report_x(x will be integer value) :param folder :param foldername :param foldercount ''' reportfolder=os.path.join(folder,foldername+'_'+str(foldercount)) try: if not DirCheck(reportfolder): os.makedirs(reportfolder) return reportfolder else: return reportfolder if [f for f in os.listdir(reportfolder) if not f.startswith('.')] == [] else CreateFolder(folder ,foldercount=int(foldercount)+1) except OSError: print ('Error: Creating directory. ' + foldercount) def initExcel(reportfolder, file='Report'): ''' Create Excel with name Report_x(x will be integer value) :param reportfolder ''' # Create an new Excel file and add a worksheet. workbook = xlsxwriter.Workbook(reportfolder+'\\'+file+'.xlsx') return workbook def GeneralReport(filename, logfile, checkmetadata): result={} json_data=logfile result['name']=filename for key in checkmetadata: temp=LibUtils.JsonValue(json_data, key) if temp=={}: result[key]="Data Not Found in json" else: result[key]=LibUtils.JsonValue(json_data, key) return result def deinitExcel(workbook): workbook.close() def Loadjson(file, status='Y'): with open(file) as f: try: if status=='Y': json_data=json.load(f) else: json_data={} f.seek(0) for line in f: if ':' in line: min_vol = line.split(':') min_vol[0] = min_vol[0].strip(' ,\n,\"') min_vol[1] = min_vol[1].strip('\,,\n, ,\"') key=str(min_vol[0]) value=str(min_vol[1]) if key in json_data: json_data[str(key)].append(str(value)) else: json_data[str(key)]=[] json_data[str(key)].append(str(value)) #print(json_data) except: json_data={} f.seek(0) for line in f: if ':' in line: min_vol = line.split(':') min_vol[0] = min_vol[0].strip(' ,\n,\"') min_vol[1] = min_vol[1].strip('\,,\n, ,\"') key=str(min_vol[0]) value=str(min_vol[1]) if key in json_data: json_data[str(key)].append(str(value)) else: json_data[str(key)]=[] json_data[str(key)].append(str(value)) return json_data if __name__ == '__main__': pass
import sys from pyspark.sql import SparkSession from pyspark.sql.functions import lit, col def demo_df(): # Extract df = spark.read.format("csv").option("header", "true").load(planes_file) # Transform df = df.withColumn("NewCol", lit(0)).filter(col("model").isNotNull()) # Load df.write.format("delta").mode("overwrite").saveAsTable("{}.planes".format(database)) # Verify resDf = spark.sql("SELECT * FROM planes") resDf.show() if __name__ == '__main__': app_name = sys.argv[1] app_config_file = sys.argv[2] print(app_name) print(app_config_file) spark = SparkSession.builder.appName(app_name).getOrCreate() # extract config c_json = spark.read.option("multiline", "true").json(app_config_file) database = c_json.select("database").first()[0] planes_file = c_json.select("planes_file").first()[0] demo_df()
import os import re testfile = open('D:\/regex_sum_238824.txt', 'r') total_number = [] for line in testfile: number_array = re.findall('[0-9]+', line) if len(number_array) > 0: total_number = total_number + number_array else: continue total_sum_number = 0 for numbers in total_number: total_sum_number = total_sum_number + int(numbers) print total_sum_number
import logging import time from collections import defaultdict from p4pktgen.config import Config class Statistic(object): def __init__(self, name): self.name = name class Counter(Statistic): def __init__(self, name): super(Counter, self).__init__(name) self.counter = 0 def inc(self): self.counter += 1 class Timer(Statistic): def __init__(self, name): super(Timer, self).__init__(name) self.start_time = None self.time = 0 def start(self): assert self.start_time == None self.start_time = time.time() def stop(self): self.time += time.time() - self.start_time self.start_time = None def get_time(self): return self.time def __repr__(self): return '{}: {}s'.format(self.name, self.get_time()) class Average(Statistic): def __init__(self, name): super(Average, self).__init__(name) self.sum = 0.0 self.counter = 0 def record(self, val): self.sum += val self.counter += 1 def get_avg(self): if self.counter == 0: return None else: return self.sum / self.counter class Statistics: __shared_state = {} def __init__(self): self.__dict__ = self.__shared_state def init(self): self.num_control_path_edges = 0 self.avg_full_path_len = Average('full_path_len') self.avg_unsat_path_len = Average('unsat_path_len') self.count_unsat_paths = Counter('unsat_paths') self.timing_file = None self.breakdown_file = None if Config().get_record_statistics(): self.timing_file = open('timing.log', 'w') self.breakdown_file = open('breakdown.log', 'w') self.edge_file = open('edge.log', 'w') self.start_time = time.time() self.stats = defaultdict(int) self.stats_per_control_path_edge = defaultdict(int) self.last_time_printed_stats_per_control_path_edge = self.start_time self.record_count = 0 self.num_test_cases = 0 self.num_solver_calls = 0 self.solver_time = Timer('solver_time') self.num_covered_edges = 0 self.num_done = 0 def record(self, result, record_path, path_solver): self.record_count += 1 current_time = time.time() if record_path: self.timing_file.write( '{},{}\n'.format(result, current_time - self.start_time)) self.timing_file.flush() self.edge_file.write('{},{},{}\n'.format(self.num_test_cases, self.num_covered_edges, current_time - self.start_time)) self.edge_file.flush() if self.record_count % 100 == 0: self.breakdown_file.write('{},{},{},{},{},{}\n'.format( current_time - self.start_time, self.num_solver_calls, path_solver.total_switch_time, Statistics().solver_time, self.avg_full_path_len.get_avg(), self.avg_unsat_path_len.get_avg(), self.count_unsat_paths.counter)) self.breakdown_file.flush() def log_control_path_stats(self, stats_per_control_path_edge, num_control_path_edges): logging.info( "Number of times each of %d control path edges has occurred" " in a SUCCESS test case:", num_control_path_edges) num_edges_with_count = defaultdict(int) num_edges_with_counts = 0 for e in sorted(stats_per_control_path_edge.keys(), key=id): num_edges_with_counts += 1 cnt = stats_per_control_path_edge[e] num_edges_with_count[cnt] += 1 logging.info(" %d %s" % (cnt, e)) num_edges_without_counts = num_control_path_edges - num_edges_with_counts num_edges_with_count[0] += num_edges_without_counts logging.info("Number of control path edges covered N times:") for c in sorted(num_edges_with_count.keys()): logging.info(" %d edges occurred in %d SUCCESS test cases" "" % (num_edges_with_count[c], c)) def dump(self): print('num_control_path_edges', self.num_control_path_edges) print('num_test_cases', self.num_test_cases) print('num_solver_calls', self.num_solver_calls) print('num_covered_edges', self.num_covered_edges) print('num_done', self.num_done) print(self.solver_time) def cleanup(self): if self.timing_file is not None: self.timing_file.close() if self.breakdown_file is not None: self.breakdown_file.close()
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'DepthEncodingDialog.ui' # # Created by: PyQt5 UI code generator 5.11.2 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_DepthEncodingDialog(object): def setupUi(self, DepthEncodingDialog): DepthEncodingDialog.setObjectName("DepthEncodingDialog") DepthEncodingDialog.resize(346, 250) self.widget = QtWidgets.QWidget(DepthEncodingDialog) self.widget.setGeometry(QtCore.QRect(10, 0, 331, 250)) self.widget.setObjectName("widget") self.start_depth_encoding = QtWidgets.QPushButton(self.widget) self.start_depth_encoding.setGeometry(QtCore.QRect(90, 195, 137, 22)) self.start_depth_encoding.setObjectName("start_depth_encoding") self.depth_source_type = QtWidgets.QComboBox(self.widget) self.depth_source_type.setGeometry(QtCore.QRect(190, 20, 131, 22)) self.depth_source_type.setObjectName("depth_source_type") self.depth_source_type.addItem("") self.depth_source_type.addItem("") self.depth_source_type.addItem("") self.label = QtWidgets.QLabel(self.widget) self.label.setGeometry(QtCore.QRect(10, 20, 131, 16)) self.label.setObjectName("label") self.label_2 = QtWidgets.QLabel(self.widget) self.label_2.setGeometry(QtCore.QRect(10, 60, 121, 16)) self.label_2.setObjectName("label_2") self.depth_encoding_type = QtWidgets.QComboBox(self.widget) self.depth_encoding_type.setGeometry(QtCore.QRect(190, 60, 131, 22)) self.depth_encoding_type.setObjectName("depth_encoding_type") self.depth_encoding_type.addItem("") self.depth_encoding_type.addItem("") self.depth_encoding_type.addItem("") self.depth_encoding_type.addItem("") self.encoding_min_label = QtWidgets.QLabel(self.widget) self.encoding_min_label.setText("Encoding Min") self.encoding_min_label.setGeometry(QtCore.QRect(10, 100, 121, 22)) self.encoding_max_label = QtWidgets.QLabel(self.widget) self.encoding_max_label.setText("Encoding Max") self.encoding_max_label.setGeometry(QtCore.QRect(10, 140, 121, 22)) self.depth_encoding_min = QtWidgets.QSpinBox(self.widget) self.depth_encoding_min.setRange(-1000000, 1000000) self.depth_encoding_min.setValue(-1) self.depth_encoding_min.setGeometry(QtCore.QRect(190, 98, 50, 22)) self.depth_encoding_max = QtWidgets.QSpinBox(self.widget) self.depth_encoding_max.setRange(-1000000, 1000000) self.depth_encoding_max.setValue(-1) self.depth_encoding_max.setGeometry(QtCore.QRect(190, 136, 50, 22)) self.retranslateUi(DepthEncodingDialog) self.depth_source_type.setCurrentIndex(1) self.depth_encoding_type.setCurrentIndex(1) QtCore.QMetaObject.connectSlotsByName(DepthEncodingDialog) def retranslateUi(self, DepthEncodingDialog): _translate = QtCore.QCoreApplication.translate DepthEncodingDialog.setWindowTitle(_translate("DepthEncodingDialog", "Data Format")) self.start_depth_encoding.setText(_translate("DepthEncodingDialog", "Start Depth Encoding")) self.depth_source_type.setItemText(0, _translate("DepthEncodingDialog", "None")) self.depth_source_type.setItemText(1, _translate("DepthEncodingDialog", "EXR")) self.depth_source_type.setItemText(2, _translate("DepthEncodingDialog", "IMAGE")) self.label.setText(_translate("DepthEncodingDialog", "Depth Source Type:")) self.label_2.setText(_translate("DepthEncodingDialog", "Encoding Type")) self.depth_encoding_type.setItemText(0, _translate("DepthEncodingDialog", "None")) self.depth_encoding_type.setItemText(1, _translate("DepthEncodingDialog", "Jet Mapping")) self.depth_encoding_type.setItemText(2, _translate("DepthEncodingDialog", "Surface Normal")) self.depth_encoding_type.setItemText(3, _translate("DepthEncodingDialog", "HHA"))
from setuptools import setup # with open('requirements.txt') as f: # dependency_links = [] # install_requires = [] # for line in f.read().splitlines(): # if 'ssh://' in line: # # make a pip line into setup line # dependency_links.append(line.replace('-e ', '') + '-0') # install_requires.append(line.split('egg=')[1]) # else: # install_requires.append(line) # pip line # -e git@github.com:matt-land/fake_subdep_module.git#egg=fakesubdepmodule # setup dependency_links list # git@github.com:matt-land/fake_subdep_module.git#egg=fakesubdepmodule-0 # setup install_requires line # fakesubdepmodule-0 setup( name='fakedepmodule', py_modules=[''], version='0.0.0', description='helps test', url='git@github.com:matt-land/fake_dep_module.git', author='No One', author_email='noone@nowhere.com', install_requires=[ 'fakesubdepmodule' ], dependency_links=[ 'git+https://github.com/matt-land/fake_subdep_module.git#egg=fakesubdepmodule-0' ] )
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2016-09-01 15:29 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stc', '0008_auto_20160830_2314'), ] operations = [ migrations.AlterField( model_name='empresa', name='id', field=models.CharField(max_length=10, primary_key=True, serialize=False), ), ]
# A Class for addresses class Address(): def __init__(self, houseN, street, city, zipCode, state = "Alabama", aptNum = None ): self.houseNum = houseN self.street = street self.aptNum = aptNum self.city = city self.state = state self.zipCode = zipCode def __str__(self): s = str(self.houseNum)+ ' ' + self.street + '\n' s += self.city + ' ' + self.state + ' ' + self.zipCode + '\n' #print("Type of s:", type(s)) return s # def print(self): # print(self.houseNum, self.street) # print(self.city, self.state, self.zipCode) def __lt__(self, other): return self.zipCode < other.zipCode def comesBefore(self, other): return self.zipCode < other.zipCode def comesAfter(self, other): return other < self # Driver test program to test Address. addr = Address(42, '15th', 'Tuscaloosa', '35404',"Alabama") print(addr) addr2 = Address(543, "18th Street", "Tuscaloosa", "35405") print(addr2) print(addr.comesBefore(addr2) ) print('This should be True.') print(addr2.comesBefore(addr) ) print("This should be False.") print(addr.comesAfter(addr2) ) print('This should be False. ') print(addr2.comesAfter(addr) ) print("This should be True. ") if self == other: print("Suprise!!")
''' IBEHS 1P10 Mini Milestone 10 Individual File Date: January 24th 2020 ''' """ GIVEN Functions """ ## This function calculates the gear ratio of your gearing mechanism ## This is a repeat of Mini-Milestone 5 (Wk-8), Objective #1 def calc_GR(gear_list1, gear_list2): ratio1 = gear_list1[-1] / gear_list1[0] ratio2 = gear_list2[-1] / gear_list2[0] gear_ratio = ratio1 * ratio2 return gear_ratio ## This function calculates the required time (in seconds) to rotate the forefinger 90° ## This is a repeat of Mini-Milestone 4 (Wk-6&7), combining Objective #2 and Objective #4 def calc_elapsed_time(input_speed, gear_ratio): FOREFINGER_ROTATION_ANGLE = 90 num_finger_revolutions = FOREFINGER_ROTATION_ANGLE / 360 num_motor_revolutions = gear_ratio * num_finger_revolutions rev_per_sec = input_speed / 60 elapsed_time = num_motor_revolutions / rev_per_sec return elapsed_time def find_gear_config(GR, Gear_Number): Gear_List = [] FL_input = 12 SL_input = 12 for Gear1 in Gear_Number: FL_output = Gear1 for Gear2 in Gear_Number: SL_output = Gear2 calc_gear_ratio = round(FL_output/FL_input * SL_output/SL_input,3) if calc_gear_ratio == round(GR,3): print("HI") return [FL_input,FL_output,SL_input,SL_output]
import numpy as np import numpy.linalg as la #print(streets) #print(n_intersections) #print(intersection_inflow) plot_street_network(streets,intersection_inflow,title="initial") t = 10**(-10) m = n_intersections n = len(streets) flow_matrix = np.zeros((m,n)) #print(flow_matrix.shape) for i in range(m): for j in range(n): if streets[j][1] == i: flow_matrix[i][j] = -1 if streets[j][2] == i: flow_matrix[i][j] = 1 #print(flow_matrix) M, U = m_echelon(flow_matrix) #print(U) rank = 0 dim_null = 0 while la.norm(U[rank],2) > t: rank += 1 dim_null = n - rank #print(dim_null)
from __future__ import annotations from dataclasses import dataclass, field, replace from typing import Tuple, Optional import random @dataclass(frozen=True) class Card: cost: int def make_default_deck() -> Tuple: costs = (0, 0, 1, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 5, 5, 6, 6, 7, 8) return tuple([Card(cost) for cost in costs]) def new_default_deck() -> Deck: return Deck() @dataclass(frozen=True) class Deck: cards: Tuple[Card, ...] = field(default_factory=make_default_deck) def draw(self, count: int = 1) -> Tuple[Tuple[Card, ...], Deck]: return self.cards[:count], Deck(self.cards[count:]) def shuffle(self) -> Deck: cards = list(self.cards) random.shuffle(cards) return Deck(tuple(cards)) @dataclass(frozen=True) class Player: health: int = 30 mana: int = 0 max_mana: int = 0 hand: Tuple = () deck: Deck = field(default_factory=new_default_deck) def draw(self, count: int = 1) -> Player: hand, deck = self.deck.draw(count) return replace(self, hand=self.hand + hand) def increase_max_mana(self) -> Player: max_mana = min(self.max_mana + 1, 10) return replace(self, max_mana=max_mana) def fill_mana(self) -> Player: return replace(self, mana=self.max_mana) def new_player(): return Player() class GameNotStartedException(Exception): pass @dataclass(frozen=True) class GameState: player: Player = field(default_factory=new_player) opponent: Player = field(default_factory=new_player) current_player: Optional[Player] = None def is_active(self, player: Player) -> bool: return self.current_player is player def start(self) -> GameState: current_player = random.choice((self.player, self.opponent)) player = self.player.draw(3 if current_player is self.player else 4) opponent = self.opponent.draw(3 if current_player is self.opponent else 4) if len(player.hand) == 3: return GameState(player=player, opponent=opponent, current_player=player) else: return GameState(player=player, opponent=opponent, current_player=opponent) def start_new_turn(self) -> GameState: if self.current_player is None: raise GameNotStartedException() if self.is_active(self.player): player = "player" else: player = "opponent" current_player = self.current_player.increase_max_mana().fill_mana().draw() if player == "player": return GameState( player=current_player, opponent=self.opponent, current_player=current_player, ) else: return GameState( player=self.player, opponent=current_player, current_player=current_player, )
food = int(input()) * 1000 eaten_food = 0 while True: gr_food = input() if gr_food == 'Adopted': break else: eaten_food += int(gr_food) if food - eaten_food >= 0: print(f'Food is enough! Leftovers: {food - eaten_food} grams.') else: print(f'Food is not enough. You need {abs(food - eaten_food)} grams more.')
"""10. Write a Python program to get the difference between the two lists""" SampleList1 = list("список") print("Sample list #1", SampleList1) SampleList2 = list("лист") print("Sample list #2", SampleList2) print("The difference (1 - 2):", list(set(SampleList1) - set(SampleList2)))
from imageio import imread, imsave from itertools import islice, product from scipy import ndimage import numpy as np import OpenEXR # non-Windows: pip install openexr; Windows: https://www.lfd.uci.edu/~gohlke/pythonlibs/#openexr import skimage.transform # pip install scikit-image def assertEqual(a, b, threshold=1e-6, limit=3): a, b = np.broadcast_arrays(a, b) indices = np.where(np.abs(b - a) > threshold) if not indices[0].size: return examples = "\n".join( "a[{}]={} != {}".format( ", ".join(str(x) for x in index), a[index], b[index] ) for index in islice(zip(*indices), limit) ) + ("..." if len(indices[0]) > limit else "") raise AssertionError( "arrays with shape {} differ by more than {}\n{}".format( a.shape, threshold, examples ) ) class Arange: def __init__(self): self.cache = np.arange(0) def __call__(self, stop): if stop > len(self.cache): self.cache = np.arange(stop) self.cache.setflags(write=False) return self.cache[:stop] arange = Arange() def multiGet(a, *indices): a = np.asarray(a) extraShape = a.shape[:a.ndim - len(indices)] extraIndices = tuple( arange(n).reshape( (n,) + (1,) * (len(extraShape) - i) ) for i, n in enumerate(extraShape) ) return a[extraIndices + tuple(indices)] assertEqual( multiGet( np.arange(16).reshape((2, 2, 2, 2)), [[0, 1], [0, 1]], [[0, 1], [1, 0]] ), [[[0, 3], [5, 6]], [[8, 11], [13, 14]]] ) def searchsorted(a, v, **kwargs): a = np.asarray(a) _, v = np.broadcast_arrays(a[..., :1], v) out = np.empty_like(v, dtype=int) for i in product( [...], *( range(n) if n > 1 else [slice(None)] \ for n in a[..., :1].shape ), ): out[i] = np.searchsorted(np.squeeze(a[i]), v[i], **kwargs) return out assertEqual( searchsorted( [[[0, 1, 2]], [[1, 2, 3]]], [ [[[0.5, 0.5], [1.5, 1.5]]], [[[2.5, 2.5], [1.5, 1.5]]] ] ), [ [[[1, 1], [2, 2]], [[0, 0], [1, 1]]], [[[3, 3], [2, 2]], [[2, 2], [1, 1]]] ] ) def useMap(abMap, a): a = np.clip(a, 0, abMap.shape[-1] - 1) ai = np.floor(a) np.minimum(ai, abMap.shape[-1] - 2, out=ai) aj = ai + 1 bi = multiGet(abMap, ai.astype(int)) bj = multiGet(abMap, aj.astype(int)) return bi * (aj - a) + bj * (a - ai) def unmap(abMap, b): b = np.clip(b, abMap[..., :1], abMap[..., -1:]) aj = searchsorted(abMap, b, side="right") np.minimum(aj, abMap.shape[-1] - 1, out=aj) ai = aj - 1 bi, bj = multiGet(abMap, ai), multiGet(abMap, aj) return (ai * (bj - b) + aj * (b - bi)) / (bj - bi) testMap = np.array([0, 1, 3, 6]) assertEqual( useMap(testMap, [0.9, 1.9, 2.9]), [0.9, 2.8, 5.7] ) assertEqual( unmap(testMap, [0.9, 2.8, 5.7]), [0.9, 1.9, 2.9] ) assert unmap(np.array([0, 1, 1, 2]), 1) == 2 assertEqual(useMap(np.array([0, 1]), [-1, 3]), [0, 1]) assertEqual(unmap(np.array([0, 1]), [-1, 3]), [0, 1]) assertEqual( useMap(np.array([[0, 1], [2, 3]]), [[0.1], [0.9]]), [[0.1], [2.9]] ) assertEqual( unmap(np.array([[0, 1], [2, 3]]), [[0.1], [2.9]]), [[0.1], [0.9]] ) def readDepthFile(path, channelNames="RGBZ"): depthFile = OpenEXR.InputFile(str(path)) header = depthFile.header() for channelName in channelNames: channelHeader = header["channels"][channelName] assert channelHeader.type.v == 2 # float32 assert (channelHeader.xSampling, channelHeader.ySampling) == (1, 1) viewBox = header["dataWindow"] width = viewBox.max.x - viewBox.min.x + 1 height = viewBox.max.y - viewBox.min.y + 1 channels = np.empty((len(channelNames), height, width)) for i, channelName in enumerate(channelNames): buffer = depthFile.channel(channelName) assert len(buffer) == height * width * np.dtype(np.float32).itemsize channels[i] = np.frombuffer(buffer, dtype=np.float32).reshape( (height, width) ) if channelNames[:3] == "RGB": image = np.moveaxis(channels[:3], 0, 2) image *= 0.8 image **= 0.5 imsave(str(path) + ".png", np.round(np.clip(image, 0, 1) * 255).astype(np.uint8)) if channelNames[-1] == "Z": depthMap = channels[-1] imsave(str(path) + ".z.png", np.round((depthMap - np.max(depthMap)) / (np.min(depthMap) - np.max(depthMap)) * 255).astype(np.uint8)) return channels def adjustRange(a, old1, old2, new1, new2, out=None): factor = (new2 - new1) / (old2 - old1) out = np.multiply(a, factor, out=out) out += new1 - old1 * factor return out def getGaussian(length, sigmasInFrame=3): x = np.linspace(-sigmasInFrame, sigmasInFrame, length, endpoint = True) np.square(x, out = x) x *= -0.5 np.exp(x, out = x) x *= sigmasInFrame / (0.5 * length * np.sqrt(2 * np.pi)) return x testCase = 5 channels = readDepthFile("zmap{}.exr".format(testCase)).astype(float) _, height, cWidth = channels.shape # height //= 2 # cWidth //= 2 unit = np.sqrt(height * cWidth) / 10 print(unit) channels = np.moveaxis( skimage.transform.resize( np.moveaxis(channels, 0, 2), (height, cWidth) ), 2, 0 ) cRadii = channels[-1] adjustRange(cRadii, np.min(cRadii), np.max(cRadii), 0.726 * unit, 0.804 * unit, out=cRadii) cStart = int(np.floor(np.min(cRadii[:, 0]))) width = cStart + cWidth - 1 + int(np.ceil(np.min(cRadii[:, -1]))) clMap = np.arange(cStart, cStart + cWidth) - cRadii np.maximum.accumulate(clMap, axis=1, out=clMap) # pylint: disable=no-member lcMap = unmap(clMap, np.arange(width)) del clMap lRadii = useMap(cRadii, lcMap) del lcMap crMap = np.arange(cStart, cStart + cWidth) + cRadii np.minimum.accumulate( # pylint: disable=no-member crMap[:, ::-1], axis=1, out=crMap[:, ::-1] ) rcMap = unmap(crMap, np.arange(width)) del crMap rRadii = useMap(cRadii, rcMap) del rcMap cImage = channels[:3] np.clip(cImage, 0, 1, out=cImage) cBlurred = np.empty_like(cImage) for channel, blurredChannel in zip(cImage, cBlurred): ndimage.filters.gaussian_filter( channel, sigma=0.05 * unit, output=blurredChannel ) cImage -= cBlurred adjustRange(cBlurred, 0, 1, 0.15, 0.8, out=cBlurred) bcImage = np.zeros((cImage.shape[0], height, width)) bcMagnitudes = np.zeros((height, width)) adImage = np.zeros((cImage.shape[0], height, width)) adMagnitudes = np.zeros((height, width)) lrMap = np.arange(width) + 2 * lRadii lcMap = np.arange(-cStart, width - cStart) + lRadii lcMask = lcMap < cWidth - 1 bcImage += useMap(cBlurred, lcMap) * lcMask bcMagnitudes += lcMask radii = useMap(lRadii, lrMap) lrMap += radii lcMap = lrMap - cStart del lrMap lcMask = lcMap < cWidth - 1 adImage += useMap(cBlurred, lcMap) * lcMask adMagnitudes += lcMask del lcMap del lcMask rlMap = np.arange(width) - 2 * rRadii rcMap = np.arange(-cStart, width - cStart) - rRadii rcMask = rcMap >= 0 bcImage += useMap(cBlurred, rcMap) * rcMask bcMagnitudes += rcMask radii = useMap(rRadii, rlMap) rlMap -= radii rcMap = rlMap - cStart del rlMap rcMask = rcMap >= 0 adImage += useMap(cBlurred, rcMap) * rcMask adMagnitudes += rcMask del rcMap del rcMask bcImage /= bcMagnitudes adImage /= adMagnitudes blurred = 1.5 * bcImage - 0.5 * adImage imsave("blurred{}.png".format(testCase), np.round(np.clip(np.moveaxis(blurred, 0, 2), 0, 1) * 255).astype(np.uint8)) cScores = np.mean(cImage, axis=0) cScores *= cScores cScores *= getGaussian(cWidth, sigmasInFrame=2.5) cScores = ndimage.filters.gaussian_filter(cScores, sigma=0.13 * unit) np.power(cScores, 10, out=cScores) magnitudes = np.zeros((height, width)) iterations = 8 layerCount = 2 * iterations merged = np.zeros((cImage.shape[0], height, width)) lrMap = np.broadcast_to(np.arange(width), (height, width)).astype(float) for i in range(iterations): radii = useMap(lRadii, lrMap) lrMap += radii lcMap = lrMap - cStart lrMap += radii lcMask = lcMap < cWidth - 1 weights = useMap(cScores, lcMap) weights *= lcMask merged += useMap(cImage, lcMap) * weights magnitudes += weights rlMap = np.broadcast_to(np.arange(width), (height, width)).astype(float) for i in range(iterations): radii = useMap(rRadii, rlMap) rlMap -= radii rcMap = rlMap - cStart rlMap -= radii rcMask = rcMap >= 0 weights = useMap(cScores, rcMap) weights *= rcMask merged += useMap(cImage, rcMap) * weights magnitudes += weights merged /= magnitudes merged += blurred imsave("gram{}.png".format(testCase), np.round(np.clip(np.moveaxis(merged, 0, 2), 0, 1) * 255).astype(np.uint8))
import datetime from chemical_analysis.utils import truncate import psycopg2 import logging import os LOGGER = logging.getLogger('django') def format_filter_response(data): response = dict() response['plant'] = set() response['truck_type'] = set() response['i2_taluka_desc'] = set() response['city'] = set() for each in data: if str(each.get('plant_name')): response['plant'].add(str(each.get('plant')) + "-" + str(each.get('plant_name'))) response['truck_type'].add(each.get('truck_type')) if str(each.get('i2_taluka_desc')): response['i2_taluka_desc'].add(each.get('i2_taluka_desc')) response['city'].add(str(each.get('city_code')) + "-" + str(each.get('city_desc'))) return response def format_dashboard_response(data, current_data, route): response = dict() response['otherData'] = list() response['simiPTPK'] = list() response['routeData'] = list() for each_data in data: other_data = dict() other_data['source_location'] = [each_data.source_lat, each_data.source_long] other_data['location'] = [each_data.latitude, each_data.longitude] other_data['simi_route'] = each_data.simi_city other_data['city'] = str(each_data.simi_city_code) + "-" + str(each_data.type) simi_ptpk = dict() simi_ptpk['nh_per'] = truncate(each_data.nh_per_ref, 2) simi_ptpk['sh_per'] = each_data.sh_per_ref simi_ptpk['hilly_per'] = each_data.hilly_per_ref simi_ptpk['plain_per'] = truncate(each_data.plain_per_ref, 0) simi_ptpk['mean_ele'] = truncate(each_data.mean_ele, 0) simi_ptpk['sd_ele'] = truncate(each_data.sd_ele, 0) simi_ptpk['simi_route'] = each_data.simi_city simi_ptpk['ptpk'] = truncate(each_data.ptpk, 2) simi_ptpk['ptpkPred'] = truncate(each_data.ptpk_pred, 2) simi_ptpk['latitude'] = each_data.latitude simi_ptpk['longitude'] = each_data.longitude simi_ptpk['source_lat'] = each_data.source_lat simi_ptpk['source_long'] = each_data.source_long simi_ptpk['avgQuantity'] = truncate(each_data.quantity, 0) simi_ptpk['avgLead'] = truncate(each_data.lead, 0) simi_ptpk['avgTravelTime'] = truncate(each_data.onward_travel, 2) simi_ptpk['avg_simi_coeff'] = truncate(each_data.simi_coeff, 2) simi_ptpk['total_restriction_time'] = each_data.total_restriction_time simi_ptpk['city'] = str(each_data.simi_city_code) + "-" + str(each_data.type) + " " + str(each_data.i2_taluka_desc) simi_ptpk['integration_flag'] = each_data.integration_flag simi_ptpk['cluster_flag'] = each_data.cluster_flag simi_ptpk['union_flag'] = each_data.union_flag response['otherData'].append(other_data) response['simiPTPK'].append(simi_ptpk) for each_data in route: route_data = dict() route_data['route_1'] = each_data.route_1 route_data['impact'] = truncate(each_data.impact, 0) route_data['lead'] = truncate(each_data.lead, 0) route_data['quantity'] = truncate(each_data.quantity, 0) route_data['simiCoeff'] = truncate(each_data.simi_coeff, 2) route_data['PTPK'] = truncate(each_data.ptpk, 2) response['routeData'].append(route_data) if current_data: response['source'] = current_data.full_plant_name response['destination'] = current_data.simi_city_name response['ptpkPred'] = truncate(current_data.ptpk_pred, 2) response['currPtpk'] = truncate(current_data.ptpk, 2) response['currQuantity'] = truncate(current_data.quantity, 2) return response con = None def lit_database_connection(): global con try: if con: try: with con.cursor() as cur: cur.execute('SELECT 1') LOGGER.info("old connection is live") except (psycopg2.OperationalError, Exception): con.close() con = psycopg2.connect(dbname=os.environ.get('LIT_DBNAME'), host=os.environ.get('LIT_HOST'), user=os.environ.get('LIT_USER'), password=os.environ.get('LIT_PASSWORD'), port=os.environ.get('LIT_PORT')) LOGGER.info("unable to use last connection, new lit connection is created") else: con = psycopg2.connect(dbname=os.environ.get('LIT_DBNAME'), host=os.environ.get('LIT_HOST'), user=os.environ.get('LIT_USER'), password=os.environ.get('LIT_PASSWORD'), port=os.environ.get('LIT_PORT')) LOGGER.info("LIT DB connection is created first time") except Exception as ex: LOGGER.exception("not able to establish lit connection") return con def get_month_no_from_name(month_name): long_month_name = month_name long_month_name = long_month_name.strip() datetime_object = datetime.datetime.strptime(long_month_name, "%b") month_number = datetime_object.month return month_number threaded_postgresql_pool = None def get_lit_connection_pool(): from psycopg2 import pool global threaded_postgresql_pool if not threaded_postgresql_pool: threaded_postgresql_pool = psycopg2.pool.ThreadedConnectionPool(10, 20, user=os.environ.get('LIT_USER'), password=os.environ.get('LIT_PASSWORD'), host=os.environ.get('LIT_HOST'), port=os.environ.get('LIT_PORT'), database=os.environ.get('LIT_DBNAME')) return threaded_postgresql_pool
from sklearn import svm from sklearn.metrics import accuracy_score from sklearn.model_selection import train_test_split def getData(): #modify this to work with the data (hint: output is not the last thing but the....) x = [] y = [] input = open("data.txt").read().split("\n") for i in input: inputArray = i.split(",") exp = inputArray.pop(len(inputArray)-1) x.append(inputArray) y.append(exp) return x,y x = [[0, 0], [1, 1]] # random input configuration (will get overwritten by getData()) y = [0, 1] # random output configuration (will get overwritten by getData()) x, y = getData() x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25, random_state=42) #clf = svm.SVR(); # Regression SVM clf = svm.SVC(kernel='rbf') # Uses linear kernel, alternate are: poly, rbf, sigmoid # http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC clf.fit(x_train, y_train) #Train # Test data y_predict = clf.predict(x_test) # Runs through all of the test data and gives outputs in array print(accuracy_score(y_test, y_predict)) # Gives % accuracy
__author__ = 'diegopinheiro' __email__ = 'diegompin@gmail.com' __github__ = 'https://github.com/diegompin' import geopandas as geo from mhs.src.dao.mhs.documents_mhs import * # from mhs.src.dao.base_dao import DAOMongo from mhs.src.dao.base_dao import BaseDAO import pandas as pd from shapely.geometry.point import Point from shapely.geometry import mapping, shape import matplotlib.pyplot as plt import numpy as np from mhs.config import MHSConfigManager from mhs.src.library.network.community_detection import CommunityDetection from mhs.src.library.file.utils import check_folder from shapely.geometry import mapping, shape import itertools as ite class PlotSCA(object): def __init__(self): # self.dao = DAOMongo() # self.__dao__sca__ = BaseDAO(SharedCareArea) # self.__dao_zcta_county__ = BaseDAO(ZctaCountyDocument) # self.__dao_shapefile__ = BaseDAO(ShapefileDocument) # dict_match_zcta = { # ZctaCountyDocument.geocode_fips_state: '06' # } # self.df_zcta_ca = pd.DataFrame(self.__dao_zcta_county__.obtain_pipeline(dict_match=dict_match_zcta)) self.df_zcta_ca = pd.DataFrame( [i.get_data() for i in ZctaCountyDocument.objects(**{ZctaCountyDocument.geocode_fips_state.name: '06'})]) self.df_shapefile_ca = pd.DataFrame([i.get_data() for i in ShapefileDocument.objects()]) self.df_shapefile_ca = self.df_shapefile_ca.loc[ self.df_shapefile_ca[ShapefileDocument.geocode_fips.name].isin( self.df_zcta_ca[ZctaCountyDocument.geocode_fips.name])] # self.conf = MHSConfigManager() # self.path_plots = self.conf.plots def plot(self, method, type_discharge, type_community_detection, year, path_plots=None): plt.style.use('classic') # def get_cm(N): # cmap = plt.cm.get_cmap('viridis') # colors = cmap([int(i) for i in np.linspace(0, cmap.N, N)]) # # from matplotlib.colors import ListedColormap # # cmap = ListedColormap(colors, name='Sequential') # cmap = ListedColormap(colors, name='my', N=N) # return cmap crs = {'init': 'epsg:2163'} fig, ax = plt.subplots(1, figsize=(20, 20)) # ax = fig.add_subplot(111) ax.axis('off') # dict_match_sca = { # SharedCareArea.method: method, # SharedCareArea.type_discharge: type_discharge, # SharedCareArea.type_community_detection: type_community_detection, # SharedCareArea.year: year # # } sca_objs = SharedCareArea.objects(**{ SharedCareArea.method.name: method, SharedCareArea.type_discharge.name: type_discharge, SharedCareArea.type_community_detection.name: type_community_detection, SharedCareArea.year.name: year }) df_sca = pd.DataFrame([i.get_data() for i in sca_objs]) df_sca[SharedCareArea.sca_id.name] = df_sca[SharedCareArea.sca_id.name].apply(lambda x: int(x)) geo_ca = geo.GeoDataFrame(self.df_shapefile_ca, crs=crs) geo_ca = geo_ca.rename(columns={ShapefileDocument.geocode_geometry.name: 'geometry'}) geo_ca_sca = geo_ca.merge(df_sca, left_on=ShapefileDocument.geocode_fips.name, right_on=SharedCareArea.zcta.name) # geo_ca.plot(linewidth=0, ax=ax, edgecolor='0', color='grey') N = len(geo_ca_sca[SharedCareArea.sca_id.name].unique()) # cmap = get_cm(N) # geo_ca_sca.plot(column=SharedCareArea.sca_id.name, linewidth=1, ax=ax, edgecolor='0', color='white') df_shapefile_annotation = geo_ca_sca.dissolve(by=SharedCareArea.sca_id.name) df_shapefile_annotation = df_shapefile_annotation.reset_index() def max_list(l, k): n = len(l) p = int(n / k) i = 0 l_prime = [] while (i < n): m = i * p % n l_prime.append(l[m]) i += 1 return l_prime df_shapefile_annotation[SharedCareArea.sca_id.name] = max_list(df_shapefile_annotation[SharedCareArea.sca_id.name], k=9) # df_shapefile_annotation = df_shapefile_annotation.sort_values(by=SharedCareArea.sca_id.name) df_shapefile_annotation.plot(column=SharedCareArea.sca_id.name, linewidth=1, ax=ax, edgecolor='grey', cmap=plt.cm.get_cmap('Pastel1')) for k, v in zip(df_shapefile_annotation.centroid.index, df_shapefile_annotation.centroid.values): plt.annotate(s=k, xy=v.coords[0], horizontalalignment='center', size=20, color='black') # path_plots = self.path_plots if not path_plots: path_plots = '.' folder = f'{path_plots}' check_folder(folder) filename = '_'.join([str(s) for s in [method, type_discharge, type_community_detection, year]]) # plt.tight_layout() plt.savefig(f'{folder}/map_{filename}.pdf', transparent=True) plt.close() # # self = PlotSCA() # # method = 'HU' # # list_type_discharge = [HospitalDischargeDocument.TYPE_DISCHARGE_INPATIENT_FROM_ED, HospitalDischargeDocument.TYPE_DISCHARGE_ED_ONLY] # list_type_community_detection_all = list(CommunityDetection.get_types()) # list_years = list([2018]) # # # combination = list(ite.product(list_type_discharge, list_years, list_type_community_detection_all )) # for (type_discharge, year, type_community_detection) in combination: # # metric = MetricsSharedCareArea.TYPE_LOCALIZATION_INDEX # # normalized = False # # type_community_detection = CommunityDetection.TYPE_INFOMAP # # type_discharge = HospitalDischargeDocument.TYPE_DISCHARGE_INPANTIENT # # self.plot(method=method, # type_discharge=type_discharge, # type_community_detection=type_community_detection, # year=year) # # # # # # crs = {'init': 'epsg:2163'} # # fig, ax = plt.subplots(1, figsize=(20, 20)) # # ax = fig.add_subplot(111) # ax.axis('off') # # dict_match_sca = { # SharedCareArea.method: method, # SharedCareArea.type_discharge: type_discharge, # SharedCareArea.type_community_detection: type_community_detection, # SharedCareArea.year: year # # } # df_sca = pd.DataFrame(self.__dao__sca__.obtain_pipeline(dict_match=dict_match_sca)) # geo_ca = geo.GeoDataFrame(self.df_shapefile_ca, crs=crs) # geo_ca = geo_ca.rename(columns={ShapefileDocument.geocode_geometry.db_field: 'geometry'}) # # geo_ca_sca = geo_ca.merge(df_sca, left_on=ShapefileDocument.geocode_fips.db_field, # right_on=SharedCareArea.zcta.db_field) # # geo_ca.plot(linewidth=2, ax=ax, edgecolor='0', color='grey') # N = len(geo_ca_sca[SharedCareArea.sca_id.db_field].unique()) # # cmap = get_cm(N) # # geo_ca_sca.plot(column=SharedCareArea.sca_id.db_field, linewidth=0, ax=ax, edgecolor='0') # # # df_shapefile_annotation = geo_ca_sca.dissolve(by=SharedCareArea.sca_id.db_field).centroid # # for k, v in zip(df_shapefile_annotation.index, df_shapefile_annotation.values): # # plt.annotate(s=k, xy=v.coords[0], # # horizontalalignment='center', size=20, color='grey') # # # [v.coords[0] for v in geo_facility_hf['geometry']] # # for k,v,z in zip(geo_facility_hf['facility_name'], geo_facility_hf['geometry'], geo_facility_hf['count']): # # plt.annotate(s=k, xy=v.coords[0], horizontalalignment='center', size=1) # # plt.legend() # # path_plots = self.path_plots # folder = f'{path_plots}/complenet2020/maps' # check_folder(folder) # # filename = f'{cols_groupby[0]}_{metric}_{normalized}_{method}_{type_community_detection}_{type_discharge}_{year}' # filename = '_'.join([str(s) for s in # [method, type_discharge, type_community_detection, year]]) # # plt.tight_layout() # plt.savefig(f'{folder}/map_{filename}.pdf') # plt.close() # # shfi = geo.read_file('/Volumes/MHS/data_acquisition/CENSUS/SHAPEFILE/CENSUS_SHAPEFILE_ZCTA.shp')
from sklearn import datasets import numpy as np import tensorflow as tf mnist = datasets.fetch_mldata('MNIST original', data_home='.') n = len(mnist.data) N = 10000 indices = np.random.permutation(range(n))[:N] #ランダムにN枚を選択する:{indices:indexの複数形,permutation:順序を並び替える} X = mnist.data[indices] y = mnist.target[indices] Y = np.eye(10)[y.astype(int)] # .eye(n)[a] : aの数字を元に、n*nの単位行列を生成している。 1 of K表現 X_train, X_test, Y_train, Y_test = train_test_split(X, Y, train_size=0.8) #データ全体の7割りをXの訓練データ、3割をXの評価データ、正解データ(ラベル)の7割をY_train、3割をY_testにしている。 #わかりやすい例 => http://docs.pyq.jp/python/machine_learning/tips/train_test_split.html #Kerasを用いて多層パーセプトロンを実装 #入力層の次元を784、隠れ層の次元を200とする ''' モデル設定 ''' n_in = len(X[0]) # 入力層:784こ n_hidden = 200 #隠れ層:200 n_out = len(Y[0]) # 10 -> 数字の種類 model = Sequential() model.add(Dense(n_hidden, input_dim=n_in)) model.add(Activation('sigmoid')) model.add(Dense(n_out)) model.add(Activation('softmax'))
from project.extensions import db import datetime class Recipe(db.Model): __tablename__ = 'receitas' ID = db.Column(db.Integer, primary_key=True, nullable=False) titulo = db.Column(db.String(200), nullable=False) ingredientes = db.Column(db.Text, nullable=False) modo_preparo = db.Column(db.Text, nullable=False) latest_change_date = db.Column(db.DateTime, nullable=False) texto = db.Column(db.Text, nullable=True) tempo_preparo = db.Column(db.String(200), nullable=True) imagem = db.Column(db.Text, nullable=True) autor = db.Column(db.Integer, db.ForeignKey('users.ID'), nullable=False) reviews = db.Column(db.Integer, nullable=False) stars = db.Column(db.Integer, nullable=False) def get_id(self): return self.ID def as_dict(self): return {c.name: getattr(self, c.name) for c in self.__table__.columns if type(getattr(self, c.name)) is not datetime.datetime} class FavoriteRecipes(db.Model): __tablename__ = 'favorite_recipes' ID = db.Column(db.Integer, primary_key=True, nullable=False) user = db.Column(db.Integer, db.ForeignKey('users.ID'), nullable=False) recipe = db.Column(db.Integer, db.ForeignKey('receitas.ID'), nullable=False) active = db.Column(db.Boolean, nullable=False) def get_id(self): return self.ID def is_active(self): return self.active def as_dict(self): return {c.name: getattr(self, c.name) for c in self.__table__.columns} class ReviewRecipe(db.Model): __tablename__ = 'review_recipe' ID = db.Column(db.Integer, primary_key=True, nullable=False) score = db.Column(db.Integer, nullable=False) user = db.Column(db.Integer, db.ForeignKey('users.ID'), nullable=False) recipe = db.Column(db.Integer, db.ForeignKey('receitas.ID'), nullable=False) active = db.Column(db.Boolean, nullable=False) def get_id(self): return self.ID def is_active(self): return self.active def as_dict(self): return {c.name: getattr(self, c.name) for c in self.__table__.columns} class User(db.Model): __tablename__ = 'users' ID = db.Column(db.Integer, primary_key=True, nullable=False) nome = db.Column(db.Text(100), unique=False, nullable=False) email = db.Column(db.Text(100), unique=True, nullable=False) senha = db.Column(db.Text, nullable=False) roles = db.Column(db.Text) is_active = db.Column(db.Boolean, default=True, server_default="true") receitas = db.relationship('Recipe', backref='users', lazy=True) @property def identity(self): """ *Required Attribute or Property* flask-praetorian requires that the user class has an ``identity`` instance attribute or property that provides the unique id of the user instance """ return self.ID @property def rolenames(self): """ *Required Attribute or Property* flask-praetorian requires that the user class has a ``rolenames`` instance attribute or property that provides a list of strings that describe the roles attached to the user instance """ try: return self.roles.split(",") except Exception: return [] @property def password(self): """ *Required Attribute or Property* flask-praetorian requires that the user class has a ``password`` instance attribute or property that provides the hashed password assigned to the user instance """ return self.senha @classmethod def lookup(cls, username): """ *Required Method* flask-praetorian requires that the user class implements a ``lookup()`` class method that takes a single ``username`` argument and returns a user instance if there is one that matches or ``None`` if there is not. """ return User.query.filter_by(email=username).one_or_none() @classmethod def identify(cls, id): """ *Required Method* flask-praetorian requires that the user class implements an ``identify()`` class method that takes a single ``id`` argument and returns user instance if there is one that matches or ``None`` if there is not. """ return User.query.get(id) def is_valid(self): return self.is_active def as_dict(self): # Não usa a senha no dic, uma vez que ela está em bytes e não é serializável return { 'UserID' : self.ID, 'nome' : self.nome, 'email' : self.email }
#!/usr/bin/env python # encoding: utf-8 """ @author: zhanghe @software: PyCharm @file: __init__.py.py @time: 2017/4/19 下午2:24 """ from logging.config import dictConfig from flask import Flask app = Flask(__name__) app.config.from_object('config')
# Generated by Django 2.1.7 on 2019-06-15 18:41 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('HiPage', '0017_auto_20190615_1514'), ] operations = [ migrations.AddField( model_name='good_get', name='Name', field=models.CharField(default='', max_length=150), ), migrations.AddField( model_name='good_get', name='Photo', field=models.ImageField(default=None, upload_to=''), ), migrations.AddField( model_name='good_get', name='Price', field=models.IntegerField(default='0'), ), ]
import wx from apartament_controller import ApartamentController from AddBillPanel import * from EditBillPanel import * from DeleteAllApartamentBillsPanel import * from DeleteCertainBillsPanel import * from FindApartamentsByCostPanel import * from FindCertainBillsOfAllApartaments import * from validator import IntValidator from DeleteBillsOfApartamentsInRange import * from ClearCertainBillsOfAllApartaments import * from FindCostOfBills import * apartamentController = ApartamentController() class windowClass(wx.Frame): def __init__(self,parent,title): super(windowClass,self).__init__(parent,title=title,size=(800,700)) print sum(ord(c) for c in 'A Happy New Year to You!') addBillPanel = AddBillPanel(self,apartamentController,(0,0),(300,300)) editBillPanel= EditBillPanel(self,apartamentController,(305,0),(150,300)) DeleteAllApartamentBillsPanel(self,apartamentController,(460,0),(200,100)) DeleteCertainBillsPanel(self,apartamentController,(460,105),(240,100)) FindApartamentsByCostPanel(self,apartamentController,(0,305),(250,100)) FindCertainBillsOfAllApartaments(self,apartamentController,(255,305),(250,100)) ClearCertainBillsOfAllApartaments(self,apartamentController,(510,305),(250,100)) DeleteBillsOfApartamentsInRange(self,apartamentController,(5,410),(250,200)) FindCostOfBills(self,apartamentController,(255,410),(250,100)) wx.Button(addBillPanel, label='Show bills', pos=(200, 10)).Bind(wx.EVT_BUTTON, self.OnShowBills) wx.Button(addBillPanel, label='Undo', pos=(200, 40)).Bind(wx.EVT_BUTTON, self.OnUndo) self.Show() def OnUndo(self,e): apartamentController.undo() dlg = wx.MessageDialog(None, "Done", "Successfully undone", wx.OK | wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() def OnShowBills(self,e): dlg = wx.MessageDialog(None, apartamentController.getAllApartamentsAndBills(), "Bills", wx.OK | wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() app = wx.App() windowClass(None,title="Epic window") app.MainLoop()
#!/usr/bin/env python # coding: UTF-8 class Card(object): """ マークと数字でカードを生成する Attributes ---------- value : str カードの数字 suit : str カードのマーク """ # 数値(インデックスの値を揃える為) ex. values[2] = "2" values[14] = "Ace" values = ( [None, None, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 1] ) # マーク suits = ["♠︎", "❤︎", "♦︎", "♣️"] def __init__(self, v, s): self.value = v self.suit = s # repr → インスタンスを文字列で返してくれる特殊メソッド def __repr__(self): v = self.suits[self.suit] + str(self.values[self.value]) return v
from django.shortcuts import render,HttpResponse from blog.models import Post # Create your views here. def index(request): posts=Post.objects.all() context={'posts':posts} return render(request,'blog/post_list.html',context)
import torch import sys import torch.nn as nn import torch.nn.functional as F import pytorch_lightning as pl import numpy as np sys.path.append("./") exec("from models import pblm") capacity = 64 class Encoder(nn.Module): def __init__(self): super(Encoder, self).__init__() c = capacity self.conv1 = nn.Conv2d(in_channels=1, out_channels=c, kernel_size=4, stride=2, padding=1) # out: c x 14 x 14 self.conv2 = nn.Conv2d(in_channels=c, out_channels=c*2, kernel_size=4, stride=2, padding=1) # out: c x 7 x 7 def forward(self, x): x = F.relu(self.conv1(x)) x = F.relu(self.conv2(x)) return x class Decoder(nn.Module): def __init__(self): super(Decoder, self).__init__() c = capacity self.conv2 = nn.ConvTranspose2d( in_channels=c*2, out_channels=c, kernel_size=4, stride=2, padding=1) self.conv1 = nn.ConvTranspose2d( in_channels=c, out_channels=1, kernel_size=4, stride=2, padding=1) def forward(self, x): x = F.relu(self.conv2(x)) x = self.conv1(x) return x class Autoencoder(pblm.PrebuiltLightningModule): def __init__(self): super(Autoencoder, self).__init__(__class__.__name__) self.encoder = Encoder() self.decoder = Decoder() self.criterion = nn.MSELoss() def forward(self, x): x = self.encoder(x) x = self.decoder(x) return x if __name__ == "__main__": vae = Autoencoder() (vae.forward(torch.rand(1, 1, 48, 48)).shape)
from sys import stdin from _collections import deque def BFS(n,m): global queue queue.append((0,0,1)) while queue: row, col, cnt = deque.popleft(queue) # queue.popleft() if row == n-1 and col == m-1: print(cnt) return if 0 <= row + 1 < n and maze[row + 1][col] == 1: deque.append(queue, (row + 1, col, cnt + 1)) if 0 <= col + 1 < m and maze[row][col + 1] == 1: deque.append(queue, (row, col + 1, cnt + 1)) move = [[1, 0], [0, 1]] n, m = map(int, stdin.readline().split()) maze = [[0] * m for i in range(n)] res = 0 queue = deque() for i in range(n): row = stdin.readline() for j in range(m): maze[i][j] = int(row[j]) BFS(n,m)
from django.apps import AppConfig class TorchboxCoreAppConfig(AppConfig): name = "tbx.core" label = "torchbox" verbose_name = "Torchbox"
# Desafio 35 Curso em Video Python # Este programa verifica se 3 comprimentos de retas informados formam um triângulo. # By Rafabr import sys import time import os import random os.system('clear') print('\nDesafio 35') print('Este programa verifica se 3 comprimentos de retas informados formam um triângulo.\n\n') try: retas = str(input( "Informe 3 medidas de reta separadas por virgula ','(Ex.: 4,5,6): ")).strip().split(',') r1 = float(retas[0]) r2 = float(retas[1]) r3 = float(retas[2]) except ValueError: os.system('clear') print('Voçe não digitou valores válidos!') time.sleep(0.5) sys.exit() except IndexError: os.system('clear') print('Voçe não digitou 3 valores!') time.sleep(0.5) sys.exit() os.system('clear') if len(retas) > 3: os.system('clear') print('Voçe digitou mais que 3 valores!') time.sleep(0.5) sys.exit() if r1 < 0 or r2 < 0 or r3 < 0: os.system('clear') print('Voçe digitou valores negativos!') time.sleep(0.5) sys.exit() t = 0 if r1 < r2 + r3: t = t + 1 if r2 < r1 + r3: t = t + 1 if r3 < r2 + r1: t = t + 1 if t == 3: print(f'As retas de medidas {retas}, formam um triângulo!') else: print(f'As retas de medidas {retas}, NÃO formam um triângulo!') print('\n---Fim da execução---\n')
#!/usr/bin/env python3 import sys import numpy as np fi = sys.argv[1] fo = fi + "--fp16" data = np.fromfile(fi, dtype=np.float32) data = data.astype(np.float16) data.tofile(fo)
import os from scipy import stats import matplotlib.pyplot as plt import numpy as np import csv #-------------------------------------------------------------------------- # IN TE VULLEN filename = "02072021.csv" # file name met data time = 36 # time in minutes between subsequent measurements theo_time = 6.0067 # theoretical half life of isotope in hours, Tc99m = 6.0067 h #-------------------------------------------------------------------------- # Initialisation rows = [] nmeas = [] date = [] activity_all = [] if not os.path.exists('Verwerkte QCs'): os.mkdir('Verwerkte QCs') #-------------------------------------------------------------------------- # reading csv file with open(filename, 'r') as csvfile: csvreader = csv.reader(csvfile,delimiter = ';') # extracting each data row one by one for row in csvreader: rows.append(row) nval = '1' rowvalue = 0 for row in rows: if not row == []: if row[0] == nval: nmeas.append(int(nval)*time/60) for value in row: if len(value) == 15 and value[1] == '/': date.append(value) if len(value) > 6 and value[-3:] == 'GBq': nactivity = float(value[:5].replace(',', '.'))*1000 activity_all.append(nactivity) if len(value) > 6 and value[-3:] == 'MBq': nactivity = float(value[:5].replace(',', '.')) activity_all.append(nactivity) nval = str(int(nval)+1) rowvalue+=1 # splitting activity in measured and expected activity_meas = [] activity_theor = [] for i in range(len(activity_all)): if (i % 2 == 0): activity_meas.append(activity_all[i]) else: activity_theor.append(activity_all[i]) #-------------------------------------------------------------------------- # Making plot fig1, ax1 = plt.subplots(1, 1, sharex=True,sharey=True) x1 = np.linspace(0, nmeas[-1], len(nmeas)) slope, intercept, r_value, p_value, std_err = stats.linregress(x1, np.log(activity_meas)) slope2, intercept2, r_value2, p_value2, std_err2 = stats.linregress(x1, np.log(activity_theor)) pract_time = np.log(2)/-slope rsqrt = r_value ** 2 # Voor de berekening van de afwijking per punt delta_x = [] delta_xx = [] x_reflist = [] for i in range(len(x1)): x_ref = np.exp(slope * x1[i] + intercept) x_reflist.append(x_ref) delta_x.append(np.abs((x_ref-activity_meas[i])/(x_ref))*100) delta_xx.append(np.abs((activity_theor[i]-activity_meas[i])/(activity_theor[i]))*100) max_x = np.argmax(delta_x) ax1.semilogy(nmeas, activity_meas, 'purple', label=f'Gemeten activiteit met t1/2 = {"{:0.3f}".format(pract_time)} h' '\n' r'$\rightarrow$' f' afwijking van {"{:0.2f}".format(np.abs(theo_time-pract_time)/theo_time*100)} % tov {theo_time} h') #ax1.semilogy(nmeas, activity_theor, 'b.', label='Theoretical activity') #ax1.semilogy(nmeas, np.exp(slope2*x1+intercept2), 'r--', label='Theoretical activity') ax1.plot(nmeas, np.exp(slope*x1+intercept),color='orange',linestyle='dashed',label=f'Lineare regressie (op semilog) met r² = {"{:0.3f}".format(rsqrt)}') ax1.scatter(nmeas[max_x], np.exp(slope*x1[max_x]+intercept), color='r', label=f'Maximal deviation of {"{:0.3f}".format(np.max(delta_x))} %' f'\n Extra: Avg. deviation {"{:0.3f}".format(np.average(delta_x))} %') ax1.set_title('QC Dosis Calibrator') ax1.set_ylabel('Activiteit [MBq]') ax1.set_xlabel('Tijd [h]') ax1.legend() fig1.savefig(os.path.join('Verwerkte QCs',f'{filename}.pdf'))
from setuptools import setup setup(name='tdwrapper', version='0.0.1', description='Teradata utility wrapper for Python', url='https://github.com/changhyeoklee/tdwrapper', author='Changhyeok Lee', author_email='Changhyeoklee@gmail.com', license='MIT', packages=['tdwrapper'], install_requires=[ 'subprocess32', 'pandas', ], zip_safe=False)
from midge.models.response_code import ResponseCode ErrorCodesMaps = { -1: ResponseCode(code=-1, message="AGAIN"), 0: ResponseCode(code=0, message="SUCCESS"), 1: ResponseCode(code=1, message="NOMEM"), 2: ResponseCode(code=2, message="PROTOCOL"), 3: ResponseCode(code=3, message="INVAL"), 4: ResponseCode(code=4, message="NO_CONN"), 5: ResponseCode(code=5, message="CONN_REFUSED"), 6: ResponseCode(code=6, message="NOT_FOUND"), 7: ResponseCode(code=7, message="CONN_LOST"), 8: ResponseCode(code=8, message="TLS"), 9: ResponseCode(code=9, message="PAYLOAD_SIZE"), 10: ResponseCode(code=10, message="NOT_SUPPORTED"), 11: ResponseCode(code=11, message="AUTH"), 12: ResponseCode(code=12, message="ACL_DENIED"), 13: ResponseCode(code=13, message="UNKNOWN"), 14: ResponseCode(code=14, message="ERRNO"), 15: ResponseCode(code=15, message="QUEUE_SIZE"), } ErrorCodesMaps[ErrorCodesMaps[-1].message] = ErrorCodesMaps[-1] ErrorCodesMaps[ErrorCodesMaps[0].message] = ErrorCodesMaps[0] ErrorCodesMaps[ErrorCodesMaps[1].message] = ErrorCodesMaps[1] ErrorCodesMaps[ErrorCodesMaps[2].message] = ErrorCodesMaps[2] ErrorCodesMaps[ErrorCodesMaps[3].message] = ErrorCodesMaps[3] ErrorCodesMaps[ErrorCodesMaps[4].message] = ErrorCodesMaps[4] ErrorCodesMaps[ErrorCodesMaps[5].message] = ErrorCodesMaps[5] ErrorCodesMaps[ErrorCodesMaps[6].message] = ErrorCodesMaps[6] ErrorCodesMaps[ErrorCodesMaps[7].message] = ErrorCodesMaps[7] ErrorCodesMaps[ErrorCodesMaps[8].message] = ErrorCodesMaps[8] ErrorCodesMaps[ErrorCodesMaps[9].message] = ErrorCodesMaps[9] ErrorCodesMaps[ErrorCodesMaps[10].message] = ErrorCodesMaps[10] ErrorCodesMaps[ErrorCodesMaps[11].message] = ErrorCodesMaps[11] ErrorCodesMaps[ErrorCodesMaps[12].message] = ErrorCodesMaps[12] ErrorCodesMaps[ErrorCodesMaps[13].message] = ErrorCodesMaps[13] ErrorCodesMaps[ErrorCodesMaps[14].message] = ErrorCodesMaps[14] ErrorCodesMaps[ErrorCodesMaps[15].message] = ErrorCodesMaps[15]