seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
27297456381 | '''
2) Faça um programa que exiba na tela os 20 primeiros números quadrados perfeitos, da seguinte forma:
1 ** 2 = 1
2 ** 2 = 4
3 ** 2 = 9
4 ** 2 = 16
'''
for num in range(1, 21):
quad = num**2
print(f'{num} ** 2 = {quad}') #importante: pro laço ser executado por inteiro o print deve estar dentro do FOR
| ibellmartins/aulas-python | exercícios - AULA PRÁTICA/FOR/ex2.py | ex2.py | py | 321 | python | pt | code | 0 | github-code | 36 |
39279563732 | import sys
from PyQt5 import QtCore
from PyQt5.QtWidgets import QDialog, QApplication, QPushButton, QVBoxLayout, QShortcut
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
import matplotlib.pyplot as plt
import pandas as pd
from readFitsSlim import Spectra
class Window(QDialog):
def __init__(self, parent=None):
super(Window, self).__init__(parent)
self.initUI()
def initUI(self):
self.setWindowTitle('Spectral Classification')
self.setWindowFlags(
QtCore.Qt.Window |
QtCore.Qt.CustomizeWindowHint |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowStaysOnTopHint)
self.classification = []
self.spec = Spectra('/data2/cpb405/DR1/*.fits')
self.spec.specList = self.spec.specList[:20]
self.index = 0
self.figure = plt.figure()
self.canvas = FigureCanvas(self.figure)
self.starButton = QPushButton('Star')
self.starButton.setStyleSheet("background-color: rgb(31, 119, 180);")
self.starButton.clicked.connect(self.STAR)
QShortcut(QtCore.Qt.Key_1, self.starButton, self.starButton.animateClick)
self.galaxyButton = QPushButton('Galaxy')
self.galaxyButton.setStyleSheet("background-color: rgb(31, 119, 180);")
self.galaxyButton.clicked.connect(self.GALAXY)
QShortcut(QtCore.Qt.Key_2, self.galaxyButton, self.galaxyButton.animateClick)
self.unknownButton = QPushButton('Unknown')
self.unknownButton.setStyleSheet("background-color: rgb(31, 119, 180);")
self.unknownButton.clicked.connect(self.UNKNOWN)
QShortcut(QtCore.Qt.Key_3, self.unknownButton, self.unknownButton.animateClick)
# set the layout
layout = QVBoxLayout()
layout.addWidget(self.canvas)
layout.addWidget(self.starButton)
layout.addWidget(self.galaxyButton)
layout.addWidget(self.unknownButton)
self.setLayout(layout)
self.plot()
def plot(self):
self.figure.clear()
ax = self.figure.add_subplot(111)
ax.plot(self.spec.specList[self.index].wavelength,self.spec.specList[self.index].flux)
ax.set_xlabel('Wavelength [Angstroms]')
ax.set_ylabel('Flux')
ax.set_yscale('log')
if self.index < (len(self.spec.specList)-1):
self.index += 1
else:
print(self.classification)
df = pd.DataFrame(columns=['designation','class'])
for i in range(len(self.classification)):
df.loc[len(df)] = [self.spec.desig[i],self.classification[i]]
df.to_csv('spectralTrainingSet.csv')
self.close()
# refresh canvas
self.canvas.draw()
def STAR(self):
self.classification.append('STAR')
self.plot()
def GALAXY(self):
self.classification.append('GALAXY')
self.plot()
def UNKNOWN(self):
self.classification.append('UNKNOWN')
self.plot()
print(self.classification)
if __name__ == '__main__':
app = QApplication(sys.argv)
main = Window()
main.show()
sys.exit(app.exec_())
| grd349/LearningLAMOST | Chris/Temp_Model/SpectraUI.py | SpectraUI.py | py | 3,429 | python | en | code | 1 | github-code | 36 |
72393817384 | import sys
import time
import random
import pygame as pg
pg.init()
WIDTH, HEIGHT = 800, 600
FPS = 60
window = pg.display.set_mode((WIDTH, HEIGHT))
clock = pg.time.Clock()
"""Добавление иконки и названия игры"""
pg.display.set_caption('Flappy bird')
pg.display.set_icon(pg.image.load(r'images/icon.png'))
"""Загрузка изображений"""
img_bg = pg.image.load(r'images/background.png')
img_bird = pg.image.load(r'images/bird.png')
img_pipe_top = pg.image.load(r'images/pipe_top.png')
img_pipe_bottom = pg.image.load(r'images/pipe_bottom.png')
"""Загрузка звука"""
pg.mixer.music.load(r'sounds/music.mp3') # Музыка загружена, но не воспроизводится
pg.mixer.music.set_volume(0.1) # Громкость музыки
pg.mixer.music.play(-1) # Запуск звука -1 для зацикленности музыки
sound_fall = pg.mixer.Sound(r'sounds/fall.wav')
"""Механика персонажа"""
player_position_y, player_speed_y, player_acceleration_y = HEIGHT // 2, 0, 0
player = pg.Rect(WIDTH // 3, player_position_y, 34, 24)
frame = 0
state = 'start'
"""Загрузка шрифта"""
min_font = pg.font.Font(None, 35)
max_font = pg.font.Font(None, 80)
pipes = list()
backgrounds = list()
lives = 3
scores = 0
pipes_scores = list()
"""Скорость движения труб"""
pipe_speed = 3
"""Добавление первого фона перед циклом"""
backgrounds.append(pg.Rect(0, 0, 288, 600))
play = True
while play:
for event in pg.event.get():
if event.type == pg.QUIT:
play = False
screen = window.get_rect()
"""Изменение номера кадра"""
frame = (frame + 0.2) % 4
"""Перемещение труб"""
for pipe in reversed(pipes):
pipe.x -= pipe_speed # Вместо 3 отнимаем значение pipe_speed
"""Уничтожение игры если труба вышла за экран"""
if pipe.right < screen.left:
pipes.remove(pipe)
"""Перемещение фона"""
for bg in reversed(backgrounds):
bg.x -= pipe_speed // 2 # Для перемещения фона обязательно целочисленное деление
"""Уничтожение игры если труба вышла за экран"""
if bg.right < screen.left:
backgrounds.remove(bg)
if backgrounds[-1].right <= screen.right:
backgrounds.append(pg.Rect(backgrounds[-1].right, 0, 288, 600))
"""Обработка нажатия на левую кнопку мыши"""
press = pg.mouse.get_pressed()
keys = pg.key.get_pressed()
click = press[0] or keys[pg.K_SPACE]
if click:
player_acceleration_y = -2
else:
player_acceleration_y = 0
"""Работа с состояниями игры"""
if state == 'start':
if click:
state = 'play'
"""Обновление положения, скорости и ускорения"""
player_position_y += (
HEIGHT // 2 - player_position_y)
player.y = player_position_y
player_speed_y = 0
player_acceleration_y = 0
elif state == 'play':
"""Механика падения"""
player_position_y += player_speed_y
player_speed_y = (player_speed_y + player_acceleration_y + 1) * 0.98
player.y = player_position_y
"""Проверка списка труб"""
if len(pipes) == 0 or pipes[-1].x < screen.width - 200:
correction = random.randint(-60, 60)
pipes.append(pg.Rect(screen.width, screen.top, 52, 200 + correction))
pipes.append(pg.Rect(screen.width, screen.bottom - 200 + correction, 52, 200))
"""Отслеживание падения птички вверх, либо вниз"""
if player.top <= screen.top or player.bottom >= screen.bottom:
sound_fall.play() # Проигрывание звука падения один раз
state = 'fall'
time.sleep(1)
"""Столкновение птички с трубами"""
for pipe in pipes:
if player.colliderect(pipe):
sound_fall.play() # Проигрывание звука падения один раз
state = 'fall'
pipes_scores.clear()
scores = 0
time.sleep(1)
"""Отслеживание перелета через трубу"""
if pipe.right <= player.left and pipe not in pipes_scores:
pipes_scores.append(pipe)
scores += 5
pipe_speed = 3 + scores // 100 # Каждые 100 очков к скорости будет прибавляться 1
elif state == 'fall':
pipes.clear()
"""Вычитание жизней"""
lives -= 1
if lives > 0:
state = 'start'
else:
state = 'game over'
else: # Game Over
play = False
"""Отрисовка"""
# window.fill(pg.Color('black')) # Нет необходимости закрашивать экран
for bg in backgrounds:
window.blit(img_bg, bg)
"""Отрисовка труб (обязательно перед игроком для того, чтобы при столкновении птица была на переднем фоне"""
for pipe in pipes:
"""Отображение труб в виде картинки"""
if pipe.y == 0:
rect = img_pipe_top.get_rect(bottomleft=pipe.bottomleft)
window.blit(img_pipe_top, rect)
else:
rect = img_pipe_bottom.get_rect(topleft=pipe.topleft)
window.blit(img_pipe_bottom, rect)
image = img_bird.subsurface(34 * int(frame), 0, 34, 24)
"""Наклон птички вверх и вниз"""
image = pg.transform.rotate(image, -player_speed_y * 2)
window.blit(image, player)
"""Отрисовка очков и жизней"""
score_text = min_font.render(f'Очки: {scores}', True, pg.Color('black'))
window.blit(score_text, (screen.left + 10, screen.top + 10))
lives_text = min_font.render(f'Жизни: {lives}', True, pg.Color('black'))
window.blit(lives_text, (screen.left + score_text.get_rect().width + 30, screen.top + 10))
pg.display.update()
clock.tick(FPS)
pg.quit()
| ArtemTroshkin/FlappyBird | main.py | main.py | py | 6,760 | python | ru | code | 0 | github-code | 36 |
22355485380 | '''
Project 2 - Simple BlackJack Game
- You will use Object Oriented Programming.
- We will use a computer dealer and a human player, starting with a normal deck of cards.
1. Start with deck of cards
2. Player places a bet, coming from their 'bankroll'
3. Dealer starts with 1 card face up and 1 card face down. Player starts with 2 cards face up.
4. Player goes first, can either:
- Hit: receive another card
- Stay: stop receiving cards
If player sum > 21, player busts and dealer collects money.
5. If player stays and sum is under 21, dealer then hits until they either beat the player by hitting 21
or the dealer busts > 21
6. If player wins, their bet is doubled and added to 'bankroll'
Special Rules:
- Face Cards (Jack, Queen, King) value = 10
- Aces can count as either 1 or 11, players choice
Classes >> Card, Deck, Hand, Bankroll
'''
import random
suits = ('Hearts', 'Diamonds', 'Spades', 'Clubs')
ranks = ('Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine', 'Ten', 'Jack', 'Queen', 'King', 'Ace')
values = {'Two':2, 'Three':3, 'Four':4, 'Five':5, 'Six':6, 'Seven':7, 'Eight':8,
'Nine':9, 'Ten':10, 'Jack':10, 'Queen':10, 'King':10, 'Ace':11}
class Card():
'''
This class creates a single card object
'''
def __init__(self, suit: str, rank: str) -> None:
self.suit = suit
self.rank = rank
self.value = values[rank]
def __str__(self):
return self.rank + " of " + self.suit
class Deck():
'''
This class creates a full deck of cards. It can also shuffle all of the cards and if the cards are dealt,
it removes them from the deck.
'''
def __init__(self) -> None:
self.all_cards = []
for suit in suits:
for rank in ranks:
# Create the card object
created_card = Card(suit,rank)
self.all_cards.append(created_card)
def shuffle(self) -> None:
random.shuffle(self.all_cards)
def deal_one(self) -> Card:
return self.all_cards.pop()
new_deck = Deck()
class Bankroll():
def __init__(self, balance: int) -> None:
self.balance = balance
def add(self, num: int) -> None:
self.balance += num
def subtract(self, num) -> None:
self.balance -= num
def balance(self) -> int:
return self.balance
def __str__(self) -> str:
return f'Your total bankroll is currently: {self.balance}'
class Hand():
def __init__(self):
self.hand = []
self.value = 0
def add_card(self,card: Card):
self.hand.append(card)
def get_value(self) -> int:
self.value = 0
for card in self.hand:
self.value += card.value
return self.value
def show_hand(self,player: str):
mycards = []
self.value = 0
for card in self.hand:
mycards.append(card.__str__())
self.value += card.value
if player == 'player':
return f'Player Hand: {mycards}, Total Value: {self.value}'
return f'Dealer Hand: {mycards}, Total Value: {self.value}'
def show_one(self):
lastcard = self.hand[-1]
mycards = [lastcard.__str__()]
self.value = lastcard.value
return f'Dealer Hand: {mycards}, Total Value: {self.value}'
# GAME FUNCTIONS
def setup_game():
print('Welcome to Black Jack!\n')
new_deck.shuffle()
player_bankroll = Bankroll(100).balance
dealer_bankroll = Bankroll(1000).balance
return player_bankroll, dealer_bankroll
def bet(player_balance: int):
player_bet = int(input('Place your bet: '))
print('\n')
if player_bet > player_balance:
print('Bet too large!\n')
not_valid_bet = True
else:
not_valid_bet = False
return player_bet, not_valid_bet
def check_bust(player: str) -> bool:
if player == 'player':
if player_hand.get_value() > 21:
print('BUST! Dealer Wins!\n')
return True
else:
if dealer_hand.get_value() > 21:
print('BUST! Player Wins!\n')
return True
return False
def check_blackjack(player: str) -> bool:
if player == 'player':
if player_hand.get_value() == 21:
print('BLACKJACK! Player Wins!\n')
return True
else:
if dealer_hand.get_value() == 21:
print('BLACKJACK! Dealer Wins!\n')
return True
return False
# MAIN METHOD
if __name__ == '__main__':
player_balance, dealer_balance = setup_game()
game_on = True
while game_on:
print(f'Dealer Bankroll: {dealer_balance}')
print(f'Player Bankroll: {player_balance}\n')
not_valid_bet = True
while not_valid_bet:
player_bet, not_valid_bet = bet(player_balance)
player_hand = Hand()
dealer_hand = Hand()
player_hand.add_card(new_deck.deal_one())
player_hand.add_card(new_deck.deal_one())
dealer_hand.add_card(new_deck.deal_one())
dealer_hand.add_card(new_deck.deal_one())
print(dealer_hand.show_one())
print(player_hand.show_hand('player') + '\n')
hit = True
while hit:
player_choice = str(input('Would you like to hit? Yes or No: ')).upper()
if player_choice == 'YES':
player_hand.add_card(new_deck.deal_one())
print(player_hand.show_hand('player') + '\n')
if check_bust('player') == True:
player_balance -= player_bet
dealer_balance += player_bet
hit = False
if check_blackjack('player') == True:
player_balance += (player_bet*1.5) + player_bet
dealer_balance -= player_bet*1.5
hit = False
else:
hit = False
print(dealer_hand.show_hand('dealer') + '\n')
#check for dealer blackjack
if check_blackjack('dealer') == True:
player_balance -= player_bet
dealer_balance += player_bet
break
while dealer_hand.get_value() < 17:
dealer_hand.add_card(new_deck.deal_one())
print(dealer_hand.show_hand('dealer') + '\n')
if check_bust('dealer') == True:
player_balance += player_bet
dealer_balance -= player_bet
break
if check_blackjack('dealer') == True:
player_balance -= player_bet
dealer_balance += player_bet
break
if dealer_hand.get_value() > player_hand.get_value():
print('Dealer Wins!')
player_balance -= player_bet
dealer_balance += player_bet
elif dealer_hand.get_value() == player_hand.get_value():
dealer_hand.add_card(new_deck.deal_one())
if check_bust('dealer') == True:
player_balance += player_bet
dealer_balance -= player_bet
break
if check_blackjack('dealer') == True:
player_balance -= player_bet
dealer_balance += player_bet
break
else:
print('Player Wins!')
player_balance += player_bet
dealer_balance -= player_bet
if player_balance == 0:
print('You are broke! Game Over.')
game_on = False
play_again = str(input('Would you like to play again? Yes or No: ')).upper()
if play_again == 'NO':
game_on = False
| stephenv13/BlackJackGame | BlackJackGame.py | BlackJackGame.py | py | 8,346 | python | en | code | 0 | github-code | 36 |
2476075869 | import random
rock = """
_______
---' ____)
(_____)
(_____)
(____)
---.__(___)
"""
paper = """
_______
---' ____)____
______)
_______)
_______)
---.__________)
"""
scissors = """
_______
---' ____)____
______)
__________)
(____)
---.__(___)
"""
# Write your code below this line 👇
user_choice = int(
input("What do you choose? Type 0 for Rock, 1 for Paper or 2 for Scissors.\n")
)
computer_choice = random.randint(0, 2)
def print_choice(choice):
if choice == 0:
print(f"You chose Rock:\n{rock}")
elif choice == 1:
print(f"You chose Paper:\n{paper}")
elif choice == 2:
print(f"You chose Scissors:\n{scissors}")
else:
print("Invalid choice.")
return
def print_computer_choice(choice):
if choice == 0:
print(f"Computer chose Rock:\n{rock}")
elif choice == 1:
print(f"Computer chose Paper:\n{paper}")
elif choice == 2:
print(f"Computer chose Scissors:\n{scissors}")
else:
print("Invalid choice.")
return
def print_result(user_choice, computer_choice):
if user_choice == computer_choice:
print("It's a draw.")
elif user_choice == 0 and computer_choice == 1:
print("You lose.")
elif user_choice == 0 and computer_choice == 2:
print("You win.")
elif user_choice == 1 and computer_choice == 0:
print("You win.")
elif user_choice == 1 and computer_choice == 2:
print("You lose.")
elif user_choice == 2 and computer_choice == 0:
print("You lose.")
elif user_choice == 2 and computer_choice == 1:
print("You win.")
else:
print("Invalid choice.")
return
if user_choice >= 0 and user_choice <= 2:
print_choice(user_choice)
print_computer_choice(computer_choice)
print_result(user_choice, computer_choice)
else:
print("Invalid choice.")
| devProMaleek/learning-python | day-4-random-list/rock-paper-scissors.py | rock-paper-scissors.py | py | 1,963 | python | en | code | 0 | github-code | 36 |
9193307146 | import os
import copy
import pytorch_lightning as pl
from pytorch_lightning import profiler
import pytorch_lightning.core.lightning as lightning
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint
import torch.nn as nn
from pytorch_lightning.loggers import WandbLogger
from datetime import datetime
from lightly.embedding._callback import CustomModelCheckpoint
class BaseEmbedding(lightning.LightningModule):
"""All trainable embeddings must inherit from BaseEmbedding.
"""
def __init__(self,
model,
criterion,
optimizer,
dataloader,
scheduler=None):
""" Constructor
Args:
model: (torch.nn.Module)
criterion: (torch.nn.Module)
optimizer: (torch.optim.Optimizer)
dataloader: (torch.utils.data.DataLoader)
"""
super(BaseEmbedding, self).__init__()
self.model = model
self.criterion = criterion
self.optimizer = optimizer
self.dataloader = dataloader
self.scheduler = scheduler
self.checkpoint = None
self.cwd = os.getcwd()
self.checkpoint_callback = None
self.init_checkpoint_callback()
self.save_hyperparameters()
def forward(self, x0, x1):
return self.model(x0, x1)
def training_step(self, batch, batch_idx):
# get the two image transformations
(x0, x1), _, _ = batch
# forward pass of the transformations
y0, y1 = self(x0, x1)
# calculate loss
loss = self.criterion(y0, y1)
# log loss and return
self.log('loss', loss)
return loss
def configure_optimizers(self):
if self.scheduler is None:
return self.optimizer
else:
return [self.optimizer], [self.scheduler]
def train_dataloader(self):
return self.dataloader
def train_embedding(self, **kwargs):
""" Train the model on the provided dataset.
Args:
**kwargs: pylightning_trainer arguments, examples include:
min_epochs: (int) Minimum number of epochs to train
max_epochs: (int) Maximum number of epochs to train
gpus: (int) number of gpus to use
weights_summary: (str) how to print a summary of the model and weights (number, size)
Returns:
A trained encoder, ready for embedding datasets.
"""
project_name=datetime.today().strftime('%Y-%m-%d_%H-%M')
wandb_logger = WandbLogger(project=project_name)
trainer = pl.Trainer(**kwargs, callbacks=[self.checkpoint_callback], profiler="pytorch", logger=wandb_logger)
trainer.fit(self)
self.checkpoint = self.checkpoint_callback.best_model_path
self.checkpoint = os.path.join(self.cwd, self.checkpoint)
def embed(self, *args, **kwargs):
"""Must be implemented by classes which inherit from BaseEmbedding.
"""
raise NotImplementedError()
def init_checkpoint_callback(self,
save_last=False,
save_top_k=0,
monitor='loss',
dirpath=None):
"""Initializes the checkpoint callback.
Args:
save_last:
Whether or not to save the checkpoint of the last epoch.
save_top_k:
Save the top_k model checkpoints.
monitor:
Which quantity to monitor.
dirpath:
Where to save the checkpoint.
"""
if pl.__version__[:3] in ['1.0', '1.1', '1.2']:
# initialize custom model checkpoint
self.checkpoint_callback = CustomModelCheckpoint()
self.checkpoint_callback.save_last = save_last
self.checkpoint_callback.save_top_k = save_top_k
self.checkpoint_callback.monitor = monitor
dirpath = self.cwd if dirpath is None else dirpath
self.checkpoint_callback.dirpath = dirpath
else:
self.checkpoint_callback = ModelCheckpoint(
dirpath=self.cwd if dirpath is None else dirpath,
filename='lightly_epoch_{epoch:d}',
save_last=save_last,
save_top_k=save_top_k,
monitor=monitor,
auto_insert_metric_name=False)
| tibe97/thesis-self-supervised-learning | lightly/embedding/_base.py | _base.py | py | 4,499 | python | en | code | 2 | github-code | 36 |
37635556780 | # The Hamming distance between two integers is the number of positions at which the corresponding bits are different.
# Now your job is to find the total Hamming distance between all pairs of the given numbers.
# Example:
# Input: 4, 14, 2
# Output: 6
# Explanation: In binary representation, the 4 is 0100, 14 is 1110, and 2 is 0010 (just
# showing the four bits relevant in this case). So the answer will be:
# HammingDistance(4, 14) + HammingDistance(4, 2) + HammingDistance(14, 2) = 2 + 2 + 2 = 6.
# Note:
# Elements of the given array are in the range of 0 to 10^9
# Length of the array will not exceed 10^4.
class Solution:
def totalHammingDistance(self, nums: List[int]) -> int:
bit_cnt = [0]*30
for n in nums:
for i in range(30):
bit_cnt[i]+=(n>>i)&1
ans = 0
for cnt in bit_cnt:
ans+=cnt*(len(nums)-cnt)
return ans | sunnyyeti/Leetcode-solutions | 477_Total_Hamming_Distance.py | 477_Total_Hamming_Distance.py | py | 910 | python | en | code | 0 | github-code | 36 |
2483212505 | '''
Descripttion:
version:
Author: WGQ
Date: 2021-11-11 14:40:28
LastEditors: WGQ
LastEditTime: 2021-11-12 17:58:46
'''
from . import adminApi
import time
from fastapi import Query, Depends, Body, Form,Request
from playhouse.shortcuts import model_to_dict
from model.RModel import *
from common import Func, Utils
from utils import UserAuthUtil
@adminApi.post('/country/save', tags=['Admin-Country'],summary="新增/编辑Country")
async def save(req:Request,countryId:int = Form(0),countryName:str=Form(...),countryCode3:str=Form(...),countryCode2:str=Form(...),countryTimezoneUtc:int=Form(...),signInUser: dict = Depends(UserAuthUtil.verifyToken)):
try:
if countryId > 0:
RCountry.update(c_name = countryName,c_code3 = countryCode3,c_code2 = countryCode2, c_timezone_utc = countryTimezoneUtc ).where(RCountry.c_id == countryId).execute()
else:
cty = RCountry.create(c_name = countryName, c_code3 = countryCode3,c_code2 = countryCode2, c_timezone_utc = countryTimezoneUtc )
countryId = cty.c_id
return Func.jsonResult({"countryId":countryId})
except Exception as e:
return Func.jsonResult({"countryId":countryId},"发生错误,出现冲突",100000500)
@adminApi.get('/country/list', tags=['Admin-Country'],summary="Country列表")
async def countryList(signInUser: dict = Depends(UserAuthUtil.verifyToken)):
countries = RCountry.select().where(RCountry.c_status == 1).order_by(RCountry.c_id.desc()).dicts()
countryList = []
for _country in countries:
countryList.append({
"countryId":_country['c_id'],
"countryName":_country['c_name'],
"countryCode3":_country['c_code3'],
"countryCode2":_country['c_code2'],
"countryTimezoneUtc":_country['c_timezone_utc'],
})
return Func.jsonResult({"countryList":countryList})
@adminApi.delete('/country/remove', tags=['Admin-Country'],summary="删除Country")
async def remove(countryId:int = Query(...,description="CountryID"), signInUser: dict = Depends(UserAuthUtil.verifyToken)):
RCountry.update(c_status = 0).where(RCountry.c_id == countryId).execute()
return Func.jsonResult({"countryId":countryId},"adx removed")
| foreversun52/cgserver | adminapi/Country.py | Country.py | py | 2,267 | python | en | code | 0 | github-code | 36 |
15744675717 | import functools
import hashlib
import os
import sys
import time
from typing import NamedTuple
from git_command import git_require
from git_command import GitCommand
from git_config import RepoConfig
from git_refs import GitRefs
_SUPERPROJECT_GIT_NAME = "superproject.git"
_SUPERPROJECT_MANIFEST_NAME = "superproject_override.xml"
class SyncResult(NamedTuple):
"""Return the status of sync and whether caller should exit."""
# Whether the superproject sync was successful.
success: bool
# Whether the caller should exit.
fatal: bool
class CommitIdsResult(NamedTuple):
"""Return the commit ids and whether caller should exit."""
# A dictionary with the projects/commit ids on success, otherwise None.
commit_ids: dict
# Whether the caller should exit.
fatal: bool
class UpdateProjectsResult(NamedTuple):
"""Return the overriding manifest file and whether caller should exit."""
# Path name of the overriding manifest file if successful, otherwise None.
manifest_path: str
# Whether the caller should exit.
fatal: bool
class Superproject:
"""Get commit ids from superproject.
Initializes a local copy of a superproject for the manifest. This allows
lookup of commit ids for all projects. It contains _project_commit_ids which
is a dictionary with project/commit id entries.
"""
def __init__(
self,
manifest,
name,
remote,
revision,
superproject_dir="exp-superproject",
):
"""Initializes superproject.
Args:
manifest: A Manifest object that is to be written to a file.
name: The unique name of the superproject
remote: The RemoteSpec for the remote.
revision: The name of the git branch to track.
superproject_dir: Relative path under |manifest.subdir| to checkout
superproject.
"""
self._project_commit_ids = None
self._manifest = manifest
self.name = name
self.remote = remote
self.revision = self._branch = revision
self._repodir = manifest.repodir
self._superproject_dir = superproject_dir
self._superproject_path = manifest.SubmanifestInfoDir(
manifest.path_prefix, superproject_dir
)
self._manifest_path = os.path.join(
self._superproject_path, _SUPERPROJECT_MANIFEST_NAME
)
git_name = hashlib.md5(remote.name.encode("utf8")).hexdigest() + "-"
self._remote_url = remote.url
self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
self._work_git = os.path.join(
self._superproject_path, self._work_git_name
)
# The following are command arguemnts, rather than superproject
# attributes, and were included here originally. They should eventually
# become arguments that are passed down from the public methods, instead
# of being treated as attributes.
self._git_event_log = None
self._quiet = False
self._print_messages = False
def SetQuiet(self, value):
"""Set the _quiet attribute."""
self._quiet = value
def SetPrintMessages(self, value):
"""Set the _print_messages attribute."""
self._print_messages = value
@property
def project_commit_ids(self):
"""Returns a dictionary of projects and their commit ids."""
return self._project_commit_ids
@property
def manifest_path(self):
"""Returns the manifest path if the path exists or None."""
return (
self._manifest_path if os.path.exists(self._manifest_path) else None
)
def _LogMessage(self, fmt, *inputs):
"""Logs message to stderr and _git_event_log."""
message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}"
if self._print_messages:
print(message, file=sys.stderr)
self._git_event_log.ErrorEvent(message, fmt)
def _LogMessagePrefix(self):
"""Returns the prefix string to be logged in each log message"""
return (
f"repo superproject branch: {self._branch} url: {self._remote_url}"
)
def _LogError(self, fmt, *inputs):
"""Logs error message to stderr and _git_event_log."""
self._LogMessage(f"error: {fmt}", *inputs)
def _LogWarning(self, fmt, *inputs):
"""Logs warning message to stderr and _git_event_log."""
self._LogMessage(f"warning: {fmt}", *inputs)
def _Init(self):
"""Sets up a local Git repository to get a copy of a superproject.
Returns:
True if initialization is successful, or False.
"""
if not os.path.exists(self._superproject_path):
os.mkdir(self._superproject_path)
if not self._quiet and not os.path.exists(self._work_git):
print(
"%s: Performing initial setup for superproject; this might "
"take several minutes." % self._work_git
)
cmd = ["init", "--bare", self._work_git_name]
p = GitCommand(
None,
cmd,
cwd=self._superproject_path,
capture_stdout=True,
capture_stderr=True,
)
retval = p.Wait()
if retval:
self._LogWarning(
"git init call failed, command: git {}, "
"return code: {}, stderr: {}",
cmd,
retval,
p.stderr,
)
return False
return True
def _Fetch(self):
"""Fetches a superproject for the manifest based on |_remote_url|.
This runs git fetch which stores a local copy the superproject.
Returns:
True if fetch is successful, or False.
"""
if not os.path.exists(self._work_git):
self._LogWarning("git fetch missing directory: {}", self._work_git)
return False
if not git_require((2, 28, 0)):
self._LogWarning(
"superproject requires a git version 2.28 or later"
)
return False
cmd = [
"fetch",
self._remote_url,
"--depth",
"1",
"--force",
"--no-tags",
"--filter",
"blob:none",
]
# Check if there is a local ref that we can pass to --negotiation-tip.
# If this is the first fetch, it does not exist yet.
# We use --negotiation-tip to speed up the fetch. Superproject branches
# do not share commits. So this lets git know it only needs to send
# commits reachable from the specified local refs.
rev_commit = GitRefs(self._work_git).get(f"refs/heads/{self.revision}")
if rev_commit:
cmd.extend(["--negotiation-tip", rev_commit])
if self._branch:
cmd += [self._branch + ":" + self._branch]
p = GitCommand(
None,
cmd,
cwd=self._work_git,
capture_stdout=True,
capture_stderr=True,
)
retval = p.Wait()
if retval:
self._LogWarning(
"git fetch call failed, command: git {}, "
"return code: {}, stderr: {}",
cmd,
retval,
p.stderr,
)
return False
return True
def _LsTree(self):
"""Gets the commit ids for all projects.
Works only in git repositories.
Returns:
data: data returned from 'git ls-tree ...' instead of None.
"""
if not os.path.exists(self._work_git):
self._LogWarning(
"git ls-tree missing directory: {}", self._work_git
)
return None
data = None
branch = "HEAD" if not self._branch else self._branch
cmd = ["ls-tree", "-z", "-r", branch]
p = GitCommand(
None,
cmd,
cwd=self._work_git,
capture_stdout=True,
capture_stderr=True,
)
retval = p.Wait()
if retval == 0:
data = p.stdout
else:
self._LogWarning(
"git ls-tree call failed, command: git {}, "
"return code: {}, stderr: {}",
cmd,
retval,
p.stderr,
)
return data
def Sync(self, git_event_log):
"""Gets a local copy of a superproject for the manifest.
Args:
git_event_log: an EventLog, for git tracing.
Returns:
SyncResult
"""
self._git_event_log = git_event_log
if not self._manifest.superproject:
self._LogWarning(
"superproject tag is not defined in manifest: {}",
self._manifest.manifestFile,
)
return SyncResult(False, False)
_PrintBetaNotice()
should_exit = True
if not self._remote_url:
self._LogWarning(
"superproject URL is not defined in manifest: {}",
self._manifest.manifestFile,
)
return SyncResult(False, should_exit)
if not self._Init():
return SyncResult(False, should_exit)
if not self._Fetch():
return SyncResult(False, should_exit)
if not self._quiet:
print(
"%s: Initial setup for superproject completed." % self._work_git
)
return SyncResult(True, False)
def _GetAllProjectsCommitIds(self):
"""Get commit ids for all projects from superproject and save them.
Commit ids are saved in _project_commit_ids.
Returns:
CommitIdsResult
"""
sync_result = self.Sync(self._git_event_log)
if not sync_result.success:
return CommitIdsResult(None, sync_result.fatal)
data = self._LsTree()
if not data:
self._LogWarning(
"git ls-tree failed to return data for manifest: {}",
self._manifest.manifestFile,
)
return CommitIdsResult(None, True)
# Parse lines like the following to select lines starting with '160000'
# and build a dictionary with project path (last element) and its commit
# id (3rd element).
#
# 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
# 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00 # noqa: E501
commit_ids = {}
for line in data.split("\x00"):
ls_data = line.split(None, 3)
if not ls_data:
break
if ls_data[0] == "160000":
commit_ids[ls_data[3]] = ls_data[2]
self._project_commit_ids = commit_ids
return CommitIdsResult(commit_ids, False)
def _WriteManifestFile(self):
"""Writes manifest to a file.
Returns:
manifest_path: Path name of the file into which manifest is written
instead of None.
"""
if not os.path.exists(self._superproject_path):
self._LogWarning(
"missing superproject directory: {}", self._superproject_path
)
return None
manifest_str = self._manifest.ToXml(
groups=self._manifest.GetGroupsStr(), omit_local=True
).toxml()
manifest_path = self._manifest_path
try:
with open(manifest_path, "w", encoding="utf-8") as fp:
fp.write(manifest_str)
except OSError as e:
self._LogError("cannot write manifest to : {} {}", manifest_path, e)
return None
return manifest_path
def _SkipUpdatingProjectRevisionId(self, project):
"""Checks if a project's revision id needs to be updated or not.
Revision id for projects from local manifest will not be updated.
Args:
project: project whose revision id is being updated.
Returns:
True if a project's revision id should not be updated, or False,
"""
path = project.relpath
if not path:
return True
# Skip the project with revisionId.
if project.revisionId:
return True
# Skip the project if it comes from the local manifest.
return project.manifest.IsFromLocalManifest(project)
def UpdateProjectsRevisionId(self, projects, git_event_log):
"""Update revisionId of every project in projects with the commit id.
Args:
projects: a list of projects whose revisionId needs to be updated.
git_event_log: an EventLog, for git tracing.
Returns:
UpdateProjectsResult
"""
self._git_event_log = git_event_log
commit_ids_result = self._GetAllProjectsCommitIds()
commit_ids = commit_ids_result.commit_ids
if not commit_ids:
return UpdateProjectsResult(None, commit_ids_result.fatal)
projects_missing_commit_ids = []
for project in projects:
if self._SkipUpdatingProjectRevisionId(project):
continue
path = project.relpath
commit_id = commit_ids.get(path)
if not commit_id:
projects_missing_commit_ids.append(path)
# If superproject doesn't have a commit id for a project, then report an
# error event and continue as if do not use superproject is specified.
if projects_missing_commit_ids:
self._LogWarning(
"please file a bug using {} to report missing "
"commit_ids for: {}",
self._manifest.contactinfo.bugurl,
projects_missing_commit_ids,
)
return UpdateProjectsResult(None, False)
for project in projects:
if not self._SkipUpdatingProjectRevisionId(project):
project.SetRevisionId(commit_ids.get(project.relpath))
manifest_path = self._WriteManifestFile()
return UpdateProjectsResult(manifest_path, False)
@functools.lru_cache(maxsize=10)
def _PrintBetaNotice():
"""Print the notice of beta status."""
print(
"NOTICE: --use-superproject is in beta; report any issues to the "
"address described in `repo version`",
file=sys.stderr,
)
@functools.lru_cache(maxsize=None)
def _UseSuperprojectFromConfiguration():
"""Returns the user choice of whether to use superproject."""
user_cfg = RepoConfig.ForUser()
time_now = int(time.time())
user_value = user_cfg.GetBoolean("repo.superprojectChoice")
if user_value is not None:
user_expiration = user_cfg.GetInt("repo.superprojectChoiceExpire")
if (
user_expiration is None
or user_expiration <= 0
or user_expiration >= time_now
):
# TODO(b/190688390) - Remove prompt when we are comfortable with the
# new default value.
if user_value:
print(
(
"You are currently enrolled in Git submodules "
"experiment (go/android-submodules-quickstart). Use "
"--no-use-superproject to override.\n"
),
file=sys.stderr,
)
else:
print(
(
"You are not currently enrolled in Git submodules "
"experiment (go/android-submodules-quickstart). Use "
"--use-superproject to override.\n"
),
file=sys.stderr,
)
return user_value
# We don't have an unexpired choice, ask for one.
system_cfg = RepoConfig.ForSystem()
system_value = system_cfg.GetBoolean("repo.superprojectChoice")
if system_value:
# The system configuration is proposing that we should enable the
# use of superproject. Treat the user as enrolled for two weeks.
#
# TODO(b/190688390) - Remove prompt when we are comfortable with the new
# default value.
userchoice = True
time_choiceexpire = time_now + (86400 * 14)
user_cfg.SetString(
"repo.superprojectChoiceExpire", str(time_choiceexpire)
)
user_cfg.SetBoolean("repo.superprojectChoice", userchoice)
print(
"You are automatically enrolled in Git submodules experiment "
"(go/android-submodules-quickstart) for another two weeks.\n",
file=sys.stderr,
)
return True
# For all other cases, we would not use superproject by default.
return False
def PrintMessages(use_superproject, manifest):
"""Returns a boolean if error/warning messages are to be printed.
Args:
use_superproject: option value from optparse.
manifest: manifest to use.
"""
return use_superproject is not None or bool(manifest.superproject)
def UseSuperproject(use_superproject, manifest):
"""Returns a boolean if use-superproject option is enabled.
Args:
use_superproject: option value from optparse.
manifest: manifest to use.
Returns:
Whether the superproject should be used.
"""
if not manifest.superproject:
# This (sub) manifest does not have a superproject definition.
return False
elif use_superproject is not None:
return use_superproject
else:
client_value = manifest.manifestProject.use_superproject
if client_value is not None:
return client_value
elif manifest.superproject:
return _UseSuperprojectFromConfiguration()
else:
return False
| GerritCodeReview/git-repo | git_superproject.py | git_superproject.py | py | 17,995 | python | en | code | 267 | github-code | 36 |
25966299187 |
import Tkinter as tk
import ScrolledText
import numpy as np
import matplotlib as mpl
import matplotlib.backends.tkagg as tkagg
from matplotlib.backends.backend_agg import FigureCanvasAgg
import sklearn.gaussian_process as skgp
import evaluatorGUI as eg
import matplotlib.pyplot as plt
import scipy.optimize
import time
def draw_figure(canvas, figure, loc=(0, 0)):
""" Draw a matplotlib figure onto a Tk canvas
from https://matplotlib.org/gallery/user_interfaces/embedding_in_tk_canvas_sgskip.html
loc: location of top-left corner of figure on canvas in pixels.
Inspired by matplotlib source: lib/matplotlib/backends/backend_tkagg.py
"""
figure_canvas_agg = FigureCanvasAgg(figure)
figure_canvas_agg.draw()
figure_x, figure_y, figure_w, figure_h = figure.bbox.bounds
figure_w, figure_h = int(figure_w), int(figure_h)
photo = tk.PhotoImage(master=canvas, width=figure_w, height=figure_h)
# Position: convert from top-left anchor to center anchor
canvas.create_image(loc[0] + figure_w/2, loc[1] + figure_h/2, image=photo)
# Unfortunately, there's no accessor for the pointer to the native renderer
tkagg.blit(photo, figure_canvas_agg.get_renderer()._renderer, colormode=2)
# Return a handle which contains a reference to the photo object
# which must be kept live or else the picture disappears
return photo
class PointSelector(tk.Frame):
def __init__(self,master):
tk.Frame.__init__(self,master)
self.changepoints=lambda x,y:None
self.inputcanvas=tk.Canvas(self,width=410,height=410,borderwidth=1,relief=tk.RAISED,background="white")
self.inputcanvas.bind("<Button-1>", self.__inputcanvasmouseclick)
self.xlist=[]
self.ylist=[]
self.inputcanvas.pack(side=tk.TOP)
def __inputcanvasmouseclick(self, event):
x = event.x
y = event.y
if x < 5:
x = 5
if x > 405:
x = 405
if y < 5:
y = 5
if y > 405:
y = 405
xc = (x - 205) / 200.0
yc = (205 - y) / 200.0
self.xlist.append(xc)
self.ylist.append(yc)
self.lastx=xc
self.lasty=yc
self.inputcanvas.create_oval(x-1,y-1,x+1,y+1)
self.changepoints(self.xlist,self.ylist)
class GPdisplay(tk.Frame):
def __init__(self,master):
tk.Frame.__init__(self,master)
self.dispcanvas=tk.Canvas(self,width=410,height=410,borderwidth=1, relief=tk.RAISED, background="white")
self.dispcanvas.pack(side=tk.TOP)
self.x=[]
self.y=[]
self.gp=None
self.log = ScrolledText.ScrolledText(self, width=50, height=15)
self.log.pack(side=tk.TOP)
def updatePoints(self,x,y):
self.x=x
self.y=y
self.updateDisplay()
def updateGP(self,gp):
self.gp=gp
self.updateDisplay()
def updateDisplay(self):
self.log.delete(1.0,tk.END)
if len(self.x)>0 and self.gp is not None:
self.dispcanvas.delete("all")
start=time.time()
self.gp.fit(np.array(self.x).reshape(-1,1),np.array(self.y).reshape(-1,1))
stop=time.time()
self.log.insert(tk.END,"log marginal likelihood: "+str(self.gp.log_marginal_likelihood())+"\nparams: \n "+"\n ".join([param+" : "+str(val) for param,val in self.gp.get_params(True).items()]))
self.log.insert(tk.END,"\ntime: "+str(stop-start))
self.log.insert(tk.END,"\nfinal params:"+"\n ".join([param+" : "+str(val) for param,val in self.gp.kernel_.get_params(True).items()]))
mean, std = self.gp.predict(np.arange(-1, 1, .01).reshape(-1, 1), return_std=True)
fig=mpl.figure.Figure(figsize=(4, 3))
ax=fig.add_axes([0, 0, 1, 1])
ax.plot(np.arange(-1, 1, .01), mean)
ax.fill_between(np.arange(-1, 1, .01), np.squeeze(mean) - std, np.squeeze(mean) + std, alpha=.1)
ax.scatter(self.x, self.y, c="red", s=50)
ax.set_xlim([-1,1])
ax.set_ylim([-1,1])
fig_x, fig_y = 0, 0
self.fig_photo = draw_figure(self.dispcanvas, fig, loc=(fig_x, fig_y))
fig_w, fig_h = self.fig_photo.width(), self.fig_photo.height()
def dispGP(self):
self.gp.fit(np.array(self.x).reshape(-1, 1), np.array(self.y).reshape(-1, 1))
mean,std=self.gp.predict(np.arange(-1,1,.01).reshape(-1,1),return_std=True)
plt.figure(figsize=(16,9))
plt.plot(np.arange(-1,1,.01),mean)
plt.fill_between(np.arange(-1,1,.01),np.squeeze(mean)-std,np.squeeze(mean)+std,alpha=.1)
plt.scatter(self.x,self.y,c="red",s=50)
plt.xlim(-1,1)
plt.ylim(-2,2)
plt.show()
class GPselector(tk.Frame):
def __init__(self,master):
tk.Frame.__init__(self,master)
self.changeGP=lambda x:None
buttonpanel = tk.Frame(self)
buttonpanel.pack(side=tk.LEFT)
updateButton=tk.Button(buttonpanel,text="Update",command=self.updateGP)
updateButton.pack(side=tk.TOP)
self.generalparamselect=eg.ParameterPanel(self,[("alpha: ",tk.DoubleVar,.0000001),("restarts: ",tk.IntVar,25),("optimize: ",tk.BooleanVar,True)])
self.generalparamselect.pack(side=tk.LEFT)
buttonpanel=tk.Frame(self)
buttonpanel.pack(side=tk.LEFT)
tk.Button(buttonpanel, text="Matern", command=self.setMatern).pack(side=tk.TOP)
tk.Button(buttonpanel, text="RBF", command=self.setRBF).pack(side=tk.TOP)
tk.Button(buttonpanel, text="RBFnoise", command=self.setRBFnoise).pack(side=tk.TOP)
self.paramselect=eg.ParameterPanel(self,[("nu: ",tk.DoubleVar,1.5),("length_scale: ",tk.DoubleVar,1.0),("length_scale_min",tk.DoubleVar,1e-5),("length_scale_max",tk.DoubleVar,1e5)])
self.paramselect.pack(side=tk.LEFT)
self.kerneltype="Matern"
def updateGP(self):
generalparams=self.generalparamselect.getparameters()
params=self.paramselect.getparameters()
if self.kerneltype=="Matern":
kernel=skgp.kernels.Matern(nu=params[0],length_scale=params[1],length_scale_bounds=(params[2],params[3]))
elif self.kerneltype=="RBF":
kernel=skgp.kernels.RBF(length_scale=params[0],length_scale_bounds=(params[1],params[2]))
elif self.kerneltype=="RBFnoise":
kernel=skgp.kernels.RBF(length_scale=params[0],length_scale_bounds=(params[3],params[4]))+params[2]*skgp.kernels.WhiteKernel(noise_level=params[1])
else:
raise ValueError("Unrecognized kernel type: "+str(self.kerneltype))
gp=skgp.GaussianProcessRegressor(kernel=kernel,n_restarts_optimizer=generalparams[1],alpha=generalparams[0],optimizer="fmin_l_bfgs_b" if generalparams[2] else None)
self.changeGP(gp)
def setMatern(self):
self.paramselect.pack_forget()
self.paramselect=eg.ParameterPanel(self,[("nu: ",tk.DoubleVar,1.5),("length_scale: ",tk.DoubleVar,1.0),("length_scale_min",tk.DoubleVar,1e-5),("length_scale_max",tk.DoubleVar,1e5)])
self.paramselect.pack(side=tk.LEFT)
self.kerneltype="Matern"
def setRBF(self):
self.paramselect.pack_forget()
self.paramselect=eg.ParameterPanel(self,[("length_scale: ",tk.DoubleVar,1.0),("length_scale_min",tk.DoubleVar,1e-5),("length_scale_max",tk.DoubleVar,1e5)])
self.paramselect.pack(side=tk.LEFT)
self.kerneltype="RBF"
def setRBFnoise(self):
self.paramselect.pack_forget()
self.paramselect=eg.ParameterPanel(self,[("length_scale: ",tk.DoubleVar,1.5),("noise_level: ",tk.DoubleVar,1.0),("noise weight",tk.DoubleVar,1.0),("length_scale_min",tk.DoubleVar,1e-5),("length_scale_max",tk.DoubleVar,1e5)])
self.paramselect.pack(side=tk.LEFT)
self.kerneltype="RBFnoise"
class GPvisualizer(tk.Frame):
def __init__(self,master):
tk.Frame.__init__(self,master)
pointselector=PointSelector(self)
pointselector.pack(side=tk.LEFT)
gpdisp=GPdisplay(self)
gpdisp.pack(side=tk.LEFT)
gpselect=GPselector(self)
gpselect.pack(side=tk.LEFT)
pointselector.changepoints=gpdisp.updatePoints
gpselect.changeGP=gpdisp.updateGP
if __name__=="__main__":
master = tk.Tk()
GPvisualizer(master).pack(side=tk.TOP)
tk.mainloop() | Hampswitch/ReciprocationGUI | reciprocation/GPvisualizer.py | GPvisualizer.py | py | 8,347 | python | en | code | 0 | github-code | 36 |
8309674493 | from bson import ObjectId
# noinspection PyProtectedMember
from motor.motor_asyncio import AsyncIOMotorCollection
from task_tracker_backend.dataclasses import UserData
from task_tracker_backend.task_factory import TaskFactory
class User:
def __init__(
self, users_collection: AsyncIOMotorCollection, tasks_collection: AsyncIOMotorCollection, _id: ObjectId
) -> None:
self.__id = _id
self.__users_collection = users_collection
self.__tasks_collection = tasks_collection
@property
def id(self) -> ObjectId:
return self.__id
@property
def task_factory(self) -> TaskFactory:
return TaskFactory(self.__tasks_collection, self.__id)
@property
async def data(self) -> UserData:
return UserData.parse_obj(await self.__users_collection.find_one({'_id': self.__id}, {'_id': 0, 'password': 0}))
async def update_data(self, data: UserData) -> None:
await self.__users_collection.update_one({'_id': self.__id}, {'$set': data.dict()})
| smthngslv/task-tracker-backend | task_tracker_backend/user.py | user.py | py | 1,031 | python | en | code | 0 | github-code | 36 |
14231652942 | '''
File name: Isonet_star_app.py
Author: Hui Wang (EICN)
Date created: 4/21/2021
Date last modified: 06/01/2021
Python Version: 3.6.5
'''
import sys,os
import logging
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QTableWidgetItem,QMessageBox
from PyQt5.QtCore import QProcess
#Isonet packages
from IsoNet.gui.isonet_gui import Ui_MainWindow ##need to change in the package
from IsoNet.gui.model_star import Model, setTableWidget #need to change in the package
from IsoNet.util.metadata import MetaData,Label,Item
class MainWindowUIClass( Ui_MainWindow ):
def __init__( self ):
'''Initialize the super class
'''
super().__init__()
self.model = Model()
#reset process as None
self.p = None
self.previous_log_line = ""
self.setting_file = ".isonet.setting"
# check for pid in last running
#if os.path.isfile(self.model.pid_file):
# os.remove(self.model.pid_file)
def setupUi( self, MW ):
''' Setup the UI of the super class, and add here code
that relates to the way we want our UI to operate.
'''
super().setupUi( MW )
#load default content in tomograms.star
setTableWidget(self.tableWidget, self.model.md)
#set up functions when cells be clicked
#self.tableWidget.cellPressed[int, int].connect(self.browseSlotTable)
self.tableWidget.cellDoubleClicked[int, int].connect(self.browseSlotTable)
self.tableWidget.cellChanged[int,int].connect(self.updateMDItem)
#self.tableWidget.horizontalHeaderItem(1).setToolTip("Header 0");
#for i,lab in enumerate(self.model.header):
# self.tableWidget.horizontalHeaderItem(i-1).setToolTip(self.get_toolTip(lab))
logging.basicConfig(format='%(asctime)s, %(levelname)-8s %(message)s',
datefmt="%m-%d %H:%M:%S",level=logging.INFO,handlers=[logging.StreamHandler(sys.stdout)])
########################
# connect function to buttons
########################
'''
self.pushButton_insert.setStyleSheet("background-color : lightblue")
self.pushButton_delete.setStyleSheet("background-color : lightblue")
self.pushButton_open_star.setStyleSheet("background-color : lightblue")
self.pushButton_3dmod.setStyleSheet("background-color : lightblue")
self.button_deconov_dir.setStyleSheet("background-color : lightblue")
self.button_mask_dir.setStyleSheet("background-color : lightblue")
self.button_subtomo_dir.setStyleSheet("background-color : lightblue")
self.button_result_dir_refine.setStyleSheet("background-color : lightblue")
self.button_result_dir_predict.setStyleSheet("background-color : lightblue")
self.button_subtomo_star_refine.setStyleSheet("background-color : lightblue")
self.button_pretrain_model_refine.setStyleSheet("background-color : lightblue")
self.button_tomo_star_predict.setStyleSheet("background-color : lightblue")
self.button_pretrain_model_predict.setStyleSheet("background-color : lightblue")
self.button_continue_iter.setStyleSheet("background-color : lightblue")
self.pushButton_deconv.setStyleSheet("background-color : lightblue")
self.pushButton_generate_mask.setStyleSheet("background-color : lightblue")
self.pushButton_extract.setStyleSheet("background-color : lightblue")
self.pushButton_refine.setStyleSheet("background-color : lightblue")
self.pushButton_predict.setStyleSheet("background-color : lightblue")
self.pushButton_predict_3dmod.setStyleSheet("background-color : lightblue")
'''
self.pushButton_insert.clicked.connect(self.copyRow)
self.pushButton_delete.clicked.connect(self.removeRow)
self.pushButton_open_star.clicked.connect(self.open_star)
self.pushButton_3dmod.clicked.connect(self.view_3dmod)
self.button_deconov_dir.clicked.connect(lambda: self.browseFolderSlot("deconv_dir"))
self.button_mask_dir.clicked.connect(lambda: self.browseFolderSlot("mask_dir"))
self.button_subtomo_dir.clicked.connect(lambda: self.browseFolderSlot("subtomo_dir"))
self.button_result_dir_refine.clicked.connect(lambda: self.browseFolderSlot("result_dir_refine"))
self.button_result_dir_predict.clicked.connect(lambda: self.browseFolderSlot("result_dir_predict"))
self.button_subtomo_star_refine.clicked.connect(lambda: self.browseSlot("subtomo_star_refine"))
self.button_pretrain_model_refine.clicked.connect(lambda: self.browseSlot("pretrain_model_refine"))
self.button_tomo_star_predict.clicked.connect(lambda: self.browseSlot("tomo_star_predict"))
self.button_pretrain_model_predict.clicked.connect(lambda: self.browseSlot("pretrain_model_predict"))
self.button_continue_iter.clicked.connect(lambda: self.browseSlot("continue_from"))
self.pushButton_deconv.clicked.connect(self.deconvolve)
self.pushButton_generate_mask.clicked.connect(self.make_mask)
self.pushButton_extract.clicked.connect(self.extract_subtomo)
self.pushButton_refine.clicked.connect(self.refine)
self.pushButton_predict.clicked.connect(self.predict)
self.pushButton_predict_3dmod.clicked.connect(self.view_predict_3dmod)
self.actionGithub.triggered.connect(self.openGithub)
#########################
#set icon location
#########################
#get the root path for isonet
isonet_path = os.popen("which isonet.py").read()
tmp = isonet_path.split("bin/isonet.py")
root_path = tmp[0]
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(root_path+"gui/icons/icon_folder.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.button_deconov_dir.setIcon(icon)
self.button_mask_dir.setIcon(icon)
self.button_subtomo_star_refine.setIcon(icon)
self.button_subtomo_dir.setIcon(icon)
self.button_pretrain_model_refine.setIcon(icon)
self.button_result_dir_refine.setIcon(icon)
self.button_tomo_star_predict.setIcon(icon)
self.button_pretrain_model_predict.setIcon(icon)
self.button_result_dir_predict.setIcon(icon)
self.button_continue_iter.setIcon(icon)
self.read_setting()
###Set up log file monitor###
import datetime
now = datetime.datetime.now()
#create a empty log file
if not self.model.isValid(self.model.log_file):
os.system("echo {} > {}".format(now.strftime("%Y-%m-%d %H:%M:%S"), self.model.log_file))
self.textBrowser_log.setText(self.model.getLogContent(self.model.log_file))
self.textBrowser_log.moveCursor(QtGui.QTextCursor.End)
#self.horizontalLayout_48.hide()
#for widgets in self.horizontalLayout_44.children():
#print(widgets.widget())
#for widget in widgets.children():
#print(widget)
# widget.hide()
####################
#self.log_watcher = QtCore.QFileSystemWatcher([self.model.log_file])
#self.log_watcher.fileChanged.connect(self.update_log)
#connect to all the main function button to run the process in the background
#cmd is the command need to be excuted, and btn pass the button object
def start_process(self, cmd, btn):
if self.mw.p is None: # No process running.
self.mw.p = QProcess()
#change the status of the current botton
if btn.text() in ["Deconvolve","Generate Mask","Extract","Refine","Predict"]:
self.model.btn_pressed_text = btn.text()
btn.setText("Stop")
btn.setStyleSheet('QPushButton {color: red;}')
else:
btn.setEnabled(False)
self.mw.p.readyReadStandardOutput.connect(self.dataReady)
self.mw.p.finished.connect(lambda: self.process_finished(btn))
self.mw.p.start(cmd)
elif btn.text() =="Stop":
if self.mw.p:
self.mw.p.kill()
else:
if self.model.btn_pressed_text:
btn.setText(self.model.btn_pressed_text)
else:
self.warn_window("Already runing another job, please wait until it finished!")
def process_finished(self, btn):
if btn.text() == "Stop":
if self.model.btn_pressed_text:
btn.setText(self.model.btn_pressed_text)
#btn.setText("Refine")
self.model.btn_pressed_text = None
btn.setStyleSheet("QPushButton {color: black;}")
else:
btn.setEnabled(True)
self.model.read_star()
setTableWidget(self.tableWidget, self.model.md)
self.mw.p = None
#link to log window to display output of stdout
def dataReady(self):
cursor = self.textBrowser_log.textCursor()
#cursor.movePosition(cursor.End)
# have transfer byte string to unicode string
import string
printable = set(string.printable)
printable.add(u'\u2588')
txt = str(self.mw.p.readAll(),'utf-8')
#txt += self.mw.p.errorString()
printable_txt = "".join(list(filter(lambda x: x in printable, txt)))
if '[' in self.previous_log_line and '[' in printable_txt:
cursor.movePosition(cursor.StartOfLine, cursor.MoveAnchor)
cursor.movePosition(cursor.End, cursor.KeepAnchor)
cursor.removeSelectedText()
cursor.deletePreviousChar()
cursor.insertText(printable_txt)
f = open(self.model.log_file, 'a+')
f.write(printable_txt)
f.close()
self.previous_log_line = printable_txt
#self.textBrowser_log.ensureCursorVisible()
verScrollBar = self.textBrowser_log.verticalScrollBar()
scrollIsAtEnd = verScrollBar.maximum() - verScrollBar.value()
if scrollIsAtEnd <=100:
verScrollBar.setValue(verScrollBar.maximum()) # Scrolls to the bottom
#self.textBrowser_log.moveCursor(QtGui.QTextCursor.End)
def removeRow(self):
#print(self.tableWidget.selectionModel().selectedIndexes()[0].row())
#print(self.tableWidget.selectionModel().selectedIndexes()[0].column())
indices = self.tableWidget.selectionModel().selectedRows()
if indices:
for index in sorted(indices,reverse=True):
self.tableWidget.removeRow(index.row())
self.updateMD()
def copyRow(self):
rowCount = self.tableWidget.rowCount()
columnCount = self.tableWidget.columnCount()
if rowCount <=0 :
self.tableWidget.insertRow(self.tableWidget.rowCount())
for j in range(columnCount):
#self.model.md._setItemValue(it,Label(self.model.header[j+1]),self.tableWidget.item(i, j).text())
#print(self.default_value(self.model.header[j+1]))
self.tableWidget.setItem(0, j, QTableWidgetItem(self.default_value(self.model.header[j+1])))
#print(self.tableWidget.item(0, j).text())
else:
indices = self.tableWidget.selectionModel().selectedRows()
if indices:
for index in sorted(indices):
self.tableWidget.insertRow(self.tableWidget.rowCount())
rowCount = self.tableWidget.rowCount()
for j in range(columnCount):
if self.model.header[j+1] in ["rlnDeconvTomoName","rlnMaskName","rlnCorrectedTomoName","rlnMaskBoundary"]:
self.tableWidget.setItem(rowCount-1, j, QTableWidgetItem("None"))
#self.tableWidget.cellChanged[rowCount-1, j].connect(self.updateMD)
else:
self.tableWidget.setItem(rowCount-1, j, QTableWidgetItem(self.tableWidget.item(index.row(), j).text()))
else:
self.tableWidget.insertRow(self.tableWidget.rowCount())
rowCount = self.tableWidget.rowCount()
for j in range(columnCount):
if self.model.header[j+1] in ["rlnDeconvTomoName","rlnMaskName","rlnCorrectedTomoName","rlnMaskBoundary"]:
self.tableWidget.setItem(rowCount-1, j, QTableWidgetItem("None"))
elif not self.tableWidget.item(rowCount-2, j) is None:
self.tableWidget.setItem(rowCount-1, j, QTableWidgetItem(self.tableWidget.item(rowCount-2, j).text()))
self.updateMD()
def default_value(self, label):
switcher = {
"rlnMicrographName": "None",
"rlnPixelSize": "1",
"rlnDefocus": "0",
"rlnNumberSubtomo":"100",
"rlnSnrFalloff":"1",
"rlnDeconvStrength": "1",
"rlnDeconvTomoName":"None",
"rlnMaskBoundary":"None",
"rlnMaskDensityPercentage": "50",
"rlnMaskStdPercentage": "50",
"rlnMaskName": "None"
}
return switcher.get(label, "None")
def switch_btn(self, btn):
switcher = {
"mask_dir": self.lineEdit_mask_dir,
"deconv_dir": self.lineEdit_deconv_dir,
"subtomo_dir": self.lineEdit_subtomo_dir,
"result_dir_refine": self.lineEdit_result_dir_refine,
"result_dir_predict": self.lineEdit_result_dir_predict,
"subtomo_star_refine":self.lineEdit_subtomo_star_refine,
"pretrain_model_refine":self.lineEdit_pretrain_model_refine,
"tomo_star_predict": self.lineEdit_tomo_star_predict,
"pretrain_model_predict":self.lineEdit_pretrain_model_predict,
"continue_from": self.lineEdit_continue_iter
}
return switcher.get(btn, "Invaid btn name")
def file_types(self, item):
switcher = {
"rlnMicrographName":"mrc or rec file (*.mrc *.rec) ;; All Files (*)",
"rlnDeconvTomoName":"mrc or rec file (*.mrc *.rec) ;; All Files (*)",
"rlnMaskName":"mrc or rec file (*.mrc *.rec) ;; All Files (*)",
"rlnMaskBoundary": "mod file (*.mod) ;; All Files (*)"
}
return switcher.get(item, "Invaid file types")
def get_toolTip(self,label):
switcher = {
"rlnMicrographName": "Your tomogram filenames",
"rlnPixelSize": "pixel size of your input tomograms",
"rlnDefocus": "estimated defocus value around 0 degree",
"rlnNumberSubtomo":"number of subtomograms to be extraced",
"rlnSnrFalloff":"SNR fall rate with the frequency",
"rlnDeconvStrength": "(1.0) Strength of the deconvolution",
"rlnDeconvTomoName":"automaticly saved deconved tomogram filename",
"rlnMaskBoundary":"model file that define your mask boundary(optional)",
"rlnMaskDensityPercentage": "The approximate percentage of pixels to keep based on their local pixel density",
"rlnMaskStdPercentage": "The approximate percentage of pixels to keep based on their local standard deviation",
"rlnMaskName": "automaticly saved mask tomogram filename"
}
return switcher.get(label, "None")
def updateMD ( self ):
star_file = self.model.tomogram_star
rowCount = self.tableWidget.rowCount()
columnCount = self.tableWidget.columnCount()
data = self.model.md._data
self.model.md = MetaData()
self.model.md.addLabels('rlnIndex')
for j in range(columnCount):
self.model.md.addLabels(self.model.header[j+1])
#self.model.md.addLabels(self.tableWidget.horizontalHeaderItem(j).text())
for i in range(rowCount):
#TODO check the folder contains only tomograms.
it = Item()
self.model.md.addItem(it)
self.model.md._setItemValue(it,Label('rlnIndex'),str(i+1))
for j in range(columnCount):
try:
#print("update:",Label(self.model.header[j+1]),self.tableWidget.item(i, j).text())
if len(self.tableWidget.item(i, j).text()) <1:
if self.model.header[j+1] != "rlnMaskBoundary":
previous_value = getattr(data[i],self.model.header[j+1])
else:
previous_value = "None"
self.model.md._setItemValue(it,Label(self.model.header[j+1]),previous_value)
self.tableWidget.setItem(i, j, QTableWidgetItem(str(previous_value)))
else:
self.model.md._setItemValue(it,Label(self.model.header[j+1]),self.tableWidget.item(i, j).text())
#self.model.md._setItemValue(it,Label(self.tableWidget.horizontalHeaderItem(j).text()),self.tableWidget.item(i, j).text())
except:
previous_value = getattr(data[i],self.model.header[j+1])
self.model.md._setItemValue(it,Label(self.model.header[j+1]),previous_value)
self.tableWidget.setItem(i, j, QTableWidgetItem(str(previous_value)))
#print("error in seeting values for {}! set it to previous value automatically.".format(self.tableWidget.horizontalHeaderItem(j).text()))
self.model.md.write(star_file)
def updateMDItem ( self, i, j ):
try:
current_value = self.tableWidget.item(i, j).text()
#self.model.md._setItemValue(self.mnodel.md._data[i],Label(self.model.header[j+1]),current_value)
#for row,it in enumerate(self.model.md):
# print(i,j)
# if row == i:
# self.model.md._setItemValue(it,Label(self.tableWidget.horizontalHeaderItem(j).text()),self.tableWidget.item(i, j).text())
self.updateMD()
except:
pass
def browseSlot( self , btn ):
''' Called when the user presses the Browse button
'''
lineEdit = self.switch_btn(btn)
pwd = os.getcwd().replace("\\","/")
options = QtWidgets.QFileDialog.Options()
options |= QtWidgets.QFileDialog.DontUseNativeDialog
flt = "All Files (*)"
if btn == "continue_from":
flt = "json file (*.json);;All Files (*)"
if btn == "subtomo_star_refine" or btn == "tomo_star_predict":
flt = "star file (*.star);;All Files (*)"
if btn == "pretrain_model_refine" or btn == "pretrain_model_predict":
flt = "model file (*.h5);;All Files (*)"
fileName, _ = QtWidgets.QFileDialog.getOpenFileName(
None,
"Choose File",
"",
flt,
options=options)
if fileName:
#self.model.setFileName( fileName )
#######
#cmd = "echo choose file: {} >> log.txt ".format(fileName)
#os.system(cmd)
#self.logWindow.append("choose file: {}".format(fileName) )
simple_name = self.model.sim_path(pwd,fileName)
lineEdit.setText( simple_name )
#self.logWindow.moveCursor(QtGui.QTextCursor.End)
#######
#self.refreshAll()
#self.debugPrint( "Browse button pressed" )
def browseFolderSlot( self , btn):
'''
Called when the user presses the Browse folder button
TODO: add file name filter
'''
lineEdit = self.switch_btn(btn)
try:
pwd = os.getcwd().replace("\\","/")
dir_path=QtWidgets.QFileDialog.getExistingDirectory(None,"Choose Directory",pwd)
#self.model.setFolderName( dir_path )
#cmd = "echo choose folder: {} >> log.txt ".format(dir_path)
#os.system(cmd)
#self.logWindow.append("choose folder: {}".format(dir_path) )
#pwd = os.getcwd().replace("\\","/")
simple_path = self.model.sim_path(pwd,dir_path)
lineEdit.setText( simple_path )
#self.logWindow.moveCursor(QtGui.QTextCursor.End)
#self.refreshAll()
except:
##TODO: record to log.
pass
def browseSlotTable( self , i, j):
''' Called when the user presses the Browse folder button
'''
if self.model.header[j+1] in ["rlnMicrographName", "rlnMaskBoundary","rlnDeconvTomoName","rlnMaskName"]:
try:
options = QtWidgets.QFileDialog.Options()
options |= QtWidgets.QFileDialog.DontUseNativeDialog
fileName, _ = QtWidgets.QFileDialog.getOpenFileName(
None,
"Choose File",
"",
self.file_types(self.model.header[j+1]),
options=options)
if not fileName:
fileName = self.tableWidget.item(i, j).text()
pwd = os.getcwd().replace("\\","/")
simple_path = self.model.sim_path(pwd,fileName)
self.tableWidget.setItem(i, j, QTableWidgetItem(simple_path))
except:
##TODO: record to log.
pass
else:
pass
def deconvolve( self ):
tomogram_star = self.model.tomogram_star
cmd = "isonet.py deconv {} ".format(tomogram_star)
if self.lineEdit_deconv_dir.text():
cmd = "{} --deconv_folder {}".format(cmd, self.lineEdit_deconv_dir.text())
if self.lineEdit_tomo_index_deconv.text():
cmd = "{} --tomo_idx {}".format(cmd, self.lineEdit_tomo_index_deconv.text())
if self.lineEdit_ncpu.text():
cmd = "{} --ncpu {}".format(cmd, self.lineEdit_ncpu.text())
if self.lineEdit_highpassnyquist.text():
cmd = "{} --highpassnyquist {}".format(cmd, self.lineEdit_highpassnyquist.text())
if self.lineEdit_chunk_size.text():
cmd = "{} --chunk_size {}".format(cmd, self.lineEdit_chunk_size.text())
if self.lineEdit_overlap.text():
cmd = "{} --overlap {}".format(cmd, self.lineEdit_overlap.text())
self.save_setting()
if self.checkBox_only_print_command_prepare.isChecked() and self.pushButton_deconv.text() == 'Deconvolve':
print(cmd)
#logging.info(cmd)
else:
self.start_process(cmd,self.pushButton_deconv)
def make_mask( self ):
#print("#####making mask############")
tomogram_star = self.model.tomogram_star
cmd = "isonet.py make_mask {} ".format(tomogram_star)
if self.lineEdit_mask_dir.text():
cmd = "{} --mask_folder {}".format(cmd, self.lineEdit_mask_dir.text())
if self.lineEdit_patch_size.text():
cmd = "{} --patch_size {}".format(cmd, self.lineEdit_patch_size.text())
if not self.checkBox_use_deconv_mask.isChecked():
cmd = "{} --use_deconv_tomo {}".format(cmd, False)
if self.lineEdit_tomo_index_mask.text():
cmd = "{} --tomo_idx {}".format(cmd, self.lineEdit_tomo_index_mask.text())
if self.lineEdit_z_crop.text():
cmd = "{} --z_crop {}".format(cmd, self.lineEdit_z_crop.text())
self.save_setting()
if self.checkBox_only_print_command_prepare.isChecked() and self.pushButton_generate_mask.text() == 'Generate Mask':
print(cmd)
else:
self.start_process(cmd,self.pushButton_generate_mask)
def extract_subtomo( self ):
tomogram_star = self.model.tomogram_star
cmd = "isonet.py extract {} ".format(tomogram_star)
if self.lineEdit_subtomo_dir.text():
cmd = "{} --subtomo_folder {}".format(cmd, self.lineEdit_subtomo_dir.text())
if self.lineEdit_subtomo_star_extract.text():
cmd = "{} --subtomo_star {}".format(cmd, self.lineEdit_subtomo_star_extract.text())
if self.lineEdit_cube_size_extract.text():
cmd = "{} --cube_size {}".format(cmd, self.lineEdit_cube_size_extract.text())
if not self.checkBox_use_deconv_extract.isChecked():
cmd = "{} --use_deconv_tomo {}".format(cmd, False)
if self.lineEdit_tomo_index_extract.text():
cmd = "{} --tomo_idx {}".format(cmd, self.lineEdit_tomo_index_extract.text())
self.save_setting()
if self.checkBox_only_print_command_prepare.isChecked() and self.pushButton_extract.text() == 'Extract':
print(cmd)
else:
self.start_process(cmd,self.pushButton_extract)
def refine( self ):
subtomo_star = self.lineEdit_subtomo_star_refine.text() if self.lineEdit_subtomo_star_refine.text() else "subtomo.star"
cmd = "isonet.py refine {} ".format(subtomo_star)
if self.lineEdit_gpuID_refine.text():
cmd = "{} --gpuID {}".format(cmd, self.lineEdit_gpuID_refine.text())
if self.lineEdit_pretrain_model_refine.text():
cmd = "{} --pretrained_model {}".format(cmd, self.lineEdit_pretrain_model_refine.text())
if self.lineEdit_continue_iter.text():
cmd = "{} --continue_from {}".format(cmd, self.lineEdit_continue_iter.text())
if self.lineEdit_result_dir_refine.text():
cmd = "{} --result_dir {}".format(cmd, self.lineEdit_result_dir_refine.text())
if self.lineEdit_preprocessing_ncpus.text():
cmd = "{} --preprocessing_ncpus {}".format(cmd, self.lineEdit_preprocessing_ncpus.text())
if self.lineEdit_iteration.text():
cmd = "{} --iterations {}".format(cmd, self.lineEdit_iteration.text())
if self.lineEdit_batch_size.text():
cmd = "{} --batch_size {}".format(cmd, self.lineEdit_batch_size.text())
if self.lineEdit_epoch.text():
cmd = "{} --epochs {}".format(cmd, self.lineEdit_epoch.text())
if self.lineEdit_steps_per_epoch.text():
cmd = "{} --steps_per_epoch {}".format(cmd, self.lineEdit_steps_per_epoch.text())
if self.lineEdit_lr.text():
cmd = "{} --learning_rate {}".format(cmd, self.lineEdit_lr.text())
if self.lineEdit_noise_level.text():
cmd = "{} --noise_level {}".format(cmd, self.lineEdit_noise_level.text())
if self.lineEdit_noise_start_iter.text():
cmd = "{} --noise_start_iter {}".format(cmd, self.lineEdit_noise_start_iter.text())
if not self.comboBox_noise_mode.currentText() == "noFilter":
cmd = "{} --noise_mode {}".format(cmd, self.comboBox_noise_mode.currentText())
if self.lineEdit_drop_out.text():
cmd = "{} --drop_out {}".format(cmd, self.lineEdit_drop_out.text())
if self.lineEdit_network_depth.text():
cmd = "{} --unet_depth {}".format(cmd, self.lineEdit_network_depth.text())
if self.lineEdit_convs_per_depth.text():
cmd = "{} --convs_per_depth {}".format(cmd, self.lineEdit_convs_per_depth.text())
if self.lineEdit_kernel.text():
cmd = "{} --kernel {}".format(cmd, self.lineEdit_kernel.text())
if self.lineEdit_filter_base.text():
cmd = "{} --filter_base {}".format(cmd, self.lineEdit_filter_base.text())
if self.checkBox_pool.isChecked():
cmd = "{} --pool {}".format(cmd, True)
if not self.checkBox_batch_normalization.isChecked():
cmd = "{} --batch_normalization {}".format(cmd, False)
if not self.checkBox_normalization_percentile.isChecked():
cmd = "{} --normalize_percentile {}".format(cmd, False)
self.save_setting()
if self.checkBox_only_print_command_refine.isChecked() and self.pushButton_refine.text() == 'Refine':
print(cmd)
else:
self.start_process(cmd,self.pushButton_refine)
def predict( self ):
tomo_star = self.lineEdit_tomo_star_predict.text() if self.lineEdit_tomo_star_predict.text() else "tomograms.star"
gpuID = self.lineEdit_gpuID_predict.text() if self.lineEdit_gpuID_predict.text() else '0,1,2,3'
cmd = "isonet.py predict {}".format(tomo_star)
if self.lineEdit_pretrain_model_predict.text() and self.model.isValid(self.lineEdit_pretrain_model_predict.text()):
cmd = "{} {}".format(cmd, self.lineEdit_pretrain_model_predict.text())
else:
self.warn_window("no trained model detected")
return
# if self.lineEdit_gpuID_predict.text():
# cmd = "{} --gpuID {}".format(cmd, self.lineEdit_gpuID_predict.text())
cmd = "{} --gpuID {}".format(cmd,gpuID)
if self.lineEdit_tomo_index_predict.text():
cmd = "{} --tomo_idx {}".format(cmd, self.lineEdit_tomo_index_predict.text())
if self.lineEdit_result_dir_predict.text():
cmd = "{} --output_dir {}".format(cmd, self.lineEdit_result_dir_predict.text())
if self.lineEdit_cube_size_predict.text():
cmd = "{} --cube_size {}".format(cmd, self.lineEdit_cube_size_predict.text())
if self.lineEdit_crop_size_predict.text():
cmd = "{} --crop_size {}".format(cmd, self.lineEdit_crop_size_predict.text())
if not self.checkBox_use_deconv_predict.isChecked():
cmd = "{} --use_deconv_tomo {}".format(cmd, False)
self.save_setting()
if self.checkBox_only_print_command_predict.isChecked() and self.pushButton_predict.text() == "Predict":
print(cmd)
else:
self.start_process(cmd,self.pushButton_predict)
def view_3dmod(self):
slected_items = self.tableWidget.selectedItems()
if len(slected_items) > 0:
cmd = "3dmod"
model_file=""
previous_i = -1
for item in slected_items:
i = item.row()
j = item.column()
if previous_i != -1 and i != previous_i:
cmd = "{} {} {}".format(cmd,model_file,"; 3dmod")
model_file=""
item_text = self.tableWidget.item(i, j).text()
if item_text[-4:] == '.mrc' or item_text[-4:] == '.rec':
cmd = "{} {}".format(cmd,item_text)
if self.model.header[j+1]=="rlnMaskBoundary" and item_text != "None":
model_file = "{}".format(item_text)
previous_i = i
cmd = "{} {}".format(cmd,model_file)
#print(cmd)
if cmd != "3dmod":
os.system(cmd)
else:
self.warn_window("selected items are not mrc or rec file(s)")
def view_predict_3dmod(self):
try:
result_dir_predict = self.lineEdit_result_dir_predict.text()
if len(result_dir_predict) < 1:
result_dir_predict = 'corrected_tomos'
list_file = os.listdir(result_dir_predict)
cmd = "3dmod"
for f in list_file:
if f[-4:] == ".mrc" or f[-4:] == ".rec":
cmd = "{} {}/{}".format(cmd,result_dir_predict,f)
if cmd != "3dmod":
os.system(cmd)
else:
self.warn_window("no mrc or rec file(s) detected in results folder: {}!".format(result_dir_predict))
except Exception:
print('pass')
def open_star( self ):
options = QtWidgets.QFileDialog.Options()
options |= QtWidgets.QFileDialog.DontUseNativeDialog
fileName, _ = QtWidgets.QFileDialog.getOpenFileName(
None,
"Choose File",
"",
"Star file (*.star)",
options=options)
if fileName:
try:
tomo_file = self.model.sim_path(self.model.pwd, fileName)
read_result = self.model.read_star_gui(tomo_file)
if read_result == 1:
self.warn_window("The input star file is not legid!")
else:
setTableWidget(self.tableWidget, self.model.md)
except:
print("warning")
pass
def read_setting(self):
if os.path.exists(self.setting_file):
data = {}
try:
with open(self.setting_file) as f:
for line in f:
(k, v) = line.split(":")
data[k] = v.strip()
self.lineEdit_deconv_dir.setText(data['deconv_dir'])
self.lineEdit_tomo_index_deconv.setText(data['tomo_index_deconv'])
self.lineEdit_preprocessing_ncpus.setText(data['preprocessing_ncpus'])
self.lineEdit_chunk_size.setText(data['chunk_size'])
self.lineEdit_highpassnyquist.setText(data['highpassnyquist'])
self.lineEdit_overlap.setText(data['overlap'])
self.lineEdit_mask_dir.setText(data['mask_dir'])
self.lineEdit_tomo_index_mask.setText(data['tomo_index_mask'])
self.checkBox_use_deconv_mask.setChecked(data['use_deconv_mask'] == 'True')
#self.checkBox_use_deconv_mask.setChecked(data['use_deconv_mask'])
self.lineEdit_patch_size.setText(data['patch_size'])
self.lineEdit_z_crop.setText(data['z_crop'])
self.lineEdit_subtomo_dir.setText(data['subtomo_dir'])
self.lineEdit_subtomo_star_extract.setText(data['subtomo_star_extract'])
self.checkBox_use_deconv_extract.setChecked(data['use_deconv_extract'] == 'True')
self.lineEdit_cube_size_extract.setText(data['cube_size_extract'])
self.lineEdit_tomo_index_extract.setText(data['tomo_index_extract'])
self.lineEdit_subtomo_star_refine.setText(data['subtomo_star_refine'])
self.lineEdit_gpuID_refine.setText(data['gpuID_refine'])
self.lineEdit_pretrain_model_refine.setText(data['pretrain_model_refine'])
self.lineEdit_continue_iter.setText(data['continue_iter'])
self.lineEdit_result_dir_refine.setText(data['result_dir_refine'])
self.lineEdit_ncpu.setText(data['ncpu'])
self.lineEdit_epoch.setText(data['epoch'])
self.lineEdit_iteration.setText(data['iteration'])
self.lineEdit_lr.setText(data['lr'])
self.lineEdit_steps_per_epoch.setText(data['steps_per_epoch'])
self.lineEdit_batch_size.setText(data['batch_size'])
self.lineEdit_noise_level.setText(data['noise_level'])
self.lineEdit_noise_start_iter.setText(data['noise_start_iter'])
self.comboBox_noise_mode.setCurrentText(data['noise_mode'])
self.lineEdit_drop_out.setText(data['drop_out'])
self.lineEdit_network_depth.setText(data['network_depth'])
self.lineEdit_convs_per_depth.setText(data['convs_per_depth'])
self.lineEdit_kernel.setText(data['kernel'])
self.lineEdit_filter_base.setText(data['filter_base'])
self.checkBox_pool.setChecked(data['pool'] == 'True')
self.checkBox_batch_normalization.setChecked(data['batch_normalization'] == 'True')
self.checkBox_normalization_percentile.setChecked(data['normalization_percentile'] == 'True')
self.lineEdit_tomo_star_predict.setText(data['tomo_star_predict'])
self.lineEdit_gpuID_predict.setText(data['gpuID_predict'])
self.lineEdit_tomo_index_predict.setText(data['tomo_index_predict'])
self.lineEdit_pretrain_model_predict.setText(data['pretrain_model_predict'])
self.lineEdit_cube_size_predict.setText(data['cube_size_predict'])
self.lineEdit_result_dir_predict.setText(data['result_dir_predict'])
self.lineEdit_crop_size_predict.setText(data['crop_size_predict'])
self.checkBox_use_deconv_predict.setChecked(data['use_deconv_predict'] == 'True')
except:
print("error reading {}!".format(self.setting_file))
def save_setting(self):
param = {}
param['deconv_dir'] = self.lineEdit_deconv_dir.text()
param['tomo_index_deconv'] = self.lineEdit_tomo_index_deconv.text()
param['preprocessing_ncpus'] = self.lineEdit_preprocessing_ncpus.text()
param['chunk_size'] = self.lineEdit_chunk_size.text()
param['highpassnyquist'] = self.lineEdit_highpassnyquist.text()
param['overlap'] = self.lineEdit_overlap.text()
param['mask_dir'] = self.lineEdit_mask_dir.text()
param['tomo_index_mask'] = self.lineEdit_tomo_index_mask.text()
param['use_deconv_mask'] = self.checkBox_use_deconv_mask.isChecked()
param['patch_size'] = self.lineEdit_patch_size.text()
param['z_crop'] = self.lineEdit_z_crop.text()
param['subtomo_dir'] = self.lineEdit_subtomo_dir.text()
param['subtomo_star_extract'] = self.lineEdit_subtomo_star_extract.text()
param['use_deconv_extract'] = self.checkBox_use_deconv_extract.isChecked()
param['cube_size_extract'] = self.lineEdit_cube_size_extract.text()
param['tomo_index_extract'] = self.lineEdit_tomo_index_extract.text()
param['subtomo_star_refine'] = self.lineEdit_subtomo_star_refine.text()
param['gpuID_refine'] = self.lineEdit_gpuID_refine.text()
param['pretrain_model_refine'] = self.lineEdit_pretrain_model_refine.text()
param['continue_iter'] = self.lineEdit_continue_iter.text()
param['result_dir_refine'] = self.lineEdit_result_dir_refine.text()
param['ncpu'] = self.lineEdit_ncpu.text()
param['epoch'] = self.lineEdit_epoch.text()
param['iteration'] = self.lineEdit_iteration.text()
param['lr'] = self.lineEdit_lr.text()
param['steps_per_epoch'] = self.lineEdit_steps_per_epoch.text()
param['batch_size'] = self.lineEdit_batch_size.text()
param['noise_level'] = self.lineEdit_noise_level.text()
param['noise_start_iter'] = self.lineEdit_noise_start_iter.text()
param['noise_mode'] = self.comboBox_noise_mode.currentText()
param['drop_out'] = self.lineEdit_drop_out.text()
param['network_depth'] = self.lineEdit_network_depth.text()
param['convs_per_depth'] = self.lineEdit_convs_per_depth.text()
param['kernel'] = self.lineEdit_kernel.text()
param['filter_base'] = self.lineEdit_filter_base.text()
param['pool'] = self.checkBox_pool.isChecked()
param['batch_normalization'] = self.checkBox_batch_normalization.isChecked()
param['normalization_percentile'] = self.checkBox_normalization_percentile.isChecked()
param['tomo_star_predict'] = self.lineEdit_tomo_star_predict.text()
param['gpuID_predict'] = self.lineEdit_gpuID_predict.text()
param['tomo_index_predict'] = self.lineEdit_tomo_index_predict.text()
param['pretrain_model_predict'] = self.lineEdit_pretrain_model_predict.text()
param['cube_size_predict'] = self.lineEdit_cube_size_predict.text()
param['result_dir_predict'] = self.lineEdit_result_dir_predict.text()
param['crop_size_predict'] = self.lineEdit_crop_size_predict.text()
param['use_deconv_predict'] = self.checkBox_use_deconv_predict.isChecked()
try:
with open(self.setting_file, 'w') as f:
for key, value in param.items():
f.write("{}:{}\n".format(key,value))
except:
print("error writing {}!".format(self.setting_file))
def openGithub(self):
import webbrowser
webbrowser.open(self.model.github_addr)
def warn_window(self,text):
msg = QMessageBox()
msg.setWindowTitle("Warning!")
msg.setText(text)
msg.setStandardButtons(QMessageBox.Ok)
msg.setIcon(QMessageBox.Warning)
msg.exec_()
class MyWindow(QtWidgets.QMainWindow):
def __init__(self):
super().__init__()
self.p = None
def closeEvent(self, event):
if self.p:
result = QtWidgets.QMessageBox.question(self,
"Confirm Exit...",
"Do you want to continue the existing job in the background?",
QtWidgets.QMessageBox.Yes| QtWidgets.QMessageBox.No | QtWidgets.QMessageBox.Cancel)
event.ignore()
if result == QtWidgets.QMessageBox.Yes:
event.accept()
if result == QtWidgets.QMessageBox.No:
self.p.kill()
event.accept()
#kill the old process
else:
result = QtWidgets.QMessageBox.question(self,
"Confirm Exit...",
"Do you want to exit? ",
QtWidgets.QMessageBox.Yes| QtWidgets.QMessageBox.No )
event.ignore()
if result == QtWidgets.QMessageBox.Yes:
event.accept()
if result == QtWidgets.QMessageBox.No:
pass
#kill the old process
stylesheet = """
QWidget #tab, #tab_2, #tab_3{
background-color: rgb(253,247,226)
}
QTabWidget{
background: rgb(144,160,187)
}
QPushButton {
background: rgb(239,221,241)
}
"""
def main():
"""
This is the MAIN ENTRY POINT of our application. The code at the end
of the mainwindow.py script will not be executed, since this script is now
our main program. We have simply copied the code from mainwindow.py here
since it was automatically generated by '''pyuic5'''.
"""
app = QtWidgets.QApplication(sys.argv)
app.setStyleSheet(stylesheet)
MainWindow = MyWindow()
#MainWindow = QtWidgets.QMainWindow()
ui = MainWindowUIClass()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
main()
| IsoNet-cryoET/IsoNet | gui/Isonet_star_app.py | Isonet_star_app.py | py | 43,739 | python | en | code | 49 | github-code | 36 |
27370619161 | import matplotlib.pyplot as plt
from random_walk import RandomWalk
# cd Documents/python_work/data_visualization
while True:
# Create instance of RandomWalk.
rw = RandomWalk(5000)
rw.fill_walk()
# Set the size of the interactive window.
plt.figure(dpi=128, figsize=(10, 5))
# Plot random walk with gradient.
point_numbers = list(range(rw.num_points))
# plt.scatter(rw.x_values, rw.y_values, c=point_numbers, cmap=plt.cm.Blues,
# s=1)
plt.plot(rw.x_values, rw.y_values, c='blue')
# Start and end points (green and red, respectively).
plt.scatter(0, 0, c='green', s=100)
plt.scatter(rw.x_values[-1], rw.y_values[-1], c='red', s=100)
# Remove axes.
# plt.axes().get_xaxis().set_visible(False)
# plt.axes().get_yaxis().set_visible(False)
plt.show()
keep_running = input("Make another walk? (y/n): ")
if keep_running == 'n':
break
| nazeern/python_crash_course | data_visualization/rw_visual.py | rw_visual.py | py | 923 | python | en | code | 0 | github-code | 36 |
73945056424 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
ROS Node to accept commands of "wheel_command" and run motors using the Pololu
DRV8835 Raspberry Pi Hat
"""
import rospy
from std_msgs.msg import Float32
from basic_motors_and_sensors.msg import WheelCommands
from pololu_drv8835_rpi import motors, MAX_SPEED # MAX_SPEED is 480 (hard-coded)
rospy.init_node("motor_node", anonymous=False)
def listener():
sub = rospy.Subscriber("/wheel_command", WheelCommands, set_wheel_command)
rospy.spin() # keep the node from exiting
def set_wheel_command(msg_in):
"""Set wheel voltages when a new command comes in"""
wheel_command_left, wheel_command_right = (msg_in.left, msg_in.right)
print(f"Got wheel commands l:{wheel_command_left} r:{wheel_command_right}")
motors.setSpeeds(-int(wheel_command_left), int(wheel_command_right))
if __name__ == "__main__":
try:
listener()
except rospy.ROSInterruptException:
motors.motor1.setSpeed(0)
motors.motor2.setSpeed(0)
pass
motors.motor1.setSpeed(0)
motors.motor2.setSpeed(0)
| macuser47/ME439_Robot | src/basic_motors_and_sensors/src/motor_node.py | motor_node.py | py | 1,092 | python | en | code | 0 | github-code | 36 |
6433676498 | import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torchvision
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import numpy as np
import math
import yaml
import pickle
import pprint
import os
import logging
import sys
import data_loaders
import nets
import losses
import utils
import setup
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
torch.manual_seed(787)
torch.cuda.manual_seed(787)
'''
#SECTION VAE Helpers
'''
def sample_n_frames(init_frames, ts, dt, ae, mu, sigma, n_generate=64):
# function to generate the next n frames conditioned on the input
with torch.no_grad():
# Get the latent variables
q_mu, q_sigma, det = ae.encode(init_frames)
_, z, _ = ae.get_increments(q_mu, q_sigma)
z_samples = torch.zeros((n_generate, z.shape[1]))
z = z[-1,:].unsqueeze(0)
# sample in z according to the learned SDE
for i in range(n_generate):
z_n = ae.get_next_z(z, ts[-1].unsqueeze(0) + i*dt, dt, mu, sigma)
z_samples[i,:] = z_n.clone()
z = z_n
global savepath
plots_list = [z_samples.detach().cpu().numpy()]
plot_titles = ['Latent Traj']
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'latent_traj.png'), plot_type='plot', axis=True)
conditional_frame = init_frames[0].unsqueeze(0).repeat(z_samples.size(0),1,1,1)
if det is not None:
in_z = torch.cat((z_samples.to(device), det[-1].repeat(z_samples.shape[0],1)), dim = 1)
else:
in_z = z_samples.to(device)
frames = ae.decode(in_z, x=conditional_frame)
return z_samples, frames
def plot_images(ae, mu, sigma, frames, frames_hat, dt, ts, l2_small):
# plot latent trajectories
# plot next frame reconstructions
z_samples, sampled_frames = sample_n_frames(frames[:2],
ts[:2],
dt,
ae.eval(),
mu,
sigma)
_, sampled_frames2 = sample_n_frames(frames[:2],
ts[:2],
dt,
ae.eval(),
mu,
sigma*2)
# create the image grids
im_grid_hat_single = torchvision.utils.make_grid(frames_hat[:64].detach().cpu(), pad_value=1, normalize=True)
im_grid_hat = torchvision.utils.make_grid(sampled_frames[:64].detach().cpu(), pad_value=1, normalize=True)
im_grid = torchvision.utils.make_grid(frames[:64].detach().cpu(), pad_value=1, normalize=True)
odd_rows = []
for row in range(4):
odd_rows.append(frames[row*8:(row+1)*8])
odd_rows.append(sampled_frames[row*8:(row+1)*8])
comp_grid = torchvision.utils.make_grid(torch.cat(odd_rows), pad_value=1, normalize=True)
plots_list = [comp_grid.cpu().numpy().transpose((1,2,0))]
plot_titles = ['Comparison']
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'train_comparison.png'))
plots_list = [im_grid_hat.numpy().transpose((1,2,0))]
plot_titles = ['Sampled (trajectory)']
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'train_sample_traj.png'))
# save the images
plots_list = [im_grid.numpy().transpose((1,2,0)),im_grid_hat_single.numpy().transpose((1,2,0))]
plot_titles = ['Original','Sampled (single)']
if l2_small:
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'train_sample_best.png'))
else:
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'train_sample.png'))
# save the movies
utils.save_gif(sampled_frames.detach().cpu(), os.path.join(savepath, 'movies/train_sample_traj.gif'))
utils.save_gif(sampled_frames2.detach().cpu(), os.path.join(savepath, 'movies/train_sample_traj2.gif'))
def save_nets(ae, mu, sigma, suffix):
'''
Routine to save the current state of our network
ae : autoencoder network
mu : latent drift network
sigma : latent diffusion network
suffix : str that defines how we want to save the network
'''
# save all the networks
torch.save(ae.state_dict(), os.path.join(savepath,'saved_nets/ae_{}.pth'.format(suffix)))
if type(mu) == nets.MLP or type(mu) == nets.Lin or type(mu) == nets.Well:
torch.save(mu.state_dict(), os.path.join(savepath,'saved_nets/mu_{}.pth'.format(suffix)))
else:
with open(os.path.join(savepath,'mu_{}.pkl'.format(suffix)),'wb') as f:
pickle.dump(mu, f)
if type(sigma) == nets.MLP:
torch.save(sigma.state_dict(), os.path.join(savepath,'saved_nets/sigma_{}.pth'.format(suffix)))
else:
with open(os.path.join(savepath,'sigma_{}.pkl'.format(suffix)),'wb') as f:
pickle.dump(sigma, f)
def train(ae,
mu,
sigma,
dt,
train_data,
val_data,
optimizer,
scheduler,
n_epochs,
data_params,
**kwargs):
'''
The main training routine:
ae : neural network (torch.Module subclass) that represents our autoencoder
mu : network or parameter that describes the latent drift
sigma : network or parameter that describes the latent diffusion
dt : time step
train_data : dataloader with the training data
val_data : dataloader with validation data
optimizer : optimization algorithm torch.optim
scheduler : lr decay schedule
n_epochs : number of epochs to run
data_params : parameters associated with the dataset
returns statistics with respect to training
'''
global savepath
global loss_type
train_dataset = train_data.dataset.dataset
val_dataset = val_data.dataset.dataset
try:
inner_num = data_params['inner_iter']
except:
inner_num = 1
if n_epochs > 1000:
reserve_epoch = 499
else:
reserve_epoch = 49
# plotting parameters
l2_small = True
l2_small_valid = True
losses_train = []
losses_valid = []
try:
plot_freq = data_params['plot_freq']
except KeyError:
plot_freq = 50
try:
plot_train = data_params['plot_train']
except KeyError:
plot_train = True
# setup the stats dict
stats = {'kl': np.Inf,
'l2' : np.Inf,
'l2_valid': np.Inf,
'kl_valid': np.Inf,
'mu_mse': 0,
'mu_mse_valid': 0,
'mu_rel': 0,
'mu_rel_valid': 0,
'sde_mse': 0,
'sde_mse_valid': 0,
'sde_rel': 0,
'sde_rel_valid': 0,
'val_cond_met': False}
for epoch in range(n_epochs):
ae.train()
mu.train()
#sigma.train()
for idx, (frames, ts) in enumerate(train_data):
# save a gif of the data
if len(frames.shape) > 2:
if idx == 0 and epoch == 0:
utils.save_gif(frames.detach().cpu(), os.path.join(savepath, 'orig_data.gif'))
# transfer the data to the device
# the rest is boilerplate
frames = frames.float().to(device)
ts = ts.float().to(device)
for _ in range(inner_num):
optimizer.zero_grad()
kl_loss, l2_loss,\
frames_hat, mu_hat, q_mu, sigma_hat_full, q_sigma_full, inc, z = ae.step(frames, ts, dt, mu, sigma)
kl_loss1, l2_loss1,\
_, _, _, _, _, _, _ = ae.step(frames, ts, dt, mu, sigma, plus_one=True)
sigma.data = sigma / sigma.norm(2) * torch.ones(z.shape[1]).norm(2)
loss = kl_loss + kl_loss1 + l2_loss + l2_loss1 + 20*sigma.norm(1)
losses_train.append((kl_loss.item(), l2_loss.item()))
loss.backward()
optimizer.step()
# And that's the end of the train routine
'''
PLOT SECTION
This is still quite messy and needs to be refactored,
but this is all visualization calls
'''
if kl_loss < stats['kl']:
stats['kl'] = kl_loss.item()
stats['mu'] = mu_hat.mean().item()
if plot_train and (epoch % plot_freq) == 0 and idx == 0:
if l2_loss < stats['l2']:
l2_small = True
stats['l2'] = l2_loss.item()
else:
l2_small = False
if len(frames.shape) > 2:
plot_images(ae, mu, sigma, frames, frames_hat, dt, ts, l2_small)
# plot mu hat
mu_hat_samples, hat_domain = utils.plot_mu_hat(mu, sigma, q_mu, ts, os.path.join(savepath, 'mu_hat_plot.png'))
if len(frames.shape) < 3:
plots = [frames.cpu(), frames_hat.detach().cpu()]
names = ['Original', 'Sampled']
utils.plot_subplots(plots, names, os.path.join(savepath, 'train_recon.png'), plot_type='plot', axis=True)
_, sampled_frames = sample_n_frames(frames[:2], ts[:2], dt, ae, mu, sigma, n_generate=1000)
plots = [frames.cpu(), sampled_frames.detach().cpu()]
names = ['Original', 'Sampled']
utils.plot_subplots(plots, names, os.path.join(savepath, 'train_sampled.png'), plot_type='plot', axis=True)
if frames.shape[1] == 1:
with torch.no_grad():
inx = torch.linspace(frames.min().item(), frames.max().item()).unsqueeze(1)
oned_enc = ae.encode(inx.cuda(0))[0].detach().data.clone().cpu()
enc_scale = ( inx.log() / oned_enc ).mean()
enc_shift = (inx.log() - enc_scale * oned_enc).mean()
plt.plot(inx.detach().cpu(), enc_scale * oned_enc.cpu(), label='encoder')
plt.plot(inx.detach().cpu(), inx.log().detach().cpu(),label='log')
plt.legend()
plt.savefig(os.path.join(savepath, 'encoder_plot.pdf'))
plt.close('all')
'''
AFFINE TRANSFORM SECTION
'''
# calculate the affine map between xt and z
current_run = train_dataset.xt_orig[idx*z.shape[0]:(idx+1)*z.shape[0]]
scale = (train_dataset.xt_orig.max() - train_dataset.xt_orig.min())
q_mu = q_mu[:, :train_dataset.xt_orig.shape[1]]
z = z[:, :train_dataset.xt_orig.shape[1]]
if not 'stocks' in savepath:
# if this is the stocks dataset, don't compute the scaling since there is none
if data_params['affine']:
transformed_xt, Q, b, sde_mse, sde_rel = utils.calc_affine(
current_run,
z.detach().cpu().numpy(),
savepath,
affine=data_params['affine'])
if z.shape[1] == mu_hat.shape[1]:
mu_residuals, mu_rel, mu_crlb = utils.compare_mu2(
mu,
q_mu,
ts,
Q,
b,
dt,
train_dataset,
os.path.join(savepath,'mu_comp_scaled.png'),
affine=data_params['affine'],
loss_type=loss_type)
else:
mu_residuals = torch.Tensor([np.NaN]).numpy()
mu_crlb = torch.Tensor([np.NaN]).numpy()
mu_rel = torch.Tensor([np.NaN]).numpy()
else:
q_max = q_mu.max()
q_min = q_mu.min()
if loss_type == 'exact':
q_scaled = ((q_mu - q_min ) / (q_max - q_min) * (scale) ).detach().cpu().numpy()
#q_scaled = q_mu.detach().cpu().numpy() / np.sqrt(scale)
else:
q_scaled = q_mu.detach().cpu().numpy() / scale
transformed_xt, Q, b, sde_mse, sde_rel = utils.calc_affine(
current_run,
q_scaled,
#z.detach().cpu().numpy() / scale,
savepath,
affine=data_params['affine'])
if z.shape[1] == mu_hat.shape[1]:
mu_residuals, mu_rel, mu_crlb = utils.compare_mu2(
mu,
q_mu,
ts,
Q,
b,
dt,
train_dataset,
os.path.join(savepath,'mu_comp_scaled.png'),
affine=data_params['affine'],
loss_type=loss_type)
else:
mu_residuals = torch.Tensor([np.NaN]).numpy()
mu_crlb = torch.Tensor([np.NaN]).numpy()
mu_rel = torch.Tensor([np.NaN]).numpy()
stats['sde_mse'] = sde_mse.copy()
stats['sde_rel'] = sde_rel.copy()
# compare the estimated mu to the true mu with the affine map q
stats['mu_mse'] = mu_residuals.copy()
stats['mu_rel'] = mu_rel.copy()
stats['mu_crlb'] = mu_crlb.copy()
else:
mu_residuals = torch.Tensor([np.NaN]).numpy()
mu_crlb = torch.Tensor([np.NaN]).numpy()
mu_rel = torch.Tensor([np.NaN]).numpy()
stats['sde_mse'] = torch.Tensor([np.NaN]).numpy()
stats['sde_rel'] = torch.Tensor([np.NaN]).numpy()
# compare the estimated mu to the true mu with the affine map Q
stats['mu_mse'] = torch.Tensor([np.NaN]).numpy()
stats['mu_rel'] = torch.Tensor([np.NaN]).numpy()
stats['mu_crlb'] = torch.Tensor([np.NaN]).numpy()
# plot and print
print('Epoch {} iter {}'.format(epoch, idx))
print('L2 loss {}'.format(l2_loss.item()))
print('KL loss {}'.format(kl_loss.item()))
plots_list = [(q_mu[1:]-q_mu[:-1]).detach().cpu().numpy(), mu_hat.detach().cpu().numpy()]
plot_titles = ['q_mu', 'mu_hat']
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'mu_comp.png'), plot_type='plot', axis=True)
if scheduler:
if type(scheduler) == torch.optim.lr_scheduler.ReduceLROnPlateau:
scheduler.step(l2_loss)
else:
scheduler.step()
if (epoch % plot_freq) == 0:
# save all the networks
#if len(frames.shape) < 3:
# utils.plot_mu_hat(mu, None, z, ts, os.path.join(savepath, 'mu_hat_est.pdf'))
save_nets(ae, mu, sigma, 'latest')
with open(os.path.join(savepath, 'latent.pkl'), 'wb') as f:
#lat_d = {'q_mu' : q_mu.detach().cpu().numpy(), 'ts' : ts, 'xt_orig' : dataset.xt_orig}
lat_d = {'q_mu' : transformed_xt, 'ts' : ts, 'xt_orig' : train_dataset.xt_orig}
pickle.dump(lat_d, f)
if type(sigma) == nn.Parameter:
print('Update sigma_hat')
print(sigma)
stats['sigma_hat'] = (sigma.sort(descending=True)[0]).detach().cpu().numpy()
if (epoch % plot_freq) == 0 and plot_train:
'''
EVAL
'''
# with our validataion data, see how well we're predicting
with torch.no_grad():
ae.eval()
# first, compute how well we predict the next step on the validation data
for idxt, (frames_test, ts_test) in enumerate(val_data):
frames_test = frames_test.float().to(device)
ts_test = ts_test.float().to(device)
kl_loss_test, l2_loss_test,\
frames_hat_test, mu_hat_test, q_mu_test, sigma_hat_full, q_sigma_full, \
inc_test, z_test = ae.step(frames_test,
ts_test,
dt,
mu,
sigma)
losses_valid.append((kl_loss_test.item(), l2_loss_test.item()))
q_mu_test = q_mu_test[:,:train_dataset.xt_orig.shape[1]]
z_test = z_test[:,:train_dataset.xt_orig.shape[1]]
if len(frames_hat_test.shape) < 3 and l2_loss_test < stats['l2_valid']:
stats['l2_valid'] = l2_loss_test.item()
stats['kl_valid'] = kl_loss_test.item()
l2_small_valid = True
stats['val_cond_met'] = True
save_nets(ae, mu, sigma, 'best_val')
plots = [frames_test.cpu(), frames_hat_test.detach().cpu()]
names = ['Original', 'Sampled']
utils.plot_subplots(plots, names, os.path.join(savepath, 'valid_recon.png'), plot_type='plot')
# if the l2 and kl are sufficiently small, save these as our current best networks
if ((l2_loss_test < stats['l2_valid'] and epoch > reserve_epoch) or ('dna' in savepath)) and ('z={}'.format(train_dataset.xt_orig.shape[1]) in savepath):
stats['val_cond_met'] = True
#stats['l2_valid'] = kl_loss_test.item()*l2_loss_test.item()
stats['l2_valid'] = l2_loss_test.item()
stats['kl_valid'] = kl_loss_test.item()
l2_small_valid = True
save_nets(ae, mu, sigma, 'best_val')
# Compute the mapping over the training data since we want to see the fit within the whole time series
frames = torch.Tensor(train_dataset.frames).float().to(device)[:z.shape[0]]
ts = torch.Tensor(train_dataset.ts).float().to(device)[:z.shape[0]]
kl_loss, l2_loss,\
frames_hat, mu_hat, q_mu, sigma_hat_full, q_sigma_full, inc, z = ae.step(frames,
ts,
dt,
mu,
sigma)
if 'gbm' in savepath:
gbm = True
else:
gbm = False
# compare the estimated mu to the true mu with the affine map Q
current_run = train_dataset.xt_orig[:z.shape[0]]
scale = train_dataset.xt_orig.max() - train_dataset.xt_orig.min()
if gbm:
scale = (np.log(train_dataset.xt_orig[:]).max() - np.log(train_dataset.xt_orig[:]).min())
if len(frames.shape) < 3:
plots = [frames.cpu(), frames_hat.detach().cpu()]
names = ['Original', 'Sampled']
utils.plot_subplots(plots, names, os.path.join(savepath, 'valid_recon.png'), plot_type='plot')
continue
if data_params['affine']:
transformed_xt, Q, b, sde_mse, sde_rel = utils.calc_affine(current_run,
z.detach().cpu().numpy(),
savepath,
affine=data_params['affine'])
mu_mse, mu_rel, mu_crlb = utils.compare_mu2(mu,
q_mu,
ts,
Q,
b,
dt,
train_dataset,
os.path.join(savepath,'mu_comp_best_val.png'),
affine=data_params['affine'],
loss_type=loss_type)
else:
q_max = q_mu.max()
q_min = q_mu.min()
if loss_type == 'exact':
q_scaled = ((q_mu - q_min ) / (q_max - q_min) * (scale) ).detach().cpu().numpy()
else:
q_scaled = q_mu.detach().cpu().numpy() / scale
transformed_xt, Q, b, sde_mse, sde_rel = utils.calc_affine(
current_run,
q_scaled,
#z.detach().cpu().numpy() / scale,
savepath,
affine=data_params['affine'],
gbm=gbm)
mu_mse, mu_rel, mu_crlb = utils.compare_mu2(mu,
q_mu,
ts,
Q,
b,
dt,
train_dataset,
os.path.join(savepath,'mu_comp_best_val.png'),
affine=data_params['affine'],
loss_type=loss_type)
stats['mu_mse_val'] = mu_mse.copy()
stats['mu_rel_val'] = mu_rel.copy()
stats['mu_crlb_val'] = mu_crlb.copy()
stats['sde_mse_valid'] = sde_mse.copy()
stats['sde_rel_valid'] = sde_rel.copy()
else:
l2_small_valid = False
plt.plot(torch.arange(sigma.shape[0]).detach().cpu().numpy(), (sigma.sort(descending=True)[0]).detach().cpu().numpy())
plt.savefig(os.path.join(savepath, 'sigma_hat.pdf'))
plt.close('all')
if 'dna' in savepath or 'balls' in savepath:
stats['val_cond_met'] = True
save_nets(ae, mu, sigma, 'best_val')
im_grid_test = torchvision.utils.make_grid(frames_test[:64].detach().cpu(), pad_value=1, normalize=True)
im_grid_hat_single_test = torchvision.utils.make_grid(frames_hat_test[:64].detach().cpu(), pad_value=1, normalize=True)
# sample the frames for the next n images
_, sampled_frames_test = sample_n_frames(frames_test[:2], ts_test[:2], dt, ae.eval(), mu, sigma)
_, sampled_frames_test2 = sample_n_frames(frames_test[:2], ts_test[:2], dt, ae.eval(), mu, sigma*2)
im_grid_hat_test = torchvision.utils.make_grid(sampled_frames_test[:64].detach().cpu(), pad_value=1, normalize=True)
odd_rows = []
for row in range(4):
odd_rows.append(frames_test[row*8:(row+1)*8])
odd_rows.append(sampled_frames_test[row*8:(row+1)*8])
comp_grid = torchvision.utils.make_grid(torch.cat(odd_rows), pad_value=1, normalize=True)
plots_list = [comp_grid.cpu().numpy().transpose((1,2,0))]
plot_titles = ['Comparison']
utils.plot_subplots(plots_list,
plot_titles,
os.path.join(savepath, 'valid_comparison.png'))
if val_dataset.xt.shape[1] < 10:
utils.calc_affine(
val_dataset.xt[:z_test.shape[0]],
np.sqrt(dt)*z_test.detach().cpu().numpy(),
savepath, suffix='test')
plots_list = [im_grid_test.numpy().transpose((1,2,0)), im_grid_hat_single_test.numpy().transpose((1,2,0))]
plot_titles = ['Original','Sampled (single)']
if l2_small_valid:
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'valid_sample_best.png'))
else:
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'valid_sample.png'))
plots_list = [im_grid_hat_test.numpy().transpose((1,2,0))]
plot_titles = ['Sampled (trajectory)']
if l2_small_valid:
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'valid_sample_traj_best.png'))
else:
utils.plot_subplots(plots_list, plot_titles, os.path.join(savepath, 'valid_sample_traj.png'))
if len(sampled_frames_test.shape) > 2:
utils.save_gif(sampled_frames_test.detach().cpu(), os.path.join(savepath, 'movies/valid_sample_traj.gif'))
utils.save_gif(sampled_frames_test2.detach().cpu(), os.path.join(savepath, 'movies/valid_sample_traj_2.gif'))
plt.figure(figsize=(10,5))
plt.subplot(1,2,1)
plt.title('NLL')
plt.plot([kp[0] for kp in losses_train])
plt.subplot(1,2,2)
plt.title('l2')
plt.yscale('log')
plt.plot([kp[1] for kp in losses_train])
plt.savefig(os.path.join(savepath, 'losses_train.png'))
plt.close('all')
plt.figure(figsize=(10,5))
plt.subplot(1,2,1)
plt.title('NLL')
plt.plot([kp[0] for kp in losses_valid])
plt.subplot(1,2,2)
plt.title('l2')
plt.yscale('log')
plt.plot([kp[1] for kp in losses_valid])
plt.savefig(os.path.join(savepath, 'losses_valid.png'))
plt.close('all')
return stats
def get_parser():
"""Get parser object."""
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(
description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"-f",
"--file",
dest="filename",
help="experiment definition file",
metavar="FILE",
required=True,
)
return parser
if __name__ == '__main__':
import shutil
args = get_parser().parse_args()
yaml_filepath = args.filename
with open(yaml_filepath, 'r') as f:
cfg = yaml.load(f, yaml.SafeLoader)
global savepath
all_stats = {'config':cfg, 'runs':[]}
try:
n_runs = cfg['n_runs']
except KeyError:
n_runs = 5
try:
n_tries = cfg['n_tries']
except KeyError:
n_tries = 1
print(n_tries)
for run in range(n_runs):
savepath = 'results/{}_d={}w={}z={}det={}lat={}loss={}sigma={}/run{}'.format(
cfg['head'],
cfg['dataset']['name'],
cfg['ae']['net']['width'],
cfg['ae']['net']['latent_dim'],
cfg['ae']['net']['add_det'],
cfg['sde']['type'],
cfg['ae']['net']['loss'],
cfg['ae']['net']['sigma_type'],
run)
global loss_type
loss_type = cfg['ae']['net']['loss']
#if os.path.isfile(os.path.join(savepath, 'data.pkl')):
# os.remove(os.path.join(savepath, 'data.pkl'))
if not os.path.exists(savepath):
os.makedirs(savepath)
if not os.path.exists(os.path.join(savepath,'movies')):
os.makedirs(os.path.join(savepath,'movies'))
if not os.path.exists(os.path.join(savepath,'saved_nets')):
os.makedirs(os.path.join(savepath,'saved_nets'))
log_format = "%(asctime)s %(message)s"
logging.basicConfig(
stream=sys.stdout,
level=logging.INFO,
format=log_format,
datefmt="%m/%d %I:%M:%S %p",
)
fh = logging.FileHandler(os.path.join(savepath, "log.txt"))
fh.setFormatter(logging.Formatter(log_format))
logging.getLogger().addHandler(fh)
logging.info("config = %s", cfg)
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(cfg)
best_loss = np.Inf
for t_num in range(n_tries):
while True:
initialized = setup.setup(cfg, savepath)
stats = train(**initialized)
val_cond_met = stats['val_cond_met']
if val_cond_met or 'dna' in cfg['head'] or 'stocks' in cfg['head']:
break
src_ae = os.path.join(savepath,'saved_nets/ae_best_val.pth')
dst_ae = os.path.join(savepath,'saved_nets/ae_best_val_{}.pth'.format(t_num))
src_mu = os.path.join(savepath,'saved_nets/mu_best_val.pth')
dst_mu = os.path.join(savepath,'saved_nets/mu_best_val_{}.pth'.format(t_num))
shutil.copyfile(src_ae, dst_ae)
shutil.copyfile(src_mu, dst_mu)
print('=========== End of Training ===========')
print('Printing results for try {}'.format(t_num))
print('STAT: L2 on Train: {}'.format(stats['l2']))
print('STAT: KL on Train: {}'.format(stats['kl']))
print('STAT: L2 on Validation: {}'.format(stats['l2_valid']))
print('STAT: KL on Validation: {}'.format(stats['kl_valid']))
print('STAT: mu mse on Validation: {}'.format(stats['mu_mse']))
print('STAT: SDE mse on Validation: {}'.format(stats['sde_mse']))
print('========== End of Results ============')
if stats['kl_valid'] + stats['l2_valid'] < best_loss:
best_loss = stats['kl_valid'] + stats['l2_valid']
src_ae = os.path.join(savepath,'saved_nets/ae_best_val.pth')
dst_ae = os.path.join(savepath,'saved_nets/ae_best_val_bt.pth')
src_mu = os.path.join(savepath,'saved_nets/mu_best_val.pth')
dst_mu = os.path.join(savepath,'saved_nets/mu_best_val_bt.pth')
shutil.copyfile(src_ae, dst_ae)
shutil.copyfile(src_mu, dst_mu)
all_stats['runs'].append(stats)
print(stats)
with open(os.path.join(savepath,'saved_stats.pkl'), 'wb') as f:
pickle.dump(all_stats, f)
| alluly/ident-latent-sde | train.py | train.py | py | 31,019 | python | en | code | 3 | github-code | 36 |
4430086553 | '''Given a string S, you need to remove all the duplicates.
That means, the output string should contain each character only once.
The respective order of characters should remain same, as in the input string.
Sample Input 1 :
ababacd
Sample Output 1 :
abcd
'''
from collections import OrderedDict
def uniqueChar(s):
# Write your code here
d=OrderedDict()
for char in s:
d[char]=d.get(char,0)+1
uniq=''
for char in d:
uniq=uniq+char
return uniq
# Main
s=input()
print(uniqueChar(s))
| Riyachauhan11/Python-learning-Concepts | dictionaries/Extract Unique characters.py | Extract Unique characters.py | py | 553 | python | en | code | 0 | github-code | 36 |
43810341653 | import matplotlib.pyplot as plt
from matplotlib import gridspec
import numpy as np
distance = 40
def create_poles(poles):
y = np.zeros(distance)
for p in poles:
y[p] = 1
x = range(distance)
plt.stem(x, y, use_line_collection=True)
def plot_robot_measurement(poles, pos, gs):
plt.subplot(gs[2:3, 0])
plt.yticks([])
plt.xticks([])
plt.xlim([pos - 1.5, pos + 3.5])
plt.ylim([-0.1, 1.1])
plt.plot([pos + 0.2], [0.6], 'g<', markersize=40)
plt.plot([pos], [0.4], 'bo', markersize=40)
create_poles(poles)
def plot_simple(particles, poles, pos=None, j=None):
gs = gridspec.GridSpec(3, 1)
# Plot Main Display
plt.subplot(gs[0:2, 0])
if j is not None:
plt.title(str(j))
plt.yticks([])
plt.xlim([-0.9, distance + 0.9])
for particle in particles:
if particle.belief == 0:
continue
plt.plot([particle.pos], [0.5], '*', color=particle.color)
create_poles(poles)
# Plot Robot Measurement
if pos is not None:
plot_robot_measurement(poles, pos, gs)
plt.show(block=True)
def plot(
particles,
poles,
pos,
resampled_particles=None,
j=None,
autorun=False):
gs = gridspec.GridSpec(3, 1)
# Plot Main Display
plt.subplot(gs[0:2, 0])
if j is not None:
plt.title(str(j))
plt.yticks([])
plt.xlim([-0.9, distance + 0.9])
for particle in particles:
plt.plot([particle.pos], [0.5], 'b*', label="Particles")
if resampled_particles is not None:
for particle in resampled_particles:
plt.plot([particle.pos], [0.25], 'g*', label="Resampled")
plt.plot([pos], [0.65], 'r*', label="Robot")
# Remove duplicates in legend (because of way I plotted one at a time.
handles, labels = plt.gca().get_legend_handles_labels()
by_label = dict(zip(labels, handles))
plt.legend(by_label.values(), by_label.keys(), loc='upper right')
create_poles(poles)
# Plot Robot Measurement
if pos is not None:
plot_robot_measurement(poles, pos, gs)
if autorun:
if j == 0:
# Not sure why this is needed but it is.
plt.pause(1)
plt.show(block=False)
plt.pause(1)
plt.close()
else:
plt.show()
def print_particle_error(robot, particles):
weights = []
for particle in particles:
weights += [particle.weight]
best_particle = weights.index(max(weights))
print("Error: " +
str(round(abs(particles[best_particle].pos - robot.pos), 2)))
print("Weight Sum: " + str(sum(weights)))
print()
| WuStangDan/localization | assignment3/sim/plot.py | plot.py | py | 2,643 | python | en | code | 3 | github-code | 36 |
31025277279 | class Solution:
def longestSubarray(self, nums: List[int]) -> int:
HashMap = dict()
left = 0
Max = 0
for right in range(len(nums)):
currVal = nums[right]
HashMap[currVal] = 1 + HashMap.get(currVal, 0)
while HashMap.get(0, 0) > 1:
HashMap[nums[left]] -= 1
left += 1
Max = max(Max, right - left)
return Max | meetsingh0202/Leetcode-Daily-Coding-Challenge | 1493-longest-subarray-of-1s-after-deleting-one-element/1493-longest-subarray-of-1s-after-deleting-one-element.py | 1493-longest-subarray-of-1s-after-deleting-one-element.py | py | 494 | python | en | code | 0 | github-code | 36 |
72174120105 | from dymond_game import game
class Main:
def __init__(self):
super(Main, self).__init__()
if __name__ == '__main__':
game_test = game.Game("test", {
"resolucion": [1600, 900],
"sfx_volume": 0.5,
"mus_volume": 0.09
})
game_test.run()
| YukiTheThicc/Black_Dymonds | debug.py | debug.py | py | 285 | python | en | code | 0 | github-code | 36 |
3581561120 | # Import necessary libraries
import openai
import subprocess
import sys
import json
import html
import re
import ssl
import os
import math
import glob
import pprint
import nltk
import pdb
import requests
import time
import random
from PIL import Image, ImageDraw, ImageFont
from PIL import UnidentifiedImageError
if not nltk.data.find('tokenizers/punkt'):
nltk.download('punkt', quiet=True)
sitelist = [
{ "subdomain": "alamo", "site_id": 29 },
{ "subdomain": "burlingame", "site_id": 30 },
{ "subdomain": "campbell", "site_id": 7 },
{ "subdomain": "castrovalley", "site_id": 25 },
{ "subdomain": "concord", "site_id": 31 },
{ "subdomain": "danville", "site_id": 9 },
{ "subdomain": "dublin", "site_id": 8 },
{ "subdomain": "hillsborough", "site_id": 12 },
{ "subdomain": "lafayette", "site_id": 13 },
{ "subdomain": "livermore", "site_id": 14 },
{ "subdomain": "orinda", "site_id": 34 },
{ "subdomain": "pittsburg", "site_id": 28 },
{ "subdomain": "pleasanthill", "site_id": 35 },
{ "subdomain": "sanramon", "site_id": 33 },
{ "subdomain": "walnutcreek", "site_id": 32 }
]
def get_site_id(subdomain):
for site in sitelist:
if site["subdomain"] == subdomain:
return site["site_id"]
return None
# Get the first command line argument
location = sys.argv[1]
sku = sys.argv[2]
# Initialize an empty dictionary for credentials
credentials = {}
# Define the file path to the credentials file
creds_file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), # Get the directory of the current file
"../creds2.txt" # Append the relative path to the credentials file
)
if os.path.exists('product.json'):
os.remove('product.json')
class Location:
def __init__(self, website, user, city, phone, consumer_key, consumer_secret, api_key):
self.website = website
self.user = user
self.city = city
self.phone = phone
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.api_key = api_key # Here's the new attribute
def scp_file_to_remote(local_file, remote_file):
try:
# Run SCP command
subprocess.Popen(["scp", local_file, remote_file])
print("File transfer initiated.")
except Exception as e:
print("Error while copying the file:", e)
def download_image(url, filename):
try:
response = requests.get(url, stream=True)
response.raise_for_status()
with open(filename, "wb") as file:
for chunk in response.iter_content(chunk_size=8192):
file.write(chunk)
print(f"Image downloaded successfully: {filename}")
except requests.exceptions.RequestException as e:
print(f"Error downloading image: {str(e)}")
def add_watermark_and_save(image_path, watermark_text, output_path):
try:
# Open the image
image = Image.open(image_path).convert("RGBA")
# Define the watermark text and font style
font = ImageFont.truetype("font.ttf", 40)
# Create a transparent overlay and draw the watermark text
overlay = Image.new("RGBA", image.size, (0, 0, 0, 0))
draw = ImageDraw.Draw(overlay)
text_width, text_height = draw.textbbox((0, 0), watermark_text, font=font)[:2]
# position = ((image.width - text_width) // 2, (image.height - text_height) // 2)
position = (image.width - text_width - 10, image.height - text_height - 10) # Position the watermark in the lower right corner
draw.text(position, watermark_text, font=font, fill=(128, 128, 128, 128))
# Composite the image and watermark overlay
watermarked = Image.alpha_composite(image, overlay)
# Save the watermarked image with the specified output path
watermarked.save(output_path)
print(f"Watermarked image saved as {output_path}")
except Exception as e:
print(f"Error: {str(e)}")
def makeunique(new_unique_product_name):
ai_response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "system",
"content": "You are a helpful budtender who knows all about the cannabis industry.",
},
{
"role": "user",
"content": f"Use this product name '{new_unique_product_name}'. Use this phrase to come up with a slightly different name that means the same thing."
f"Come up with a new name that is max 70 chars long and will rank well with regard to SEO. If there is a mention of price. Change it to some other descriptive language instead."
},
]
)
def generate_new_product_name(sku):
ai_response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "system",
"content": "You are a helpful budtender who knows all about the cannabis industry.",
},
{
"role": "user",
"content": f"Use this product slug '{product['slug']}' to rewrite the product title. The slug contains words separated by a -."
f"Use them to come up with a new name that is max 70 chars long and will rank well with regard to SEO. If there is a mention of price. Change it to some other descriptive language. Dont put spaces in the names. Use underscores to separate words."
},
]
)
new_product_name = ai_response['choices'][0]['message']['content'].strip()
new_product_name = html.unescape(re.sub('<.*?>', '', new_product_name))
return new_product_name
def generate_new_image_name(image_name):
ai_response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{
"role": "system",
"content": "You are a creative AI assistant and California Budtender for a delivery service.",
},
{
"role": "user",
"content": f"I have an image with the name '{image_name}'. Please suggest a new name for the image that does not use dates or times in the name. Limit the name to 70 characters. Dont put spaces in the names. Use underscores to separate words."
},
]
)
new_image_name = ai_response['choices'][0]['message']['content'].strip()
new_image_name = html.unescape(re.sub('<.*?>', '', new_image_name))
return new_image_name
def remove_keys(images_data):
keys_to_remove = [
'date_created',
'date_created_gmt',
'date_modified',
'date_modified_gmt',
'id',
'alt'
]
new_images_data = []
for index, image_data in enumerate(images_data):
if index < 4:
new_image_data = {key: value for key, value in image_data.items() if key not in keys_to_remove}
else:
new_image_data = {}
new_images_data.append(new_image_data)
return new_images_data
def generate(new_pics_prompt):
res = openai.Image.create(
prompt=new_pics_prompt,
n=1,
size="256x256",
)
return res["data"][0]["url"]
locations = []
# Open the credentials file
with open(creds_file_path) as f:
# Initialize variables for parsing the file
website = None
user = None
city = None
phone = None
consumer_key = None
consumer_secret = None
openai.api_key = None
for line in f:
line = line.strip() # Remove trailing and leading whitespace
if line.startswith("[") and line.endswith("]"):
if website and user and city and phone and consumer_key and consumer_secret and openai.api_key:
locations.append(Location(website, user, city, phone, consumer_key, consumer_secret, openai.api_key))
website = line[1:-1].lstrip() # Remove the brackets and any leading whitespace
user = None
city = None
phone = None
consumer_key = None
consumer_secret = None
openai.api_key = None
elif website and " = " in line:
key, value = line.split(" = ")
if key == "user":
user = value
elif key == "city":
city = value
elif key == "phone":
phone = value
elif key.lower().endswith("_consumer_key"):
consumer_key = value
elif key.lower().endswith("_consumer_secret"):
consumer_secret = value
elif key == "openai.api_key":
openai.api_key = value
aikey = value
elif key == "website":
website = value
locations.append(
Location(website, user, city, phone, consumer_key,
consumer_secret, openai.api_key)
)
#fetches the first product dataset to be edited and pushed to the other sites.
for locationa in locations[:1]:
base_url = "https://" + locationa.website + "/wp-json/wc/v3/products"
consumer_key = locationa.website + "_consumer_key:" + locationa.consumer_key
consumer_secret = locationa.website + "_consumer_secret:" + locationa.consumer_secret
city = locationa.city
phone = locationa.phone
website = locationa.website
aikey = openai.api_key
auth = (
locationa.consumer_key,
locationa.consumer_secret,
)
response = requests.get(f'{base_url}', auth=auth, params={'sku': sku})
response.raise_for_status()
product = response.json()[0]
source_product = product
source_product['images'] = remove_keys(source_product['images'])
source_images = source_product['images'][:4]
imagecounter = 0
for item in source_images:
imagecounter = imagecounter + 1
print("Image:",imagecounter)
#source_product_name = product['name'].strip()
item['src'] = item['src'].replace("/29/","/30/")
item['src'] = item['src'].replace("alamo","burlingame")
#imgcnt = 0
#pprint.pprint(source_images)
#source_image_url = item['src']
# for item in source_images:
# source_product_name = product['name'].strip()
# print("Source Product\n",source_product_name)
# print(website, aikey)
# print("Source Images")
# imgcnt = 0
# pprint.pprint(source_images)
# source_image_url = item['src']
# new_product_name = generate_new_product_name(sku)
# print("New name suggestion:", new_product_name)
seq = 0
#fetches all but the first product and applies the updated first site product details.
print("Destination Products\n")
for locationb in locations[1:]:
seq = seq + 1
base_url = "https://" + locationb.website + "/wp-json/wc/v3/products"
consumer_key = locationb.website + "_consumer_key:" + locationb.consumer_key
consumer_secret = locationb.website + "_consumer_secret:" + locationb.consumer_secret
city = locationb.city
city = city.replace('"', '')
phone = locationb.phone
phone = phone.replace(' ', '').replace('-', '').replace('"', '').replace('(', '').replace(')', '')
website = locationb.website
aikey = openai.api_key
auth = (
locationb.consumer_key,
locationb.consumer_secret,
)
response = requests.get(f'{base_url}', auth=auth, params={'sku': sku})
response.raise_for_status()
product = response.json()[0]
#source_product = product
source_product['images'] = remove_keys(source_product['images'])
product['images'] = source_product['images']
msgg = "#" + str(seq) + " " + str(sku)
print(msgg)
subdomain = website.split('.')[0]
print("Domain: ", subdomain)
site_id = get_site_id(subdomain)
print("Site ID:", site_id)
print(city, "Doap")
print(city, " Ca ", phone)
print("Sku: ", sku)
# First AI call: generate new product name
product['name'] = generate_new_product_name(sku).replace('"','').replace('"','').replace("'","").replace(" ","_").replace("(","").replace(")","").replace(",","").replace("$","")
print("New dest product name: ", product['name'])
print("New Images")
imgcnt = 0
for item in source_images:
imgcnt = imgcnt + 1
itemname = item['name'].replace('-',' ').capitalize()
print("Image #", imgcnt)
itemname = item['name'].replace('-',' ').capitalize()
# print("Image #", imgcnt)
new_unique_product_name = generate_new_image_name(product['name']).replace('"','').replace('"','').replace("'","").replace("!","").replace("(","").replace(")","").replace(",","").replace("→","")
new_unique_file_name = new_unique_product_name
item['name'] = new_unique_product_name
# print(item['name'], " : ", item['src'])
source_image_url = item['src']
source_image_filename = os.path.basename(source_image_url)
new_unique_file_name = new_unique_file_name + ".png"
download_image(source_image_url, source_image_filename)
print("Source image url: ", source_image_url)
replaced_url = source_image_url.replace("https://alamo.", "/var/www/")
stripped_path = "/".join(replaced_url.split("/")[:-1])
print("Orig file path: ", stripped_path)
new_path = stripped_path.split("/")
new_path[7] = str(site_id)
new_path = "/".join(new_path)
print("New remote file path: ", new_path)
#item['src'] = "https://" + subdomain + ".doap.com/" + stripped_path + "/" + new_unique_file_name
item['src'] = "https://" + subdomain + ".doap.com/" + stripped_path + "/" + new_unique_file_name
item['src'] = item['src'].replace("/var/www/doap.com/","")
watermark_text = city + " Doap " + phone
add_watermark_and_save(source_image_filename, watermark_text, new_unique_file_name)
local_file = '/Users/dmenache/Nextcloud/Projects/doap-api/ai_product_updater/' + new_unique_file_name
remote_server = 'dmenache@debian.doap.com'
testpath = stripped_path.replace("https://burlingame.","/var/www/")
remote_file = f'{remote_server}:{testpath}/{new_unique_file_name}'
scp_file_to_remote(local_file, remote_file)
pdb.set_trace()
#pprint.pprint(item)
#pprint.pprint(source_images)
product['images'] = source_images
#pprint.pprint(product)
# pprint.pprint(product)
for image in product['images']:
image['src'] = image['src'].replace('https://burlingame.doap.com/https://burlingame.doap.com/', 'https://burlingame.doap.com/')
print("product[images]",product['images'])
print("source_images",source_images)
print("product[images]",product['images'])
break
pprint.pprint(product)
pdb.set_trace()
update_url = f'{base_url}/{product["id"]}'
update_response = requests.put(update_url, json=product, auth=auth)
update_response.raise_for_status()
| menached/ai_product_updater | t1.py | t1.py | py | 14,935 | python | en | code | 0 | github-code | 36 |
34682609282 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 16 16:18:01 2019
@author: cacquist
"""
# coding: utf-8
# In[1]:
# ------------------------------------------------------------------------
# date : 12.04.2018
# author : Claudia Acquistapace
# goal : routine to read 1D meteogram for a given date and site ( Joyce ) and extract data for the site and also
# level2 variables for the site Store them in a ncdf file to be copied on ostro for comparison 1to1 with observations from the ground
# DAYS WITH BOUNDARY LAYER CLOUDS OF INTEREST:
# - 20130502 (folder 20130502-default )
# - 20130505 (folder 20130505-default-redone_v1)
# - 20130511 (folder 20130511-default )
# - 20160603 (folder 20160603-default-redone_v2 )
# ------------------------------------------------------------------------
# In[1]:
# ---- importing libraries
import numpy as np
import matplotlib
import scipy
import numpy.ma as ma
import pandas as pd
import netCDF4 as nc4
import glob
from netCDF4 import Dataset
import matplotlib.dates as mdates
import xarray as xr
from myFunctions import f_closest
import matplotlib.pyplot as plt
from myFunctions import f_calcPblHeightRN
from myFunctions import f_calcWvariance
from myFunctions import f_runningMeanSkewnessVarianceStd_W
from myFunctions import f_PBLClass
from myFunctions import f_calcCloudBaseTopPBLcloudsV2
from myFunctions import f_calcCloudBaseTopPBLclouds
from myFunctions import f_calcPblHeightTW
from myFunctions import f_cloudmask
from myFunctions import f_calcWindSpeed_Dir
from myFunctions import f_calculateCloudBaseTopThickness
def f_processModelOutput(path_icon, \
iconFilename, \
modelInputParameters, \
date, \
humanInfo, \
debuggingFlag, \
verboseFlag, \
pathDebugFig, \
pathOut, \
domSel):
print('processing meteograms for the '+date)
# ---- reading datafile selected
data = Dataset(path_icon+iconFilename, mode='r')
time = data.variables['time'][:].copy()
datetime_ICON = nc4.num2date(data.variables['time'][:],data.variables['time'].units)
Qi = data.variables['QI'][:].copy()
Qc = data.variables['QC'][:].copy()
T = data.variables['T'][:].copy() # in [K]
zonalWind = data.variables['U'][:].copy()
merWind = data.variables['V'][:].copy()
vertWind = data.variables['W'][:].copy()
LWP = data.variables['TQC'][:].copy()
IWV = data.variables['TQV'][:].copy()
thetaV = data.variables['THETAV'][:].copy()
height = data.variables['height'][:].copy()
P = data.variables['P'][:].copy() # [Pa]
RH = data.variables['REL_HUM'][:].copy()
q = data.variables['QV_DIA'][:].copy() # [kg/kg]
Hsurf = float(data.station.split('_hsurf=')[-1].split('\n')[0])
height2 = data.variables['height_2'][:].copy()
rho = data.variables['RHO'][:].copy()
SWSurfFlux = data.variables['SOBS'][:].copy() # shortwave net flux at surface
LWSurfFlux = data.variables['THBS'][:].copy() # longwave net flux at surface
LHFL = data.variables['LHFL'][:].copy() # latent heat flux (surface)
SHFL = data.variables['SHFL'][:].copy() # sensible heat flux (surface)
TempSurf = data.variables['T_S'][:]
#print(Hsurf)
#print(height2[-1])
#print(height2[0])
#print(len(height2))
# subtracting from model height arrays the height of the ground level at JOYCE
# and make it comparable with the observations
height2 = height2 - np.repeat(Hsurf, len(height2))
height = height -np.repeat(Hsurf, len(height))
# --- reading dimension of height and time arrays
dimTime = len(datetime_ICON)
dimHeight = len(height2)
if verboseFlag == 1:
print('variable extracted from the data')
print('data loaded for '+date)
print('dimension for height_2 :', dimHeight)
print('dimension for time :', dimTime)
# ------------------------------------------------------------------
# plot meridional and zonal wind for checking fields
# ------------------------------------------------------------------
if debuggingFlag == 1:
if verboseFlag == 1:
print('no plots')
# =============================================================================
# fig, ax = plt.subplots(figsize=(14,6))
# ax.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M"))
# ax.xaxis.set_minor_formatter(mdates.DateFormatter("%H:%M"))
# ax.spines["top"].set_visible(False)
# ax.spines["right"].set_visible(False)
# ax.get_xaxis().tick_bottom()
# ax.get_yaxis().tick_left()
# ax.xaxis_date()
# ax.set_xlabel("time [hh:mm]", fontsize=16)
# ax.set_ylabel("Fluxes at the surface [W/m2]", fontsize=16)
# plt.plot(datetime_ICON, SWSurfFlux, label='Shortwave net flux')
# plt.plot(datetime_ICON, LWSurfFlux, label='Longwave net flux')
# plt.legend()
# plt.savefig(pathDebugFig+'surface_LWSW_surfFlux_iconlem_'+date+'.png', format='png')
#
# =============================================================================
# =============================================================================
# fig, ax = plt.subplots(figsize=(14,6))
# ax.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M"))
# ax.xaxis.set_minor_formatter(mdates.DateFormatter("%H:%M"))
# ax.spines["top"].set_visible(False)
# ax.spines["right"].set_visible(False)
# ax.get_xaxis().tick_bottom()
# ax.get_yaxis().tick_left()
# ax.xaxis_date()
# ax.set_xlabel("time [hh:mm]", fontsize=16)
# ax.set_ylabel("Latent/sensible heat fluxes at the surface [W/m2]", fontsize=16)
# plt.plot(datetime_ICON, LHFL, label='Latent heat flux')
# plt.plot(datetime_ICON, SHFL, label='Sensible heat flux')
# plt.legend()
# plt.savefig(pathDebugFig+'surface_LatentSensible_heatFlux_iconlem_'+date+'.png', format='png')
# =============================================================================
if verboseFlag == 1:
print('end of plotting graphs for debugging in debugging mode')
# ------------------------------------------------------------------
# defining constants needed for calculations
# ------------------------------------------------------------------
Rw = 462.
Rl = 287.
g = 9.81
P_0 = 100*1000.
const = 0.286 # R/Cp
P_0 = 100*1000.
const = 0.286 # R/Cp
Lv = 2260 # J / kg
Cp = 1005.7 # /K Kg
# ------------------------------------------------------------------
# derivation of water vapor mixing ratio
# ------------------------------------------------------------------
r = np.zeros((dimTime, dimHeight))
for itempo in range(dimTime):
for ih in range(dimHeight):
r[itempo,ih] = q[itempo,ih]/(1. - q[itempo,ih] )
if verboseFlag == 1:
print('water vapor mixing ratio calculated')
# ------------------------------------------------------------------
# --- calculating cloud mask for ice and liquid clouds using thresholds on Qi, Qc
# ------------------------------------------------------------------
QcThreshold = modelInputParameters['QcThresholdVar']
QiThreshold = modelInputParameters['QiThresholdVar']
cloudMask = f_cloudmask(time,height2,Qc,Qi,QiThreshold,QcThreshold)
# =============================================================================
#
# for indT in range(len(datetime_ICON)):#
# if (~np.isnan(CBMatrix_ICON[indT,0]) == True) and (~np.isnan(CTMatrix_ICON[indT,0])== True):
#
# indCB = f_closest(height, CBMatrix_ICON[indT,0])
# indCT = f_closest(height, CTMatrix_ICON[indT,0])
#
# if (indCB == 0) or (indCT == 0):
# CT_array_ICON[indT] = np.nan
# CB_array_ICON[indT] = np.nan
# else:
# CT_array_ICON[indT] = height[indCT] # saving cloud top height
# CB_array_ICON[indT] = height[indCB] # saving cloud base height
#
# =============================================================================
# calculating cloud base , cloud top and cloud thickness for all clouds and for pbl clouds
clouds, PBLclouds = f_calculateCloudBaseTopThickness(cloudMask, datetime_ICON, height2, humanInfo)
# deriving lowest cloud base and corresponding cloud top for PBL clouds
CBarr = np.zeros(dimTime)
CBarr.fill(np.nan)
CTarr = np.zeros(dimTime)
CTarr.fill(np.nan)
iPBL = 0
for itime in range(dimTime):
if iPBL < len(PBLclouds.time.values):
if clouds.time.values[itime] == PBLclouds.time.values[iPBL]:
CBarray = PBLclouds.cloudBase.values[iPBL, :]
if CBarray.size - np.count_nonzero(np.isnan(CBarray)) != 0:
minCB = np.nanmin(PBLclouds.cloudBase.values[iPBL, :])
CBarr[itime] = minCB
indexLevelMin = np.nanargmin(PBLclouds.cloudBase.values[iPBL, :])
CTarr[itime] = PBLclouds.cloudTop[iPBL, indexLevelMin]
iPBL = iPBL + 1
print('cloud base and cloud top for ICON-LEM calculated ')
# ------------------------------------------------------------------
# ---- calculating potential temperature and equivalent potential temperature
# ------------------------------------------------------------------
theta = np.zeros((dimTime, dimHeight))
theta_e = np.zeros((dimTime, dimHeight))
theta.fill(np.nan)
theta_e.fill(np.nan)
for iTime in range(dimTime):
for iHeight in range(dimHeight):
if height[iHeight] < Hsurf:
theta[iTime, iHeight] = 0.
else:
theta[iTime, iHeight] = T[iTime, iHeight] * (float(P_0)/float(P[iTime, iHeight]))**(const)
if verboseFlag == 1:
print('potential temperature calculated')
for iTime in range(dimTime):
for iHeight in range(dimHeight):
lv = (2500.-2.42*(T[iTime, iHeight]-273.15))*1000. # latent heat of vaporization in J/kg
theta_e[iTime, iHeight] = theta[iTime, iHeight]+(lv*r[iTime, iHeight]/Cp)* (np.power(100000./P[iTime, iHeight], Rl/Cp)) # equivalent potential temperature in K
if verboseFlag == 1:
print('equivalent potential temperature calculated')
# ------------------------------------------------------------------
# --- Calculating Boundary layer height using the richardson number derivation according to Seidel Et al, 2010
# ------------------------------------------------------------------
device = 'mod'
PBLHeightArrRN = f_calcPblHeightRN(thetaV,zonalWind,merWind,height2,time, device)
if verboseFlag == 1:
print('height of the PBL (RN) calculated')
# ------------------------------------------------------------------
# --- calculation of the variance, std, skewness of the vertical velocity using a running mean window
# ------------------------------------------------------------------
timeWindowSk = modelInputParameters['timeWindowSkVar']
runningWindow = modelInputParameters['runningWindowVar']
resultDyn = f_runningMeanSkewnessVarianceStd_W(time, timeWindowSk, runningWindow, height2, vertWind)
# output of the function : varianceW, stdWmatrix, SKmatrix
varianceWmatrix = resultDyn[0]
stdWmatrix = resultDyn[1]
SKmatrix = resultDyn[2]
if verboseFlag == 1:
print('variance, std and skewness of w calculated')
print('std max = '+str(np.nanmax(stdWmatrix)))
# ------------------------------------------------------------------
# --- Calculating Boundary layer height using the threshold on variance of w ()
# ------------------------------------------------------------------
device = 'mod'
sigmaW = stdWmatrix
sigmaThreshold = modelInputParameters['SigmaWThresStd'] # m/s, threshold for std of w from Schween et al, 2014.AMT
PBLHeightArrTW = f_calcPblHeightTW(sigmaW,sigmaThreshold,height2,time, device)
if verboseFlag == 1:
print('height of the PBL (TW) calculated')
# ------------------------------------------------------------------
# --- Calculating variance over the timewindow using running mean
# ------------------------------------------------------------------
#timewindow = modelInputParameters['timewindowVar']
#varianceWmatrix = f_calcWvariance(vertWind,time,height2,timewindow)
#if verboseFlag == 1:
# print('variance of vertical velocity calculated')
# ------------------------------------------------------------------
# --- calculation of the connection of the turbulence to the surface.
# ------------------------------------------------------------------
#Turbulence is connected to the surface if checks if variance at 200 m of height is greater than 0.03 for turbulence
# calculating the time serie of difference of the sigmaW and the threshold value at 200 m height
deltaSigma = np.subtract(varianceWmatrix, 0.03)[:,f_closest(height,200.)]
connection2Surface = [] # array indicating connection of the turbulence to the surface
# calculating connection to the surface. =0 ( not connected, if sigmaW(200)-sigmaGround)<0,
# =1 (connected thus turbulent, if sigmaW(200)-sigmaGround)>0)
for itime in range(dimTime):
if deltaSigma[itime] < 0.:
connection2Surface.append(0)
else:
connection2Surface.append(1)
if verboseFlag == 1:
print('connection of turbulence with the surface calculated')
# ------------------------------------------------------------------
# ---- calculation of the stability array
# ------------------------------------------------------------------
stabilityArr = []
# difference of temperature between 150m and closest level to surface
deltaT = np.subtract(T, T[f_closest(height,Hsurf),:])[:,f_closest(height,150.)]
for itime in range(dimTime):
#print(Tarray[indRef]-Tarray[indGround])
if deltaT[itime] < 0.3:
stabilityArr.append(1)
else:
stabilityArr.append(0)
if verboseFlag == 1:
print('stability at the surface calculated')
# ------------------------------------------------------------------
# --- Calculation of wind shear as done for PBL ( running mean over 30 min of sqrt(Delta U^2 + delta V^2))/delta H
# where variations are calculated over 5 range gates
# ------------------------------------------------------------------
windData = f_calcWindSpeed_Dir(datetime_ICON, height2, zonalWind, merWind)
windSpeed = windData['windSpeed']
windDirection = windData['windDirection']
# =============================================================================
# --- calculating shear of horizontal wind
u_rm = np.zeros((len(datetime_ICON), len(height2)))
v_rm = np.zeros((len(datetime_ICON), len(height2)))
# --- defining running mean values of zonal and meridional wind
for indH in range(0,len(height2)):
zonal = pd.Series(zonalWind[:,indH])
mer = pd.Series(merWind[:,indH])
#u_rm[:,indH] = pd.rolling_mean(zonalWind[:,indH], window=200)
#v_rm[:,indH] = pd.rolling_mean(merWind[:,indH], window=200)
u_rm[:,indH] = zonal.rolling(200).mean()
v_rm[:,indH] = mer.rolling(200).mean()
#
# calculating wind shear and horizontal wind
shear_ICON = np.zeros((len(datetime_ICON), len(height2)))
for indT in range(0,len(datetime_ICON)):
for indH in range(0,len(height2)):
if (indH < 2.) or (indH > len(height2)-3):
shear_ICON[indT, indH] = 0.
else:
deltaV = (np.absolute(v_rm[indT, indH+2] - v_rm[indT, indH-2]))**2
deltaU = (np.absolute(u_rm[indT, indH+2] - u_rm[indT, indH-2]))**2
deltaH = np.absolute(height[indH+2] - height[indH-2])
shear_ICON[indT, indH] = (np.sqrt(deltaU + deltaV))/deltaH
# =============================================================================
if verboseFlag == 1:
print('horizontal wind speed, direction and shear calculated')
# ------------------------------------------------------------------
# ----calculating boundary layer classification (version from submitted paper)
# ------------------------------------------------------------------
ylim = np.repeat(3000, dimTime) # defining array of heights up to which PBL classification is calculated
gradWindThr = 0.01
SigmaWThres = 0.2
outputClass = f_PBLClass(datetime_ICON, \
height2, \
gradWindThr, \
SigmaWThres, \
ylim, \
cloudMask, \
varianceWmatrix, \
SKmatrix, \
stabilityArr, \
connection2Surface, \
shear_ICON, \
CBarr)
PBLclass = outputClass[0]
if verboseFlag == 1:
print('PBL classification calculated')
if debuggingFlag == 1:
print('dimensions of PBL class')
print(np.shape(PBLclass))
# =============================================================================
# # plotting classification
# fig, ax = plt.subplots(figsize=(10,4))
# ax.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M"))
# ax.xaxis.set_minor_formatter(mdates.DateFormatter("%H:%M"))
# ax.spines["top"].set_visible(False)
# ax.spines["right"].set_visible(False)
# ax.get_xaxis().tick_bottom()
# ax.get_yaxis().tick_left()
# ax.xaxis_date()
# cax = ax.pcolormesh(datetime_ICON, height2, PBLclass.transpose(), vmin=0., vmax=6., cmap=plt.cm.get_cmap("jet", 7))
# ax.set_ylim(Hsurf,3000.) # limits of the y-axes
# #ax.set_xlim(0,24) # limits of the x-axes
# ax.set_title("PBL classification", fontsize=14)
# ax.set_xlabel("time [UTC]", fontsize=12)
# ax.set_ylabel("height [m]", fontsize=12)
# cbar = fig.colorbar(cax, ticks=[0, 1, 2, 3, 4, 5, 6], orientation='vertical')
# cbar.ticks=([0,1,2,3,4,5,6])
# cbar.ax.set_yticklabels(['no class','in cloud','non turb','cloud driven','convective', 'intermittent','wind shear'])
# cbar.set_label(label="PBL type",size=12)
# cbar.ax.tick_params(labelsize=12)
# cbar.aspect=20
# plt.savefig(pathDebugFig+'PBLclassification_iconlem_'+date+'.png', format='png')
# =============================================================================
# ------------------------------------------------------------------
# --- calculation of the LCL
# ------------------------------------------------------------------
# determining P, T and RH at the surface
Psurf = data.variables['P_SFC'][:].copy()
Tsurf = data.variables['T2M'][:].copy()
RHsurf = RH[:,149]
LCLarray = []
from myFunctions import lcl
for iTime in range(dimTime):
LCLarray.append(lcl(Psurf[iTime],Tsurf[iTime],RHsurf[iTime]/100.))
if verboseFlag == 1:
print('LCL calculated')
# ------------------------------------------------------------------
# calculate LTS index for lower tropospheric stability (Wood and Bretherton, 2006)
# ------------------------------------------------------------------
LTS = np.zeros(dimTime)
H700 = np.zeros(dimTime)
Pthr = 700 * 100. # Pressure level of 700 Hpa used as a reference
# calculating height of the surface
indSurf = 146# f_closest(height,Hsurf)
for iTime in range(dimTime):
indP700 = f_closest(P[iTime,:],Pthr)
LTS[iTime] = theta[iTime, indP700] - theta[iTime, indSurf]
H700[iTime] = height[indP700]
if verboseFlag == 1:
print('LTS calculated')
#print(theta[4500, indP700])
#print(theta[4500, indSurf])
#print(theta[4500, :])
# ------------------------------------------------------------------
# ---- calculating liquid potential temperature
# ------------------------------------------------------------------
theta_liquid = np.zeros((dimTime, dimHeight))
theta_liquid.fill(np.nan)
for iTime in range(dimTime):
for iHeight in range(dimHeight):
if height[iHeight] < Hsurf:
theta_liquid[iTime, iHeight] = 0.
else:
theta_liquid[iTime, iHeight] = theta[iTime, iHeight] - (Lv/Cp)* Qc[iTime, iHeight]
if verboseFlag == 1:
print('liquid potential temperature calculated')
# ------------------------------------------------------------------
# ------- saving mean outputs as ncdf file
# ------------------------------------------------------------------
f = nc4.Dataset(pathOut+'icon_lem_derivedproperties'+date+'.nc','w', format='NETCDF4') # creates a netCDF file for writing
tempgrp = f.createGroup('Temp_data') # creates a group: A netCDF group is basically a directory or folder within the netCDF dataset
# specify dimensions of the data ( each dimension of multidimensiona array needs to be given a name and a length)
tempgrp.createDimension('dimH', len(height2)) # dimension for height
tempgrp.createDimension('dimHlong', len(height)) # dimension for height
tempgrp.createDimension('dimHsurf', 1) # dimension for scalar values
tempgrp.createDimension('dimT', len(datetime_ICON)) # dimension for time
tempgrp.createDimension('NclassesPBL', 8) # dimension for the number of cloud layers found
tempgrp.createDimension('dimHlarger', len(height)) # dimension for height
tempgrp.createDimension('nchar', 5)
# preallocating netCDF variables for data storage
varHeight2 = tempgrp.createVariable('height2', 'f4', 'dimH')
varHeight = tempgrp.createVariable('height', 'f4', 'dimHlong')
vardomain = tempgrp.createVariable('domain', 'S1', 'nchar')
vartime = tempgrp.createVariable('datetime_ICON', 'f4', 'dimT')
varLTS = tempgrp.createVariable('LTS', 'f4', 'dimT')
varPBLheight = tempgrp.createVariable('PBLHeightArrRN', 'f4', 'dimT')
varPBLheight2 = tempgrp.createVariable('PBLHeightArrTW', 'f4', 'dimT')
varCloudLayers = tempgrp.createVariable('NcloudLayers', 'f4', 'dimT')
varHsurf = tempgrp.createVariable('HeightSurface', 'f4', 'dimHsurf')
varLCL = tempgrp.createVariable('LCLarray', 'f4', 'dimT')
varLWP = tempgrp.createVariable('LWP', 'f4', 'dimT')
varIWV = tempgrp.createVariable('IWV', 'f4', 'dimT')
varLHFL = tempgrp.createVariable('LHFL', 'f4', 'dimT')
varSHFL = tempgrp.createVariable('SHFL', 'f4', 'dimT')
varLWSF = tempgrp.createVariable('LWSurfFlux', 'f4', 'dimT')
varSWSF = tempgrp.createVariable('SWSurfFlux', 'f4', 'dimT')
# PBL class and connected flags, LTS clouds, SW clouds, PBL height, CB height
varPBL_class = tempgrp.createVariable('PBLclass', 'f4', ('dimT','dimH'))
varflagCloud = tempgrp.createVariable('flagCloud', 'f4', ('dimT','dimH'))
varQc = tempgrp.createVariable('Qc', 'f4', ('dimT','dimH'))
varQi = tempgrp.createVariable('Qi', 'f4', ('dimT','dimH'))
varflagTurb = tempgrp.createVariable('flagTurb', 'f4', ('dimT','dimH'))
varflagcloudDriven = tempgrp.createVariable('flagcloudDriven', 'f4', ('dimT','dimH'))
varflagInstability = tempgrp.createVariable('flagInstability', 'f4',('dimT','dimH'))
varflagWindShear = tempgrp.createVariable('flagWindShear', 'f4', ('dimT','dimH'))
varflagSurfDriven = tempgrp.createVariable('flagSurfaceDriven', 'f4', ('dimT','dimH'))
varvarianceW = tempgrp.createVariable('varianceW', 'f4', ('dimT','dimH'))
varHwind = tempgrp.createVariable('windSpeed', 'f4', ('dimT','dimH'))
varWindDirection = tempgrp.createVariable('windDirection', 'f4', ('dimT','dimH'))
varShearHwind = tempgrp.createVariable('shearHwind', 'f4', ('dimT','dimH'))
varcloudMask = tempgrp.createVariable('cloudMask', 'f4', ('dimT','dimH'))
varthetaPot = tempgrp.createVariable('theta', 'f4', ('dimT','dimH'))
varskewnessW = tempgrp.createVariable('skewnessW', 'f4', ('dimT','dimH'))
varstdWmatrix = tempgrp.createVariable('stdWmatrix', 'f4', ('dimT','dimH'))
varMixingRatio = tempgrp.createVariable('r', 'f4', ('dimT','dimH'))
varthetaL = tempgrp.createVariable('theta_liquid', 'f4', ('dimT','dimH'))
varthetaPot_e = tempgrp.createVariable('theta_e', 'f4', ('dimT','dimH'))
varw = tempgrp.createVariable('vertWind', 'f4', ('dimT','dimHlarger'))
varP = tempgrp.createVariable('P', 'f4', ('dimT','dimH'))
varRH = tempgrp.createVariable('RH', 'f4', ('dimT','dimH'))
varQ = tempgrp.createVariable('q', 'f4', ('dimT','dimH'))
varT = tempgrp.createVariable('T', 'f4', ('dimT','dimH'))
varMerWind = tempgrp.createVariable('merWind', 'f4', ('dimT','dimH'))
varZonWind = tempgrp.createVariable('zonalWind', 'f4', ('dimT','dimH'))
varRho = tempgrp.createVariable('rho', 'f4', ('dimT','dimH'))
varT_surf = tempgrp.createVariable('TempSurf', 'f4', 'dimT')
# passing data into the variables
varHeight2[:] = height2
varHeight[:] = height
vardomain = domSel
vartime[:] = time
varLTS[:] = LTS
varPBLheight[:] = PBLHeightArrRN
varPBLheight2[:] = PBLHeightArrTW
varHsurf = Hsurf
varLCL[:] = LCLarray
varLWP[:] = LWP
varIWV[:] = IWV
varLHFL[:] = LHFL
varSHFL[:] = SHFL
varLWSF[:] = LWSurfFlux
varSWSF[:] = SWSurfFlux
varPBL_class[:,:] = PBLclass
varflagCloud[:] = outputClass[1]
varflagTurb[:] = outputClass[2]
varflagcloudDriven[:] = outputClass[3]
varflagInstability[:] = outputClass[4]
varflagWindShear[:] = outputClass[5]
varflagSurfDriven[:] = outputClass[6]
varvarianceW[:,:] = varianceWmatrix
varHwind[:,:] = windSpeed
varWindDirection[:,:] = windDirection
varShearHwind[:,:] = shear_ICON
varcloudMask[:,:] = cloudMask
varthetaPot[:,:] = theta
varskewnessW[:,:] = SKmatrix
varstdWmatrix[:,:] = stdWmatrix
varMixingRatio[:,:] = r
varthetaL[:,:] = theta_liquid
varthetaPot_e[:,:] = theta_e
varw[:,:] = vertWind
varP[:,:] = P
varRH[:,:] = RH
varQ[:,:] = q
varT[:,:] = T
varMerWind[:,:] = merWind
varZonWind[:,:] = zonalWind
varRho[:,:] = rho
varQc[:,:] = Qc
varQi[:,:] = Qi
varT_surf[:] = TempSurf
#Add global attributes
f.description = "icon lem model derived physical quantities and PBL classification"
f.history = "Created by Claudia Acquistapace cacquist@meteo.uni-koeln.de - University of Cologne"
#Add local attributes to variable instances
varPBL_class.units = '1=in cloud, 2=non turb, 3=cloud driven, 4=convective, 5=intermittent, 6=wind shear'
vartime.units = 'seconds since '+date[0:4]+'-'+date[4:6]+'-'+date[6:8]+' 00:00:00'
# closing ncdf file
f.close()
print('File Saved ') | ClauClouds/PBL_paper_repo | f_processModelOutput.py | f_processModelOutput.py | py | 29,461 | python | en | code | 1 | github-code | 36 |
16630075934 | import random
from GradientDescent.data_utils import get_points
def current_loss_4_MBGD(w_current, b_current, x, y, seed_list):
loss = 0
for seed in seed_list:
loss = loss + (w_current*x[seed]+b_current - y[seed])**2
return loss/float(len(seed_list))
def step_gradient(w_current, b_current, x, y, lr, seed_list):
w_gradient = 0
b_gradient = 0
for seed in seed_list:
w_gradient = w_gradient + (w_current*x[seed]+b_current - y[seed])*x[seed]
b_gradient = b_gradient + (w_current*x[seed]+b_current - y[seed])
w_current = w_current - lr*w_gradient/float(len(seed_list))
b_current = b_current - lr*b_gradient/float(len(seed_list))
return w_current, b_current
def gradient_descent_runner(w_starter, b_starter, x, y, _length, lr, steps, batch_size):
w_current = w_starter
b_current = b_starter
seed_list = random.sample(range(_length), batch_size)
for i in range(steps):
w_current, b_current = step_gradient(w_current, b_current, x, y, lr, seed_list)
return w_current, b_current
def main():
lr = 0.0001
w_starter = 0.0001
b_starter = 0.0001
steps = 2000
filepath = 'data.csv'
x, y, _length = get_points(filepath)
batch_size = 4
seed_list = random.sample(range(_length), batch_size) # using the batch of samples to watch the change of loss
#print(seed_list)
print("Starting mini-batch gradient descent at w = {0}, b = {1}, loss = {2}".format(w_starter, b_starter,
current_loss_4_MBGD(w_starter, b_starter, x,
y, seed_list)))
print("Running...")
w_current, b_current = gradient_descent_runner(w_starter, b_starter, x, y, _length, lr, steps, batch_size)
print("After {0} iterations w = {1}, b = {2}, loss = {3}".format(steps, w_current, b_current,
current_loss_4_MBGD(w_current, b_current, x, y,
seed_list)))
if __name__ == '__main__':
main() | GanZhan/Gradient-Descent-Examples | MBGD.py | MBGD.py | py | 2,259 | python | en | code | 1 | github-code | 36 |
74481651624 | from copy import deepcopy
from config.irl_config import IRLConfig
from config.rl_config import RLConfig
from env_design.envs import ENV_MAKERS
class ConfigBuilder(dict):
def __init__(
self,
num_gpus=0,
num_workers=0,
rl_algo=None,
irl_algo=None,
env=None,
# additional overriding args
**kwargs
):
super(ConfigBuilder, self).__init__()
self.rl_algo = rl_algo
self.irl_algo = irl_algo
self.rl_config = RLConfig(env, rl_algo, irl_algo)
self.irl_config = IRLConfig(env, irl_algo)
self.update(
num_gpus=num_gpus,
num_workers=num_workers,
env=env,
)
self.cli_args = kwargs
def build_base_rl(
self,
env_params,
**kwargs,
):
base = self.rl_config.rl_config.pre_build()
base.update(
**self
)
if env_params is not None:
base.update(env_config=env_params.get())
base.update(**kwargs)
for cli_arg in self.cli_args:
if cli_arg in base:
base[cli_arg] = self.cli_args[cli_arg]
return base
def build_base_irl(
self
):
base = self.irl_config.pre_build()
base.update(
**self
)
for cli_arg in self.cli_args:
if cli_arg in base:
base[cli_arg] = self.cli_args[cli_arg]
base.postprocess()
return base
def build(
self,
env_params=None, # Mandatory, to ensure proper initialization
*args,
**kwargs
):
new = deepcopy(self)
rl = self.rl_config.pre_build()
if env_params is not None:
rl.update(
env_config=env_params.get()
)
irl = self.build_base_irl()
new.update(**rl)
new.update(**irl)
return dict(new)
| Ojig/Environment-Design-for-IRL | ed_airl/config/builder.py | builder.py | py | 2,019 | python | en | code | 0 | github-code | 36 |
73744164585 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
from django.db import migrations
import ielex.lexicon.models as models
def forwards_func(apps, schema_editor):
print('Updating clades for all languages..')
Language = apps.get_model('lexicon', 'Language')
for l in Language.objects.all():
models.Language.updateClades(l)
def reverse_func(apps, schema_editor):
LanguageClade = apps.get_model('lexicon', 'LanguageClade')
print('Deleting all LanguageClade entries..')
LanguageClade.objects.delete()
class Migration(migrations.Migration):
dependencies = [('lexicon', '0048_auto_20160502_1338')]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
| lingdb/CoBL-public | ielex/lexicon/migrations/0049_update_languageClade.py | 0049_update_languageClade.py | py | 748 | python | en | code | 3 | github-code | 36 |
72596157545 | from command.management_commands.management_command import ManagementCommand
from terminal.confirm import Confirm
class DelCommand(ManagementCommand):
def __init__(self):
super().__init__()
self.__confirm = Confirm()
def execute(self, *args):
if len(args) == 0:
raise Exception( "Exception: sequence id or name is required")
sequence_to_delete = self.get_sequence(args[0])
user_confirm = self.__confirm.run(
"Do you really want to delete {}: {}?".format(sequence_to_delete.get_name(),
sequence_to_delete.get_dna_sequence()))
if user_confirm:
self.get_dna_collection().remove_sequence(sequence_to_delete)
return "Deleted: {}".format(sequence_to_delete)
return "Delete cancelled"
| AyalaGottfried/DNA-Analyzer-System | command/management_commands/del_command.py | del_command.py | py | 851 | python | en | code | 2 | github-code | 36 |
35629134265 | from get_txt1 import GroundTruth
import time
import os
import csv
def down_sample_txt(folder_path,down_sample_rate=0.1):
new_header='./part_label/'
skip_number=int(1/down_sample_rate)
for header,j1,k1 in os.walk(folder_path):
# print(i,j,k)
for file_name_ori in k1:
if 'csv' not in file_name_ori:
continue
content=[]
print(header+file_name_ori)
csvFile = open(header+file_name_ori, "r")
reader = csv.reader(csvFile)
count=0
for item in reader:
count+=1
# if reader.line_num == 1:
# continue
if count%skip_number==0:
# print(count,item,type(item))
content.append(item)
name=new_header+file_name_ori.split('.')[0]
print(name)
with open(name + '.txt', 'w') as f:
for l in content:
f.write(str(l[0]) + ',' + str(l[1]) + ',' + str(l[2]) + '\n')
# break
def main():
folder_path='./label/'
down_sample_txt(folder_path)
# data_path='./CODE/Downloads/2014-05-06-12-54-54/'
# files_path=[]
# header='./CODE/Downloads/'
# files_path.append('2014-12-02-15-30-08')
# files_path.append('2014-12-10-18-10-50')
# files_path.append('2015-11-13-10-28-08')
# for data_path_ori in files_path:
# data_path=header+data_path_ori+'/'
# print('processing ',data_path)
# data1_ground_truth=GroundTruth()
# data1_ground_truth.load_data(data_path)
# data1_ground_truth.save_match_file_list('./'+data_path_ori+'.csv')
# # print(data1_ground_truth.data_list1)
# print('number of useful data is',len(data1_ground_truth.match_file_list))
if __name__ == '__main__':
start_time=time.time()
main()
end_time=time.time()
print('花费时间 ',(end_time-start_time),'s') | 625160928/robotcar_dataset_process | down_sample.py | down_sample.py | py | 1,965 | python | en | code | 0 | github-code | 36 |
42778925613 | import json
import logging
from io import BytesIO
from typing import Optional
import pandas as pd
import requests
from pydantic import Field, SecretStr
from toucan_connectors.common import ConnectorStatus
from toucan_connectors.toucan_connector import ToucanConnector, ToucanDataSource
class NetExplorerDataSource(ToucanDataSource):
file: str
sheet: Optional[str] = 0
class NetExplorerConnector(ToucanConnector):
data_source_model: NetExplorerDataSource
instance_url: str = Field(
None,
Title='Instance URL',
placeholder='exemple.netexplorer.pro',
)
user: str
password: SecretStr
def _retrieve_token(self):
login_url = f'https://{self.instance_url}/api/auth'
data = json.dumps({'user': self.user, 'password': self.password.get_secret_value()})
headers = {'Content-Type': 'application/json'}
resp = requests.post(login_url, data=data, headers=headers)
return resp.json()['token']
def _retrieve_folders(self, token):
folders_url = f'https://{self.instance_url}/api/folders?depth=-1'
headers = {'Authorization': f'Bearer {token}'}
resp = requests.get(folders_url, data={}, headers=headers)
return resp.json()
def _retrieve_file_id(self, folders, data_source):
basedir = data_source.file.split('/')[0]
path = data_source.file.split('/')[1:]
_id = None
def _search(iterate_on, compare_to, for_id=False):
for element in iterate_on:
if element['name'] == compare_to:
return element['id'] if for_id else element['content']
try:
# Search among base directories
folders = _search(folders, basedir)
# Search among paths
for elem in path:
if elem.endswith(('xlsx', 'xls', 'csv')):
_id = _search(folders['files'], elem, True)
assert _id
else:
folders = _search(folders['folders'], elem)
assert folders
except AssertionError:
raise ValueError('Unable to find the file')
return _id
def _retrieve_file(self, token, _id):
download_url = f'https://{self.instance_url}/api/file/{_id}/download'
headers = {'Authorization': f'Bearer {token}'}
resp = requests.get(download_url, data={}, headers=headers)
return BytesIO(resp.content)
def _retrieve_data(self, data_source: NetExplorerDataSource) -> pd.DataFrame:
logging.getLogger(__name__).debug('_retrieve_data')
self.instance_url = self.instance_url.replace('https://', '').strip('/')
data_source.file = data_source.file.strip('/')
token = self._retrieve_token()
folders = self._retrieve_folders(token)
_id = self._retrieve_file_id(folders, data_source)
data = self._retrieve_file(token, _id)
df = pd.DataFrame()
if data_source.file.endswith('csv'):
df = pd.read_csv(data)
else:
df = pd.read_excel(data, sheet_name=data_source.sheet)
return df
def get_status(self) -> ConnectorStatus:
"""
Test the Net Explorer's connexion.
:return: a ConnectorStatus with the current status
"""
try:
self._retrieve_token()
return ConnectorStatus(status=True)
except Exception:
return ConnectorStatus(status=False, error='Unable to connect')
| ToucanToco/toucan-connectors | toucan_connectors/net_explorer/net_explorer_connector.py | net_explorer_connector.py | py | 3,536 | python | en | code | 16 | github-code | 36 |
4108381477 | from sys import stdin
input = stdin.readline
lines = int(input())
for w in range(1, lines + 1):
tree = {}
num, gen = [int(x) for x in input().split()]
for _ in range(num):
person, number, *descendants = input().split()
tree[person] = descendants
print(f"Tree {w}:")
fit = {}
for person in tree:
queue = [[person]]
counter = 0
people = 0
while queue and counter < gen:
counter += 1
queued = queue.pop()
added = []
for x in queued:
if x in tree:
added.extend(tree[x])
if counter == gen:
people += len(tree[x])
if added:
queue.append(added)
if people > 0:
if people not in fit:
fit[people] = []
fit[people].append(person)
keys = list(fit.keys())
keys.sort(reverse=True)
printed = 0
for i in range(0, min(3, len(keys))):
if printed >= 3:
break
for x in sorted(fit[keys[i]]):
printed += 1
print(f"{x} {keys[i]}")
if w != lines:
print()
| AAZZAZRON/DMOJ-Solutions | ecna05b.py | ecna05b.py | py | 1,190 | python | en | code | 1 | github-code | 36 |
14862191628 | # console_test.py
''' This module is to test the functionality of the TVShow class in a console
This is not the main executable, use main_gui.py as "__main__"
'''
from myLib.tv_class import TVShow, sqlite3
def menu():
'''print header info to user'''
print('Joseph Fitzgibbons, FitzgibbonsP13, Final Project')
print('Welcome to my SUPER AMAZING EPISODE SHUFFLER!!!!')
# instructions and commands
def show_db():
'''get everything from the database and show to user'''
try:
path = TVShow.db_path() # this is a call to a "static" class method
conn = sqlite3.connect(path)
curs = conn.cursor()
curs.execute('SELECT * FROM showsWatched')
rows = curs.fetchall()
conn.commit()
print()
if not rows:
print('Database is empty.')
for row in rows:
# printing the show name, season and episode already watched
print('Show: {} Season {}, Episode {}'.format(row[0].replace('+', ' '), row[1], row[2]))
except sqlite3.Error as _e:
print(str(_e))
except Exception as _e:
print(str(_e))
finally:
conn.close()
def full_clear_db():
try:
path = TVShow.db_path()
conn = sqlite3.connect(path)
curs = conn.cursor()
curs.execute('DELETE FROM showsWatched')
conn.commit()
except sqlite3.Error as _e:
print(_e)
finally:
conn.close()
def main():
'''main executable function'''
menu()
show = TVShow()
answer = 'yes'
while answer == 'yes' or answer == 'y':
print('\nEnter that name of a show to watch, or enter "clear" to '\
'delete show from database,\n"show list" to see '\
'everything in the database, or "exit".'
)
user_input = input('>>>> ')
if user_input == 'show list':
show_db()
continue
elif user_input == 'clear':
flag = show.clear_db()
if isinstance(flag, str):
print(flag)
else:
print('Show successfully cleared from the database!')
continue
elif user_input == 'delete':
full_clear_db()
continue
elif user_input == 'exit':
break
show.set_name(user_input)
# this function will return a string if something went wrong getting everything
flag = show.get_info()
if isinstance(flag, str) is True:
print(flag)
continue
combo = show.next_combo()
if isinstance(combo, str) is True:
print(combo)
continue
else:
print('Goto Season %s Episode %s' % (combo[0], combo[1]))
answer = input('Do another? y/n: ')
print('Goodbye. ~ Joe Fitzgibbons')
if __name__ == '__main__':
main()
| fitzypop/random-episode | python/proof_of_concept/console_test.py | console_test.py | py | 2,990 | python | en | code | 0 | github-code | 36 |
37373287317 | import math
import torch
from torch import nn
import torch.nn.functional as F
class SelfAttentionLayer(nn.Module):
'''
Self attention layer
'''
def __init__(self, hidden_size, num_attention_heads, dropout_prob):
super().__init__()
self.hidden_size = hidden_size
self.num_attention_heads = num_attention_heads
self.attention_head_size = hidden_size // num_attention_heads
assert self.hidden_size % self.num_attention_heads == 0
self.query = nn.Linear(self.hidden_size, self.attention_head_size * self.num_attention_heads)
self.key = nn.Linear(self.hidden_size, self.attention_head_size * self.num_attention_heads)
self.value = nn.Linear(self.hidden_size, self.attention_head_size * self.num_attention_heads)
# self.dropout = nn.Dropout(dropout_prob)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def compute_qkv(self, hidden_states):
q = self.query(hidden_states)
k = self.key(hidden_states)
v = self.value(hidden_states)
return q, k, v
def forward(self, hidden_states, attention_mask=None):
q, k, v = self.compute_qkv(hidden_states)
# (B, L, H*D) -> (B, H, L, D)
query_layer = self.transpose_for_scores(q)
key_layer = self.transpose_for_scores(k)
value_layer = self.transpose_for_scores(v)
query_layer = query_layer / math.sqrt(self.attention_head_size)
# [BSZ, NAT, L, L]
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
if attention_mask is not None:
attention_scores = attention_scores.float().masked_fill_((1-attention_mask.unsqueeze(1).unsqueeze(1)).to(torch.bool), float(-1e8)) # remove padding token
attention_probs = F.softmax(attention_scores, dim=-1, dtype=torch.float32).type_as(value_layer)
# attention_probs = self.dropout(attention_probs)
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.hidden_size,)
context_layer = context_layer.view(*new_context_layer_shape)
return context_layer
class FFNIntermediate(nn.Module):
def __init__(self, hidden_size, intermediate_size):
super().__init__()
self.dense = nn.Linear(hidden_size, intermediate_size)
self.intermediate_act_fn = nn.GELU()
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class FFNOutput(nn.Module):
def __init__(self, intermediate_size, hidden_size, dropout_prob):
super().__init__()
self.dense = nn.Linear(intermediate_size, hidden_size)
self.LayerNorm = nn.LayerNorm(hidden_size, eps=1e-12)
# self.dropout = nn.Dropout(dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
# hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class FFNLayer(nn.Module):
def __init__(self, hidden_size, intermediate_size, dropout_prob):
super().__init__()
self.intermediate_layer = FFNIntermediate(hidden_size, intermediate_size)
self.output_layer = FFNOutput(intermediate_size, hidden_size, dropout_prob)
def forward(self, hidden_states):
intermediate_output = self.intermediate_layer(hidden_states)
layer_output = self.output_layer(intermediate_output, hidden_states)
return layer_output
class TransformerLayer(nn.Module):
def __init__(self, hidden_size, num_attention_heads, intermediate_size, dropout_prob):
super().__init__()
self.sa_layer = SelfAttentionLayer(hidden_size, num_attention_heads, dropout_prob)
self.ffn_layer = FFNLayer(hidden_size, intermediate_size, dropout_prob)
def forward(self, hidden_states, attention_mask=None):
hidden_states = self.sa_layer(hidden_states, attention_mask)
hidden_states = self.ffn_layer(hidden_states)
return hidden_states | ZZR8066/SEMv2 | SEMv2/libs/model/transformer.py | transformer.py | py | 4,423 | python | en | code | 2 | github-code | 36 |
15859509196 | # -*- coding: utf-8 -*-
#importando as bibliotecas
from matplotlib.pyplot import text
import yfinance as yf
import pandas as pd
import numpy as np
import os.path
import telegram
pd.options.mode.chained_assignment = None
#escolher uma ação
wege = yf.Ticker('WEGE3.SA')
#escolher inteervalo de dados
wege_dia = wege.history(period='id', interval='5m')
#pegar preço de fechamento
wege_dia = wege_dia.Close
#transformando em dataframe
df_wege_dia = pd.DataFrame(wege_dia)
#reset index
df_wege_dia.reset_index(inplace=True)
#pegar o ultimo valor negociado
wege_dia_ultimo_preco = df_wege_dia.tail(1)
#renomear as colunas
wege_dia_ultimo_preco.rename(columns={'Datetime':'data_pregao', 'Close':'preco_fechamento'}, inplace=True)
#Ajustar a data
wege_dia_ultimo_preco['data_pregao']=pd.to_datetime(wege_dia_ultimo_preco['data_pregao'], format='%Y-%m-%d')
#Usar o data frame historico e pegar apenas o preço de fechamento e data pregão
if os.path.isfile('wege.csv'):
df_wege = pd.read_csv('wege.csv', delimiter=';')
else:
df = pd.read_csv('all_bovesta.csv', delimiter=';') #colocar aqui o seu arquivo do bovespa
df_wege = df[df['silga_acao']=='WEGE3']
df_wege = df_wege[['data_pregao', 'preco_fechamento']]
#Ajustar a data
df_wege['data_pregao']=pd.to_datetime(df_wege['data_pregao'], format='%Y-%m-%d')
#Retirar a ultima data que queremos calcular
df_remove = df_wege.loc[(df_wege['data_pregao'] == pd.to_datetime('today').normalize())]
df_wege = df_wege.drop(df_wege.index)
#append data atual
df_wege_total = df_wege.append(wege_dia_ultimo_preco)
#Ajuste data atual
df_wege_total['data_pregao']=pd.to_datetime(df_wege_total['data_pregao'], utc=True).dt.date
df_wege_total.to_csv('wege.csv', sep=';', index=False)
#Calcular MACD
rapidaMME=df_wege_total.preco_fechamento.ewm(span=12).mean()
lentaMME = df_wege_total.preco_fechamento.ewm(span=26).mean()
MACD= rapidaMME - lentaMME
sinal=MACD.ewm(span=9).mean()
df_wege_total['MACD'] = MACD
df_wege_total['sinal'] = sinal
#Ajuste de indx e retirar o campo data pregão
df_wege_total = df_wege_total.set_index(pd.DatetimeIndex(df_wege_total['data_pregao'].values))
df_wege_total = df_wege_total.drop('data_pregao',1)
# Criar codigo para verificar a compra ou a venda
df_wege_total['flag']=''
df_wege_total['preco_compra']=np.nan
df_wege_total['preco_venda']=np.nan
for i in range(1, len(df_wege_total.sinal)):
if df_wege_total['MACD'][i] > df_wege_total['sinal'][i]:
if df_wege_total['flag'][i-1] == 'c':
df_wege_total['flag'][i]='C'
else:
df_wege_total['flag'][i]='C'
df_wege_total['preco_compra'][i] = df_wege_total['preco_fechamento'][i]
elif df_wege_total['MACD'][i] < df_wege_total['sinal'][i]:
if df_wege_total['flag'][i-1] =='V':
df_wege_total['flag'][i]='V'
else:
df_wege_total['flag'][i]='V'
df_wege_total['preco_venda'][i] = df_wege_total['preco_fechamento'][i]
#Verifica os 2 ultimos dias
hoje = df_wege_total.flag[-1]
ontem = df_wege_total.flag[-2]
flag= hoje
preco_fechamento = round(df_wege_total.preco_fechamento.tail(1)[-1],2)
print(flag, preco_fechamento)
my_token = '1840232813:AAHxoVmcDWHK3jAxiTWsMqTsiw9vTHaICpY'
chat_id = '-476980685'
def envia_mensagem(msg, chat_id, token=my_token):
bot=telegram.Bot(token = token)
bot.SendMessage(chat_id = chat_id, text=msg)
msg = f'WEGE3 (WEGE), {flag} preço de fechamento: {preco_fechamento}'
if ontem == hoje:
envia_mensagem(msg, chat_id, my_token)
| bertuci/compra_e_venda_acoes | bot_MACD/macd_bot.py | macd_bot.py | py | 3,561 | python | pt | code | 1 | github-code | 36 |
38192330286 | import os
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.template.loader import get_template
from xhtml2pdf import pisa
from ..models import *
from django.contrib.auth.models import User
from django.contrib.staticfiles import finders
def link_callback(uri, rel):
"""
Convert HTML URIs to absolute system paths so xhtml2pdf can access those
resources
"""
result = finders.find(uri)
if result:
if not isinstance(result, (list, tuple)):
result = [result]
result = list(os.path.realpath(path) for path in result)
path = result[0]
else:
sUrl = settings.STATIC_URL # Typically /static/
sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/
mUrl = settings.MEDIA_URL # Typically /media/
mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/
if uri.startswith(mUrl):
path = os.path.join(mRoot, uri.replace(mUrl, ""))
elif uri.startswith(sUrl):
path = os.path.join(sRoot, uri.replace(sUrl, ""))
else:
return uri
# make sure that file exists
if not os.path.isfile(path):
raise Exception(
'media URI must start with %s or %s' % (sUrl, mUrl)
)
return path
def admission_letter(request):
# HttpResponse("workng")
# get_student = get_object_or_404(Student, user=request.user)
# user = User.objects.get(id=request.user.id)
signature = CollegeSettings.objects.first()
entry = Registration.objects.all()
template_path = 'KCHS/registration/admission_letter.html'
context = {'logo': signature,
'registration': entry}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
# if the file is dowloaded
# response['Content-Disposition'] = 'attachment; filename="fieldApplicationForm.pdf"'
# if display
response['Content-Disposition'] = 'filename="FieldApplicationLetter.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisa_status = pisa.CreatePDF(
html, dest=response)
# if error then show some funny view
if pisa_status.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
| luggiestar/kahama | KCHS/views/download_pdf_files_views.py | download_pdf_files_views.py | py | 2,550 | python | en | code | 0 | github-code | 36 |
10663838847 | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 26 15:20:29 2016
@author: neo
"""
#cat1 = 'new_candidate2.cat'
#cat1 = 'icrf1.cat'
cat1 = '331_sou.cat'
cat2 = 'icrf2.cat'
#cat3 = 'MFV247.cat'
#cat4 = 'AMS260.cat'
#cat = 'common_source.cat'
#fcat = open('../catalog/'+cat,'w')
#cat1 = 'common_source.cat'
f1 = open('../catalog/'+cat1,'r')
sou1 = f1.readlines()
#cat2 = '352_sou.cat'
f2 = open('../catalog/'+cat2,'r')
sou2 = f2.readlines()
#f3 = open('../catalog/'+cat3,'r')
#sou3 = f3.readlines()
#
#f4 = open('../catalog/'+cat4,'r')
#sou4 = f4.readlines()
f1.close()
f2.close()
#f3.close()
#f4.close()
com = [ sou1[i].strip('\n') for i in range(len(sou1)) if sou1[i] in sou2 \
# and sou1[i] in sou3 and sou1[i] in sou4]
]
print(len(com))
#for i in range(len(com)):
# print>>fcat, com[i]
#fcat.close() | Niu-Liu/thesis-materials | sou-selection/progs/Catalog_comparasion.py | Catalog_comparasion.py | py | 819 | python | en | code | 0 | github-code | 36 |
26599759147 | from PyQt5 import QtWidgets
from PyQt5 import QtCore
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QHeaderView
from db.models import *
from gui.widgets.custom_widgets import DialogWithDisablingOptions
class MainWidget(QtWidgets.QWidget):
def __init__(self, parent, model):
super().__init__(parent)
self.layout = QtWidgets.QHBoxLayout(self)
self.league_list = QtWidgets.QTableView(self)
self.league_list.setModel(model)
self.league_list.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.league_list.clearSelection()
self.league_list.resizeColumnsToContents()
self.league_list.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
self.layout.addWidget(self.league_list, 2)
self.button_layout = QtWidgets.QVBoxLayout(self)
self.new_league_button = QtWidgets.QPushButton("New league", self)
self.button_layout.addWidget(self.new_league_button)
self.new_league_button.clicked.connect(self.on_new_league)
self.new_round_button = QtWidgets.QPushButton("New round", self)
self.button_layout.addWidget(self.new_round_button)
self.new_round_button.clicked.connect(self.on_new_round)
self.del_button = QtWidgets.QPushButton("Delete league", self)
self.button_layout.addWidget(self.del_button)
self.del_button.clicked.connect(self.on_delete)
self.results_overview_button = QtWidgets.QPushButton("Results overview", self)
self.button_layout.addWidget(self.results_overview_button)
self.results_overview_button.clicked.connect(self.on_load)
self.quit_button = QtWidgets.QPushButton("Quit", self)
self.button_layout.addWidget(self.quit_button)
self.quit_button.clicked.connect(self.on_quit)
self.button_layout.addStretch()
self.jsolutions_label = QtWidgets.QLabel("JSolutions")
self.jsolutions_label.setAlignment(QtCore.Qt.AlignCenter)
self.button_layout.addWidget(self.jsolutions_label)
self.button_layout.setAlignment(QtCore.Qt.AlignTop)
self.button_layout.setAlignment(self.jsolutions_label, QtCore.Qt.AlignBottom)
self.layout.addLayout(self.button_layout, 1)
self.setLayout(self.layout)
def get_selected_league(self):
selected_indexes = self.league_list.selectedIndexes()
if len(selected_indexes) != 2 or (selected_indexes[0].row() != selected_indexes[1].row()):
return None
else:
return self.league_list.model().get_league(selected_indexes[0])
@pyqtSlot()
def on_quit(self):
QtCore.QCoreApplication.instance().quit()
@pyqtSlot()
def on_load(self):
league = self.get_selected_league()
if league is None:
return
if league.max_round == 0 or league.max_round is None:
QtWidgets.QMessageBox.warning(self, "Zero rounds error", "Zero rounds have been played in this league. "
"Unable to show the results overview.")
else:
from gui.windows import LeagueOverviewWindow
win = LeagueOverviewWindow(league, parent=self)
win.show()
@pyqtSlot()
def on_delete(self):
league = self.get_selected_league()
if league is None:
return
league = League.get_by_name(league.name)
reply = QtWidgets.QMessageBox.question(self, "Message", f"Are you sure you want to delete {league.name} league?",
QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No)
if reply == QtWidgets.QMessageBox.Yes:
Result.delete_all_from_league(league)
league.delete_instance()
self.league_list.model().refresh()
else:
return
@pyqtSlot()
def on_new_league(self):
league_names = list(map(lambda x: x.name, League.get_all()))
dialog = DialogWithDisablingOptions("New league", "Please enter valid league name:", league_names)
if dialog.exec_():
league = League.create(name=dialog.ret_str)
league.save()
self.league_list.model().refresh()
@pyqtSlot()
def on_new_round(self):
league = self.get_selected_league()
if league is None:
return
from gui.windows import InputWindow
new_round_win = InputWindow(self, league)
new_round_win.show()
def refresh_leagues_overview(self):
self.league_list.model().refresh()
| jsaric/quiz-manager | gui/widgets/main_widget.py | main_widget.py | py | 4,612 | python | en | code | 0 | github-code | 36 |
31005247537 | # ATRIBUTOS:
# dia: int
# mes: int
# anio: int
class Fecha:
def __init__(self, x):
(dia,mes,anio)=x.split("/")
(self.dia,self.mes,self.anio)=(int(dia), int(mes), int(anio))
def __str__(self):
return str(self.dia)+"/"+str(self.mes)+"/"+str(self.anio)
def __sub__(self,x):
pass
def siguiente(self):
nueva = Fecha(str(self))
nueva.dia += 1
if nueva.dia > Fecha.dias_mes(nueva.mes, nueva.anio):
nueva.dia = 1
nueva.mes += 1
if nueva.mes > 12:
nueva.mes = 1
nueva.anio += 1
return nueva
@staticmethod
def dias_mes(mes, anio):
dias = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
if mes == 2 and Fecha.bisiesto(anio):
return 29
return dias[mes - 1]
@staticmethod
def bisiesto(anio):
return (anio % 4 == 0 and anio % 100 != 0) or (anio % 400 == 0)
f = Fecha("01/05/2013")
print(f)
g = f.siguiente()
print(g)
| matias1lol/semestre-1 | semeste-2/Fecha.py | Fecha.py | py | 867 | python | es | code | 0 | github-code | 36 |
44850642568 | #!/usr/bin/env python3
#
# Add metadata from Apple Podcasts to cached mp3s
# so they sync to Garmin Watches with appropriate
# metadata
# ---------------
# Michael Oliver, 2022, MIT License
#
# Standing on the shoulders of giants:
# Modified prior art and inspiration by Douglas Watson
# https://douglas-watson.github.io/post/2020-05_export_podcasts/
#
# Intended for use as a cron job or to be run before Garmin Express
#
# Queries the Apple Podcasts database for episodes that have been
# downloaded, then updates the metadata embeded in those files
# so that the mp3's have the correct metadata
#
# https://mcoliver.com
import os
import urllib.parse
import sqlite3
SQL = """
SELECT p.ZAUTHOR, p.ZTITLE, e.ZTITLE, e.ZASSETURL, e.ZPUBDATE
from ZMTEPISODE e
join ZMTPODCAST p
on e.ZPODCASTUUID = p.ZUUID
where ZASSETURL NOTNULL;
"""
def check_imports():
''' Prompts for password to install dependencies, if needed '''
try:
import mutagen
except ImportError:
os.system(
"""osascript -e 'do shell script "/usr/bin/pip3 install mutagen" with administrator privileges'""")
def get_downloaded_episodes(db_path):
'''Run SQL Query'''
return sqlite3.connect(db_path).execute(SQL).fetchall()
def main(db_path):
'''Itterate through the database and re-encode the mp3s'''
for author, podcast, title, path, zpubdate \
in get_downloaded_episodes(db_path):
src_path = urllib.parse.unquote(path[len('file://'):])
print(f"Updating: {src_path}")
if os.path.exists(src_path):
try:
mp3 = MP3(src_path, ID3=EasyID3)
if mp3.tags is None:
mp3.add_tags()
mp3.tags['artist'] = author
mp3.tags['album'] = podcast
mp3.tags['title'] = title
mp3.save()
except HeaderNotFoundError:
print(f"Corrupted file: {podcast} - {title}")
continue
except IsADirectoryError:
print(
f"Failed to export {podcast} - {title}, media file is a movie")
continue
except FileNotFoundError:
print("File does not exist. skipping")
continue
else:
print (f"File does not Exist {src_path}")
if __name__ == "__main__":
db_path = os.path.expanduser(
"~/Library/Group Containers/243LU875E5.groups.com.apple.podcasts/Documents/MTLibrary.sqlite")
check_imports()
from mutagen.mp3 import MP3, HeaderNotFoundError
from mutagen.easyid3 import EasyID3
main(db_path)
| mcoliver/fixPodcastMetadata | fixPodcastMetadata.py | fixPodcastMetadata.py | py | 2,652 | python | en | code | 4 | github-code | 36 |
12685763897 | from moduleBaseClass import ModuleBaseClass
from StringIO import StringIO
from PIL import Image
class Module(ModuleBaseClass):
def __init__(self):
self.header = 'x42\x4d'
self.name = 'bmp'
def final_check(self, raw):
try:
Image.open(StringIO(raw))
return True
except:
return False
| tengwar/xorstuff | modules/bmp.py | bmp.py | py | 360 | python | en | code | 0 | github-code | 36 |
70077372585 | import pandas as pd
import lxml.html
import requests
import shelve
import os, sys
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
if not os.path.exists('database'):
os.mkdir('database')
elif not os.path.isdir('database'):
os.remove('database')
os.mkdir('database')
xlsx = 'database/Free+English+textbooks.xlsx'
xfile = pd.ExcelFile(xlsx)
df = xfile.parse()
books = shelve.open('database/serial')
class Book:
PARENT = 'https://link.springer.com'
def __init__(self, idx, title, edition, subject, url):
self.title = title.replace('/', '_')
self.idx = idx
self.name = f'{self.title}, {edition}'
self.subject = self._process(subject)
self._image_url = None
self._url = url
self.pdf = None
self.epub = None
def __repr__(self):
return f'{self.idx}: {self.name}'
def __eq__(self, other):
return self.idx == other.idx
def _process(self, subject):
subject = subject.split(';')[0]
book = self
try:
books[subject].append(book)
except (KeyError, AttributeError):
books[subject] = []
books[subject].append(book)
self._make_template(book, subject)
return subject
def _make_template(self, book, subject):
path = os.path.join('templates', subject)
if os.path.exists(path):
return
else:
html = '''{% extends "base.html" %}
{% block content %}
<hr>
<a href="{{ url_for('index') }}"><< BACK TO INDEX</a>
<hr>
<h1><center>{{ subject }} Books</center></h1>
{% for book in books[subject] %}
<hr>
<h3>{{ book.name }}</h3>
<img src="static/images/{{ book.image}}" />
<p><u>DOWNLOAD</u>:
{% if book.pdf %}
<a href="{{ book.pdf }}">PDF</a>
{% endif %}
{% if book.epub %}
<a href="{{ book.epub }}">EPUB</a>
{% endif %}
{% if not book.pdf and not book.epub %}
<em>unavailable.</em>
{% endif %}
</p>
{% endfor %}
{% endblock %}'''
with open(path + '.html', 'w') as fhand:
fhand.write(html)
def _scrape(self):
name = self.name.replace(' ', '_') + '.html'
path = os.path.join('static', 'cache', name)
if os.path.exists(path):
with open(path, 'rb') as fhand:
html = fhand.read()
html = lxml.html.fromstring(html)
else:
response = requests.get(self._url)
content = response.content
with open(path, 'wb') as fhand:
fhand.write(content)
html = lxml.html.fromstring(content)
try:
xpath, epub = self.__get_xpaths(html)
except IndexError:
print(f'Error: {self.idx} {self.name}'
' server access point missing')
return False
else:
self.__make_links(xpath, epub)
finally:
self.image = self.name.replace(' ', '_').replace('\\', '_') + '.jpeg'
path = os.path.join('static', 'images', self.image)
if not os.path.exists(self.image):
self.__set_image_url(html)
try:
image = requests.get(self._image_url).content
except:
image = ""
finally:
with open(path, 'wb') as fhand:
fhand.write(image)
def __get_xpaths(self, html):
epub = None
xpath = html.xpath('//*[@id="main-content"]/article[1]/'
'div/div/div[2]/div/div/a')
if not bool(xpath):
xpath = html.xpath(
'//*[@id="main-content"]/article[1]/div/div/div[2]/div[1]/a'
)
epub = html.xpath(
'//*[@id="main-content"]/article[1]/div/div/div[2]/div[2]/a'
)
epub = epub[0]
xpath = xpath[0]
return xpath, epub
def __make_links(self, xpath, epub):
stub = xpath.get('href')
self.pdf = __class__.PARENT + stub
if epub is not None:
stub = epub.get('href')
self.epub = __class__.PARENT + stub
def __set_image_url(self, html):
if self.pdf or self.epub:
img_xpath = html.xpath(
'//*[@id="main-content"]/article[1]/div/aside[1]/'
'div/div/div/img'
)
img_xpath = None if not len(img_xpath) else img_xpath[0]
self._image_url = img_xpath.get('src')
else:
self._image_url = ""
def load_data():
for idx, row in df.iterrows():
book = Book(idx,
df['Book Title'].iloc[idx],
df['Edition'].iloc[idx],
df['Subject Classification'].iloc[idx],
df['OpenURL'].iloc[idx])
try:
assert book in books[book.subject]
logger.info(f' SKIPPING : {book.name}')
continue
except (KeyError, AssertionError) as init:
subject = books[book.subject]
book._scrape()
subject.append(book)
books[book.subject] = subject
logger.info(f' SERIALIZED : {book.name}')
else: books.close()
| chris-hamberg/springer_books_web | scraper.py | scraper.py | py | 5,483 | python | en | code | 0 | github-code | 36 |
16249768953 | from app import create_celery_app
from app.lib.flask_mailplus import send_templated_msg
celery = create_celery_app()
@celery.task()
def test_mail_func(email,message):
ctx = {'email':email, 'message':message}
send_templated_msg(subject='[Snake Eyes], contact',
sender = email,
recipients= "",
reply_to = email,
template = "contact/mail/index",ctx=ctx)
return None | sbansa1/SnakeEyes | app/blueprints/contact/tasks.py | tasks.py | py | 471 | python | en | code | 0 | github-code | 36 |
18036429357 | from heapq import heappush, heappop
class Solution:
def kSmallestPairs(self, nums1: List[int], nums2: List[int], k: int) -> List[List[int]]:
visited = set()
heap = []
output = []
heappush(heap, (nums1[0]+nums2[0], 0, 0))
visited.add((0, 0))
while len(output) < k and heap:
_, i, j = heappop(heap)
output.append((nums1[i], nums2[j]))
if i+1 < len(nums1) and (i+1, j) not in visited:
heappush(heap, (nums1[i+1]+nums2[j], i+1, j))
visited.add((i+1, j))
if j+1 < len(nums2) and (i, j+1) not in visited:
heappush(heap, (nums1[i]+nums2[j+1], i, j+1))
visited.add((i, j+1))
return output | LittleCrazyDog/LeetCode | 373-find-k-pairs-with-smallest-sums/373-find-k-pairs-with-smallest-sums.py | 373-find-k-pairs-with-smallest-sums.py | py | 779 | python | en | code | 2 | github-code | 36 |
496529237 | import datetime
import sys
import uuid
import pandas as pd
import pytest
from dagster_gcp import (
bigquery_resource,
bq_create_dataset,
bq_delete_dataset,
bq_solid_for_queries,
import_df_to_bq,
import_gcs_paths_to_bq,
)
from dagster_pandas import DataFrame
from google.cloud import bigquery
from google.cloud.exceptions import NotFound
from dagster import (
DagsterExecutionStepExecutionError,
InputDefinition,
List,
ModeDefinition,
Nothing,
OutputDefinition,
Path,
execute_pipeline,
pipeline,
solid,
)
from dagster.config.validate import validate_config
from dagster.core.definitions import create_environment_type
from dagster.seven import mock
def dataset_exists(name):
'''Check if dataset exists - ensures we have properly cleaned up after tests and haven't leaked
any datasets'''
client = bigquery.Client()
dataset_ref = client.dataset(name)
try:
client.get_dataset(dataset_ref)
return True
except NotFound:
return False
def get_dataset():
'''Creates unique dataset names of the form: test_ds_83791a53
'''
return 'test_ds_' + str(uuid.uuid4()).replace('-', '_')
def bq_modes():
return [ModeDefinition(resource_defs={'bigquery': bigquery_resource})]
def test_simple_queries():
@pipeline(mode_defs=bq_modes())
def bq_pipeline():
bq_solid_for_queries(
[
# Toy example query
'SELECT 1 AS field1, 2 AS field2;',
# Test access of public BQ historical dataset (only processes ~2MB here)
# pylint: disable=line-too-long
'''SELECT *
FROM `weathersource-com.pub_weather_data_samples.sample_weather_history_anomaly_us_zipcode_daily`
ORDER BY postal_code ASC, date_valid_std ASC
LIMIT 1''',
]
).alias('bq_query_solid')()
res = execute_pipeline(bq_pipeline).result_for_solid('bq_query_solid')
assert res.success
values = res.output_value()
for df in values:
assert isinstance(df, pd.DataFrame)
assert values[0].to_dict('list') == {'field1': [1], 'field2': [2]}
assert values[1].to_dict('list') == {
'postal_code': ['02101'],
'country': ['US'],
'date_valid_std': [datetime.date(2014, 1, 1)],
'doy_std': [1],
'avg_temperature_air_2m_f': [25.05],
'avg_temperature_anomaly_air_2m_f': [-7.81],
'tot_precipitation_in': [0.0],
'tot_precipitation_anomaly_in': [-0.28],
'tot_snowfall_in': [0.0],
'tot_snowfall_anomaly_in': [-1.36],
'avg_wind_speed_10m_mph': [7.91],
'avg_wind_speed_10m_anomaly_mph': [-1.85],
}
# pylint: disable=line-too-long
def test_bad_config():
configs_and_expected_errors = [
(
# Create disposition must match enum values
{'create_disposition': 'this is not a valid create disposition'},
'Value not in enum type BQCreateDisposition',
),
(
# Dataset must be of form project_name.dataset_name
{'default_dataset': 'this is not a valid dataset'},
'Value at path root:solids:test:config:query_job_config:default_dataset is not valid. Expected "_Dataset"',
),
(
# Table must be of form project_name.dataset_name.table_name
{'destination': 'this is not a valid table'},
'Value at path root:solids:test:config:query_job_config:destination is not valid. Expected "_Table"',
),
(
# Priority must match enum values
{'priority': 'this is not a valid priority'},
'Value not in enum type BQPriority',
),
(
# Schema update options must be a list
{'schema_update_options': 'this is not valid schema update options'},
'Value at path root:solids:test:config:query_job_config:schema_update_options must be list. Expected: [BQSchemaUpdateOption]',
),
(
{'schema_update_options': ['this is not valid schema update options']},
'Value not in enum type BQSchemaUpdateOption',
),
(
{'write_disposition': 'this is not a valid write disposition'},
'Value not in enum type BQWriteDisposition',
),
]
@pipeline(mode_defs=bq_modes())
def test_config_pipeline():
bq_solid_for_queries(['SELECT 1']).alias('test')()
env_type = create_environment_type(test_config_pipeline)
for config_fragment, error_message in configs_and_expected_errors:
config = {'solids': {'test': {'config': {'query_job_config': config_fragment}}}}
result = validate_config(env_type, config)
assert result.errors[0].message == error_message
def test_create_delete_dataset():
dataset = get_dataset()
@pipeline(mode_defs=bq_modes())
def create_pipeline():
bq_create_dataset.alias('create_solid')()
config = {'solids': {'create_solid': {'config': {'dataset': dataset, 'exists_ok': True}}}}
assert execute_pipeline(create_pipeline, config).result_for_solid('create_solid').success
config = {'solids': {'create_solid': {'config': {'dataset': dataset, 'exists_ok': False}}}}
with pytest.raises(DagsterExecutionStepExecutionError) as exc_info:
execute_pipeline(create_pipeline, config)
assert 'Dataset "%s" already exists and exists_ok is false' % dataset in str(
exc_info.value.user_exception
)
@pipeline(mode_defs=bq_modes())
def delete_pipeline():
bq_delete_dataset.alias('delete_solid')()
# Delete should succeed
config = {'solids': {'delete_solid': {'config': {'dataset': dataset}}}}
assert execute_pipeline(delete_pipeline, config).result_for_solid('delete_solid').success
# Delete non-existent with "not_found_ok" should succeed
config = {'solids': {'delete_solid': {'config': {'dataset': dataset, 'not_found_ok': True}}}}
assert execute_pipeline(delete_pipeline, config).result_for_solid('delete_solid').success
# Delete non-existent with "not_found_ok" False should fail
config = {'solids': {'delete_solid': {'config': {'dataset': dataset, 'not_found_ok': False}}}}
with pytest.raises(DagsterExecutionStepExecutionError) as exc_info:
execute_pipeline(delete_pipeline, config)
assert 'Dataset "%s" does not exist and not_found_ok is false' % dataset in str(
exc_info.value.user_exception
)
assert not dataset_exists(dataset)
# See: https://github.com/dagster-io/dagster/issues/1711
@pytest.mark.skip
def test_pd_df_load():
dataset = get_dataset()
table = '%s.%s' % (dataset, 'df')
test_df = pd.DataFrame({'num1': [1, 3], 'num2': [2, 4]})
create_solid = bq_create_dataset.alias('create_solid')
load_solid = import_df_to_bq.alias('load_solid')
query_solid = bq_solid_for_queries(['SELECT num1, num2 FROM %s' % table]).alias('query_solid')
delete_solid = bq_delete_dataset.alias('delete_solid')
@solid(
input_defs=[InputDefinition('success', Nothing)], output_defs=[OutputDefinition(DataFrame)]
)
def return_df(_context): # pylint: disable=unused-argument
return test_df
config = {
'solids': {
'create_solid': {'config': {'dataset': dataset, 'exists_ok': True}},
'load_solid': {'config': {'destination': table}},
'delete_solid': {'config': {'dataset': dataset, 'delete_contents': True}},
}
}
@pipeline(mode_defs=bq_modes())
def bq_pipeline():
delete_solid(query_solid(load_solid(return_df(create_solid()))))
result = execute_pipeline(bq_pipeline, config)
assert result.success
values = result.result_for_solid('query_solid').output_value()
assert values[0].to_dict() == test_df.to_dict()
# BQ loads should throw an exception if pyarrow and fastparquet aren't available
with mock.patch.dict(sys.modules, {'pyarrow': None, 'fastparquet': None}):
with pytest.raises(DagsterExecutionStepExecutionError) as exc_info:
result = execute_pipeline(bq_pipeline, config)
assert (
'loading data to BigQuery from pandas DataFrames requires either pyarrow or fastparquet'
' to be installed' in str(exc_info.value.user_exception)
)
cleanup_config = {
'solids': {'delete_solid': {'config': {'dataset': dataset, 'delete_contents': True}}}
}
@pipeline(mode_defs=bq_modes())
def cleanup():
delete_solid()
assert execute_pipeline(cleanup, cleanup_config).success
assert not dataset_exists(dataset)
# See: https://github.com/dagster-io/dagster/issues/1711
@pytest.mark.skip
def test_gcs_load():
dataset = get_dataset()
table = '%s.%s' % (dataset, 'df')
create_solid = bq_create_dataset.alias('create_solid')
query_solid = bq_solid_for_queries(
[
'SELECT string_field_0, string_field_1 FROM %s ORDER BY string_field_0 ASC LIMIT 1'
% table
]
).alias('query_solid')
delete_solid = bq_delete_dataset.alias('delete_solid')
@solid(
input_defs=[InputDefinition('success', Nothing)], output_defs=[OutputDefinition(List[Path])]
)
def return_gcs_uri(_context): # pylint: disable=unused-argument
return ["gs://cloud-samples-data/bigquery/us-states/us-states.csv"]
config = {
'solids': {
'create_solid': {'config': {'dataset': dataset, 'exists_ok': True}},
'import_gcs_paths_to_bq': {
'config': {
'destination': table,
'load_job_config': {
'autodetect': True,
'skip_leading_rows': 1,
'source_format': 'CSV',
'write_disposition': 'WRITE_TRUNCATE',
},
}
},
'delete_solid': {'config': {'dataset': dataset, 'delete_contents': True}},
}
}
@pipeline(mode_defs=bq_modes())
def bq_pipeline():
delete_solid(query_solid(import_gcs_paths_to_bq(return_gcs_uri(create_solid()))))
result = execute_pipeline(bq_pipeline, config)
assert result.success
values = result.result_for_solid('query_solid').output_value()
assert values[0].to_dict() == {'string_field_0': {0: 'Alabama'}, 'string_field_1': {0: 'AL'}}
assert not dataset_exists(dataset)
| helloworld/continuous-dagster | deploy/dagster_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_solids.py | test_solids.py | py | 10,520 | python | en | code | 2 | github-code | 36 |
12673510789 | from pathlib import Path
import numpy as np
import torch
from torch.utils.data import Dataset
from torchvision import transforms
import torchvision.transforms.functional as TF
from PIL import Image
from src.draw_utils import save_img_with_kps
from src.readers.image_reader import ImageReader
from typing import Dict
from torch import Tensor
from typing import Tuple
class Phase0PointsDataset(Dataset):
MEAN = [0.5, 0.5, 0.5]
STD = [0.2, 0.2, 0.2]
IMG_SIZE = 768
TRANSFORMS = transforms.Compose(
[
transforms.Resize(IMG_SIZE),
transforms.ToTensor(),
transforms.Normalize(mean=MEAN, std=STD),
]
)
def __init__(self, reader: ImageReader, augment: bool = False):
assert isinstance(reader, ImageReader)
self.reader = reader
self.augment = augment
def __len__(self):
return len(self.reader)
def __getitem__(self, idx: int) -> Dict[str, Tensor]:
sample = self.reader[idx]
img = sample.phase_0_image
kps = torch.tensor(sample.phase_0_points).to(dtype=torch.float32)
if self.augment:
if np.random.rand() < 0.5:
img, kps = Phase0PointsDataset.color_augment(img, kps)
if np.random.rand() < 0.5:
img, kps = Phase0PointsDataset.rotate(img, kps)
if np.random.rand() < 0.5:
img, kps = Phase0PointsDataset.perspective_augment(img, kps)
if np.random.rand() < 0.5:
img, kps = Phase0PointsDataset.crop_augment(img, kps)
kps = kps / torch.tensor([img.width, img.height])
kps = kps.flatten()
img_tensor = Phase0PointsDataset.TRANSFORMS(img)
sample_t = {
"img": img_tensor,
"kps": kps,
}
return sample_t
@staticmethod
def color_augment(img: Image.Image, kps: Tensor) -> Tuple[Image.Image, Tensor]:
img = TF.adjust_brightness(img, 0.7 + np.random.rand() * 1.5)
img = TF.adjust_contrast(img, 0.5 + np.random.rand() * 1.5)
img = TF.adjust_gamma(img, gamma=0.5 + np.random.rand(), gain = 0.5 + np.random.rand())
img = TF.adjust_hue(img, -0.5 + np.random.rand())
img = TF.adjust_saturation(img, np.random.rand() * 1.5)
return img, kps
@staticmethod
def rotate(img: Image.Image, kps: Tensor) -> Tuple[Image.Image, Tensor]:
rotation_angle_deg = np.random.rand() * 30 - 15
rotation_angle_rad = np.deg2rad(rotation_angle_deg)
rotation_matrix = np.array(
[
[np.cos(rotation_angle_rad), -np.sin(rotation_angle_rad)],
[np.sin(rotation_angle_rad), np.cos(rotation_angle_rad)],
]
)
rot_torch = torch.from_numpy(rotation_matrix.astype(np.float32))
img = TF.rotate(img, np.rad2deg(rotation_angle_rad))
center = torch.tensor([img.width, img.height]) / 2
kps = kps - center
kps = torch.matmul(kps, rot_torch)
kps = kps + center
return img, kps
@staticmethod
def perspective_augment(img: Image.Image, kps: Tensor) -> Tuple[Image.Image, Tensor]:
topleft = kps[0]
topright = kps[1]
bottomleft = kps[2]
bottomright = kps[3]
startpoints = [
topleft.to(dtype=torch.int32).tolist(),
topright.to(dtype=torch.int32).tolist(),
bottomright.to(dtype=torch.int32).tolist(),
bottomleft.to(dtype=torch.int32).tolist(),
]
a = min([torch.linalg.norm(topleft - topright) * 0.1, torch.linalg.norm(topleft - bottomleft) * 0.1])
new_topleft = topleft + (-a + np.random.rand() * 2*a)
new_topleft = torch.clip(new_topleft, torch.tensor([0, 0]), torch.tensor([img.width, img.height]))
new_topright = topright + (-a + np.random.rand() * 2*a)
new_topright = torch.clip(new_topright, torch.tensor([0, 0]), torch.tensor([img.width, img.height]))
new_bottomleft = bottomleft + (-a + np.random.rand() * 2*a)
new_bottomleft = torch.clip(new_bottomleft, torch.tensor([0, 0]), torch.tensor([img.width, img.height]))
new_bottomright = bottomright + (-a + np.random.rand() * 2*a)
new_bottomright = torch.clip(new_bottomright, torch.tensor([0, 0]), torch.tensor([img.width, img.height]))
endpoints = [
new_topleft.to(dtype=torch.int32).tolist(),
new_topright.to(dtype=torch.int32).tolist(),
new_bottomright.to(dtype=torch.int32).tolist(),
new_bottomleft.to(dtype=torch.int32).tolist(),
]
img = transforms.functional.perspective(img, startpoints, endpoints)
kps = torch.stack([new_topleft, new_topright, new_bottomleft, new_bottomright])
return img, kps
@staticmethod
def crop_augment(img: Image.Image, kps: Tensor) -> Tuple[Image.Image, Tensor]:
kps_x0 = kps[:, 0].min().item()
kps_x1 = kps[:, 0].max().item()
kps_y0 = kps[:, 1].min().item()
kps_y1 = kps[:, 1].max().item()
crop_x0 = int(kps_x0 * np.random.rand())
crop_x1 = int(kps_x1 + np.random.rand() * (img.width - kps_x1))
crop_y0 = int(kps_y0 * np.random.rand())
crop_y1 = int(kps_y1 + np.random.rand() * (img.height - kps_y1))
# make square
crop_1 = max(crop_x1 - crop_x0, crop_y1 - crop_y0)
crop_y1 = crop_y0 + crop_1
crop_x1 = crop_x0 + crop_1
img = img.crop((crop_x0, crop_y0, crop_x1, crop_y1))
kps = kps - torch.tensor([crop_x0, crop_y0])
return img, kps
@staticmethod
def img_from_tensor(img_tensor: Tensor) -> Image.Image:
img: np.ndarray = img_tensor.permute(1, 2, 0).numpy()
img = (
img * np.array(Phase0PointsDataset.STD) + np.array(Phase0PointsDataset.MEAN)
) * 255
img = img.astype(np.uint8)
img = Image.fromarray(img)
return img
def show(self, idx: int, out_folder: Path, repeat_idx=0, verbose: bool = False):
sample_t = self[idx]
img_tensor = sample_t["img"]
kps_tensor = sample_t["kps"]
img = Phase0PointsDataset.img_from_tensor(img_tensor)
kps = kps_tensor.reshape(-1, 2).numpy() * Phase0PointsDataset.IMG_SIZE
filename = out_folder / f"sample_{idx}_{repeat_idx}.jpg"
save_img_with_kps(img, kps, filename, circle_radius=10, verbose=verbose)
| AvanDavad/receipt_extractor | src/datasets/phase0points_dataset.py | phase0points_dataset.py | py | 6,430 | python | en | code | 0 | github-code | 36 |
33653170551 | #Reference Files
#https://lotr.fandom.com/wiki/Quest_of_the_Ring
#https://www.asciiart.eu/books/lord-of-the-rings
import random
import time
#Title page/main menu
def main():
print(" _____ _ _ __ _ _ ______ _")
print("|_ _| (_) | / _| | | | | | ___ (_)")
print(" | |_ __ __ _ _| | ___ | |_ | |_| |__ ___ | |_/ /_ _ __ __ _ ___ ")
print(" | | '__/ _` | | | / _ \| _| | __| '_ \ / _ \ | /| | '_ \ / _` / __|")
print(" | | | | (_| | | | | (_) | | | |_| | | | __/ | |\ \| | | | | (_| \__ \ ")
print(" \_/_| \__,_|_|_| \___/|_| \__|_| |_|\___| \_| \_|_|_| |_|\__, |___/")
print(" __/ | ")
print(" |___/")
print("\n")
print("Welcome to the Hobbiton Trail")
print("Travel from the Shire to Mordor to deliver the one ring!")
print("1.) Start")
print("2.) Exit")
option = input("-->")
if option == "1" or option == "start" or option == "Start":
start()
elif option == "2" or option == "Exit" or option == "exit":
quit()
else:
main()
#asking name of main player "Ring-bearer"
def start():
print("\n")
global player_name
player_name = input("What is your name, young hobbit?: ")
while len(player_name)>0:
if len(player_name)>1:
print(str(player_name)+"? A fine hobbit name.")
companion()
elif len(player_name)==1:
player_name_choice = input(str(player_name)+"? Truly? Only one letter? Must be Dwarvish. Are you certain that's your name? (y/n):")
if player_name_choice == "y" or player_name_choice == "Y":
print("\n")
print("Very well..."+(str(player_name))+"... *grumble*... I'm still going to treat you like any other hobbit!")
companion()
elif player_name_choice == "n" or player_name_choice == "N":
player_name = input("What is your name: ")
companion()
else:
print("Did you write anything? My eyes must be going. Could you write that again for me?")
player_name = input("What is your name: ")
companion()
#asking name of companion "gardener"
def companion():
print("\n")
global companion_name
companion_name = input("This is a dangerous journey to undertake alone, who will be joining you?: ")
while len(companion_name)>0:
if len(companion_name)>1:
print (str(companion_name)+"? A worthy companion.")
f.companions.append(companion_name)
mode()
elif len(companion_name)==1:
companion_name_choice = input(str(companion_name)+"? *sigh* Another dwarf? There's sure to be too much singing on this journey. Are you sure that's who you wish to travel with? (y/n):")
if companion_name_choice == "y" or player_name_choice =="Y":
print("Very well..."+(str(companion_name))+"... *sigh* ... Save me from the stubbornness of Dwarves.")
f.companions.append(companion_name)
mode()
elif companion_name_choice == "n" or player_name_choice == "N":
companion_name = input("Who will be joining you?: ")
f.companions.append(companion_name)
mode()
#starting variables to refer back to/modify
class party():
def fellowship(object):
disbanded = False
def ofthering(self):
if self.disbanded:
continue_choice = input("the fellowship is disbanded, but not yet broken. Try again? (y/n): ")
if continue_choice == "y" or continue_choice =="Y":
start()
else:
main()
f = party()
f.companions = []
f.weapons = 2
f.water = 0
f.ponies = 2
f.miles = 1535
f.day = 0
f.food = 0
f.daysleft = 1
def party():
disbanded == False
if disbanded == True:
continue_choice = input("The fellowship is broken, but not yet disbanded. Try again? (y/n): ")
if continue_choice == "y" or continue_choice == "Y":
start()
elif continue_choice == "n" or continue_choice == "N":
main()
#main and starting stats for main player. Death statement.
def ringbearer(r):
r.alive == True
r.fleshwound = False
r.RingMad = False
r.LazyHobbit = False
if r.alive == False:
print("\n")
continue_choice = input("The ring-bearer has fallen. But not all hope is yet lost. Try again? (y/n?): ")
if continue_choice == "y" or continue_choice == "Y":
start()
elif continue_choice == "n" or continue_choice == "N":
main()
#main and starting stats for companion character. Death statement.
def gardener(g):
g.alive == True
g.fleshwound = False
g.LazyHobbit = False
if g.alive == False:
print("\n")
print("Your companion has fallen... their final words are whispered out.")
time.sleep(3)
print("\n")
print("'I made a promise, player_name. A promise.")
time.sleep(1)
print("Don't you leave them, companion_name.")
time.sleep(1)
print("And I don't mean to.")
time.sleep(2)
print("I don't mean to.'")
time.sleep(3)
continue_choice = input ("The journey cannot continue without your dearest friend. Do you attempt the journey again? (y/n?): ")
if continue_choice == "y" or continue_choice == "Y":
start()
elif continue_choice == "n" or continue_choice == "N":
main()
#choose difficulty level
def mode():
print("\n")
print("The road ahead is long and arduous, though perhaps I can ease your way.")
mode_choice = input("How difficult of a journey would you like to undertake? (easy, normal, hard, impossible): ")
if mode_choice == "easy":
f.food = 1000
f.water = 1000
intro()
elif mode_choice == "normal":
f.food = 500
f.water = 500
intro()
elif mode_choice == "hard":
f.food = 300
f.water = 300
intro()
elif mode_choice == "impossible":
f.food = 100
f.water = 100
intro()
else:
print("please check your spelling and try again")
mode()
#Intro text
def intro():
print("\n")
print("You have a difficult road ahead of you my young hobbit.")
time.sleep(1)
print("You carry with you one of the great rings of power.")
time.sleep(1)
print("THE great ring of power.")
time.sleep(1)
print(" _______________________")
print(" _______________________------------------- `|")
print(" /:--__ |")
print("||< > | ___________________________/")
print("| \__/_________________------------------- |")
print("| |")
print(" | THE LORD OF THE RINGS |")
print(" | |")
print(" | Three Rings for the Elven-kings under the sky, |")
print(" | Seven for the Dwarf-lords in their halls of stone, |")
print(" | Nine for Mortal Men doomed to die, |")
print(" | One for the Dark Lord on his dark throne |")
print(" | In the Land of Mordor where the Shadows lie. |")
print(" | One Ring to rule them all, One Ring to find them, |")
print(" | One Ring to bring them all and in the darkness bind them |")
print(" | In the Land of Mordor where the Shadows lie. |")
print(" | ____________________|_")
print(" | ___________________------------------------- `|")
print(" |/`--_ |")
print(" ||[ ]|| ___________________/")
print(" \===/___________________--------------------------")
print("\n")
print("\n")
print("\n")
print("\n")
time.sleep(2)
proceed = input("To proceed on your journey, select any key and hit enter: ")
if proceed !=" ":
chp1()
else: #easter egg because I'm a dork
print("\n")
print("Any key except that one! Fool of a Took! Hold on, I'll fix this.")
time.sleep(2)
print("...")
time.sleep(1)
print("...")
time.sleep(1)
print("*grunt*")
time.sleep(1)
print("...")
time.sleep(.5)
print("Ahh, there we go.")
time.sleep(2)
chp1()
#Chapter 1 text
def chp1():
print("\n")
print("It's a dangerous business, "+str(player_name)+", going out your door.")
time.sleep(1)
print("You step onto the road, and if you don't keep your feet,")
time.sleep(1)
print("there's no knowing where you might be swept off to")
time.sleep(1)
print("\n")
print(" . .:.:.:.:. .:\ /:. .:.:.:.:. ,")
print(" .-._ `..:.:. . .:.:`- -':.:. . .:.:.,' _.-.")
print(" .:.:.`-._`-._..-''_...---..._``-.._.-'_.-'.:.:.")
print(" .:.:. . .:_.`' _..-''._________,``-.._ `.._:. . .:.:.")
print(" .:.:. . . ,-'_.-'' ||_-(O)-_|| ``-._`-. . . .:.:.")
print(" .:. . . .,'_.' '---------' `._`.. . . .:.:")
print(" :.:. . . ,',' _________ `.`. . . .:.:")
print(" `.:.:. .,',' _.-''_________``-._ `._. _.':")
print(" -._ `._./ / ,'_.-'' , ``-._`. ,' '`:..' _.-:")
print(" .:.:`-.._' / ,',' `.`. /' ' \\.-':.:.:")
print(" :.:. . ./ / ,',' , `.`. / ' ' '\\. .:.::")
print(":.:. . ./ / / / , \ \ : ' ' ' \\. .:.:")
print(".:. . ./ / / / , , \ \ : ' ' ' '::. .:.")
print(":. . .: : o / / \ ;' ' ' ' ':: . .:")
print(".:. . | | /_\ : : , , : ' ' ' ' ' :: .:.")
print(":. . .| | ((<)) | |, , , |\'__',-._.' ' ||. .:")
print(".:.:. | | `-' | |---....____ | ,---\/--/ ' ||:.:.")
print("------| | : : ,. ```--..._ , |'' ' ' ' ' ||----")
print("_...--. | , \ \ ,. `-._ , /: ' ' ' ' ' ;;..._")
print(":.:. .| | -O- \ \ ,. `._ / /:' ' ' ' ':: .:.:")
print(".:. . | |_(`__ \ \ `. / / :' ' ' ' ';;. .:.")
print(":. . .<' (_) `> `.`. ,. ,. ,',' \ ' ' ' ;;. . .:")
print(".:. . |):-.--'( `.`-._ ,. _,-',' \ ' ' '//| . .:.")
print(":. . .;)()(__)(___________`-._`-.._______..-'_.-'_________\' ' //_:. . .:")
print(".:.:,' \/\/--\/--------------------------------------------`._',;'`. `.:.::")
print(":.,' ,' ,' ,' / / / ,-------------------. \ \ \ `. `.`. `..:")
print(",' ,' ' / / / / // \\ \ \ \ \ ` `...:")
print("\n")
time.sleep(1)
print("And so "+(str(player_name))+" set forth on their journey,")
print("accompanied by their loyal friend and gardener "+(str(companion_name))+",")
print("to the city of Bree, to meet with Gandalf.")
print("\n")
time.sleep(2)
trav1()
#stats to display during travel/in future GUI
def init_stat():
print("CURRENT STATUS")
print("Days on the road: "+(str(f.day)))
print("Miles to Mordor: "+(str(f.miles)))
print("Companions: "+ ", ".join(f.companions))
print("Food remaining: "+(str(f.food)))
print("Water remaining: "+(str(f.water)))
print("Ponies: "+(str(f.ponies)))
print("Weapons: "+(str(f.weapons)))
print("\n")
#Actions that players can potentially take
def progress():
travelevents()
deathcheck()
miles_traveled = random.randint(30,60)
f.day +=1
f.miles = f.miles - miles_traveled + f.ponies*5
f.food = f.food - 1 - len(f.companions) - f.ponies
f.water = f.water - 1 - len(f.companions) - f.ponies
##random travel events that can occur during progress
def travelevents():
event = random.randint(1,20)
if event == 1:
print("\n")
print("The Nazgul are on your trail. You must hide to avoid them, this costs you one day of travel and supplies.")
f.day +=1
f.food = f.food - 1 - len(f.companions) - f.ponies
f.water = f.water - 1 - len(f.companions) - f.ponies
elif event == 2:
print("\n")
print("An Uruk-Hai war party is traveling nearby. You run for cover, dropping some food.")
f.food = f.food - 10
elif event == 3:
print("\n")
print("The eye of Sauron has noticed you. Lie low for a while until his gaze passes. This costs you one day of travel and supplies.")
f.day +=1
f.food = f.food - 1 - len(f.companions) - f.ponies
f.water = f.water - 1 - len(f.companions) - f.ponies
elif event ==4:
print("\n")
print("A member of your party has been bitten by a gigantic spider. You use water to cleanse the wound.")
f.water = f.water - 10
elif event ==5:
print("\n")
print("The Nazgul have spotted you! Do you fight or do you run?")
fight_flight = input("(fight/run): ")
if fight_flight == "fight":
while f.weapons <= 0:
print("You have no weapons! The Nazgul overpower you and kill the party in their quest for the ring.")
r.alive == False
while f.weapons >0:
brawl = random.randint(1,3)
if brawl ==1:
print("\n")
print("You successfully fight off the Nazgul")
if brawl ==2:
print("\n")
print("You succsessfully fend off the Nazgul for now, but one of your weapons is broken in the fight")
f.weapons -= 1
else:
print("\n")
print("One of the Nazgul has managed to stab you. "+(str(player_name))+" has an injury.")
r.fleshwound = True
ringbearer()
else:
runchance = random.randint(1,3)
if runchance ==1:
print("\n")
print("You drop some of your food and water in your escape.")
f.food -= 15
f.water -= 15
else:
print("\n")
print("You got away safely.")
def forage():
print("\n")
f.day += 1
eventf = random.randint(1,5)
foraged = random.randint(10,21)
if eventf == 1:
print("You've found mushrooms!")
f.food = f.food + foraged
f.water = f.water + foraged
elif eventf == 2:
print("You've found potatoes!")
f.food = f.food + foraged
f.water = f.water + foraged
elif eventf == 3:
print("You've found... something. It appears to be edible.")
f.food = f.food + foraged - 5
f.water = f.water + foraged
elif eventf ==4:
print("The land here appears barren, there's no food to be found.")
else:
print("You've found mushrooms! In your haste you eat them before identifying them and realize they are poisoned. You waste 5 water rinsing your mouth.")
f.water = f.water - 5
def hunt():
print("\n")
f.day +=1
eventh = random.randint(1,6)
caught = random.randint(20,40)
if eventh ==1:
print("You've caught a deer!")
f.food = f.food + caught
if eventh ==2:
print("You've caught some rabbits!")
f.food = f.food + caught
if eventh ==3:
print("You caught a troll... better him than us... and better than starving... I guess.")
f.food = f.food + caught
if eventh ==4:
print("The Uruk-Hai marched through here recently. There is no game to be found.")
if eventh ==5:
injury = random.randint(1,3)
if injury ==1:
print("You were injured trying to bring down an animal! You now have a fleshwound!")
r.fleshwound = True
ringbearer()
else:
print("Your companion, "+(str(companion_name))+", was injured trying to bring down an animal! They now have a fleshwound!")
g.fleshwound = True
gardener()
def rest():
print("\n")
f.day +=1
f.food = f.food - 1 - len(f.companions) - f.ponies
f.water = f.water - 1 - len(f.companions) - f.ponies
eventr = random.randint(1,3)
if eventr ==1 or eventr ==2:
if r.fleshwound == True or g.fleshwound == True:
print("You've rested and tended to your injuries. None of your party have fleshwounds.")
r.fleshwound = False
g.fleshwound = False
if r.RingMad == True :
print("During your rest, your companions were able to talk some sense into you. The corruption of the ring seems to have passed for now.")
r.RingMad = False
if r.LazyHobbit == True or g.LazyHobbit == True:
print("Given a day to laze around, the hobbits appear to be cured of their laziness.")
r.LazyHobbit = False
g.LazyHobbit = False
else:
print("Your party is fully rested")
else:
print("Your party takes the day to rest, but the elements prevent you from feeling re-energized.")
def helpchoices():
print("\n")
print("Travel: make progress on your journey. Consuming food and water based on the size of your party. Ponies can help you travel faster.")
print("Forage: gather food and water to sustain your party.")
print("Hunt: hunt nearby creatures for increased food. Provides more food than foraging, but does not provide water.")
print("Rest: take one day to rest your party, with the potential to heal injuries and detrimental status effects.")
print("\n")
def quit_check():
check = input("are you sure you want to quit? (y/n): ")
if check =="y" or check =="Y":
print("...One does not simply walk into Mordor...")
quit()
else:
print("And so we continue on our journey.")
def deathcheck():
if f.food == 0 or f.water == 0:
print("You have run out of resources. You cannot possibly go on.")
r.alive == False
ringbearer()
elif f.daysleft == 0:
print("You couldn't find help in time. You didn't make it.")
r.alive == False
ringbearer()
#Travel marker for chapter 1
def trav1():
print("\n")
print("The Shire ---------> Bree")
init_stat()
while 1415 < f.miles <=1535:
options1()
else:
chap2()
#Travel options for chapter 1
def options1():
print("what would you like to do?")
select = input("travel, forage, hunt, rest, help, quit: ")
if select == "travel":
progress()
trav1()
elif select == "forage":
forage()
trav1()
elif select == "hunt":
hunt()
trav1()
elif select == "rest":
print("\n")
print("Lazy hobbits, we've barely gotten started!")
rest()
trav1()
elif select =="help":
helpchoices()
elif select=="quit":
quit_check()
else:
print("Please check your spelling and try again.")
select = input("travel, forage, hunt, rest, help, quit: ")
#Chapter 2 text/story
def chap2():
print("\n")
print("On the road to Bree you find your friends Pippin and Merry stealing vegetables from Farmer Maggot's fields.")
time.sleep(1)
print("They join you on your journey to Bree, bringing the vegetables they have found with them.")
f.food += 50
f.companions.append("Pippin")
f.companions.append("Merry")
time.sleep(2)
print("\n")
print("Pippin and Merry and joined the party.")
print("\n")
init_stat()
print("\n")
time.sleep(2)
print("As you continue toward Bree your party is pursued by mysterious black riders.")
time.sleep(1)
print("\n")
time.sleep(1)
print("Fortunately you are able to hide from these dangerous riders and make it to the Inn of the Prancing Pony to meet Gandalf.")
time.sleep(5)
print("\n")
print("\n")
print(" _____,,;;;`; The Inn ;';;;,,_____")
print(",~( ) , )~~\| of the |/~~( , ( )~;")
print("' / / --`--, Prancing .--'-- \ \ `")
print(" / \ | ' Pony ` | / |")
print("\n")
time.sleep(3)
print("Unfortunately, Gandalf is unable to meet you in Bree,")
print("however, you do encounter a ranger by the name of Strider.")
print("Your part leaves with Strider to make their way to Rivendell")
print("\n")
print("Strider joins the party. Bringing his own weapons and rations.")
f.companions.append("Strider")
f.food += 30
f.weapons += 3
f.ponies += 1
trav2()
#travel for chapter 2
def trav2():
print("\n")
print("Bree ----------> Rivendell")
init_stat()
while 1320 < f.miles <=1535:
options2()
else:
chap3()
#Slightly modified text and options for chapter 2
def options2():
print("\n")
print("what would you like to do?")
select = input("travel, forage, hunt, rest, help, quit: ")
if select == "travel":
progress()
trav2()
if select == "forage":
forage()
trav2()
if select == "hunt":
print("\n")
print("Strider grabs his bow and leaves the camp to hunt.")
hunt()
trav2()
if select == "rest":
print("\n")
print("As the hobbits sit down to second breakfast, Strider stairs at you in disbelief.")
print("'What are you doing??'")
print("\n")
time.sleep(2)
print("We'd better get back on the road.")
rest()
trav2()
if select=="help":
helpchoices()
if select =="quit":
quit_check()
daysleft = 4
#morgal blade death timer
def mbtimer():
if daysleft == 0:
r.alive == FALSE
ringbearer()
#Chapter 3 text/story
def chap3():
print("\n")
print("Strider successfully leads your party through the Midgewater Marshes.")
print("As you make your journey to Rivendell, you rest at the ruin of Weathertop.")
print("There, you are waylayed by five of the Nazgul.")
time.sleep(2)
print("\n")
print("In an attempt to hide from them, "+(str(player_name))+" put on the ring.")
print("Oh no! "+(str(player_name))+" is stabbed with a poisoned Morgul blade by their leader.")
print("\n")
print(" .---.")
print(" |---|")
print(" |---|")
print(" |---|")
print(" .---^ - ^---.")
print(" :___________:")
print(" | |//")
print(" | |//|")
print(" | |//|")
print(" | |//|")
print(" | |//|")
print(" | |//|")
print(" | |.-|")
print(" |.-'**|")
print(" \***/")
print(" \*/")
print(" V")
print("\n")
time.sleep(2)
print("Strider managed to fight away the Nazgul with fire and blade.")
time.sleep(1)
print("\n")
print("Strider is unable to heal "+(str(player_name))+"'s wound.")
print("A poisoned tip of the Ringwraith's blade remains in their flesh, moving toweard their heart.")
time.sleep(1)
print("\n")
print((str(player_name))+" doesn't have much time left.")
print("\n")
print("The elf, Arwen, appears. She offers aid, and hurries the party on to Rivendell.")
print("\n")
print("Arwen has joined your party. She brings her horse, Asfaloth, weapons, and supplies.")
f.companions.append("Arwen")
f.food += 30
f.water += 30
f.ponies += 1
init_stat()
print("\n")
print("Arwen urges that there isn't much time left for "+(str(player_name)))
print("'I'll carry them, I can move faster than the group,' she urges")
time.sleep(1)
print("\n")
print("Should "+(str(player_name))+" travel with Arwen? Or remain with the larger group?")
print("Note, if you don't make it to Rivendell in 4 days, "+(str(player_name))+" will surely die.")
print("1.) Travel with Arwen")
print("2.) Travel with the full group")
option = input("-->")
if option == "1" or option == "Arwen":
f.companions.remove(companion_name)
f.companions.remove("Strider")
f.companions.remove("Pippin")
f.companions.remove("Merry")
f.ponies = 1
f.daysleft = 5
trav3a()
elif option == "2" or option == "group" or option == "party":
trav3b()
else:
print("please check your input and try again")
option = input("-->")
#traveling with Arwen
def trav3a():
print("\n")
print("Weathertop ----------> Rivendell")
init_stat()
while 1067 < f.miles <=1320:
options3a()
else:
chap4()
#traveling with the party
def trav3b():
print("\n")
print("Weathertop ----------> Rivendell")
init_stat()
while 1067 < f.miles <=1320:
options3b()
else:
chap4()
#swift travel with Arwen, no stopping for supplies
def options3a():
print("\n")
print("We must make haste. Do not tarry.")
select = input("travel, help, quit: ")
if select == "travel":
progress_swift()
trav3a()
if select=="help":
helpchoices()
if select =="quit":
quit_check()
#travel options with the party.
def options3b():
print("\n")
print("what would you like to do?")
select = input("travel, forage, hunt, rest, help, quit: ")
if select == "travel":
progress_slow()
trav2()
if select == "forage":
print("\n")
print("Arwen and Strider urge you to hurry. Though if you can find the Kingsfoil plant, it might help prevent the poison from spreading.")
forage()
print((str(companion_name))+" manages to find Kingsfoil. Perhaps "+(str(player_name))+" might make it another day.")
daysleft +=1
trav2()
if select == "hunt":
print("\n")
print("Strider grabs his bow and leaves the camp to hunt.")
daysleft -=1
hunt()
trav2()
if select == "rest":
print("\n")
print("The hobbits are exhausted and stop to rest.")
print("\n")
daysleft -=1
rest()
trav2()
if select=="help":
helpchoices()
if select =="quit":
quit_check()
#Arwen speed
def progress_swift():
f.daysleft -=1
mbtimer()
deathcheck()
miles_traveled = random.randint(50,80)
f.day +=1
f.miles = f.miles - miles_traveled + f.ponies*5
f.food = f.food - 1 - len(f.companions) - f.ponies
f.water = f.water - 1 - len(f.companions) - f.ponies
#party speed - slightly increased for fairness and a possibility to make it
def progress_slow():
f.daysleft -=1
mbtimer()
deathcheck()
miles_traveled = random.randint(40,70)
f.day +=1
f.miles = f.miles - miles_traveled + f.ponies*5
f.food = f.food - 1 - len(f.companions) - f.ponies
f.water = f.water - 1 - len(f.companions) - f.ponies
def chap4():
print("placeholder")
quit()
main()
| sciangela/totr | totr.py | totr.py | py | 28,218 | python | en | code | 1 | github-code | 36 |
39497612599 | """
Module to handle a local InfoKinds with unique name.
NOTE: this is taken from python-common in nomad-lab-base.
It is copied here to remove the dependency from nomad-lab-base.
For more info on python-common visit:
https://gitlab.mpcdf.mpg.de/nomad-lab/python-common
The author of this code is: Dr. Fawzi Roberto Mohamed
E-mail: mohamed@fhi-berlin.mpg.de
"""
from past.builtins import cmp
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import range
from builtins import object
import logging
from ai4materials.external import compact_sha
import json
import os, re
from ai4materials.external.json_support import jsonCompactS, jsonCompactD, jsonIndentD
from io import open
class InfoKindEl(object):
"""Info kind (tipically from a file, without shas but with locally unique names)"""
__slots__ = ["name","description","kindStr","units","superNames","dtypeStr", "repeats", "shape", "extra_args"]
IGNORE_EXTRA_ARGS = 1
ADD_EXTRA_ARGS = 2
RAISE_IF_EXTRA_ARGS = 3
def __init__(self, name, description, kindStr = "type_document_content", units = None, superNames = None,
dtypeStr = None, shape = None, extraArgsHandling = ADD_EXTRA_ARGS, repeats = None, **extra_args):
if superNames is None:
superNames = []
self.name = name
self.description = description
self.kindStr = kindStr
self.superNames = superNames
self.units = units
self.dtypeStr = dtypeStr
if dtypeStr in ["None", "null"]:
self.dtypeStr = None
self.shape = shape
self.repeats = repeats
if extraArgsHandling == self.ADD_EXTRA_ARGS:
self.extra_args = extra_args
elif extraArgsHandling == self.IGNORE_EXTRA_ARGS:
self.extra_args = {}
else:
raise Exception("extra arguments to InfoKindEl:" + str(extra_args))
def __eq__(o1, o2):
try:
if not (o1.name == o2.name and o1.description == o2.description and o1.kindStr == o2.kindStr and
o1.units == o2.units and o1.shape == o2.shape):
return False
if o1.dtypeStr != o2.dtypeStr:
return False
if o1.repeats != o2.repeats:
return False
if o1.extra_args != o2.extra_args:
return False
if o1.superNames == o2.superNames:
return True
if len(o1.superNames) != len(o2.superNames):
return False
if o1.superNames[0] != o2.superNames[0]:
return False
a1 = o1.superNames[1:]
a2 = o2.superNames[1:]
a1.sort()
a2.sort()
for i in range(len(a1)):
if a1[i] != a2[i]:
return False
return True
except:
raise
return False
def __cmp__(k1, k2):
c = cmp(k1.name, k2.name)
if c != 0: return c
c = cmp(k1.kindStr, k2.kindStr)
if c != 0: return c
c = cmp(k1.description, k2.description)
if c != 0: return c
if len(k1.superNames) > 0:
if len(k2.superNames) > 0:
c = cmp(k1.superNames[0], k2.superNames[0])
if c != 0: return c
s1 = k1.superNames[1:]
s2 = k2.superNames[1:]
c = cmp(s1, s2)
if c != 0: return c
else:
return 1
elif len(k2.superNames) > 0:
return -1
if c != 0: return c
c = cmp(k1.units, k2.units)
if c != 0: return c
c = cmp(k1.dtypeStr, k2.dtypeStr)
if c != 0: return c
c = cmp(k1.repeats, k2.repeats)
if c != 0: return c
c = cmp(k1.shape, k2.shape)
if c != 0: return c
if k1.extra_args == k2.extra_args:
return 0
if k1.extra_args is None:
return 1
if k2.extra_args is None:
return -1
extraK1 = list(k1.extra_args.keys())
extraK1.sort()
extraK2 = list(k2.extra_args.keys())
extraK2.sort()
i = 0
while (i < len(extraK1) and i < len(extraK2)):
kk1 = extraK1[i]
kk2 = extraK2[i]
c = cmp(kk1, kk2)
if c != 0: return c # use -c ?
c = cmp(k1.extra_args[kk1], k2.extra_args[kk2])
if c != 0: return c
c = cmp(len(extraK1), len(extraK2))
return c
def __ne__(o1, o2):
return not o1.__eq__(o2)
def prepare(self, env):
if len(self.superNames) > 1:
a = self.superNames[1:]
a.sort(lambda x, y: cmp(env.gidOf(x, precalculatedGid = True), env.gidOf(y, precalculatedGid = True)))
self.superNames[1:] = a
def evalGid(self, env):
self.prepare(env)
sha = env.newSha()
self.serialize(env,sha.update, precalculatedGid = True, selfGid = False)
return 'p' + sha.b64digest()[:28]
def serialize(self, env, writeOut, subGids = True, addExtraArgs = True, precalculatedGid = False, selfGid = True):
d = self.toDict(env, subGids = subGids, addExtraArgs = addExtraArgs, precalculatedGid = precalculatedGid, selfGid = selfGid)
jsonCompactD(d, writeOut)
def toDict(self, env = None, addExtraArgs = True, inlineExtraArgs = True , selfGid = False, subGids = False, precalculatedGid = False):
res = {
"description": self.description,
"name": self.name,
"superNames": self.superNames,
}
try:
if self.kindStr != "type_document_content":
if self.kindStr is None or self.kindStr == "":
res["kindStr"] = "MetaType"
else:
res["kindStr"] = self.kindStr
if env:
if selfGid:
res["gid"] = env.gidOf(self.name, precalculatedGid = precalculatedGid)
if subGids:
res["superGids"] = [ env.gidOf(sName, precalculatedGid = precalculatedGid) for sName in self.superNames ]
elif subGids or selfGid:
raise Exception("env required in toDict for subGids or selfGid")
if self.units is not None:
res["units"] = self.units
if self.dtypeStr is not None:
res["dtypeStr"] = self.dtypeStr
if self.repeats is not None:
res["repeats"] = self.repeats
if self.shape is not None:
res["shape"] = self.shape
if addExtraArgs:
if inlineExtraArgs:
res.update(self.extra_args)
else:
res["extraArgs"] = self.extra_args
except:
logging.exception("error in InfoKindEl.toDict, partial dict is %s", res)
return res
def __unicode__(self):
s = StringIO.StringIO()
self.serialize(s)
return s.string
class RelativeDependencySolver(object):
def __init__(self):
self.deps = {}
def __call__(self, infoKindEnv, source, dep):
if "relativePath" not in dep:
raise Exception('Invalid dependency for relativeDependencySolver there must be a relativePath')
basePath = source.get('path')
if basePath:
baseDir = os.path.dirname(os.path.abspath(basePath))
else:
baseDir = os.getcwd()
dPath = os.path.realpath(os.path.join(baseDir, dep['relativePath']))
if dPath in self.deps:
return self.deps[dPath]
depInfo = None
depIKEnv = InfoKindEnv(path = dPath, dependencyLoader=infoKindEnv.dependencyLoader)
self.deps[dPath] = depIKEnv
with open(dPath, encoding="utf-8") as f:
try:
depInfo = json.load(f)
except:
logging.exception("Error while loading dependency %s" % f)
raise
if depInfo:
depIKEnv.fromJsonList(depInfo, name = os.path.basename(dPath), source = { 'path': dPath }, dependencyLoad = False)
return depIKEnv
class InfoKindEnv(object):
"""An environment keeping locally unique InfoKinds and their gids"""
def __init__(self, infoKinds = None, name = None, description = None, newSha = compact_sha.sha512, gids = None,
dependencyLoader = None, path = None, uri = None, deps = None):
self.newSha = newSha
self.clear()
self.name = name
self.description = description
self.dependencyLoader = dependencyLoader
if dependencyLoader is None:
self.dependencyLoader = RelativeDependencySolver()
self.path = path
self.uri = uri
if not infoKinds is None:
for ik in infoKinds:
self.addInfoKindEl(ik)
if not gids is None:
self.gids = gids
if deps:
self.deps = deps
def __str__(self):
if self.path:
return "InfoKindEnv loaded from {}".format(self.path)
def clear(self):
self.gids = {}
self.infoKinds = {}
self.deps = []
def depNames(self):
res = set()
for dep in self.deps:
for name in dep.infoKinds.keys():
res.add(name)
return res
def noDepNames(self):
return set(self.infoKinds.keys()).difference(self.depNames())
def embedDeps(self):
hidden = []
duplicate = set()
for dep in self.deps:
for name, ikEl in dep.infoKinds.items():
oldVal=self.infoKinds.get(name, None)
if oldVal is None:
self.infoKinds[name] = ikEl
elif ikEl != oldVal:
hidden.append(ikEl)
else:
duplicate.add(name)
return { "hidden": hidden, "duplicate": duplicate }
def addInfoKindEl(self, infoKind):
if infoKind.name in self.infoKinds and infoKind != self.infoKinds[infoKind.name]:
raise Exception('InfoKindEnv has collision for name {0}: {1} vs {2}'
.format(infoKind.name, infoKind, self.infoKinds[infoKind.name]))
self.infoKinds[infoKind.name] = infoKind
def addDependenciesFrom(self, infoKindEnv):
toAdd = set(self.infoKinds.keys())
missing = set()
while len(toAdd):
ikName = toAdd.pop()
ik = self.infoKinds.get(ikName,None)
if ik is None:
depInfoKindEl = infoKindEnv.infoKinds.get(ikName, None)
if depInfoKindEl:
self.infoKinds[ikName] = depInfoKindEl
toAdd.add(ikName)
else:
missing.add(ikName)
else:
for dep in ik.superNames:
if not dep in self.infoKinds:
toAdd.add(dep)
return missing
def gidOf(self, name, precalculatedGid=False):
res = self.gids.get(name,None)
if res is None:
if precalculatedGid:
raise Exception("non precalculated gid for %s" % name)
res = self.calcGid(name)
return res
def calcGid(self, name):
inProgress = []
toDo = [name]
hasPending = False
for i in range(2):
while len(toDo) > 0:
if not hasPending and inProgress:
now = inProgress.pop()
else:
now = toDo.pop()
if now in self.gids and now in inProgress:
inProgress.remove(now)
hasPending = False
nowVal = self.infoKinds.get(now, None)
if nowVal is None:
raise Exception("while calculating gid of %r found unknown key %r" % (name, now))
for subName in nowVal.superNames:
if subName in self.gids:
continue
hasPending = True
if subName in toDo:
toDo.remove(subName)
if subName in inProgress:
raise Exception('found loop to %s evaluating %s, currently in progress: %s' % (subName, now, inProgress))
toDo.append(subName)
if not hasPending:
self.gids[now] = nowVal.evalGid(self)
if now in inProgress:
inProgress.remove(now)
else:
if now in inProgress:
raise Exception('found loop to %s, currently in progress: %s' % (now, inProgress))
inProgress.append(now)
toDo = list(inProgress)
return self.gids[name]
def keyDependsOnKey(self, k1Name, k2Name):
"""partial ordering given by the dependencies
1: k1Name depends on k2Name
0: k1Name == k2Name
-1: k2Name depends on k1Name
None: no dependency"""
if k1Name == k2Name: return 0
k1 = self.infoKinds[k1Name]
k2 = self.infoKinds[k2Name]
if k1.superNames != k2.superNames:
allSuperK1 = set()
toDoK1 = list(k1.superNames)
allSuperK2 = set()
toDoK2 = list(k2.superNames)
while (len(toDoK1) > 0 or len(toDoK2) > 0):
if len(toDoK1) > 0:
el1Name = toDoK1.pop()
if k2Name == el1Name:
return 1
el1 = self.infoKinds[el1Name]
if el1.kindStr in self and not el1.kindStr in allSuperK1:
toDoK1.append(el1.kindStr)
for subEl in el1.superNames:
if not subEl in allSuperK1:
toDoK1.append(subEl)
allSuperK1.update(el1.superNames)
if len(toDoK2) > 0:
el2Name = toDoK2.pop()
if k1Name == el2Name:
return -1
el2 = self.infoKinds[el2Name]
if el2.kindStr in self and not el2.kindStr in allSuperK2:
toDoK2.append(el2.kindStr)
for subEl in el2.superNames:
if not subEl in allSuperK2:
toDoK2.append(subEl)
allSuperK2.update(el2.superNames)
return None
def __contains__(self, name):
"if an item with the given name is contained in this environment"
return name in self.infoKinds
def __len__(self):
"""returns the number of InfoKindEl stored in this environment"""
return len(self.infoKinds)
def __getitem__(self, name):
"""returns a dictionary representing the entry with the given name, or None if it does not exist"""
ikEl = self.infoKinds.get(name, None)
if ikEl:
return ikEl.toDict(self)
return None
def infoKindEls(self):
return list(self.infoKinds.values())
def infoKindEl(self, name):
"""returns the InfoKindEl with the given name, or None if it does not exist"""
return self.infoKinds.get(name, None)
def calcGids(self):
for k in self.infoKinds.keys():
if not k in self.gids:
self.gids[k]=self.calcGid(k)
def serialize(self, writeOut, subGids = True, selfGid = True):
infoKinds = self.sortedIKs()
writeOut("""{
"type": "nomad_meta_info_1_0",
"description": """)
if self.description:
jsonIndentD(self.description, writeOut, extraIndent = 4)
else:
writeOut('""')
writeOut(',\n')
if not self.path:
baseDir = os.getcwd()
else:
baseDir = os.path.normpath(os.path.dirname(os.path.abspath(self.path)))
depKeys = set()
if self.deps:
writeOut(' "dependencies": [ ')
depColon = False
for d in self.deps:
path = d.path
uri = d.uri
depKeys.update(d.infoKinds.keys())
if path:
path = os.path.normpath(os.path.abspath(path))
if path.startswith(baseDir) or not uri:
if depColon:
writeOut(", ")
else:
depColon = True
jsonIndentD({"relativePath": os.path.relpath(path, baseDir)}, writeOut, extraIndent = 4)
continue
if uri:
if depColon:
writeOut(", ")
else:
depColon = True
jsonIndentD({"uri": uri}, writeOut, extraIndent = 4)
continue
raise Exception("Dependency on serializable %s" % d)
writeOut('],\n')
addColon = False
writeOut(' "metaInfos": [ ')
for ik in infoKinds:
if ik.name in depKeys:
continue
if addColon:
writeOut(", ")
else:
addColon = True
jsonIndentD(ik.toDict(env = self, subGids = subGids, selfGid = selfGid), writeOut, extraIndent = 4, check_circular = True)
writeOut("]\n}\n")
def sortedIKs(self):
infoKinds = list(self.infoKinds.values())
infoKinds.sort(lambda x, y: cmp(x.name.lower()+x.name, y.name.lower()+y.name))
return infoKinds # self.sortAndComplete(infoKinds, ignoreMissing = True)
def toJsonList(self, withGids):
infoKinds = list(self.infoKinds.keys())
infoKinds.sort(lambda x, y: self.compareKeys(x.name, y.name))
return [self.infoKinds[x].toDict(self,
self if withGids else None) for x in infoKinds]
def verifyGids(self, preserveAbsent=False):
changes = {}
oldGids = self.gids
self.gids = {}
self.calcGids()
for k,v in oldGids.items():
newVal = self.gids.get(k, None)
if newVal is None:
if preserveAbsent:
self.gids[k] = v
else:
changes[k] = (v, None)
elif v != newVal:
changes[k] = (v, newVal)
return changes
def fromJsonList(self, jsonDict, name, source, extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS, dependencyLoad=False):
typeStr = jsonDict.get("type","nomad_meta_info_1_0")
typeRe = re.compile(r"nomad_meta_info_(?P<major>[0-9]+)_(?P<minor>[0-9]+)$")
self.name = name
m = typeRe.match(typeStr)
if not m:
raise Exception("unexpected type '%s', expected nomad_meta_info_1_0" % typeStr)
if int(m.group("major")) != 1:
raise Exception("Unsupported major version %s, expeced 1")
dependencies = jsonDict.get("dependencies",[])
jsonList = jsonDict.get("metaInfos",[])
self.description = jsonDict.get("description","")
overwritten = []
gidToCheck = {}
deps = []
for d in dependencies:
if self.dependencyLoader is None:
raise Exception("no dependencyLoader while loading local_in")
dep = self.dependencyLoader(self, source, d)
if dep:
self.deps.append(dep)
index = -1
for ii in jsonList:
index += 1
val = dict(ii)
if not "name" in ii:
raise Exception("InfoKind at %d is without name: %s" % (index, ii) )
oldVal=self.infoKinds.get(ii['name'],None)
gid=None
if "gid" in ii:
gid = ii['gid']
del val['gid']
if "superGids" in ii:
if not "superNames" in ii:
raise Exception("superGids without superNames in fromJsonList")
superNames = ii["superNames"]
superGids = ii["superGids"]
if len(superNames) != len(superGids):
raise Exception("superGids incompatible with superNames in fromJsonList: %s vs %s" % (ii["superGids"], ii["superNames"]))
toCheck = {}
for i in range(len(superNames)):
assert not superNames[i] in toCheck.keys(), "duplicate superName %r in %r" % (superNames[i], ii["name"])
toCheck[superNames[i]] = superGids[i]
gidToCheck[ii["name"]] = toCheck
del val['superGids']
val['extraArgsHandling'] = extraArgsHandling
ikEl = InfoKindEl(**val)
if not oldVal is None and ikEl != oldVal:
overwritten.append((oldVal, ikEl))
if gid:
self.gids[ii['name']] = gid
self.infoKinds[ikEl.name] = ikEl
res = { "overwritten": overwritten }
if not dependencyLoad:
res.update(self.embedDeps())
for childName, gids in gidToCheck.items():
for name, gid in gids.items():
if self.gidOf(name) != gid:
raise Exception("incompatible superGid for superName %s of %s (%s vs %s)" % (name, ii["name"], gid, self.gidOf(name)))
if res.get("overwritten", False) or res.get("duplicate", False) or res.get("hidden", False):
res["hasWarnings"] = True
else:
res["hasWarnings"] = res.get("hasWarnings", False)
return res
def sortAndComplete(self, propsToSort, ignoreMissing = False):
"""builds a list of properties in propsToSort, so that all the dependecies of each
property are present before them"""
toDo = list(propsToSort)
done = set()
deps = []
res = []
while len(toDo)>0:
pAtt = toDo.pop()
nameAtt = pAtt.name
if nameAtt in done:
continue
deps = [nameAtt]
while len(deps)>0:
nameAtt = deps[-1]
pAtt = self.infoKinds.get(nameAtt, None)
if pAtt is None:
if ignoreMissing:
deps.pop()
done.add(nameAtt)
continue
else:
raise Exception("missing dependent InfoKindEl {0} following chain {1}".format(nameAtt, pAtt))
hasDepsToDo = False
kindStr = pAtt.kindStr
kindType = self.infoKindEl(kindStr)
for superName in pAtt.superNames:
if not superName in done:
if superName in deps:
raise Exception("circular dependency {0}, {1}".format(deps,superName))
deps.append(superName)
hasDepsToDo = True
if kindType and not kindStr in done:
if kindStr in deps:
raise Exception("circular dependency in kindStr {0}, {1}".format(deps,kindStr))
deps.append(kindStr)
hasDepsToDo = True
if not hasDepsToDo:
deps.pop()
res.append(pAtt)
done.add(nameAtt)
return res
def metaInfoNameWithAllSuper(self, name):
"""returns the meta info names of name and all its dependencies"""
toAdd = set([name])
res = set([name])
while toAdd:
e = toAdd.pop()
for superName in self.infoKinds[e].superNames:
if not superName in res:
res.add(superName)
toAdd.add(superName)
return res
def firstAncestorsByType(self, name):
"""Returns the first acestors of each type separated in roots and children.
(scala conversion, could be improved a bit)"""
metaInfoNames = self.metaInfoNameWithAllSuper(name)
metaInfoNames.remove(name)
mInfo = list(metaInfoNames)
edges = {}
for i, metaName in enumerate(mInfo):
metaInfo = self.infoKinds[metaName]
edges[i] = [mInfo.index(x) for x in metaInfo.superNames]
typeGroups = {}
for mIdx, metaName in enumerate(mInfo):
kindStr = self.infoKinds[metaName].kindStr
tNow = typeGroups.get(kindStr, None)
if tNow is None:
typeGroups[kindStr] = [mIdx]
else:
tNow.append(mIdx)
childsByType = {}
toDo = set(range(len(mInfo)))
while (toDo):
now = toDo.pop()
kindNow = self.infoKinds[mInfo[now]].kindStr
toDo2 = set(edges[now])
known2 = set(edges[now])
while (toDo2):
now2 = toDo2.pop()
if (self.infoKinds[mInfo[now2]].kindStr == kindNow):
childs = childsByType.get(kindNow, None)
if childs:
childs.add(now2)
else:
childsByType[kindNow] = set([now2])
if now2 in toDo:
toDo.remove(now2)
for el in edges[now2]:
if not el in known2:
toDo2.add(el)
known2.add(el)
res = {}
for typeName, allChilds in typeGroups.items():
childs = childsByType.get(typeName, set())
allForKind = set(allChilds)
rootNames = [mInfo[x] for x in (allForKind - childs)]
childNames = [mInfo[x] for x in childs]
res[typeName] = (rootNames, childNames)
return res
def loadJsonFile(filePath, dependencyLoader = None, extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS, uri = None):
env = InfoKindEnv(dependencyLoader = dependencyLoader, path = filePath, uri = uri)
try:
with open(filePath, encoding="utf-8") as f:
o = json.load(f)
warnings = env.fromJsonList(o, name = os.path.basename(filePath), source = {'path': filePath}, extraArgsHandling = extraArgsHandling)
except:
logging.exception("Error while loading file %s" % filePath)
raise
return env, warnings
def load_metainfo(filename, dependencyLoader=None, extraArgsHandling=InfoKindEl.ADD_EXTRA_ARGS, uri=None):
"""Loads a metainfo environment for a filename. The filename should not
contain the full path, as the full path is resolved here and not by the
caller.
Args:
filename: filename as a string.
Returns:
Tuple containing the metainfo environment, and any possible warnings
that were encountered in the loading.
"""
path = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../../../nomad-meta-info/meta_info/nomad_meta_info/{}".format(filename)))
return loadJsonFile(path, dependencyLoader, extraArgsHandling, uri)
def loadJsonStream(fileStream, name = None, dependencyLoader = None, extraArgsHandling = InfoKindEl.ADD_EXTRA_ARGS, filePath = None, uri = None):
if filePath is None:
try:
filePath = fileStream.name
except:
filePath = None
if name is None and not filePath is None:
name = os.path.basename(filePath)
env = InfoKindEnv(dependencyLoader = dependencyLoader, name = name, path = filePath, uri = uri)
try:
o = json.load(fileStream)
warnings = env.fromJsonList(o, source = {'path': filePath}, extraArgsHandling = extraArgsHandling)
except:
logging.exception("Error while loading file %s" % filePath)
raise
return env, warnings
| angeloziletti/ai4materials | ai4materials/external/local_meta_info.py | local_meta_info.py | py | 27,845 | python | en | code | 36 | github-code | 36 |
41737860574 | from functools import cmp_to_key
def custom_split(s):
if s == '': return []
cnt = 0
res = []
last_comma = -1
brackets = 0
while (cnt < len(s)):
if s[cnt] == '[': brackets += 1
if s[cnt] == ']': brackets -= 1
if s[cnt] == ',':
if brackets == 0:
res.append(s[last_comma+1:cnt])
last_comma = cnt
cnt += 1
res.append(s[last_comma+1:])
return res
def compare(l1, l2):
len1 = len(l1)
len2 = len(l2)
cnt = 0
while cnt < len1 and cnt < len2:
len1 = len(l1)
len2 = len(l2)
if l1[cnt][0] != '[' and l2[cnt][0] != '[': # both values are integers
if int(l1[cnt]) < int(l2[cnt]): return 1
if int(l1[cnt]) > int(l2[cnt]): return -1
else:
if l1[cnt][0] == '[':
l1_new = custom_split(l1[cnt][1:-1])
else:
temp = l1[cnt]
l1_new = []
l1_new.append(temp)
if l2[cnt][0] == '[':
l2_new = custom_split(l2[cnt][1:-1])
else:
temp = l2[cnt]
l2_new = []
l2_new.append(temp)
compare_res = compare(l1_new,l2_new)
if compare_res == 1: return 1
if compare_res == -1: return -1
cnt += 1
if not cnt < len1 and cnt < len2: return 1
if cnt < len1 and not cnt < len2: return -1
return 0
def custom_sort(a,b):
return True if compare(a,b) == 1 else False
packets = []
DIVIDER_PACKET_1 = ["[[2]]"]
DIVIDER_PACKET_2 = ["[[6]]"]
with open('input.txt') as f:
lines = [line.rstrip('\n') for line in f]
cnt = 1
sum = 0
while 3*(cnt-1) < len(lines):
packet_one = lines[3*(cnt-1)]
packet_two = lines[3*(cnt-1)+1]
list_one = custom_split(packet_one[1:-1])
list_two = custom_split(packet_two[1:-1])
packets.append(list_one)
packets.append(list_two)
cnt += 1
packets.append(DIVIDER_PACKET_1)
packets.append(DIVIDER_PACKET_2)
packets = sorted(packets, key=cmp_to_key(compare), reverse=True)
cnt = 1
decoder_key = 1
for packet in packets:
print(packet)
if packet == DIVIDER_PACKET_1: decoder_key *= cnt
if packet == DIVIDER_PACKET_2: decoder_key *= cnt
cnt += 1
print("decoder_key: ", decoder_key) | Jiggzawyr/advent-of-code-2022 | Day 13 Distress Signal/part2.py | part2.py | py | 2,368 | python | en | code | 0 | github-code | 36 |
29099062357 | from PyQt5.QAxContainer import *
from PyQt5.QtCore import *
from config.errCode import *
from config.kiwoomType import RealType
from config.slack import Slack
from PyQt5.QtTest import *
import os
class Kiwoom(QAxWidget):
def __init__(self):
super().__init__() # == QAxWidget.__init__()
print('class: GetAccountInfo -- api[kiwoom] connected')
self.slack = Slack()
self.realType = RealType()
#_____event loop______#
self.login_event_loop = QEventLoop() # event loop start - for login
self.detail_account_info_event_loop = QEventLoop()
self.calculator_event_loop = QEventLoop()
#_____account_rel_____#
self.account_stock_dict = {}
self.not_signed_stock_dict = {}
self.portfolio_stock_dict = {}
self.account_num = None
self.deposit = 0 # 예수금
self.use_money = 0
self.use_money_percent = 0.5
self.output_deposit = 0
self.total_profit_loss_money = 0 # 총평가손익금액
self.total_profit_loss_rate = 0.0 # 총수익률(%)
#___for_calculate_stock__#
self.calcul_data=[]
#_____screen num______#
self.screen_my_info="2000"
self.screen_calculate_stock='4000'
self.screen_real_stock='5000' # 종목별로 할당할 스크린 번호
self.screen_order_stock='6000' # 종목별로 할당할 '주문용' 스크린 번호
self.screen_start_stop_real = '1000'
#_____initial setting___#
self.get_ocx_instance() # 1. api를 컨트롤하겠다.
self.event_slots() # 2. event slot들을 만들어주자.
self.real_event_slots() # 2+. 실시간 event slot 추가
self.signal_login_commConnect() # 3. login을 요청한다.
self.get_account_info() # 4. 계좌번호를 출력한다.
self.detail_account_info() # 5. 예수금 요청 시그널
self.detail_account_mystock() # 6. 계좌평가잔고내역을 불러온다.
self.not_concluded_account() # 7. 미체결 종목 불러오기.
##self.calculator_fnc() #종목분석
self.read_code() # 8. 저장된 종목 조회
self.screen_number_setting() # 9.스크린번호할당
self.dynamicCall('SetRealReg(QString, QString, QString, QString)',
self.screen_start_stop_real,
'',
self.realType.REALTYPE['장시작시간']['장운영구분'],
'0') # 장시작 시간 받을때만 0으로(최초) 나머지 실시간 조회는 모두 '1'로 지정
for code in self.portfolio_stock_dict.keys():
screen_num = self.portfolio_stock_dict[code]['스크린번호']
fids = self.realType.REALTYPE['주식체결']['체결시간']
self.dynamicCall('SetRealReg(QString, QString, QString, QString)',
screen_num,
code,
fids,
'1') # 실시간 종목조회 - '1'로 지정
print('CODE : {}, SCREEN : {}, FID : {}'.format(code, screen_num, fids))
def get_code_list_by_market(self, market_code):
'''
전체 종목 코드 반환
'''
code_list = self.dynamicCall('GetCodeListByMarket(QString)', market_code)
code_list = code_list.split(';')[:-1]
return code_list
def calculator_fnc(self):
'''
종목 분석
'''
code_list = self.get_code_list_by_market('10') #코스닥 전체 종목 조회
code_list = code_list[100:]
print('코스닥 종목 수 : {}'.format(len(code_list)))
for idx, code in enumerate(code_list):
self.dynamicCall('DisconnectRealData(QString)', self.screen_calculate_stock)
print('{} / {} : KOSDAQ Stock Code : {} is updating.. '.format(idx+1, len(code_list), code))
self.day_kiwoom_db(code=code)
def day_kiwoom_db(self, code=None, date=None, sPrevNext='0'):
QTest.qWait(3600) # 이벤트는 살려두고, 실행 지연만 시킴
self.dynamicCall('SetInputValue(QString, QString)', '종목코드', code)
self.dynamicCall('SetInputValue(QString, QString)', '수정주가구분', '1')
if date != None:
self.dynamicCall('SetInputValue(QString, QString)', '기준일자', date)
self.dynamicCall('CommRqData(QString, QString, int, QString)',\
'주식일봉차트조회', 'opt10081', sPrevNext, self.screen_calculate_stock)
self.calculator_event_loop.exec_()
##___api controller____##
def get_ocx_instance(self):
self.setControl("KHOPENAPI.KHOpenAPICtrl.1") #.ocx확장자로 저장된 키움 api를 파이썬으로 컨트롤하겠다.
##___group of event slots__##
def event_slots(self): # slot : 이벤트 발생시 slot으로 데이터 회수
self.OnEventConnect.connect(self.login_slot) # 로그인 요청에 대한 응답이 왔을때 응답을 받도록 연결해둠.
self.OnReceiveTrData.connect(self.trdata_slot) # 트랜젝션 요청에 대한 응답이 왔을때 응답을 받도록 연결해둠.
###___slots__###
def login_slot(self, err_code):
print(errors(err_code)[1]) # 로그인 요청에 대한 응답이 오면 에러 코드 출력
self.login_event_loop.exit() # 에러 코드 출력하고 로그인 이벤트 루프 종료.
def real_event_slots(self):
self.OnReceiveRealData.connect(self.realdata_slot)
def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext):
if sRQName == '예수금상세현황요청':
deposit = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, 0, '예수금')
self.deposit = int(deposit)
use_money = float(self.deposit)*self.use_money_percent
self.use_money = int(use_money)
self.use_money = self.use_money/4 # 4종목 이상 매수를 위함
# 예수금
output_deposit = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, 0, '예수금')
self.output_deposit = int(output_deposit)
print('예수금 : {}'.format(self.output_deposit))
# 출금가능금액
can_exit = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, 0, '출금가능금액')
self.can_exit = int(can_exit)
print('출금가능금액 : {}'.format(self.can_exit))
self.detail_account_info_event_loop.exit()
elif sRQName == '계좌평가잔고내역요청':
total_buy_money = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, 0, '총매입금액')
self.total_buy_money = int(total_buy_money)
total_profit_loss_money = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, 0, '총평가손익금액')
self.total_profit_loss_money = int(total_profit_loss_money)
total_profit_loss_rate = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, 0, '총수익률(%)')
self.total_profit_loss_rate = float(total_profit_loss_rate)
print('[계좌평가잔고내역요청(싱글)]\n총매입액: {}\n총평가손익:{}\n총수익률(%):{}'.format(\
self.total_buy_money, self.total_profit_loss_money, self.total_profit_loss_rate ))
# 보유종목 수 가져오기
rows = self.dynamicCall('GetRepeatCnt(QString, QString)',sTrCode, sRQName) # 최대 20개
for i in range(rows):
code = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '종목번호') # 보유 종목의 종목코드를 순서대로 불러온다
code = code.strip()[1:]
code_name = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '종목명')
code_name = code_name.strip()
count_stock = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '보유수량')
count_stock = int(count_stock)
buy_price = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '매입가')
buy_price = int(buy_price)
profit_rate = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '수익률(%)')
profit_rate = float(profit_rate)
current_price = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '현재가')
current_price = int(current_price)
total_buy_price = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '매입금액')
total_buy_price = int(total_buy_price)
count_can_sell_stock = self.dynamicCall('GetCommData(QString, QString, int, QString)',
sTrCode, sRQName, i, '매매가능수량')
count_can_sell_stock = int(count_can_sell_stock)
mystockMonit = '[보유종목정보(멀티)]\n종목번호: {} | 종목명: {} | 보유수량: {} | 매입가: {} | 수익률(%): {} | 현재가: {} | 매입금액: {} | 매매가능수량: {}'.\
format(code, code_name, count_stock, buy_price, profit_rate, current_price, total_buy_price, count_can_sell_stock)
print(mystockMonit)
# self.slack.notification(
# text=mystockMonit)
self.account_stock_dict[code]={}
self.account_stock_dict[code].update({
'name':code_name,
'count':count_stock,
'buy_price':buy_price,
'profit_rate':profit_rate,
'current_price':current_price,
'total_buy_price':total_buy_price,
'count_sell':count_can_sell_stock
})
print('보유 종목 : {} - {}'.format(code_name,code))
if sPrevNext == '2':
print('현재 조회한 종목 수 : 20')
print('다음 페이지를 조회합니다')
self.detail_account_mystock(sPrevNext='2')
else:
print('현재 조회한 종목 수 : {}'.format(rows))
print('최종페이지입니다.')
self.detail_account_info_event_loop.exit()
elif sRQName == '실시간미체결요청':
rows = self.dynamicCall('GetRepeatCnt(QString, QString)', sTrCode, sRQName)
for i in range(rows):
code = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '종목코드')
code_name = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '종목명')
order_no = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '주문번호')
order_status = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '주문상태')
order_quantity = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '주문수량')
order_price = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '주문가격')
order_sector = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '주문구분')
not_signed_quantity = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '미체결수량')
ok_quantity = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '체결량')
code = code.strip()
code_name = code_name.strip()
order_no = order_no.strip()
order_status = order_status.strip()
order_quantity = int(order_quantity.strip())
order_price = int(order_price.strip())
order_sector = order_sector.strip().lstrip('+').lstrip('-')
not_signed_quantity = int(not_signed_quantity.strip())
ok_quantity = int(ok_quantity.strip())
if order_no in self.not_signed_stock_dict:
pass
else:
self.not_signed_stock_dict[order_no]={}
self.not_signed_stock_dict[order_no].update({
'code':code,
'code_name':code_name,
'order_status':order_status,
'order_quantity':order_quantity,
'order_price':order_price,
'order_sector':order_sector,
'not_signed_quantity':not_signed_quantity,
'ok_quantity':ok_quantity
})
not_signed = '미체결 종목 : {}(주문번호:{})'.format(code_name, order_no)
print(not_signed)
# self.slack.notification(text=not_signed)
elif '주식일봉차트조회' == sRQName:
print('일봉 데이터 요청중..')
code = self.dynamicCall('GetCommData(QString, QString, int, QString)',\
sTrCode, sRQName, 0, '종목코드')
code = code.strip()
rows = self.dynamicCall('GetRepeatCnt(QString, QString)', sTrCode, sRQName)
print('데이터 >> {} , {}개'.format(code, rows))
# data = self.dynamicCall('GetCommDataEx(QString, QString)', sTrCode, sRQName)
# [['', '현재가', '거래량', '거래대금', '날짜', '시가', '고가',' 저가],
# ['', '현재가', '거래량', '거래대금', '날짜', '시가', '고가',' 저가],
# ['', '현재가', '거래량', '거래대금', '날짜', '시가', '고가',' 저가],
# ...]
# 이하 동일 코드(for-loop 사용)
# self.slack.notification(text="['', '현재가', '거래량', '거래대금', '날짜', '시가', '고가',' 저가]")
for i in range(rows):
data = []
current_price = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '현재가')
trade_count = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '거래량')
trade_amount = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '거래대금')
date = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '일자')
start_price = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '시가')
high_price = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '고가')
low_price = self.dynamicCall('GetCommData(QString, QString, int, QString)', sTrCode, sRQName, i, '저가')
data.append("")
data.append(current_price.strip())
data.append(trade_count.strip())
data.append(trade_amount.strip())
data.append(date.strip())
data.append(start_price.strip())
data.append(high_price.strip())
data.append(low_price.strip())
data.append("")
self.calcul_data.append(data.copy())
if sPrevNext == '2':
self.day_kiwoom_db(code=code, sPrevNext=sPrevNext)
else:
## 120일 이평선 조건 | 예시
print('상장 기간(총 일수) : {}'.format(len(self.calcul_data)))
pass_success = False # 반복 조건
# 이평선 그리기 위한 데이터가 충분한지 확인
if self.calcul_data == None or len(self.calcul_data) < 120:
pass_success = False
else:
# 데이터가 충분하다면(120일 이상)
total_price = 0
for value in self.calcul_data[:120]: # 리스트에는 최근일자부터 순서대로 들어가있음(최근 120일 순회)
total_price += int(value[1]) # 현재가(종가) 누적 더하기
moving_avg_price = total_price / 120
bottom_stock_price = False
check_price = None
if int(self.calcul_data[0][7]) <= moving_avg_price and moving_avg_price <= int(self.calcul_data[0][6]): # 가장최근일(오늘) 저가
code_nm = self.dynamicCall('GetMasterCodeName(QString)', code)
msg = '[매수신호] 오늘 {} ({}) 주가 - 120 이평선에 걸쳐있음'.format(code_nm, code)
print(msg)
self.slack.notification(text=msg)
bottom_stock_price = True
check_price = int(self.calcul_data[0][6]) #고가
past_price = None
# 과거 일봉 조회 (120일 이평선보다 밑에 있는지 확인)
if bottom_stock_price == True:
moving_avg_price_past = 0
price_top_moving = False
idx = 1
while True:
if len(self.calcul_data[idx:]) < 120: # 데이터 충분한지(120일) 계속 확인
print('데이터 부족함(120일치 데이터 필요)')
break
else:
total_price = 0
for value in self.calcul_data[idx:idx+120]:
total_price += int(value[1]) # 과거 종가 누적 더하기
moving_avg_price_past = total_price / 120
if moving_avg_price_past <= int(self.calcul_data[idx][6]) and idx <= 20:
price_top_moving = False
break
elif int(self.calcul_data[idx][7]) > moving_avg_price_past and idx > 20:
print('120일 이평선 위에 있는 일봉 확인')
price_top_moving = True
past_price = int(self.calcul_data[idx][7])
break
idx += 1
if price_top_moving == True:
if moving_avg_price > moving_avg_price_past and check_price > past_price:
print('매수신호 포착')
pass_success = true
if pass_success == True:
print('포착된 종목 저장..')
code_nm = self.dynamicCall('GetMasterCodeName(QString)', code)
msg = '{},{},{}\n'.format(code, code_nm, str(self.calcul_data[0][1]))
f = open('files/condition_stock.txt', 'a', encoding='utf8')
f.write('%s,%s,%s\n' % (code, code_nm, str(self.calcul_data[0][1])))
f.close()
self.slack.notification(text=msg)
elif pass_success == False:
code_nm = self.dynamicCall('GetMasterCodeName(QString)', code)
msg = '{} -{} | 조회 | 매수신호 포착되지 않음'.format(code, code_nm)
print(msg)
self.slack.notification(text=msg)
self.calcul_data.clear()
self.calculator_event_loop.exit()
self.calculator_event_loop.exit()
self.stop_screen_cancel(self.screen_my_info)
self.detail_account_info_event_loop.exit()
##_____request_login_____##
def signal_login_commConnect(self):
self.dynamicCall("CommConnect()") # login request signal
self.login_event_loop.exec() ## 요청에 대해 응답이 올때까지 대기
##_____request_account____##
def get_account_info(self):
account_list = self.dynamicCall("GetLoginInfo(QString)", "ACCNO") # request account number signal
account_num = account_list.split(';')[1] # first account(모의투자 계좌)
self.account_num = account_num
print("account : {}".format(account_num))
def detail_account_info(self, sPrevNext='0'): # 첫 조회 : sPrevNext='0'
print('예수금 요청중..')
self.dynamicCall('SetInputValue(QString, QString)','계좌번호', self.account_num)
self.dynamicCall('SetInputValue(QString, QString)','비밀번호', '0000')
self.dynamicCall('SetInputValue(QString, QString)','비밀번호입력매체구분','0000')
self.dynamicCall('SetInputValue(QString, QString)','조회구분','1')
self.dynamicCall('CommRqData(QString, QString, int, QString)',\
'예수금상세현황요청','opw00001',sPrevNext, self.screen_my_info)
self.detail_account_info_event_loop = QEventLoop()
self.detail_account_info_event_loop.exec_()
def stop_screen_cancel(self, sScrNo=None):
self.dynamicCall('DisconnectRealData(QString)',sScrNo)
def detail_account_mystock(self, sPrevNext='0'): #싱글데이터
self.dynamicCall('SetInputValue(QString, QString)', '계좌번호', self.account_num)
self.dynamicCall('SetInputValue(QString, QString)', '비밀번호', '0000') # 모의투자 공통 0000
self.dynamicCall('SetInputValue(QString, QString)', '비밀번호입력매체구분', '00')
self.dynamicCall('SetInputValue(QString, QString)', '조회구분', '1') # 1:합산, 2:개별
self.dynamicCall('CommRqData(QString, QString, int, QString)', '계좌평가잔고내역요청', 'opw00018', sPrevNext, self.screen_my_info)
self.detail_account_info_event_loop.exec_()
def not_concluded_account(self, sPrevNext='0'):
print('미체결 종목 요청중..')
self.dynamicCall('SetInputValue(QString, QString)', '계좌번호', self.account_num)
self.dynamicCall('SetInputValue(QString, QString)', '체결구분', '1')
self.dynamicCall('SetInputValue(QString, QString)', '매매구분', '0')
self.dynamicCall('CommRqData(QString, QString, int, QString)',
'실시간미체결요청','opt10075', sPrevNext, self.screen_my_info)
self.detail_account_info_event_loop.exec_()
def read_code(self):
file_path = 'files/condition_stock.txt'
if os.path.exists(file_path):
f = open(file_path, 'r', encoding='utf8')
lines = f.readlines()
for line in lines:
if line != '':
ls = line.split(',')
stock_code = ls[0]
stock_name = ls[1]
stock_price = abs(int(ls[2].split('\n')[0]))
self.portfolio_stock_dict.update({
stock_code: {'종목명':stock_name,'현재가':stock_price}
})
f.close()
print(self.portfolio_stock_dict)
def screen_number_setting(self):
screen_overwrite = []
# 계좌평가잔고내역에 있는 종목들
for code in self.account_stock_dict.keys():
if code not in screen_overwrite:
screen_overwrite.append(code)
# 미체결에 있는 종목들
for order_number in self.not_signed_stock_dict.keys():
code = self.not_signed_stock_dict[order_number]['종목코드']
if code not in screen_overwrite:
screen_overwrite.append(code)
# 포트폴리오에 담겨있는 종목들
for code in self.portfolio_stock_dict.keys():
if code not in screen_overwrite:
screen_overwrite.append(code)
# 스크린번호 할당
cnt = 0 # 스크린번호 하나에 최대 100개 요청가능
for code in screen_overwrite:
real_screen = int(self.screen_real_stock)
order_screen = int(self.screen_order_stock)
if (cnt % 50) == 0: # 스크린번호 하나에 종목코드 최대 50개만 할당함
real_screen += 1 # 5000 => 5001
self.screen_real_stock = str(real_screen)
if (cnt % 50) == 0:
order_screen += 1 # 6000 -> 6001
self.screen_order_stock = str(order_screen)
if code in self.portfolio_stock_dict.keys():
self.portfolio_stock_dict[code].update({
'스크린번호':str(self.screen_real_stock),
'주문용스크린번호':str(self.screen_order_stock)
})
elif code not in self.portfolio_stock_dict.keys():
self.portfolio_stock_dict.update({
code: {'스크린번호': str(self.screen_real_stock),
'주문용스크린번호': str(self.screen_order_stock)}
})
cnt += 1
print(self.portfolio_stock_dict)
def realdata_slot(self, sCode, sRealType, sRealData):
if sRealType == '장시작시간':
fid = self.realType.REALTYPE[sRealType]['장운영구분']
chk = self.dynamicCall('GetCommRealData(QString, int)',
sCode, fid)
if chk == '0':
print('장 시작 전')
elif chk == '3':
print('장 시작')
elif chk == '2':
print('장 종료, 동시호가 전환')
elif chk == '4':
print('장 종료 (3:30)')
elif sRealType == '주식체결':
currtime = self.dynamicCall('GetCommRealData(QString,int)',
sCode, self.realType.REALTYPE[sRealType]['체결시간'])
currprice = self.dynamicCall('GetCommRealData(QString,int)',
sCode, self.realType.REALTYPE[sRealType]['현재가'])
currprice = abs(int(currprice))
addprice = self.dynamicCall('GetCommRealData(QString,int)',
sCode, self.realType.REALTYPE[sRealType]['전일대비'])
addprice = abs(int(addprice))
perprice = self.dynamicCall('GetCommRealData(QString,int)',
sCode, self.realType.REALTYPE[sRealType]['등락율'])
perprice = float(perprice)
bestsellprice = self.dynamicCall('GetCommRealData(QString,int)',
sCode, self.realType.REALTYPE[sRealType]['(최우선)매도호가'])
bestsellprice = abs(int(bestsellprice))
bestbuyprice = self.dynamicCall('GetCommRealData(QString,int)',
sCode, self.realType.REALTYPE[sRealType]['(최우선)매수호가'])
bestbuyprice = abs(int(bestbuyprice))
amount = self.dynamicCall('GetCommRealData(QString,int)',
sCode, self.realType.REALTYPE[sRealType]['거래량'])
amount = abs(int(amount))
if sCode not in self.portfolio_stock_dict:
self.portfolio_stock_dict.update({sCode:{}})
self.portfolio_stock_dict[sCode].update({
'체결시간':currtime,
'현재가':currprice,
'전일대비':addprice,
'등락율':perprice,
'(최우선)매도호가':bestsellprice,
'(최우선)매수호가':bestbuyprice,
'거래량':amount
})
print(self.portfolio_stock_dict[sCode])
self.slack.notification(text = self.portfolio_stock_dict[sCode]) | sw-song/kiwoom | test_api/kiwoom.py | kiwoom.py | py | 29,513 | python | en | code | 0 | github-code | 36 |
41510671413 | #!/usr/bin/env python3
"""
Session Authentication Module
"""
from api.v1.auth.auth import Auth
from api.v1.views.users import User
import uuid
from typing import TypeVar
class SessionAuth(Auth):
"""
Responsible for session Authentication
Inherits From auth class
"""
user_id_by_session_id = {}
def create_session(self, user_id: str = None) -> str:
"""
creates a session ID for a user_id
args:
user_id: str - id of user
return:
Session id
"""
if user_id is None or type(user_id) is not str:
return None
else:
session_id: str = str(uuid.uuid4())
self.user_id_by_session_id[session_id] = user_id
return session_id
def user_id_for_session_id(self, session_id: str = None) -> str:
"""
Return User id based on the session id
Args:
session_id: str : session id
Return:
user_id: str : user id
"""
if session_id is None or type(session_id) is not str:
pass
else:
user_id: str = self.user_id_by_session_id.get(session_id)
return user_id
def current_user(self, request=None):
"""
Returns the user Id
Args:
request
Return:
user_id
"""
session_cookie = self.session_cookie(request)
user_id = self.user_id_for_session_id(session_cookie)
return User.get(user_id)
def destroy_session(self, request=None):
"""
deletes the user session / logout:
"""
if request is None:
return False
else:
session_token = self.session_cookie(request)
if session_token:
user_id = self.user_id_for_session_id(session_token)
if user_id:
del self.user_id_by_session_id[session_token]
return True
else:
return False
else:
return False
| tommyokoyo/alx-backend-user-data | 0x02-Session_authentication/api/v1/auth/session_auth.py | session_auth.py | py | 2,157 | python | en | code | 0 | github-code | 36 |
5818279983 | from Algorithms.Usefull_elements import Step, intersection, addition, get_edges, invert_Graph, vertex_list_to_str, hsv_to_hex, replace_color
import copy
from collections import defaultdict
def algorithm_depth_first_search(matrix):
mass = list() # массив смежных вершин
vertex_mark = dict() # объявление пустого словаря (соотв. вершин меткам)
vertex = list() # объявление пустого списка (вершины без меток)
stack = list() # объявление пустого списка (стек)
all_vertex = [] # список вершин
steps = [] # список шагов
alg_result = [] # шаг-результат
edges = [] # список рёбер
route = [] # маршрут
loop = False # нет контура
# вложенная функция, реализующая алгоритм
def dfs(prev_ver, cur_ver):
print(f' Текущая вершина: {cur_ver}')
#h_step.node_options[cur_ver] = replace_color(h_step.node_options[cur_ver], "#DC143C") # изменение цвета по маршруту
h_step.node_options[cur_ver] += ', borderWidth: 3, color: {border: "#DC143C", background: "#1E90FF", highlight: { border: "#DC143C" }}'; # изменение цвета границы по маршруту
vertex_mark[cur_ver] = False # вершина просмотрена
while mass[cur_ver]: # пока есть смежные вершины
# h_step.edge_options[(cur_ver, mass[cur_ver][0])] += replace_color(h_step.edge_options[(cur_ver, mass[cur_ver][0])], "#DC143C") # подкрашиваем ребро
if vertex_mark[mass[cur_ver][0]] == None: # МОЖЕТ БЫТЬ ПЕТЛЯ or vertex_mark[mass[cur_ver][0]] == False
h_step.edge_options[(cur_ver, mass[cur_ver][0])] += ', "color": "#DC143C", width: 3' # подкрашиваем ребро
if vertex_mark[mass[cur_ver][0]] == None:
print(f' Переходим к смежной вершине: {mass[cur_ver][0]}')
route.append(cur_ver) # добавляем вершину в маршрут
# переходим к первой смежной вершине
if not dfs(cur_ver, mass[cur_ver][0]): # обнаружен контур
return False
print(f' Возвращаемся к вершине {cur_ver}')
h_step.text = f'<p class="mb-2 text-gray-500 dark:text-gray-400">Возвращаемся к вершине {cur_ver}</p>' + h_step.text
print(f' Текущая вершина: {cur_ver}')
mass[cur_ver].pop(0) # удаляем просмотренную смежную вершину
elif vertex_mark[mass[cur_ver][0]]:
mass[cur_ver].pop(0) # удаляем просмотренную смежную вершину
else:
return False # обнаружен контур
print(f'Смежных непомеченных вершин нет, помещаем в стек вершину {cur_ver}')
vertex_mark[cur_ver] = True # определён порядок вершины
stack.append(cur_ver) # помещаем вершину в стек
vertex.remove(cur_ver) # исключаем вершину для повторного просмотра
for ver in route:
h_step.text += f'{ver}->'
if route:
route.pop()
h_step.text += f'{cur_ver}</p><p class="mb-2 text-gray-500 dark:text-gray-400">Вершина {cur_ver} не имеет смежных вершин, добавляем её в стек {stack}</p>' # последний текст шага
else:
h_step.text = f'<p class="mb-2 text-gray-500 dark:text-gray-400">Возвращаемся к вершине {cur_ver}</p><p class="mb-2 text-gray-500 dark:text-gray-400">Некуда шагать!</p><p class="mb-2 text-gray-500 dark:text-gray-400">Вершина {cur_ver} не имеет смежных вершин, добавляем её в стек {stack}</p>' # последний текст шага
h_step.step_label = f'Добавление вершины x<sub>{cur_ver}</sub> в стек' # название шага
h_step.node_options[cur_ver] += ', borderWidth: 1, "color": "#00FA9A"' # изменение цвета
new_step = copy.deepcopy(h_step)
h_step.text = '<p class="mb-2 text-gray-500 dark:text-gray-400">Маршрут обхода: ' # текст шага
if prev_ver != cur_ver and (prev_ver, cur_ver) in edges:
h_step.edge_options[(prev_ver, cur_ver)] += ', "color": "#1E90FF", width: 1' # возвращаем цвет ребру
# print(new_step.edge_options)
steps.append(new_step) # добавляем шаг в список
new_step = Step(True, True) # создаём новый шаг
return True
# инициализация
size_of_matrix = len(matrix) # получаем размер матрицы
for i in range(size_of_matrix):
# словарь соответствия исходных вершин меткам
vertex_mark.update({i: None})
# формирование множеста непомеченных вершин
vertex.append(i)
# формирование массива смежных вершин
neighbor = list() # смежные вершины
for j in range(size_of_matrix):
if matrix[i][j] == 1:
neighbor.append(j)
mass.append(neighbor)
edges = get_edges(matrix) # список рёбер
all_vertex = vertex.copy()
print(f'Вершины: {all_vertex}')
# исходный граф
first_step = Step(True, True) # создаём первый шаг (исходный граф)
first_step.text = '<p class="mb-2 text-gray-500 dark:text-gray-400">Это граф по введённой матрице</p>' # текст шага
first_step.nodes = all_vertex # список вершин
first_step.edges = edges # список ребер
# общие опции для рёбер
for edge in edges.keys():
first_step.edge_options[edge] = 'label: "1"'
first_step.edge_options[edge] += ', "color": "#1E90FF"'
print(f'рёбра: {first_step.edge_options}')
for i in all_vertex: # метки для вершин
first_step.node_options[i] = f'label: "x{i}"'
first_step.node_options[i] += ', shape: "circle"'
first_step.node_options[i] += ', "color": "#1E90FF"'
# выбор начальной вершины обхода
h_step = copy.deepcopy(first_step) # создаём вспомогательный объект (шаг)
print(vertex)
while vertex:
new_step = copy.deepcopy(first_step) # создаём первый шаг
h_step.text = '<p class="mb-2 text-gray-500 dark:text-gray-400">Маршрут обхода: ' # текст шага
if not dfs(0, vertex[0]): # запуск алгоритма
loop = True
print('Выполнение алгоритма прервано из-за наличия контура')
break
print(f'Вершины в стеке:', list(map(lambda el: el, stack)))
if not loop:
print('Алгоритм успешно завершен')
result_step = copy.deepcopy(first_step)
result_step.text = f'<p class="mb-2 text-gray-500 dark:text-gray-400">Стек - {stack} ({stack[-1]} - вершина стека)</p>'
result_step.text += '<p class="mb-2 text-gray-500 dark:text-gray-400">Это граф, разбитый на уровни</p>' # текст шага
stack.reverse() # переворачиваем список для следования вершин по уровням
for ver in stack: # установка уровней для вершин
result_step.node_options[ver] = f'label: "x{ver}"'
result_step.node_options[ver] += ', shape: "circle"'
result_step.node_options[ver] += ', "color": "#1E90FF"'
result_step.node_options[ver] += f', level: {stack.index(ver)}'
neighbor_ver = [] # пары вершин соседних уровней
for i in range(len(stack)-1):
neighbor_ver.append(tuple([stack[i], stack[i+1]]))
print(f'Пары смежных вершин: {neighbor_ver}')
result_step.general_options += ', layout: { hierarchical: { direction: "LR", levelSeparation: 100} }'
flag = True
for edge in edges.keys():
# result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "curvedCCW", "forceDirection": "none" }, width: 1'
if edge in neighbor_ver:
result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "dynamic", roundness: 0 }, width: 1'
elif flag:
result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "curvedCW", roundness: 0.5 }, width: 1'
flag = False
else:
result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "curvedCCW", roundness: 0.5 }, width: 1'
flag = True
alg_result.append(result_step)
else:
print('ОШИБКА')
result_step = Step(True, True)
result_step.text = '<p class="mb-2 text-gray-500 dark:text-gray-400"">АЛГОРИТМ ПРЕРВАН ИЗ-ЗА НАЛИЧИЯ КОНТУРА В ГРАФЕ!</p>' # текст шага
alg_result.append(result_step)
# добавление таблицы в исходные данные
alg_input = Step(True, True, True)
alg_input.text = copy.deepcopy(first_step.text)
alg_input.nodes = copy.deepcopy(first_step.nodes)
alg_input.edges = copy.deepcopy(first_step.edges)
alg_input.edge_options = copy.deepcopy(first_step.edge_options)
alg_input.node_options = copy.deepcopy(first_step.node_options)
first_line = []
first_line.append('')
for i in range(size_of_matrix):
first_line.append(f'x<sub>{i}</sub>')
alg_input.matrix.append(list(first_line))
for i in range(size_of_matrix):
next_line = []
next_line.append(f'x<sub>{i}</sub>')
next_line += (list(matrix[i]))
alg_input.matrix.append(list(next_line))
for i in range(1, size_of_matrix+1):
alg_input.matrix[i][i] = -1
return [ alg_input, steps, alg_result ]
#########################################################################################################################
#########################################################################################################################
#########################################################################################################################
#########################################################################################################################
#########################################################################################################################
#########################################################################################################################
#########################################################################################################################
#########################################################################################################################
# топологическая соритровка, алгоритм Демукрона
# диалог с пользователем
def demukron(matrix):
vertex_level = dict() # объявление пустого словаря (соотв. вершин уровням)
vertex = set() # объявление пустого множества (вершины без уровня)
all_vertex = [] # список вершин
edges = [] # список рёбер
steps = [] # список шагов
alg_result = [] # шаг-результат
# реализация алгоритма
def dm(vertex):
step = Step(False, True, True) # создание первого шага
# формирование исходной матрицы
first_line = []
first_line.append('')
for i in range(size_of_matrix):
first_line.append(f'x<sub>{i}</sub>')
step.matrix.append(list(first_line))
for i in range(size_of_matrix):
next_line = []
next_line.append(f'x<sub>{i}</sub>')
next_line += (list(matrix[i]))
step.matrix.append(list(next_line))
for i in range(1, size_of_matrix+1):
step.matrix[i][i] = -1
# формирование уровня
level = 0
while vertex:
step = copy.deepcopy(step)
step.text = '<p class="mb-2 text-gray-500 dark:text-gray-400">'
flag = False # уровень отсутствует
level_v = set() # вершины формируемого уровня
for i in vertex: # просмотр столбца матрицы
sum = 0
# просмотр входящих вершин
for j in range(len(matrix)):
sum += matrix[j][i]
if sum == 0:
level_v.add(i) # добавление вершины в уровень
vertex_level[i] = level # обновление уровня вершины
flag = True # уровень найден
if flag:
print(f'Вершины {level} уровня: ', set(map(lambda el: el, level_v)))
else:
return False # уровень не сформирован
for i in level_v:
matrix[i] = list(map(lambda el: 0, matrix[i])) # удаление(зануление) строки
# удаление строки
for ver in level_v:
for i in range(1, size_of_matrix+1):
step.matrix[ver+1][i] = -1
step.text += f'Вершина x<sub>{ver}</sub> не имеет входящих рёбер<br/>'
step.text += f'Формируем уровень N<sub>{level}</sub> = ' + '{ '
for ver in level_v:
step.text += f'x<sub>{ver}</sub> '
step.text += '}<br/>'
for ver in level_v:
step.text += f'Порядковая функция O(x<sub>{ver}</sub>) = {level}<br/>'
step.text += '</p>'
step.step_label = f'Формирование уровня N <sub>{level}</sub>'
steps.append(step)
print(f'матрица {matrix}')
vertex -= level_v # исключение вершин с определённым уровнем
level += 1
return True
# инициализация
for i in range(len(matrix)):
# словарь соответствия исходных вершин уровням
vertex_level.update({i: None})
# формирование множеста вершин без уровня
vertex.add(i)
edges = get_edges(matrix) # список рёбер
all_vertex = vertex.copy() # список вершин
# исходный граф
alg_input = Step(True, True, True) # создаём первый шаг (исходный граф)
alg_input.text = '<p class="mb-2 text-gray-500 dark:text-gray-400">Это граф по введённой матрице</p>' # текст шага
alg_input.nodes = all_vertex # список вершин
alg_input.edges = edges # список ребер
# общие опции для рёбер
for edge in edges.keys():
alg_input.edge_options[edge] = 'label: "1"'
alg_input.edge_options[edge] += ', "color": "#1E90FF"'
print(f'рёбра: {alg_input.edge_options}')
for i in all_vertex: # метки для вершин
alg_input.node_options[i] = f'label: "x{i}"'
alg_input.node_options[i] += ', shape: "circle"'
alg_input.node_options[i] += ', "color": "#1E90FF"'
# добавление таблицы в исходные данные
size_of_matrix = len(matrix)
first_line = []
first_line.append('')
for i in range(size_of_matrix):
first_line.append(f'x<sub>{i}</sub>')
alg_input.matrix.append(list(first_line))
for i in range(size_of_matrix):
next_line = []
next_line.append(f'x<sub>{i}</sub>')
next_line += (list(matrix[i]))
alg_input.matrix.append(list(next_line))
for i in range(1, size_of_matrix+1):
alg_input.matrix[i][i] = -1
res = dm(vertex) # запуск алгоритма
if res:
print('Алгоритм успешно завершен')
print(f'Вершины по уровням: {vertex_level}')
result_step = copy.deepcopy(alg_input)
result_step.matrix = []
result_step.text = f'<p class="mb-2 text-gray-500 dark:text-gray-400">Разделение вершин по уровням - {vertex_level})</p>'
result_step.text += '<p class="mb-2 text-gray-500 dark:text-gray-400">Это граф, разбитый на уровни</p>' # текст шага
for ver, level in vertex_level.items(): # установка уровней для вершин
result_step.node_options[ver] = f'label: "x{ver}"'
result_step.node_options[ver] += ', shape: "circle"'
result_step.node_options[ver] += ', "color": "#1E90FF"'
result_step.node_options[ver] += f', level: {level}'
neighbor_ver = [] # пары вершин соседних уровней
sorted_levels = sorted(set(vertex_level.values())) # Получение уникальных значений уровней и их сортировка
for level in sorted_levels[:-1]: # Проход по уровням, исключая последний
current_level_vertices = [vertex for vertex, vertex_level in vertex_level.items() if vertex_level == level] # Вершины текущего уровня
next_level_vertices = [vertex for vertex, vertex_level in vertex_level.items() if vertex_level == level + 1] # Вершины следующего уровня
neighbor_pairs = [(v1, v2) for v1 in current_level_vertices for v2 in next_level_vertices] # Пары соседних вершин
neighbor_ver.extend(neighbor_pairs) # Добавление пар в список
result_step.general_options += ', layout: { hierarchical: { direction: "LR", levelSeparation: 100, nodeSpacing: 150} }'
print(edges)
print(neighbor_ver)
flag = True
for edge in edges.keys():
# result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "curvedCCW", "forceDirection": "none" }, width: 1'
if edge in neighbor_ver:
result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "dynamic", roundness: 0 }, width: 1'
elif flag:
result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "curvedCW", roundness: 0.5 }, width: 1'
flag = False
else:
result_step.edge_options[edge] = 'smooth: { "enabled": true, "type": "curvedCCW", roundness: 0.5 }, width: 1'
flag = True
sorted_dict = defaultdict(list)
for vertex, level in vertex_level.items():
sorted_dict[level].append(vertex)
sorted_dict = dict(sorted(sorted_dict.items()))
result_step.text = '<p class="mb-2 text-gray-500 dark:text-gray-400">'
for level, ver in sorted_dict.items():
result_step.text += f'Уровень N<sub>{level}</sub> = ' + '{ '
for x in (ver):
result_step.text += f'x<sub>{x}</sub> '
result_step.text += '}<br/>'
result_step.text += '</p>'
alg_result.append(result_step)
else:
print('Выполнение алгоритма прервано из-за наличия контура')
result_step = Step()
result_step.text = '<p class="mb-2 text-gray-500 dark:text-gray-400"">АЛГОРИТМ ПРЕРВАН ИЗ-ЗА НАЛИЧИЯ КОНТУРА В ГРАФЕ!</p>' # текст шага
alg_result.append(result_step)
return [ alg_input, steps, alg_result ] | VelandMerl/graph_bauman_centuary_presents | Algorithms/Topological_Sort.py | Topological_Sort.py | py | 21,006 | python | ru | code | 1 | github-code | 36 |
70677287784 | """
Filename: plot_zonal_mean.py
Author: Damien Irving, irving.damien@gmail.com
Description:
"""
# Import general Python modules
import sys, os, pdb
import argparse
import numpy
import matplotlib.pyplot as plt
from matplotlib import gridspec
import iris
import iris.plot as iplt
from iris.experimental.equalise_cubes import equalise_attributes
import seaborn
# Import my modules
cwd = os.getcwd()
repo_dir = '/'
for directory in cwd.split('/')[1:]:
repo_dir = os.path.join(repo_dir, directory)
if directory == 'ocean-analysis':
break
modules_dir = os.path.join(repo_dir, 'modules')
sys.path.append(modules_dir)
try:
import general_io as gio
import timeseries
import grids
import convenient_universal as uconv
except ImportError:
raise ImportError('Must run this script from anywhere within the ocean-analysis git repo')
# Define functions
experiment_colors = {}
experiment_colors['historical'] = 'green'
experiment_colors['piControl'] = 'black'
experiment_colors['historicalAA'] = 'blue'
experiment_colors['historicalGHG'] = 'red'
experiment_colors['historicalnoAA'] = 'orange'
basins = {'atlantic': 2,
'pacific': 3,
'indian': 5}
def scale_data(cube, var, reverse_sign=False):
"""Scale data"""
if var == 'precipitation_minus_evaporation_flux':
cube.data = cube.data * 86400
units = 'mm/day'
else:
units = cube.units
if reverse_sign:
cube.data = cube.data * -1
return cube, units
def set_plot_grid(tas_trend=False):
"""Set the grid of plots.
Args:
tas_trend (bool): Include a panel for the tas trend?
"""
if tas_trend:
nrows = 4
heights = [2, 1, 1, 1]
else:
nrows = 3
heights = [2, 1, 1]
gs = gridspec.GridSpec(nrows, 1, height_ratios=heights)
return gs
def calculate_climatology(cube, time_bounds, experiment):
"""Calculate annual mean climatology"""
if not experiment == 'piControl':
time_constraint = gio.get_time_constraint(time_bounds)
cube = cube.extract(time_constraint)
cube = cube.collapsed('time', iris.analysis.MEAN)
cube.remove_coord('time')
return cube
def calc_linear_trend(data, xaxis):
"""Calculate the linear trend.
polyfit returns [a, b] corresponding to y = a + bx
"""
if data.mask[0]:
return data.fill_value
else:
return numpy.polynomial.polynomial.polyfit(xaxis, data, 1)[-1]
def get_trend_cube(cube, xaxis='time'):
"""Get the trend data.
Args:
cube (iris.cube.Cube)
xaxis (iris.cube.Cube)
"""
coord_names = [coord.name() for coord in cube.dim_coords]
assert coord_names[0] == 'time'
if xaxis == 'time':
trend_data = timeseries.calc_trend(cube, per_yr=True)
trend_unit = ' yr-1'
else:
trend_data = numpy.ma.apply_along_axis(calc_linear_trend, 0, cube.data, xaxis.data)
trend_data = numpy.ma.masked_values(trend_data, cube.data.fill_value)
trend_unit = ' '+str(xaxis.units)+'-1'
trend_cube = cube[0, ::].copy()
trend_cube.data = trend_data
trend_cube.remove_coord('time')
trend_cube.units = str(cube.units) + trend_unit
return trend_cube
def get_scale_factor(tas_cube):
"""Calculate scale factor (linear warming).
Multiplies the linear trend (K / yr) by the number of years
"""
linear_trend = get_trend_cube(tas_cube)
scale_factor = linear_trend.data * tas_cube.shape[0]
return scale_factor
def plot_climatology(climatology_dict, var, units, legloc, aggregation='Zonal mean'):
"""Plot the zonal mean climatology"""
for experiment in ['historical', 'historicalGHG', 'historicalAA', 'historicalnoAA', 'piControl']:
if climatology_dict[experiment]:
color = experiment_colors[experiment]
iplt.plot(climatology_dict[experiment], color=color, alpha=0.8, label=experiment)
plt.legend(loc=legloc)
plt.ylabel('%s %s (%s)' %(aggregation, var.replace('_', ' '), units) )
def check_lats(climatology_dict, experiment):
"""Sometimes the latitude axes are not exactly equal after regridding."""
experiment_lats = climatology_dict[experiment].coord('latitude')
control_lats = climatology_dict['piControl'].coord('latitude')
if not control_lats == experiment_lats:
diffs = experiment_lats.points - control_lats.points
assert numpy.abs(diffs).max() < 0.0001, "%s and control have very different latitude axes" %(experiment)
climatology_dict[experiment].coord('latitude').points = control_lats.points
climatology_dict[experiment].coord('latitude').bounds = control_lats.bounds
assert climatology_dict[experiment].coord('latitude') == climatology_dict['piControl'].coord('latitude'), \
"Problem with %s latitude axis" %(experiment)
return climatology_dict[experiment]
def plot_difference(climatology_dict):
"""Plot the difference between experiment and control climatology"""
assert climatology_dict['piControl'], 'must have control data for difference plot'
for experiment in ['historical', 'historicalGHG', 'historicalAA', 'historicalnoAA']:
if climatology_dict[experiment]:
climatology_dict[experiment] = check_lats(climatology_dict, experiment)
diff_cube = climatology_dict[experiment] - climatology_dict['piControl']
iplt.plot(diff_cube, color=experiment_colors[experiment], alpha=0.8)
plt.ylabel('Experiment - piControl')
def plot_trend(trend_dict, units, scaled=False):
"""Plot the trend"""
for experiment in ['historical', 'historicalGHG', 'historicalAA', 'historicalnoAA', 'piControl']:
if trend_dict[experiment]:
iplt.plot(trend_dict[experiment], color=experiment_colors[experiment], alpha=0.8)
if not scaled:
plt.ylabel('Trend ($%s \enspace yr^{-1}$)' %(units) )
else:
plt.ylabel('Trend ($%s \enspace yr^{-1}$) scaled by warming' %(units) )
def read_data(inargs):
"""Read input data into appropriate dictionaries."""
file_dict = {'historical': inargs.historical_files,
'historicalGHG': inargs.historicalghg_files,
'historicalAA': inargs.historicalaa_files,
'historicalnoAA': inargs.historicalnoaa_files,
'piControl': inargs.picontrol_files}
tas_dict = {'historical': inargs.historical_tas_file,
'historicalGHG': inargs.historicalghg_tas_file,
'historicalAA': inargs.historicalaa_tas_file,
'historicalnoAA': inargs.historicalnoaa_tas_file,
'piControl': None}
area_dict = {'historical': inargs.historical_area_file,
'historicalGHG': inargs.historicalghg_area_file,
'historicalAA': inargs.historicalaa_area_file,
'historicalnoAA': inargs.historicalnoaa_area_file,
'piControl': inargs.picontrol_area_file}
basin_dict = {'historical': inargs.historical_basin_file,
'historicalGHG': inargs.historicalghg_basin_file,
'historicalAA': inargs.historicalaa_basin_file,
'historicalnoAA': inargs.historicalnoaa_basin_file,
'piControl': inargs.picontrol_basin_file}
return file_dict, tas_dict, area_dict, basin_dict
def get_areacello_data(cube):
"""Generate an area data array."""
dim_coord_names = [coord.name() for coord in cube.dim_coords]
assert 'latitude' in dim_coord_names
assert 'longitude' in dim_coord_names
if not cube.coord('latitude').has_bounds():
cube.coord('latitude').guess_bounds()
if not cube.coord('longitude').has_bounds():
cube.coord('longitude').guess_bounds()
area_data = iris.analysis.cartography.area_weights(cube)
area_data = numpy.ma.masked_where(numpy.ma.getmask(cube.data), area_data)
return area_data
def area_ajustment(data_cube, area_file, metadata_dict):
"""Multipy a data cube by its cell area."""
if area_file:
area_cube = iris.load_cube(area_file[0])
area_data = uconv.broadcast_array(area_cube.data, [1, 2], data_cube.shape)
metadata_dict[area_file[0]] = area_cube.attributes['history']
else:
area_data = get_areacello_data(data_cube)
data_cube.data = data_cube.data * area_data
if 'm-2' in str(data_cube.units):
units = str(data_cube.units).replace('m-2', "")
else:
units = str(data_cube.units) + ' m2'
return data_cube, units, metadata_dict
def main(inargs):
"""Run the program."""
file_dict, tas_dict, area_dict, basin_dict = read_data(inargs)
metadata_dict = {}
climatology_dict = {}
time_trend_dict = {}
tas_scaled_trend_dict = {}
branch_dict = {}
for experiment in ['historical', 'historicalGHG', 'historicalAA', 'historicalnoAA', 'piControl']:
filenames = file_dict[experiment]
if not filenames:
climatology_dict[experiment] = None
time_trend_dict[experiment] = None
tas_scaled_trend_dict[experiment] = None
else:
print(experiment)
try:
time_constraint = gio.get_time_constraint(inargs.total_time)
except (AttributeError, TypeError):
time_constraint = iris.Constraint()
with iris.FUTURE.context(cell_datetime_objects=True):
cube = iris.load(filenames, gio.check_iris_var(inargs.var))
# Merge cubes
metadata_dict[filenames[0]] = cube[0].attributes['history']
equalise_attributes(cube)
iris.util.unify_time_units(cube)
cube = cube.concatenate_cube()
cube = gio.check_time_units(cube)
# Time extraction and branch time info
coord_names = [coord.name() for coord in cube.dim_coords]
assert coord_names[0] == 'time'
if 'historical' in experiment:
original_time_length = cube.shape[0]
cube = cube.extract(time_constraint)
new_time_length = cube.shape[0]
branch_time_index_offset = original_time_length - new_time_length
branch_time = cube.attributes['branch_time']
time_length = cube.shape[0]
branch_dict[experiment] = (branch_time, time_length, branch_time_index_offset)
elif experiment == 'piControl':
branch_time, time_length, branch_time_index_offset = branch_dict['historical']
start_index, error = uconv.find_nearest(cube.coord('time').points, float(branch_time) + 15.5, index=True)
if abs(error) > 15:
print("WARNING: Large error of %f in locating branch time" %(error))
start_index = 0
start_index = start_index + branch_time_index_offset
cube = cube[start_index:start_index+time_length, ::]
# Temporal smoothing
cube = timeseries.convert_to_annual(cube, full_months=True)
# Mask marginal seas
if basin_dict[experiment]:
basin_cube = iris.load_cube(basin_dict[experiment])
cube = uconv.mask_marginal_seas(cube, basin_cube)
# Regrid and select basin
cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)
if not inargs.basin == 'globe':
if basin_dict[experiment] and not regrid_status:
ndim = cube.ndim
basin_array = uconv.broadcast_array(basin_cube.data, [ndim - 2, ndim - 1], cube.shape)
else:
basin_array = uconv.create_basin_array(cube)
cube.data.mask = numpy.where((cube.data.mask == False) & (basin_array == basins[inargs.basin]), False, True)
# Scale
cube, units = scale_data(cube, inargs.var, reverse_sign=inargs.reverse_sign)
# Zonal statistic
if inargs.area_adjust:
if regrid_status:
area_dict[experiment] = None
cube, units, metadata_dict = area_ajustment(cube, area_dict[experiment], metadata_dict)
zonal_cube = cube.collapsed('longitude', iris.analysis.SUM)
aggregation = 'Zonally integrated'
else:
zonal_cube = cube.collapsed('longitude', iris.analysis.MEAN)
aggregation = 'Zonal mean'
zonal_cube.remove_coord('longitude')
# Climatology and trends
climatology_dict[experiment] = calculate_climatology(zonal_cube, inargs.climatology_time, experiment)
time_trend_dict[experiment] = get_trend_cube(zonal_cube)
if tas_dict[experiment]:
tas_cube = iris.load_cube(tas_dict[experiment], 'air_temperature' & time_constraint)
scale_factor = get_scale_factor(tas_cube)
print(experiment, 'warming:', scale_factor)
tas_scaled_trend_dict[experiment] = time_trend_dict[experiment] * (1. / abs(scale_factor))
metadata_dict[tas_dict[experiment][0]] = tas_cube.attributes['history']
else:
tas_scaled_trend_dict[experiment] = None
# Create the plots
tas_scaled_trend_flag = tas_scaled_trend_dict['historicalGHG'] and tas_scaled_trend_dict['historicalAA']
fig = plt.figure(figsize=[15, 20])
gs = set_plot_grid(tas_trend=tas_scaled_trend_flag)
ax_main = plt.subplot(gs[0])
plt.sca(ax_main)
plot_climatology(climatology_dict, inargs.var, units, inargs.legloc, aggregation)
plt.title('%s (%s), %s' %(inargs.model, inargs.run, inargs.basin) )
ax_diff = plt.subplot(gs[1])
plt.sca(ax_diff)
plot_difference(climatology_dict)
ax_time_trend = plt.subplot(gs[2])
plt.sca(ax_time_trend)
plot_trend(time_trend_dict, units)
if tas_scaled_trend_flag:
ax_tas_trend = plt.subplot(gs[3])
plt.sca(ax_tas_trend)
plot_trend(tas_scaled_trend_dict, units, scaled=True)
plt.xlabel('latitude')
plt.savefig(inargs.outfile, bbox_inches='tight')
gio.write_metadata(inargs.outfile, file_info=metadata_dict)
if __name__ == '__main__':
extra_info ="""
author:
Damien Irving, irving.damien@gmail.com
note:
"""
description=''
parser = argparse.ArgumentParser(description=description,
epilog=extra_info,
argument_default=argparse.SUPPRESS,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("outfile", type=str, help="Output file name")
parser.add_argument("var", type=str, help="Variable standard_name")
parser.add_argument("model", type=str, help="Model name")
parser.add_argument("run", type=str, help="Run (e.g. r1)")
parser.add_argument("basin", type=str, choices=('atlantic', 'pacific', 'indian', 'globe'), help="Ocean basin")
parser.add_argument("--historical_files", type=str, default=None, nargs='*',
help="Input files for the historical experiment")
parser.add_argument("--historicalghg_files", type=str, default=None, nargs='*',
help="Input files for the historicalGHG experiment")
parser.add_argument("--historicalaa_files", type=str, default=None, nargs='*',
help="Input files for the historicalAA experiment")
parser.add_argument("--historicalnoaa_files", type=str, default=None, nargs='*',
help="Input files for the historicalnoAA experiment")
parser.add_argument("--picontrol_files", type=str, default=None, nargs='*',
help="Input files for the piControl experiment")
parser.add_argument("--historical_tas_file", type=str, default=None, nargs='*',
help="Global mean surface temperature file for historical experiment")
parser.add_argument("--historicalghg_tas_file", type=str, default=None, nargs='*',
help="Global mean surface temperature file for historicalGHG experiment")
parser.add_argument("--historicalaa_tas_file", type=str, default=None, nargs='*',
help="Global mean surface temperature file for historicalAA experiment")
parser.add_argument("--historicalnoaa_tas_file", type=str, default=None, nargs='*',
help="Global mean surface temperature file for historicalnoAA experiment")
parser.add_argument("--historical_area_file", type=str, default=None, nargs='*',
help="Cell area file for historical experiment")
parser.add_argument("--historicalghg_area_file", type=str, default=None, nargs='*',
help="Cell area file for historicalGHG experiment")
parser.add_argument("--historicalaa_area_file", type=str, default=None, nargs='*',
help="Cell area file for historicalAA experiment")
parser.add_argument("--historicalnoaa_area_file", type=str, default=None, nargs='*',
help="Cell area file for historicalnoAA experiment")
parser.add_argument("--picontrol_area_file", type=str, default=None, nargs='*',
help="Cell area file for piControl experiment")
parser.add_argument("--historical_basin_file", type=str, default=None, nargs='*',
help="Cell basin file for historical experiment")
parser.add_argument("--historicalghg_basin_file", type=str, default=None, nargs='*',
help="Cell basin file for historicalGHG experiment")
parser.add_argument("--historicalaa_basin_file", type=str, default=None, nargs='*',
help="Cell basin file for historicalAA experiment")
parser.add_argument("--historicalnoaa_basin_file", type=str, default=None, nargs='*',
help="Cell basin file for historicalnoAA experiment")
parser.add_argument("--picontrol_basin_file", type=str, default=None, nargs='*',
help="Cell basin file for piControl experiment")
parser.add_argument("--area_adjust", action="store_true", default=False,
help="Adjust plots for area [default=False]")
parser.add_argument("--reverse_sign", action="store_true", default=False,
help="Multiple the data by -1 (CCSM4 has wrong sign for wind stress) [default=False]")
parser.add_argument("--climatology_time", type=str, nargs=2, metavar=('START_DATE', 'END_DATE'),
default=('1986-01-01', '2005-12-31'), help="Time period for climatology [default = entire]")
parser.add_argument("--total_time", type=str, nargs=2, metavar=('START_DATE', 'END_DATE'),
default=None, help="Time period for entire analysis. Must go right to end of experiment for control overlap period to be calculated correctly. [default = entire]")
parser.add_argument("--legloc", type=int, default=8,
help="Legend location")
args = parser.parse_args()
main(args)
| DamienIrving/ocean-analysis | visualisation/plot_zonal_mean.py | plot_zonal_mean.py | py | 19,621 | python | en | code | 9 | github-code | 36 |
71738073384 | import unittest
from selenium import webdriver
from data.constants import Constants
from helpers.keywords import Helpers
from pom.pages.login import Login
from pom.pages.project import Project
from pom.locators.base_loc import BaseLoc
from selenium.webdriver.chrome.options import Options
from webdriver_manager.chrome import ChromeDriverManager
from pom.pages.home import Home
from pom.locators.sidebar_loc import SideBarLoc
class ProjectTest(unittest.TestCase):
def setUp(self):
options = Options()
options.headless = True
print("\n========== PROJECTS TESTS ==========")
self.driver = webdriver.Chrome(ChromeDriverManager().install(), options=options)
self.driver.maximize_window()
self.driver.get(Constants.url["prod"])
self.driver.find_element(*BaseLoc.sign_in_lnk).click()
Login.login_form(self, Constants.credentials["users"]["real"]["user"], Constants.credentials["users"]["real"]["pass"])
Helpers.click_visible_element(self, SideBarLoc.inbox_li_btn)
def test_create_project(self):
Project.create_projects(self, 1, Constants.project_data["name"], False, Constants.project_data["view"]["panel"])
Home.validate_project(self)
def test_create_project_fav(self):
Project.create_projects(self, 1, Constants.project_data["name"], True, Constants.project_data["view"]["list"])
Home.validate_project(self)
def test_create_projects(self):
Project.create_projects(self, 3, Constants.project_data["name"], True, Constants.project_data["view"]["list"])
Home.validate_project(self)
def test_delete_all_projects(self):
Project.delete_all_projects(self)
def tearDown(self):
Helpers.wait_seconds(self, 3)
self.driver.quit()
if __name__ == "__main__":
unittest.main()
| jaime-contreras-98/todoist-python-selenium | tests/e2e/test/test_projects.py | test_projects.py | py | 1,846 | python | en | code | 0 | github-code | 36 |
9369243974 | import imaplib
import email
from time import sleep
from random import randint
import importlib
from src.Analyser import mark_email
from src.Email import Email
import numpy as np
from goto import with_goto
from src.save import save
ai = importlib.import_module("Neural_Network", package=None)
"""
Fonction qui efface tous les fichiers emailX.txt
De nouveaux fichiers seront créés, avec X commençant à 1
"""
def efface_old_mail():
import os
arret = False
x = 1
while not arret:
if os.path.exists("email" + str(x) + ".txt"):
os.remove("email" + str(x) + ".txt")
x += 1
else:
print("Fichiers supprimés")
arret = True
old_mes_nb = -1
x = 1
efface_old_mail()
@with_goto
def run():
print('start')
label.connexion
### Récupération de l'adresse mail à tester, et du mot de passe ###
connexion_pos = False
while not connexion_pos:
adresse = input("Adresse mail: ")
mdp = input("Mot de passe: ")
if("@gmail.com" in adresse):
connexion_pos = True
else:
print("Adresse mail non valide\n")
continue
label.start
try:
old_mes_nb = -1
x = 1
### Connexion à la boite mail ###
try :
mail = imaplib.IMAP4_SSL('imap.gmail.com')
#mail.login('yncrea.test.projet.M1@gmail.com', 'ujikolpm')
mail.login(adresse, mdp)
except Exception:
### Cas où la connexion échoue ###
print("Echec connexion\n")
goto.connexion
while True:
mail.list()
mail.select('inbox')
result, data = mail.uid('search', None, "ALL")
i = len(data[0].split())
new_mes_nb = i
if (old_mes_nb == -1):
old_mes_nb = new_mes_nb
### Un nouveau message arrive dans la boite mail ###
if (new_mes_nb > old_mes_nb):
print("\n---NOUVEAU MESSAGE : %i---" % x)
latest_email_uid = data[0].split()[new_mes_nb - 1]
result, email_data = mail.uid('fetch', latest_email_uid, '(RFC822)')
raw_email = email_data[0][1]
raw_email_string = raw_email.decode('utf-8')
email_message = email.message_from_string(raw_email_string)
### Création d'un fichier texte contenant le message ###
### Création d'un objet Email récupérant les infos du fichier texte ###
for part in email_message.walk():
save_string = r"email" + str(x) + ".txt"
myfile = open(save_string, 'a')
myfile.write(str(part))
mailo = Email(save_string)
myfile.close()
### L'email est déplacé dans le dossier traitement ###
cible_dossier = 'traitement'
result_move, err_mes = mail.uid('move', latest_email_uid, cible_dossier)
if (result_move == 'OK'):
print("Mail déplacé avec succès")
else:
print(err_mes)
mail.select(cible_dossier)
result, data = mail.uid('search', None, "ALL")
latest_email_uid = data[0].split()[- 1]
### Analyse du message et attribution de son niveau de dangerosit ###
mark = mark_email(mailo)
marks = np.array([mark])
sortie_traitement = ai.analyse_mail(marks)[0][0]
save(mailo, marks=mark, grade=sortie_traitement.item())
print("Résultat traitement :", sortie_traitement)
if (sortie_traitement >= 0.6): ### Cas d'un message sur ###
result_move, err_mes = mail.uid('move', latest_email_uid, "sur")
if (result_move == 'OK'):
print("Mail déplacé dans sur")
else:
print(err_mes)
elif (sortie_traitement >= 0.4 and sortie_traitement < 0.6): ### Cas d'un message pour lequel l'IA a un doute ###
result_move, err_mes = mail.uid('move', latest_email_uid, "moyen")
if (result_move == 'OK'):
print("Mail déplacé dans moyen")
else:
print(err_mes)
else: ### Cas d'un message dangereux ###
result_move, err_mes = mail.uid('move', latest_email_uid, "danger")
if (result_move == 'OK'):
print("Mail déplacé dans danger")
else:
print(err_mes)
x += 1
old_mes_nb = new_mes_nb
print("Analyse effectuée")
elif (new_mes_nb < old_mes_nb):### Cas où des messages ont été supprimés ###
old_mes_nb = new_mes_nb
except TimeoutError:### Timeout de la connexion avec la boite mail atteint, retour au label start pour rafraichir la connexion ###
goto.start
except KeyboardInterrupt:
goto.end
label.end
mail.logout()
print("Good bye")
run()
| PtspluS/Phising-Analising | src/Recevoir_email_complet.py | Recevoir_email_complet.py | py | 5,361 | python | fr | code | 1 | github-code | 36 |
3650801558 | from flask import Blueprint, jsonify, g, request
from wrappers.auth_required import auth_required, rate_limited
from models.jobs import TOPJob
from utils.json_helper import jsonify_payload
bp = Blueprint("management", __name__, url_prefix="/management")
@bp.route("/jobs", methods=["GET"])
@auth_required
def get_jobs():
print('user info: ', g.user_id)
jobs = TOPJob.find_by_user_id(g.user_id)
print('jobs: ', [job.serialize() for job in jobs])
return jsonify_payload({'jobs': [job.serialize() for job in jobs]})
@bp.route("/jobs", methods=["POST"])
@auth_required
@rate_limited
def create_job():
payload = request.get_json()
print('payload: ', payload)
print('subscription data: ', g.subscription)
job_name = payload.get('job_name')
job_description = payload.get('job_description')
job_id = payload.get('job_id')
if not job_id:
print('creating job')
job = TOPJob(job_name, job_description, g.user_id)
job.save()
else:
print('updating job ', job_id)
job = TOPJob.find_by_id(job_id)
job.update(job_name, job_description)
return jsonify_payload({'job': job.serialize()}) | matthewlouisbrockman/the_one_plugin | backend/management/management_routes.py | management_routes.py | py | 1,181 | python | en | code | 0 | github-code | 36 |
25548663168 | #!/usr/bin/python3
def list_division(my_list_1, my_list_2, list_length):
result = []
for i in range(list_length):
try:
if i >= len(my_list_1) or i >= len(my_list_2):
raise IndexError("out of range")
numerator = my_list_1[i]
denominator = my_list_2[i]
if not (
isinstance(numerator, (int, float)) and
isinstance(denominator, (int, float))
):
raise TypeError("wrong type")
if denominator == 0:
raise ZeroDivisionError("division by zero")
division_result = numerator / denominator
result.append(division_result)
except IndexError:
print("out of range")
result.append(0)
except TypeError:
print("wrong type")
result.append(0)
except ZeroDivisionError:
print("division by zero")
result.append(0)
finally:
pass
return (result)
| LeaderSteve84/alx-higher_level_programming | 0x05-python-exceptions/4-list_division.py | 4-list_division.py | py | 1,051 | python | en | code | 0 | github-code | 36 |
317579586 | #!/bin/python3
import math
import os
import random
import re
import sys
def setVisit(M, pos):
x,y = pos
M[y][x] = 2
def fillRegion(M, pos, size):
x,y = pos
n,m = size
if(x < 0 or y < 0 or x >= m or y >= n):
return 0
if(M[y][x] != 1):
return 0
ret = 1
setVisit(M, pos)
for i in range(-1, 2):
for j in range(-1, 2):
ret += fillRegion(M, (x+j, y+i), size)
return ret
# Complete the connectedCell function below.
def connectedCell(matrix):
l = []
n = len(matrix)
m = len(matrix[0])
for mat in matrix:
l.append(list(mat))
ret = 0
for i in range(n):
for j in range(m):
ret = max(ret, fillRegion(l, (j, i), (n, m)))
return ret
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
m = int(input())
matrix = []
for _ in range(n):
matrix.append(list(map(int, input().rstrip().split())))
result = connectedCell(matrix)
fptr.write(str(result) + '\n')
fptr.close()
| DStheG/hackerrank | HackerRank/connected-cell-in-a-grid.py | connected-cell-in-a-grid.py | py | 1,014 | python | en | code | 0 | github-code | 36 |
13231147951 | import json
import os
import subprocess
import sys
from pathlib import Path
import youtube_dl
ydl_opts_download = {
"format": "bestaudio/best",
"cachedir": False,
"outtmpl": "%(id)s%(ext)s",
"postprocessors": [
{
"key": "FFmpegExtractAudio",
"preferredcodec": "mp3",
"preferredquality": "192",
}
],
}
def download_single_yt(url_list):
skipped = []
for i in range(len(url_list)):
try:
with youtube_dl.YoutubeDL(ydl_opts_download) as ydl:
ydl.download([url_list[i]])
except:
skipped.append(url_list[i])
if skipped == []:
return 0
else:
download_single_yt(skipped)
def ytdownload(link):
with youtube_dl.YoutubeDL(
{
"outtmpl": "%(id)s%(ext)s",
"quiet": True,
}
) as ydl:
result = ydl.extract_info(link, download=False)
if "entries" in result:
# Can be a playlist or a list of videos
video = result["entries"]
playlist_urls = [
result["entries"][i]["webpage_url"] for i, item in enumerate(video)
]
download_single_yt(playlist_urls)
print("-" * 15)
def download(title, link, out_folder, i):
print("downloading ", title, " OTS")
os.chdir("./" + out_folder)
fname = ""
if i < 10:
fname = "0" + str(i) + " - " + title
else:
fname = str(i) + " - " + title
os.mkdir(fname)
os.chdir("./" + fname)
if "spotify" in link.lower():
subprocess.check_call(["spotdl", link, "--output-format wav"])
elif "youtube" in link.lower():
ytdownload(link)
os.chdir("..")
os.chdir("..")
def download_all(json_source, out_folder):
print("open file...")
file = open(json_source)
movies = json.load(file)
print("creating main folder...")
ost = Path(out_folder)
if not ost.exists():
ost.mkdir()
for i, movie in enumerate(movies):
link = movie["link"].replace(" ", "_")
title = movie["title"].replace(" ", "_")
download(title, link, out_folder, i + 1)
print("--- DONE ---")
# download_all("ESC.json", "ESC")
| RiccardoPeron/competitions-music-analysis | Functions/downloader.py | downloader.py | py | 2,304 | python | en | code | 0 | github-code | 36 |
38716086802 | #!/usr/bin/env python3
with open('input.txt', 'r') as f:
adjustments = [int(n) for n in f]
print("Part 1:", sum(adjustments))
seen = set()
current = 0
i = 0
while current not in seen:
seen.add(current)
current += adjustments[i]
i = (i+1) % len(adjustments)
print("Part 2:", current)
| lvaughn/advent | 2018/1/freq.py | freq.py | py | 339 | python | en | code | 1 | github-code | 36 |
28986176573 | # coding: utf-8
"""
Yapily API
To access endpoints that require authentication, use your application key and secret created in the Dashboard (https://dashboard.yapily.com) # noqa: E501
The version of the OpenAPI document: 0.0.358
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import yapily
from yapily.models.bulk_user_delete_details import BulkUserDeleteDetails # noqa: E501
from yapily.rest import ApiException
class TestBulkUserDeleteDetails(unittest.TestCase):
"""BulkUserDeleteDetails unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test BulkUserDeleteDetails
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = yapily.models.bulk_user_delete_details.BulkUserDeleteDetails() # noqa: E501
if include_optional :
return BulkUserDeleteDetails(
id = '0',
invalid_application_user_ids = [
'0'
],
invalid_user_uuids = [
'0'
],
status = 'IN_PROGRESS',
started_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
users = [
yapily.models.user_delete_response.UserDeleteResponse(
id = '0',
delete_status = 'SUCCESS',
creation_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
user_consents = [
yapily.models.consent_delete_response.ConsentDeleteResponse(
id = '0',
delete_status = 'SUCCESS',
institution_id = '0',
institution_consent_id = '0',
creation_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), )
], )
],
links = {
'key' : '0'
}
)
else :
return BulkUserDeleteDetails(
)
def testBulkUserDeleteDetails(self):
"""Test BulkUserDeleteDetails"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| alexdicodi/yapily-sdk-python | sdk/test/test_bulk_user_delete_details.py | test_bulk_user_delete_details.py | py | 2,749 | python | en | code | null | github-code | 36 |
71148994983 | from utils import connector
async def declare_queue(queue_name, durable=False):
conct = connector.Connector()
channel = await conct.get_channel()
await channel.queue_declare(
queue=queue_name,
durable=durable,
)
async def bind_queue(queue_name, exchange_name, routing_key):
conct = connector.Connector()
channel = await conct.get_channel()
await channel.queue_bind(
queue=queue_name,
exchange=exchange_name,
routing_key=routing_key,
)
async def declare_exchange(
exchange_name,
exchange_type="direct",
durable=False,
):
conct = connector.Connector()
channel = await conct.get_channel()
await channel.exchange_declare(
exchange=exchange_name,
exchange_type=exchange_type,
durable=durable,
passive=False,
# if passive is True - it will raise exception if exchange
# doesn't exist
internal=False,
# If set, the exchange may not be used directly by publishers,
# but only when bound to other exchanges. Internal exchanges are
# used to construct wiring that is not visible to applications.
# Hint: could be used as "dead-letter-exchange" for queues
)
| Yuriy-Leonov/python-rabbitmq-example | utils/funcs.py | funcs.py | py | 1,256 | python | en | code | 0 | github-code | 36 |
43189726204 | import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
from .. import default_config
import numpy
class CustomViewBox(pg.ViewBox):
def __init__(self, *args, **kwds):
pg.ViewBox.__init__(self, *args, **kwds)
self.StromDisplay=None
self.ChannelNum=0
self.ScaleBar = []
self.ScaleSize = 0
self.ScaleText = ''
self.Window = []
self.FreehandRoi = []
self.DrawnRoi = []
self.StormRegistrationChannel = -1
self.ConfRegistrationChannel = -1
self.DrawnRoi = []
self.ConfocalOffset = [0, 0]
self.StormMarkerRois = []
self.ConfMarkerRois = []
self.PanMode = default_config.viewer_input_mode
self.ClickMode ='Norm'
self.AffineTransform = []
## reimplement right-click to zoom out
def mouseClickEvent(self, ev):
if self.ClickMode == 'Reg':
Current = self.mapToView(ev.pos())
Marker= pg.ROI([0, 0])
if len(self.StormMarkerRois)<3:
self.StormMarkerRois.append(Marker)
Marker.addFreeHandle([Current.x(),Current.y()])
Handle=Marker.getHandles()[0]
Handle.sides=4
Handle.startAng=0
Handle.buildPath()
Handle.generateShape()
self.StormDisplay.plot_widget.addItem(Marker)
else:
if len(self.ConfMarkerRois)<3:
self.ConfMarkerRois.append(Marker)
Marker.addFreeHandle([Current.x(),Current.y()])
self.StormDisplay.plot_widget.addItem(Marker)
else:
self.ClickMode='Norm'
else:
pg.ViewBox.mouseClickEvent(self, ev)
def SetRegistrationChannelStorm(self,StormChannelNum):
self.StormRegistrationChannel=StormChannelNum
def SetRegistrationChannelConf(self,ConfChannelNum):
self.ConfRegistrationChannel=ConfChannelNum
def mouseDragEvent(self, ev):
if self.PanMode == 'Pan':
pg.ViewBox.mouseDragEvent(self, ev)
elif self.PanMode == 'Conf':
cursorOffset = ev.screenPos() - ev.lastScreenPos()
# scale to pixel coordinates
XTrans = cursorOffset[0] * self.viewPixelSize()[0] / 8
YTrans = cursorOffset[1] * self.viewPixelSize()[1] / 8
self.ConfocalOffset = [self.ConfocalOffset[0] + YTrans, self.ConfocalOffset[1] + XTrans]
for CN in range(4):
if self.StormDisplay.DisplayedConfocalChannel[CN] != 0:
self.StormDisplay.DisplayedConfocalChannel[CN].translate(YTrans, XTrans)
#move the registration markers if there are any:
Scale=1000.0/self.StormDisplay.ConfocalSizeMultiplier
for RoiInd in range(len(self.main_window.viewer.display.Viewbox.ConfMarkerRois)):
Marker= pg.ROI([0, 0])
OldPoints=self.ConfMarkerRois[RoiInd].getLocalHandlePositions()[0][1]
self.StormDisplay.plot_widget.removeItem(self.ConfMarkerRois[RoiInd])
self.ConfMarkerRois[RoiInd]=Marker
Marker.addFreeHandle([OldPoints.x()+XTrans*Scale * self.StormDisplay.ConfocalMetaData['SizeX'],OldPoints.y()+YTrans*Scale * self.StormDisplay.ConfocalMetaData['SizeY']])
self.StormDisplay.plot_widget.addItem(Marker)
#calcualte correlation between confocal and storm channel
#if event is finished display registration correlation
if ev.isFinish():
#if the displayed channels exist:
if self.ConfRegistrationChannel!=-1 and self.StormRegistrationChannel!=-1:
#if the channels are displayed:
if self.StormDisplay.DisplayedConfocalChannel[self.ConfRegistrationChannel]!=0 and self.StormDisplay.DisplayedStormChannel[self.StormRegistrationChannel]!=0:
#maybe rescale the images if really slow;Or precalculate an image and just index from it
Im1=self.StormDisplay.DisplayedConfocalChannel[self.ConfRegistrationChannel]
Im2=self.StormDisplay.DisplayedStormChannel[self.StormRegistrationChannel]
Scale=1000.0/self.StormDisplay.ConfocalSizeMultiplier
Correlation=0
for ind in range(len(Im2.getData()[0])):
IndX=(int(Im2.getData()[0][ind])/(Scale * self.StormDisplay.ConfocalMetaData['SizeX']))-self.ConfocalOffset[1]
IndY=(int(Im2.getData()[1][ind])/(Scale * self.StormDisplay.ConfocalMetaData['SizeY']))-self.ConfocalOffset[0]
if IndX>-1 and IndX<Im1.image.shape[1] and IndY>-1 and IndY<Im1.image.shape[0]:
Correlation+=Im1.image[IndY,IndX]
Msg=self.main_window.status_bar.currentMessage()
Msg=str.split(str(Msg),' Correlation:')[0]
#find a possible norm of correlation
#mean might be a more representative value for normalization:numpy.mean(Im1.image)
MaxCorr=len(Im2.getData()[0])*Im1.image.max()
self.main_window.status_bar.showMessage(Msg+' Correlation: '+ str(float(Correlation)/float(MaxCorr)) )
else:
Msg=self.main_window.status_bar.currentMessage()
Msg=str.split(str(Msg),' Correlation:')[0]
self.main_window.status_bar.showMessage(Msg+' Correlation: The selected channels are not displayed' )
#print signal.correlate2d(Im1,Im2)
if self.main_window.viewer.display.ConfocalSizeMultiplier==1:
Scale=1000*self.main_window.viewer.display.ConfocalSizeMultiplier
else:
Scale=10*self.main_window.viewer.display.ConfocalSizeMultiplier
self.main_window.doubleSpinBox_confocal_display_offset_x.setValue(
int(self.ConfocalOffset[1] * Scale * self.main_window.viewer.display.ConfocalMetaData['SizeX']))
self.main_window.doubleSpinBox_confocal_display_offset_y.setValue(
int(self.ConfocalOffset[0] * Scale * self.main_window.viewer.display.ConfocalMetaData['SizeX']))
ev.accept()
pos = ev.pos()
modifiers = QtGui.QApplication.keyboardModifiers()
if modifiers == QtCore.Qt.ControlModifier and ev.button() == QtCore.Qt.LeftButton:
if ev.isFinish():
# self.traj_widget.update_selection_infos()
self.rbScaleBox.hide()
else:
rect_box = QtCore.QRectF(pg.Point(ev.buttonDownPos(ev.button())), pg.Point(pos))
rect_box = self.childGroup.mapRectFromParent(rect_box)
self.update_selection(rect_box)
self.traj_widget.update_selection_infos()
self.updateScaleBox(ev.buttonDownPos(), ev.pos())
elif self.PanMode == 'Roi':
Current = self.mapToView(ev.pos())
Prev = self.mapToView(ev.lastPos())
r1 = pg.QtGui.QGraphicsLineItem(Prev.x(), Prev.y(), Current.x(), Current.y())
r1.setPen(pg.mkPen('w'))
self.DrawnRoi.append(r1)
self.addItem(r1)
self.FreehandRoi.append(Current)
# closing curve on finish
if ev.isFinish():
Current = self.mapToView(ev.buttonDownPos())
Prev = self.mapToView(ev.pos())
r1 = pg.QtGui.QGraphicsLineItem(Prev.x(), Prev.y(), Current.x(), Current.y())
r1.setPen(pg.mkPen('w'))
self.DrawnRoi.append(r1)
self.addItem(r1)
self.FreehandRoi.append(Current)
ev.accept()
pos = ev.pos()
modifiers = QtGui.QApplication.keyboardModifiers()
if modifiers == QtCore.Qt.ControlModifier and ev.button() == QtCore.Qt.LeftButton:
if ev.isFinish():
# self.traj_widget.update_selection_infos()
self.rbScaleBox.hide()
else:
rect_box = QtCore.QRectF(pg.Point(ev.buttonDownPos(ev.button())), pg.Point(pos))
rect_box = self.childGroup.mapRectFromParent(rect_box)
self.update_selection(rect_box)
self.traj_widget.update_selection_infos()
self.updateScaleBox(ev.buttonDownPos(), ev.pos())
def deleteFreehandROI(self, roi):
for r in self.DrawnRoi:
self.removeItem(r)
self.FreehandRoi = []
self.DrawnRoi = []
roi = None
def deleteActiveContourROI(self, DrawnElements):
for r in DrawnElements:
self.removeItem(r)
def deleteActiveContourROI3d(self, DrawnElements):
for r in DrawnElements:
self.removeItem(r)
def deleteEllipseROI(self, roi):
self.removeItem(roi)
def updateMatrix(self, changed=None):
# keep scale bar at same position
if self.ScaleBar != []:
ViewRange = self.viewRange()
XLength = (ViewRange[0][1] - ViewRange[0][0]) * 0.05
YLength = (ViewRange[1][1] - ViewRange[1][0]) * 0.05
Xpos = ViewRange[0][0] + XLength
Ypos = ViewRange[1][0] + YLength
self.ScaleBar.clear()
self.Window.removeItem(self.ScaleText)
self.ScaleBar = self.Window.plot(x=[Xpos, Xpos + self.ScaleSize], y=[Ypos, Ypos], symbol='o')
PosX = Xpos
PosY = Ypos + YLength * 0.1
self.ScaleText = pg.TextItem(text=str(self.ScaleSize) + ' nm', color=(200, 200, 200))
self.Window.addItem(self.ScaleText)
self.ScaleText.setPos(PosX, PosY)
pg.ViewBox.updateMatrix(self, changed=None)
def setScaleBar(self, ScaleBar, Window, Size, Text):
self.ScaleBar = ScaleBar
self.Window = Window
self.ScaleSize = Size
self.ScaleText = Text
def deleteScaleBar(self):
if self.ScaleBar != []:
self.ScaleBar.clear()
self.Window.removeItem(self.ScaleText)
self.ScaleBar = []
self.ScaleSize = 0
self.ScaleText = ''
def setWindow(self, Window):
self.Window = Window
def deleteConfocalImage(self):
self.StromDisplay = None
def setConfocalImage(self, StormDisplay, ChannelNum):
self.StormDisplay = StormDisplay
self.ChannelNum = ChannelNum
| KatonaLab/vividstorm | controllers/viewer/CustomViewBox.py | CustomViewBox.py | py | 10,817 | python | en | code | 0 | github-code | 36 |
24304374080 | from argparse import ArgumentParser
from gitrello import Gitrello
import github
import trello
import settings
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('--pr_id', required=True)
parser.add_argument('--repo', required=True)
args = parser.parse_args()
g = github.Github(settings.GITHUB_TOKEN).get_user()
client = trello.TrelloClient(api_key=settings.API_KEY, token=settings.API_TOKEN)
board = client.get_board(settings.BOARD_ID)
repo = [x for x in g.get_repos() if x.name == args.repo][0]
pull = repo.get_pull(int(args.pr_id))
gitrello = Gitrello(pull, board)
card = gitrello.create_card()
| jakobpederson/gitrello | convert_pr.py | convert_pr.py | py | 667 | python | en | code | 0 | github-code | 36 |
72640402983 | import os, argparse, traceback, glob, random, itertools, time, torch, threading, queue
import numpy as np
import torch.optim as optim
from models.tacotron import post_CBHG
from torch.nn import L1Loss
from torch.utils.data import Dataset, DataLoader
from torch.nn.utils.rnn import pad_sequence
from util.hparams import *
data_dir = './data'
mel_list = sorted(glob.glob(os.path.join(data_dir + '/mel', '*.npy')))
spec_list = sorted(glob.glob(os.path.join(data_dir + '/spec', '*.npy')))
mel_len = np.load(os.path.join(data_dir + '/mel_len.npy'))
def DataGenerator():
while True:
idx_list = np.random.choice(len(mel_list), batch_group, replace=False)
idx_list = sorted(idx_list)
idx_list = [idx_list[i : i + batch_size] for i in range(0, len(idx_list), batch_size)]
random.shuffle(idx_list)
for idx in idx_list:
random.shuffle(idx)
mel = [torch.from_numpy(np.load(mel_list[mel_len[i][1]])) for i in idx]
spec = [torch.from_numpy(np.load(spec_list[mel_len[i][1]])) for i in idx]
mel = pad_sequence(mel, batch_first=True)
spec = pad_sequence(spec, batch_first=True)
yield [mel, spec]
class Generator(threading.Thread):
def __init__(self, generator):
threading.Thread.__init__(self)
self.queue = queue.Queue(8)
self.generator = generator
self.start()
def run(self):
for item in self.generator:
self.queue.put(item)
self.queue.put(None)
def next(self):
next_item = self.queue.get()
if next_item is None:
raise StopIteration
return next_item
def train(args):
train_loader = Generator(DataGenerator())
model = post_CBHG(K=8, conv_dim=[256, mel_dim]).cuda()
optimizer = optim.Adam(model.parameters())
step, epochs = 0, 0
if args.checkpoint is not None:
ckpt = torch.load(args.checkpoint)
model.load_state_dict(ckpt['model'])
optimizer.load_state_dict(ckpt['optimizer'])
step = ckpt['step'],
step = step[0]
epoch = ckpt['epoch']
print('Load Status: Epoch %d, Step %d' % (epoch, step))
torch.backends.cudnn.benchmark = True
try:
for epoch in itertools.count(epochs):
for _ in range(batch_group):
start = time.time()
mel, target = train_loader.next()
mel = mel.float().cuda()
target = target.float().cuda()
pred = model(mel)
loss = L1Loss()(pred, target)
model.zero_grad()
loss.backward()
optimizer.step()
step += 1
print('step: {}, loss: {:.5f}, {:.3f} sec/step'.format(step, loss, time.time() - start))
if step % checkpoint_step == 0:
save_dir = './ckpt/' + args.name + '/2'
torch.save({
'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'step': step,
'epoch': epoch
}, os.path.join(save_dir, 'ckpt-{}.pt'.format(step)))
except Exception as e:
traceback.print_exc()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--checkpoint', '-c', default=None)
parser.add_argument('--name', '-n', required=True)
args = parser.parse_args()
save_dir = os.path.join('./ckpt/' + args.name, '2')
os.makedirs(save_dir, exist_ok=True)
train(args)
| chldkato/Tacotron-pytorch | train2.py | train2.py | py | 3,633 | python | en | code | 6 | github-code | 36 |
10571851448 | import copy
from node import Node
import heapq
class Search():
def __init__(self, unsorted_stack, search_type):
self.unsorted_stack = unsorted_stack
self.frontier = [] # tuples to make total cost priority
self.visited = []
self.order_added = 0
self.root = Node(unsorted_stack, None, self.order_added, search_type)
self.frontier.append((self.root.total_cost, 0, self.root))
self.order_added += 1
def find_duplicate(self, state):
# check if duplicate state is already in frontier
for tuple in self.frontier:
if tuple[2].state == state:
return tuple
return None
def expand_frontier(self, node):
# create children for each possible flip level
for level in range(2, len(self.unsorted_stack)):
child = copy.deepcopy(node)
child.flip(level)
child.parent = node
child.order_added = self.order_added
dup = self.find_duplicate(child.state)
# if the state is unique, push to frontier
if (child.state not in self.visited):
if (dup == None):
heapq.heappush(self.frontier, (child.total_cost(), child.order_added, child))
self.order_added += 1
# use faster route to state if one is found
elif dup != None:
if dup[0] > child.total_cost():
self.frontier[self.frontier.index(dup)] = child
def find_solution(self):
while not len(self.frontier) == 0:
# explore cheapest node in frontier
node = heapq.heappop(self.frontier)[2]
self.visited.append(node.state) # record as visited
if node.get_heuristic() == 0: # if sorted, return sorted stack
return node
self.expand_frontier(node)
| JosephCarpenter/Informed-Search-Algorithm | search.py | search.py | py | 1,905 | python | en | code | 0 | github-code | 36 |
70781957544 | from django.conf.urls import url, include
from rest_framework.urlpatterns import format_suffix_patterns
from devup.views import UpList, UpDetail, UpCreate, UpUpdate
app_name = 'devup'
urlpatterns = [
url(r'^up_list$', UpList.as_view(), name='up_list'),
url(r'^up_create$', UpCreate.as_view(), name='up_create'),
url(r'^up_detail/(?P<pk>[-\w]+)/$', UpDetail.as_view(), name='up_detail'),
url(r'^(?P<pk>[-\w]+)/$', UpDetail.as_view(), name='detail'),
url(r'^(?P<pk>\d+)/update$', UpUpdate.as_view(), name='update'),
]
| maherrub/aot | devup/urls.py | urls.py | py | 539 | python | en | code | 0 | github-code | 36 |
73701609385 | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
class Bottleneck(nn.Module):
def __init__(self, nChannels, growthRate, dropout_rate):
super(Bottleneck, self).__init__()
self.dropout_rate = dropout_rate
interChannels = 4 * growthRate
self.bn1 = nn.BatchNorm2d(nChannels)
self.conv1 = nn.Conv2d(nChannels, interChannels, kernel_size=1,
bias=False)
self.bn2 = nn.BatchNorm2d(interChannels)
self.conv2 = nn.Conv2d(interChannels, growthRate, kernel_size=3,
padding=1, bias=False)
def forward(self, x):
out = self.conv1(F.relu(self.bn1(x)))
out = self.conv2(F.relu(self.bn2(out)))
if self.dropout_rate > 0:
out = F.dropout(out, p=self.dropout_rate, training=self.training)
out = torch.cat((x, out), 1)
return out
class SingleLayer(nn.Module):
def __init__(self, nChannels, growthRate, dropout_rate):
super(SingleLayer, self).__init__()
self.dropout_rate = dropout_rate
self.bn1 = nn.BatchNorm2d(nChannels)
self.conv1 = nn.Conv2d(nChannels, growthRate, kernel_size=3,
padding=1, bias=False)
def forward(self, x):
out = self.conv1(F.relu(self.bn1(x)))
if self.dropout_rate > 0:
out = F.dropout(out, p=self.dropout_rate, training=self.training)
out = torch.cat((x, out), 1)
return out
class Transition(nn.Module):
def __init__(self, nChannels, nOutChannels):
super(Transition, self).__init__()
self.bn1 = nn.BatchNorm2d(nChannels)
self.conv1 = nn.Conv2d(nChannels, nOutChannels, kernel_size=1,
bias=False)
def forward(self, x):
out = self.conv1(F.relu(self.bn1(x)))
out = F.avg_pool2d(out, 2)
return out
class Model(nn.Module):
def __init__(self, nClasses=256, growthRate=12, depth=40, bottleneck=False,
dropout_rate=0.0, n_layers=3, **kwargs):
super().__init__()
self.n_layers = n_layers
print("n_layers", n_layers)
# dense blocks per layer
nDenseBlocks = (depth - 4) // n_layers
if bottleneck:
nDenseBlocks //= 2
if bottleneck:
reduction = 0.5
else:
reduction = 1.0
# initial convolution
nChannels = 2 * growthRate
self.conv1 = nn.Conv2d(3, nChannels, kernel_size=3, padding=1,
bias=False)
for layer_n in range(1, n_layers + 1):
dense_layer = self._make_dense(
nChannels, growthRate, nDenseBlocks, bottleneck, dropout_rate)
setattr(self, f'dense{layer_n}', dense_layer)
nChannels += nDenseBlocks * growthRate
if layer_n < n_layers:
nOutChannels = int(math.floor(nChannels * reduction))
trainsition_layer = Transition(nChannels, nOutChannels)
setattr(self, f'trans{layer_n}', trainsition_layer)
nChannels = nOutChannels
self.bn1 = nn.BatchNorm2d(nChannels)
self.fc = nn.Linear(nChannels, nClasses)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.bias.data.zero_()
def _make_dense(self, nChannels, growthRate, nDenseBlocks, bottleneck, dropout_rate):
layers = []
for i in range(int(nDenseBlocks)):
if bottleneck:
layers.append(Bottleneck(nChannels, growthRate, dropout_rate))
else:
layers.append(SingleLayer(nChannels, growthRate, dropout_rate))
nChannels += growthRate
return nn.Sequential(*layers)
def forward(self, x):
out = self.conv1(x)
for i in range(1, self.n_layers):
dense_layer = getattr(self, f'dense{i}')
trans_layer = getattr(self, f'trans{i}')
out = trans_layer(dense_layer(out))
last_dense_layer = getattr(self, f'dense{self.n_layers}')
out = last_dense_layer(out)
out = F.avg_pool2d(F.relu(self.bn1(out)), out.size()[-1])
out = torch.squeeze(torch.squeeze(out, 2), 2)
out = F.log_softmax(self.fc(out))
return out
def save(self, path):
torch.save(self.state_dict(), path)
def load(self, path):
state_dict = torch.load(path)
self.load_state_dict(state_dict)
| ikhlestov/caltech-ml-courses | models/model_dense.py | model_dense.py | py | 4,801 | python | en | code | 0 | github-code | 36 |
6797727811 | # django imports
from django import template
import itertools
import datetime
import pytz
import dateutil
register = template.Library()
@register.filter
def group_by_date(dates, timezone):
tz = pytz.timezone(timezone)
dates_parser = []
for day in dates:
try:
new_date = pytz.utc.localize(dateutil.parser.parse(day))
except ValueError:
new_date = dateutil.parser.parse(day)
dates_parser.append(new_date)
days = [
tz.normalize(day.replace(tzinfo=pytz.utc)) for day in dates_parser
]
days2 = [
list(group) for k, group in itertools.groupby(
days, key=datetime.datetime.toordinal,
)
]
return [(day[0].date, day) for day in days2]
| tomasgarzon/exo-services | service-exo-mail/mail/templatetags/group_by.py | group_by.py | py | 747 | python | en | code | 0 | github-code | 36 |
36588084295 | # 에라토스테네스의 체
import sys
input = sys.stdin.readline
n, k = map(int, input().split())
nums = [i for i in range(2, n+1)]
t = 0
while True:
m = nums[0]
for n in nums:
if n%m == 0:
ans = n
nums.remove(n)
t += 1
if t == k:
break
if t == k:
break
print(ans)
| meatsby/algorithm | etc/bootcampprep/codingtestprep/day2/6.py | 6.py | py | 356 | python | en | code | 0 | github-code | 36 |
12560528102 | from flask import Flask, jsonify, request, redirect, Response, render_template
import requests
from config import api_key, cam_names, rover_det
app = Flask(__name__)
@app.route('/')
def home():
return render_template("index.html")
@app.route('/rover', methods = ['POST'])
def rover():
rov_name = request.form['optrover']
return redirect(f'/{rov_name}.html?emsg=OK')
@app.route('/<rov_name>.html')
def pic_criteria(rov_name):
err_msg = request.args.get('emsg')
if err_msg == None:
err_msg = ""
rov_det = rover_det[rov_name]
rov_pic = rov_det["rov_pic"]
st_date = rov_det["landing_date"]
end_date = rov_det["max_date"]
cameras = rov_det["cameras"]
cam_list = {}
for cam in cameras:
cam_list.update({cam:cam_names[cam]})
return render_template('pic_criteria.html', rname=rov_name, rpic=rov_pic, sdat=st_date, edat=end_date, clist=cam_list, emsg=err_msg)
@app.route('/img_criteria', methods = ['POST'])
def imgcrit():
rov_name = request.args.get('rov_name')
form_date = request.form['date']
try:
form_cam = request.form['optcam']
except:
form_cam = ""
return redirect(f'/list.html?rov_name={rov_name}&img_date={form_date}&sel_cam={form_cam}')
@app.route('/list.html')
def img_list():
opts=""
rov_name = request.args.get('rov_name')
img_date = request.args.get('img_date')
sel_cam = request.args.get('sel_cam')
opts = "earth_date=" + img_date
if sel_cam != "":
opts += "&camera=" + sel_cam
opts += "&api_key=" + api_key
emsg = ""
api_list = requests.get(f'https://api.nasa.gov/mars-photos/api/v1/rovers/{rov_name}/photos?{opts}')
if api_list.text == "":
emsg = 'No images for that camera and date. Please try again.'
return redirect(f'/{rov_name}.html?emsg="No images for that camera and date. Please try again."')
else:
api_list = eval(api_list.text)
img_list = api_list["photos"]
max_rows = len(img_list)
rend_list = {}
for i in range(1, max_rows+1):
row_cam = img_list[i-1]["camera"]["full_name"]
row_url = img_list[i-1]["img_src"]
row_date = img_list[i-1]["earth_date"]
dictval = {
"camera":row_cam,
"img_url":row_url,
"earth_date":row_date,
}
rend_list.update({i:dictval})
return render_template('list.html', rname=rov_name, ilist=rend_list, alist=api_list)
if __name__ == '__main__':
app.run(debug=True) | brianr0922/mars_rover | main.py | main.py | py | 2,308 | python | en | code | 0 | github-code | 36 |
6783941465 | """Change column distance_bin to distance_cat
Revision ID: 2524785502b4
Revises: c137e7385dd7
Create Date: 2020-03-20 16:47:15.648707
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2524785502b4'
down_revision = 'c137e7385dd7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('race', sa.Column('distance_cat', sa.String(), nullable=True))
op.create_foreign_key(None, 'race', 'runner_contact', ['runner_contact_id'], ['id'])
op.drop_column('race', 'distance_bin')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('race', sa.Column('distance_bin', sa.VARCHAR(), autoincrement=False, nullable=True))
op.drop_constraint(None, 'race', type_='foreignkey')
op.drop_column('race', 'distance_cat')
# ### end Alembic commands ###
| dcjohnson24/gugs_db | migrations/versions/2524785502b4_change_column_distance_bin_to_distance_.py | 2524785502b4_change_column_distance_bin_to_distance_.py | py | 980 | python | en | code | 0 | github-code | 36 |
37634895040 | # A critical point in a linked list is defined as either a local maxima or a local minima.
# A node is a local maxima if the current node has a value strictly greater than the previous node and the next node.
# A node is a local minima if the current node has a value strictly smaller than the previous node and the next node.
# Note that a node can only be a local maxima/minima if there exists both a previous node and a next node.
# Given a linked list head, return an array of length 2 containing [minDistance, maxDistance] where minDistance is the minimum distance between any two distinct critical points and maxDistance is the maximum distance between any two distinct critical points. If there are fewer than two critical points, return [-1, -1].
# Example 1:
# Input: head = [3,1]
# Output: [-1,-1]
# Explanation: There are no critical points in [3,1].
# Example 2:
# Input: head = [5,3,1,2,5,1,2]
# Output: [1,3]
# Explanation: There are three critical points:
# - [5,3,1,2,5,1,2]: The third node is a local minima because 1 is less than 3 and 2.
# - [5,3,1,2,5,1,2]: The fifth node is a local maxima because 5 is greater than 2 and 1.
# - [5,3,1,2,5,1,2]: The sixth node is a local minima because 1 is less than 5 and 2.
# The minimum distance is between the fifth and the sixth node. minDistance = 6 - 5 = 1.
# The maximum distance is between the third and the sixth node. maxDistance = 6 - 3 = 3.
# Example 3:
# Input: head = [1,3,2,2,3,2,2,2,7]
# Output: [3,3]
# Explanation: There are two critical points:
# - [1,3,2,2,3,2,2,2,7]: The second node is a local maxima because 3 is greater than 1 and 2.
# - [1,3,2,2,3,2,2,2,7]: The fifth node is a local maxima because 3 is greater than 2 and 2.
# Both the minimum and maximum distances are between the second and the fifth node.
# Thus, minDistance and maxDistance is 5 - 2 = 3.
# Note that the last node is not considered a local maxima because it does not have a next node.
# Constraints:
# The number of nodes in the list is in the range [2, 105].
# 1 <= Node.val <= 105
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def nodesBetweenCriticalPoints(self, head: Optional[ListNode]) -> List[int]:
prev = None
min_dis = float("inf")
max_dis = float("-inf")
first_critical = None
last_critical = None
cur = head
i = 0
while cur:
if prev and cur.next:
if (cur.next.val > cur.val < prev.val) or (prev.val< cur.val > cur.next.val ):
if last_critical is not None:
min_dis = min(min_dis,i-last_critical)
last_critical = i
else:
last_critical = i
if first_critical is not None:
max_dis = max(max_dis,i-first_critical)
else:
first_critical = i
prev,cur = cur,cur.next
i+=1
return [-1 if min_dis == float("inf") else min_dis, -1 if max_dis==float("-inf") else max_dis ]
| sunnyyeti/Leetcode-solutions | 2058 Find the Minimum and Maximum Number of Nodes Between Critical Points.py | 2058 Find the Minimum and Maximum Number of Nodes Between Critical Points.py | py | 3,223 | python | en | code | 0 | github-code | 36 |
22727851226 | user_input = input("vorodi: ").split(",")
print(user_input)
file_obj = open("my_file.txt","w+")
for item in range(len(user_input)):
shomarandeh = "shomare:__"+str(item+1) +" |"
name = "name:"+user_input[item]
char_count = "|" +"char count "+ str(len(user_input[item]))
to_wirte = "{0:<6s}{1:^24s}{02:<24s}\n".format(shomarandeh,name,char_count)
data = file_obj.write(to_wirte)
file_obj.close() | mahdi76-karaj/Example | hafteh_9/4/1/tamrin.py | tamrin.py | py | 413 | python | en | code | 0 | github-code | 36 |
32519507649 | from fastapi import FastAPI, Request, HTTPException, status, Depends ,File, UploadFile
from fastapi.templating import Jinja2Templates
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from fastapi.staticfiles import StaticFiles
from starlette.responses import HTMLResponse
from tortoise.contrib.fastapi import register_tortoise
from tortoise.signals import post_save
from tortoise import BaseDBAsyncClient
from typing import Optional, Type, List
from PIL import Image
import secrets
from datetime import datetime
from emails import *
from db_models import User, Business, Product
from pydentic_models import (
user_pydenticIn,
user_pydentic,
business_pydentic,
business_pydenticIn,
product_pydenticIn,
product_pydentic
)
from authentication import get_hashed_password, verify_token, token_generator
app = FastAPI()
templates = Jinja2Templates(directory='templates')
oauth2_scheme = OAuth2PasswordBearer(tokenUrl='token')
app.mount('/static', StaticFiles(directory='static'), name='static')
async def get_current_user(token: str=Depends(oauth2_scheme)):
try:
payload = jwt.decode(token, config_credentials['SECRET'], algorithms=['HS256'])
user = await User.get(id=payload.get('id'))
except:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Invalid token',
headers={'WWW-Authenticate': 'Bearer'}
)
return await user
@app.post('/products')
async def get_products(product: product_pydenticIn, user: user_pydentic=Depends(get_current_user)):
product = product.dict(exclude_unset=True)
if product['original_price'] > 0:
product['percentage_discount'] = ((product['original_price'] - product['new_price']) / product['original_price']) * 100
product_obj = await Product.create(**product, business=user)
product_obj = await product_pydenticIn.from_tortoise_orm(product_obj)
return {
'status': 'OK',
'data': product_obj
}
else:
return {
'status': 'ERROR'
}
@app.get('/products')
async def get_product():
response = await product_pydentic.from_queryset(Product.all())
return {
'status': 'OK',
'data': response
}
@app.get('/products/{id}')
async def get_product(id: int):
product = await Product.get(id=id)
business = await product.business
owner = await business.owner
response = await product_pydentic.from_queryset_single(Product.get(id=id))
return {
'status': 'OK',
'data': {
'product_details': product,
'business_details': {
'name': business.name,
'city': business.city,
'region': business.region,
'description': business.description,
'logo': business.logo,
'owner_id': owner.id,
'email': owner.email,
'join_date': owner.join_date.strtime('%b %d %Y'),
},
}
}
@app.delete('/products/{id}')
async def delete_product(id: int, user: user_pydentic=Depends(get_current_user)):
product = await Product.get(id=id)
business = await product.business
owner = await business.owner
if user == owner:
product.delete()
return {
'status': 'OK'
}
return HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Invalid token',
headers={'WWW-Authenticate': 'Bearer'}
)
@app.post('/user/me')
async def user_login(user: user_pydenticIn=Depends(get_current_user)):
business = await Business.get(owner=user)
logo = business.logo
logo_path = 'localhost:8000/static/images/' + logo
return {
'status': 'OK',
'data': {
'username': user.username,
'email': user.email,
'verified': user.is_verified,
'join_date': user.join_date.strtime('%b %d %Y'),
'logo': logo_path
}
}
@app.post('/token')
async def generate_token(request_form: OAuth2PasswordRequestForm = Depends()):
token = await token_generator(request_form.username, request_form.password)
return {'access_token': token,
'token_type': 'bearer'}
@app.get('verification/', response_class=HTMLResponse)
async def email_verification(request: Request, token: str):
user = await verify_token(token)
if user and not user.is_verified:
user.is_verified = True
await user.save()
return templates.TemplateResponse(
'verification.html',
{'request': request,
'username': user.username},
)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Invalid token',
headers={'WWW-Authenticate': 'Bearer'}
)
@post_save(User)
async def create_business(sender: 'Type[User]', instance: User, created:bool,
using_db: 'Optional[BaseDBAsyncClient]', updated_fields: List[str]) -> None :
if created:
business_obj = await Business.create(
name=instance.username,
owner = instance,
)
await business_pydentic.from_tortoise_orm(business_obj)
await send_email([instance.email], instance)
@app.post('/registration')
async def user_registration(user: user_pydenticIn):
user_info = user.dict(exclude_unset=True)
user_info['password'] = get_hashed_password(user_info['password'])
user_obj = await User.create(**user_info)
new_user = await user_pydentic.from_tortoise_orm(user_obj)
return {
'status': 'OK',
'data': f'Hello, {new_user.username}, thanks for your registration, check your email'
}
@app.get('/')
def index():
return {'Message': 'Hello World!'}
@app.post('/uploadfile/profile')
async def create_upload_file(file: UploadFile=File(...), user: user_pydentic=Depends(get_current_user)):
FILEPATH = './static/images/'
filename = file.filename
extension = filename.split('.')[1]
if extension not in ['jpg', 'png']:
return {
'status': 'ERROR',
'detail': 'File extension not allowed'
}
token_name = f'{secrets.token_hex(10)}.{extension}'
generated_name = FILEPATH + token_name
file_content = await file.read()
with open(generated_name, 'wb') as file:
file.write(file_content)
img = Image.open(generated_name)
img = img.resize(size=(200, 200))
img.save(generated_name)
file.close()
business = await Business.get(owner=user)
owner = await business.owner
if owner == user:
business.logo = token_name
await business.save()
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Not Authenticated to perform this action',
headers={'WWW-Authenticate': 'Bearer'}
)
file_url = 'localhost:8000' + generated_name[1:]
return {
'status': 'OK',
'filename': file_url
}
@app.post('/uploadfile/product/{id}')
async def create_upload_file(id: int, file: UploadFile=File(...),
user: user_pydentic=Depends(get_current_user)):
FILEPATH = './static/images/'
filename = file.filename
extension = filename.split('.')[1]
if extension not in ['jpg', 'png']:
return {
'status': 'ERROR',
'detail': 'File extension not allowed'
}
token_name = f'{secrets.token_hex(10)}.{extension}'
generated_name = FILEPATH + token_name
file_content = await file.read()
with open(generated_name, 'wb') as file:
file.write(file_content)
img = Image.open(generated_name)
img = img.resize(size=(200, 200))
img.save(generated_name)
file.close()
product = await Product.get(id=id)
business = await Product.business
owner = await business.owner
if owner == user:
product.product_image = token_name
await product.save()
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Not Authenticated to perform this action',
headers={'WWW-Authenticate': 'Bearer'}
)
file_url = 'localhost:8000' + generated_name[1:]
return {
'status': 'OK',
'filename': file_url
}
@app.put('/product/{id}')
async def update_product(id: int, update_info: product_pydenticIn, user: user_pydentic=Depends(get_current_user)):
product = await Product.get(id=id)
business = await product.business
owner = await business.owner
update_info = update_info.dict(exclude_unset=True)
update_info['date_published'] = datetime.utcnow()
if owner == user and update_info['original_price'] >= 0:
update_info['percentage_discount'] = ((update_info['original_price'] - update_info['new_price']) / update_info['original_price']) * 100
product = await product.update_from_dict(update_info)
await product.save()
response = await product_pydentic.from_tortoise_orm(product)
return {
'status': 'OK',
'data': response
}
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Not Authenticated to perform this action',
headers={'WWW-Authenticate': 'Bearer'}
)
@app.put('/business/{id}')
async def update_business(id: int, update_business: business_pydenticIn, user: user_pydentic=Depends(get_current_user)):
update_business = update_business.dict()
business = await Business.get(id=id)
owner = await business.owner
if user == owner:
await business.update_from_dict(update_business)
await business.save()
response = await business_pydentic.from_tortoise_orm(business)
return {
'status': 'OK',
'data': response
}
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Not Authenticated to perform this action',
headers={'WWW-Authenticate': 'Bearer'}
)
register_tortoise(
app,
db_url='sqlite://database.sqlite3',
modules={'models': ['db_models']},
generate_schemas=True,
add_exception_handlers=True,
) | AlexBabilya/E-Commerce | main.py | main.py | py | 10,643 | python | en | code | 1 | github-code | 36 |
3407130666 | #!/usr/bin/env python
"""A script to normalized interview transcripts.
It outputs a single text file with cleaned lines
one sentence per line"""
import argparse
import re
import string
import spacy
fillers = [
"eh",
"m",
"mm",
"mmm",
"ah",
"ahm",
"ehm",
"yy",
"y",
"aha",
"a-ha",
"aa",
"e",
"ee",
"łyy",
"ym",
"yym",
"ymm",
"yyym",
"oh",
"am",
"oo",
"hm",
"em",
"emm",
"eem",
"yyo",
"ii",
"nnn",
"nn",
"no",
"mhm",
"am",
"amm",
"aam",
"eey",
"eeyy",
"mmyy",
"yhm",
"ymhm",
"mmy",
"yynn",
"li",
"cc",
]
nlp = spacy.load("pl_core_news_lg")
punctuation = string.punctuation + '…' +'–' + '’' + "‘"
def get_data(path: str) -> list:
"""reads .txt file into a list of strings"""
list_of_lines = []
with open(path, "r") as source:
for line in source:
if line == False:
continue
else:
line = line.lstrip().rstrip()
list_of_lines.append(line)
return list_of_lines
def write_data(data: list, path: str):
"""writes data line by line into a file"""
with open(path, "w") as sink:
for sentence in data:
print(sentence, file=sink)
def remove_fillers(line: str) -> str:
"""removes filler expresisons"""
tokens = line.split()
for word in tokens:
if word in fillers:
tokens.remove(word)
no_fillers = " ".join(word for word in tokens)
return no_fillers
def pre_tokenization(data: list) -> list:
"""data normalization to be performed before sentence tokenization"""
cleaned = []
for line in data:
# replace ';' and '%' with 'ł'
add_l = re.sub(r"[;%]", "ł", line)
# replace the elipses with whitespaces to account for certain types of stutters
no_elipses = re.sub(r"[…]", " ", add_l)
# replace two period elipses with whitespace
two_periods = re.sub(r"\.\.", " ", no_elipses)
# remove hyphenated stutters
no_stutters = re.sub(r"\b[a-zA-ZżźćńółęąśŻŹĆĄŚĘŁÓŃ]+-+\W", "", two_periods)
# remove digits and numbers
no_numbers = re.sub(r"(?:[+-]|\()?\$?\d+(?:,\d+)*(?:\.\d+)?\)?", "", no_stutters)
# remove bracketed content
no_brackets = re.sub(r"\[.*?\]", "", no_numbers)
# remove content in parentheses
no_parens = re.sub(r"\(.*?\)", "", no_brackets)
# remove all duplicate words
# retain only the first word
no_duplicates = re.sub(r"\b(\w+)(?:\W+\1\b)+", r"\1", no_parens)
# append only non-empty strings
if no_duplicates:
cleaned.append(no_duplicates)
return cleaned
def make_continuous(data: list) -> string:
"""joins a list of strings into one long string"""
one_line = " ".join(data)
return one_line
def post_tokenization(data: list) -> list:
"""data normaization to be performed after sentence tokenization"""
cleaned = []
for sentence in data:
# casefold and strip
casefolded = sentence.lower().lstrip().rstrip()
# standardize quotation marks
standard_quotes = re.sub(r"[„”“]", '"', casefolded)
# remove punctuation
no_polish_punctuation = standard_quotes.translate(str.maketrans("", "", punctuation))
# remove the hyphens
no_hyphens = re.sub(r"-", " ", no_polish_punctuation)
# remove the fillers
no_fillers = remove_fillers(no_hyphens)
# remove duplicates left over after the fillers were removed
# leave only the first word
no_duplicates = re.sub(r"\b(\w+)(?:\W+\1\b)+", r"\1", no_fillers)
# remove multiple white spaces
single_spaces = re.sub(" +", ' ', no_duplicates)
if single_spaces:
cleaned.append(single_spaces)
return cleaned
def main(args: argparse.Namespace) -> None:
lines = get_data(args.input)
cleaned = pre_tokenization(lines)
one_line = make_continuous(cleaned)
to_tokenize = nlp(one_line)
sent_tokenized = [sentence.text for sentence in to_tokenize.sents]
cleaned_again = post_tokenization(sent_tokenized)
write_data(cleaned_again, args.output)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--input", help="path to source file")
parser.add_argument("--output", help="path to output")
main(parser.parse_args())
| zoobereq/He-write-age | data cleaning/normalize.py | normalize.py | py | 4,556 | python | en | code | 3 | github-code | 36 |
31058563267 | #important topics
#inheritance and incapsulation
#here varbale will be default public
#if you want to make variable private use __variable
class Person:
__age=89
def __init__(self):
self.first_name="Ram"
self.last_name="Thapa"
self.age=23
p=Person()
print(p.first_name)
print(Person.__age);#it will give error because it is private
#getter and setter are used to access the private variable
class Person:
__age=89
def __init__(self):
self.first_name="Ram"
self.last_name="Thapa"
self.age=23
#getter is used to get the value of the private variable
def get_age(self):
return self.__age
#setter is used to set the value of the private variable
def set_age(self,age):
if age>0:
self.__age=age
else:
print("age should be greater than 0")
print(Person.get_age())
#getter has one parameter which is self
#setter have two parameter one is self and other is new value
| subin131/Python-OOP | OOP/June30Python.py | June30Python.py | py | 1,052 | python | en | code | 0 | github-code | 36 |
32884223298 | #Import random
import random
#Create the function below:
inte=random.randint(1,5)
entero=inte
row=[]
matrix=[]
def matrixBuilder(entero):
for i in range(entero):
row.append(1)
for i in range(entero):
global matrix
matrix.append(row)
return matrix
print(matrixBuilder(entero))
| sergioadll/python-loops | exercises/15.1-Matrix_Builder/app.py | app.py | py | 311 | python | en | code | 0 | github-code | 36 |
43606735550 | import os
etl_repo_home="/opt/pentaho/repositories/grip-pentaho-di-reports/ETL/reports"
#Creating Base directory
mifid2_base_dir="mifid"
mifid2tr_rg_dirs=['mifid2-tr\mifid2-tr-reports-trade',
'mifid2-tr\mifid2-tr-reports-correction',
'mifid2-tr\mifid2-tr-reports-automation']
mifid2cpr_rg_dirs=['mifid2-cpr\mifid2-cpr-reports-position',
'mifid2-cpr\mifid2-cpr-reports-correction',
'mifid2-cpr\Mifid2-cpr-reports-automation']
try:
os.chdir(etl_repo_home)
except:
print("Not able to Change the directory to ETL repository Home")
exit()
if os.path.isdir(mifid2_base_dir):
os.chdir(mifid2_base_dir)
else:
try:
os.makedirs(mifid2_base_dir)
os.chdir(mifid2_base_dir)
except:
print("Not able to Change the directory to ETL repository Home")
exit()
for dirlist in mifid2tr_rg_dirs,mifid2cpr_rg_dirs:
for dir in dirlist:
if os.path.isdir(dir):
print('Directory Already Exist : ',dir)
else:
os.makedirs(dir)
| Kulamanipradhan0/Python | Module/Os/CreateDirectoryMifidRG.py | CreateDirectoryMifidRG.py | py | 1,125 | python | en | code | 0 | github-code | 36 |
23251362882 | import os
from zipfile import ZipFile, ZIP_DEFLATED
class Zip():
"""Zip up all the contents of a directory into the output file."""
def __init__(self, input_directory, output_file):
self.input_directory = input_directory
self.output_file = output_file
def zip(self):
try:
zip_file = ZipFile(self.output_file, 'w', ZIP_DEFLATED)
for root, dirs, files in os.walk(self.input_directory, topdown=True):
dir_prefix = root[len(os.path.commonprefix([self.input_directory, root]))+1:]
if len(dirs) is 0 and len(files) is 0:
zip_file.write(root, dir_prefix[:-1])
else:
for name in files:
zip_file.write(os.path.join(root, name), os.path.join(dir_prefix, name))
finally:
zip_file.close()
| thewtex/odt-respace | source/odt_respace/zip.py | zip.py | py | 809 | python | en | code | 1 | github-code | 36 |
70797390505 | import copy
import matplotlib.colors as colors
import matplotlib.pyplot as plt
import nibabel as nib
import numpy as np
from matplotlib import cm
from util.util import info, crop_center, error, print_timestamped
different_colors = ["#FF0000", "#008000", "#0000FF", "#FFD700", # Red, green, blue, gold
"#00BFFF", "#DDA0DD", "#808080", "#800000", # Light blue, magenta, gray, maroon
"#808000", "#00FF00", "#FFFF00", "#800080", # Olive, lime, yellow, purple
"#008080", "#000080"] # Teal, navy
class PlotHandler:
def __init__(self, args, options, complementary=False):
self.output_data_folder = options.output_data_folder
self.phase = options.phase # 0 search, 1 train, 2 test
self.prefix = options.prefix
self.mapping_source = args.mapping_source
self.mapping_target = args.mapping_target
self.plot_names_dict = {}
self.plot_names_dict['source'] = self.mapping_source
self.plot_names_dict['target'] = self.mapping_target
self.sliced = args.sliced
self.chosen_slice = args.chosen_slice
self.plot_only_results = args.plot_only_results
self.nifti_image_extension = ".nii.gz"
self.target_shape = (210, 180)
if self.sliced:
self.image_extension = ".png"
else:
self.image_extension = self.nifti_image_extension
# Set plot folder
base = self.output_data_folder / args.experiment_name
if complementary:
specific_name = "_complementary"
else:
specific_name = "_" + args.method + "_main" + str(args.main_clusters) + "_sub" + str(args.sub_clusters)
if self.phase != 1:
specific_name += "_" + str(args.postprocess)
if self.phase == 0 or (self.phase == 2 and args.model_phase == "search"):
self.plot_folder = base / (args.test_set + "_search" + specific_name)
elif self.phase == 1:
self.plot_folder = base / (self.prefix + specific_name)
else:
self.plot_folder = base / (args.test_set + specific_name)
if self.phase != 1 or not self.plot_only_results:
self.plot_folder.mkdir(parents=True, exist_ok=True)
self.train_folder = None
self.labels_folder = None
info("The plots for the current run will be saved in " + str(base) + ".")
if not args.plot_only_results and self.phase != 2:
self.train_folder = self.plot_folder / "train"
self.labels_folder = self.plot_folder / "labels"
self.train_folder.mkdir(parents=True, exist_ok=True)
self.labels_folder.mkdir(parents=True, exist_ok=True)
print(
"The plots of the training images for the current run will be saved in " + str(self.train_folder) + ".")
print("The plots of the labels for the current run will be saved in " + str(self.labels_folder) + ".")
def plot_reference(self, reference_mri, model_folder, mris_shape, affine, method, main, sub):
plot_nifti(reference_mri,
model_folder / (
"lab_" + method + "_main" + str(main) + "_sub" + str(sub) + self.nifti_image_extension),
mris_shape,
affine)
def plot_train(self, visuals, patient_name, mris_shape, affine):
if not self.plot_only_results:
for label, image in visuals.items():
filename = self.train_folder / (patient_name + "_" + self.plot_names_dict[label] + self.image_extension)
if self.sliced:
reshaped = image.reshape(mris_shape)
cropped = crop_center(reshaped, self.target_shape)
plot_image(cropped,
filename,
colormap=cm.get_cmap('gray'),
mris_shape=mris_shape,
plotbar=False)
else:
reshaped = image.reshape(mris_shape)
cropped = crop_center(reshaped[:, :, self.chosen_slice], self.target_shape)
plot_image(cropped,
str(filename).split(".")[0] + ".png",
colormap=cm.get_cmap('gray'),
plotbar=False)
plot_nifti(image,
filename,
mris_shape,
affine=affine)
def plot_results(self, visuals, patient_name, smoothing, mris_shape, affine):
self.plot_names_dict['learned_target'] = self.mapping_target + "_learned"
self.plot_names_dict['learned_target_smoothed'] = self.mapping_target + "_learned_" + smoothing
for label, image in visuals.items():
if 'truth' not in label:
folder = self.plot_folder / patient_name
folder.mkdir(parents=True, exist_ok=True)
filename = folder / (patient_name + "_" + self.plot_names_dict[label] + self.image_extension)
if self.sliced:
reshaped = image.reshape(mris_shape)
cropped = crop_center(reshaped, self.target_shape)
plot_image(cropped,
filename,
colormap=cm.get_cmap('gray'),
mris_shape=mris_shape,
plotbar=False)
else:
reshaped = image.reshape(mris_shape)
cropped = crop_center(reshaped[:, :, self.chosen_slice], self.target_shape)
plot_image(cropped,
str(filename).split(".")[0] + ".png",
colormap=cm.get_cmap('gray'),
plotbar=False)
plot_nifti(image,
filename,
mris_shape,
affine=affine)
def plot_shaded_labels(self, patient_name, labels1, labels2, method, main_clusters, mris_shape, affine):
m1_filename = self.labels_folder / (
patient_name + "_" + self.mapping_source + "_labels_" + method + self.image_extension)
m2_filename = self.labels_folder / (
patient_name + "_" + self.mapping_target + "_labels_" + method + self.image_extension)
if self.sliced:
reshaped1 = labels1.reshape(mris_shape)
cropped1 = crop_center(reshaped1, self.target_shape)
plot_image(cropped1,
m1_filename,
shaded_labels=1.0,
colormap=colormap_fusion(main_clusters),
mris_shape=mris_shape,
plotbar=False)
reshaped2 = labels2.reshape(mris_shape)
cropped2 = crop_center(reshaped2, self.target_shape)
plot_image(cropped2,
m2_filename,
shaded_labels=1.0,
colormap=colormap_fusion(main_clusters),
mris_shape=mris_shape,
plotbar=False)
else:
reshaped1 = labels1.reshape(mris_shape)
cropped1 = crop_center(reshaped1[:, :, self.chosen_slice], self.target_shape)
plot_image(cropped1,
str(m1_filename).split(".")[0] + ".png",
shaded_labels=1.0,
colormap=colormap_fusion(main_clusters),
plotbar=False)
reshaped2 = labels2.reshape(mris_shape)
cropped2 = crop_center(reshaped2[:, :, self.chosen_slice], self.target_shape)
plot_image(cropped2,
str(m2_filename).split(".")[0] + ".png",
shaded_labels=1.0,
colormap=colormap_fusion(main_clusters),
plotbar=False)
plot_nifti(labels1,
m1_filename,
mris_shape,
affine=affine)
plot_nifti(labels2,
m2_filename,
mris_shape,
affine=affine)
def print_tumour(self, tumour, patient_name, mris_shape, affine):
if not self.plot_only_results:
folder = self.plot_folder / patient_name
folder.mkdir(parents=True, exist_ok=True)
filename = folder / (patient_name + "_truth_tumour" + self.image_extension)
if self.sliced:
plot_image(tumour,
filename,
mris_shape=mris_shape,
plotbar=False)
else:
reshaped = tumour.reshape(mris_shape)
cropped = crop_center(reshaped[:, :, self.chosen_slice], self.target_shape)
plot_image(cropped,
str(filename).split(".")[0] + ".png",
plotbar=False)
plot_nifti(tumour,
filename,
mris_shape,
affine=affine)
def plot_image(image,
filename,
colormap=copy.copy(cm.get_cmap('viridis')),
mris_shape=None,
shaded_labels=None,
one_int_bounds=False,
plotbar=True,
white_bg=False,
verbose=True):
if plotbar:
res_size1 = 6
res_size2 = 5
else:
res_size1 = res_size2 = 5
fig = plt.figure(figsize=(res_size1, res_size2), dpi=300)
# ax = plt.gca()
if len(image.shape) == 1:
if mris_shape is not None:
image = image.reshape(mris_shape)
else:
error("The image cannot be reshaped and showed with imshow")
elif len(image.shape) > 2:
error("The image has a shape greater than 2. You might have forgotten to slice it.")
image = np.rot90(image, k=-1)
image = np.flip(image, axis=1)
if shaded_labels is None:
max_lin = np.max(image)
bounds = None
else:
n_clusters = int(colormap.N / 256)
max_lin = shaded_labels
bounds = np.linspace(0, max_lin, n_clusters + 1)
if one_int_bounds and max_lin < 15:
bounds = range(int(max_lin) + 1)
min_val = np.min(image) + 0.1e-10
if min_val > max_lin:
min_val = max_lin
if colormap != cm.get_cmap('gray'):
colormap.set_under('w')
plt.axis('off')
plt.xticks([])
plt.yticks([])
sc = plt.imshow(image,
cmap=colormap,
vmin=min_val,
vmax=max_lin)
if plotbar:
# divider = make_axes_locatable(ax)
# cax = divider.append_axes("right", size="5%", pad=0.05)
cax = None
plt.colorbar(sc, cax=cax, ticks=bounds)
plt.savefig(filename, bbox_inches='tight', transparent=not white_bg)
if verbose:
print_timestamped("Saved in " + str(filename))
plt.close(fig)
def plot_nifti(image, filename, mris_shape=None, affine=None, verbose=True):
if affine is None:
affine = np.array([[-1., 0., 0., -0.],
[0., -1., 0., 239.],
[0., 0., 1., 0.],
[0., 0., 0., 1.]])
if len(image.shape) == 1:
if mris_shape is not None:
image = image.reshape(mris_shape)
else:
error("The image cannot be reshaped, please add the shape.")
new_nifti = nib.Nifti1Image(image, affine=affine)
nib.save(new_nifti, filename)
if verbose:
print_timestamped("Saved in " + str(filename))
def colormap_fusion(n_clusters):
if n_clusters > len(different_colors):
error("The number of clusters is greater than the available size of colours.")
stacked_colors = []
for i in range(n_clusters):
colormap = shaded_color_map(different_colors[i])
linspace_colormap = colormap(np.linspace(0.20, 1, 256))
stacked_colors.append(linspace_colormap)
newcolors = np.vstack(stacked_colors)
return colors.ListedColormap(newcolors)
def full_colormap_fusion(n_clusters):
if n_clusters > len(different_colors):
error("The number of clusters is greater than the available size of colours.")
return colors.ListedColormap(different_colors[:n_clusters])
def shaded_color_map(rgb_color):
return colors.LinearSegmentedColormap.from_list("", ["white", rgb_color])
| giuliabaldini/brainclustering | util/plot_handler.py | plot_handler.py | py | 12,669 | python | en | code | 0 | github-code | 36 |
35962565077 | import numpy as np
import scipy.stats as stats
def gauss(x,mz,sigma,A):
return A*np.exp(-(x-mz)**2/2/sigma**2)
def sig(respwr, mz):
return mz/(2.355*respwr)
def peaklist(filename):
arr = []
pknums = []
f = open(filename)
for line in f:
if 'Num peaks:' in line:
x = line.split(' ')
pknums.append(int(x.pop(-1)))
if ('Name:' not in line and 'MW:' not in line and 'Comment:' not in line
and 'Num' not in line and line != '\n'):
arr1 = line.split('\t')
arr2 = arr1.pop(-1)
arr.append(arr1)
arr = np.array(arr).astype(np.float)
f.close()
return arr
# specsim.genMS(mz, win, pkvar = 0.2, noiselevel = 5e-3, bsline = False,
# baselevel = 0.05, rp = 4000, points = 30000)
# Generate mass spectrum with dict input mz
# Inputs:
# mz - type: dict, format {'mz1':[m/z, int],.....} where m/z is the peak center
# and int is the signal height on a scale [0,1]
# win - spectral window on which to create spectrum, m/z 100 - m/z 1000 recomm.
#
# pkvar - scale of random samples used to scale positive peak heights, 0.2
# typical
#
# noiselevel - height of electronics noise
#
# bsline - if True, raised exponential baseline is added at beginning of
# spectrum, default is False.
# baselevel - coefficient to determine prominence of raised baseline
# realistic values ~ 0.05 - 0.2
# misalign - option to add slight random shift to m/z access, simulates ppm
# error for peak center values
# rp - resolving power of the spectrum, recommended value = 4,000
#
# points - number of data points to use, recommended = 30,000
# RETURNS:
# sigsim - vector of simulated intensity
# x - vector of m/z values over which sigsim is calculated
def genMS(mz, win, pkvar = 0.2, noiselevel = 5e-3, bsline = False,
baselevel = 0.05, misalign = False, rp = 4000, points = 30000):
mz0 = win[0]
mzf = win[1]
x = np.linspace(mz0,mzf, points)
sigsim = np.zeros(points)
# Add random shift to m/z axis
if misalign == True:
# Generate spectrum with Gaussian peaks and small random peak shift
for key in mz:
sigsim += gauss(x, mz[key][0], sig(rp,mz[key][0]+
np.random.normal(0,0.005)), mz[key][1])
else:
# Generate spectrum with Gaussian peaks
for key in mz:
sigsim += gauss(x, mz[key][0], sig(rp,mz[key][0]), mz[key][1])
# Add baseline rise at low mass end
if bsline == True:
baseline = baselevel*np.exp(-1*np.linspace(0,5,points))
sigsim += baseline
# Add peak variance
for i in range(points):
if sigsim[i] > 0:
sigsim[i] *= abs(1 + np.random.normal(0,pkvar))
# Add electronic baseline noise
noise = np.random.normal(0,1,points)*noiselevel
sigsim += noise
return x, sigsim
def file_sz(fname):
with open(fname) as f:
for i, l in enumerate(f):
pass
k = l.split(' ')
k.pop(-1)
return i + 1, len(k)
def importgendat(fileloc):
points, n = file_sz(fileloc)
arr = np.empty((points, n))
f = open(fileloc)
i = 0
for line in f:
arr1 = line.split(' ')
b = arr1.pop(-1)
arr2 = np.array(arr1).astype(np.float)
arr[i,:] = arr2
i += 1
mz = arr[:,0]
signal = arr[:,1:]
return mz, signal
| kbenham4102/MSGen | specsim.py | specsim.py | py | 3,523 | python | en | code | 0 | github-code | 36 |
42258313707 | # This program adds two numbers
num1 = 1
num2 = 2
# Add two numbers
sum = num1 + num2
# Display the sum
print('The sum is:', sum)
# finding the Average of 2 numbers given by Valar Mam
A = 1
B = 2
# avg
avg = (A + B) / 2
print("average of A & B =", avg)
# finding the apple task given by Valar Mam
my_list = ["apple", "banana", "cherry"]
# if element present then return
# exist otherwise not exist
# Checking if apple exists in list
#print("for apple in test_list:")
if "apple" in my_list:
print("apple Exists")
else:
print("apple dose not exist")
# change value of first item to "mango" in the given list by valar mam
my_list[0]="mango"
print("after changing value",my_list)
#how will you insert new element 'watermelon' to then given list
my_list.append("watermelon")
print("after adding new element",my_list)
# create a tuple with only one item and will tuple allow duplicate values explain with examples
names=('valr','valr','karthi', 'balaji','valr')
print('names',names)
animals=('Valar',)
print('list of animals',animals)
# create dictionary and get value of first key?
useless = {1:'Amma',2:'Amma1', 'valar':'very useless'}
print("list of useless things",useless)
useless[1]
print('first element',useless[1])
#will dictionary allows duplicate values with same key ? explain with example?
useless = {1:'Amma',1:'Amma', 'valar':'very useless'}
print('check for duplicate',useless)
#Create set and delete the last element from set by valar mam.
a={1,2,3,4,5}
a.remove(5)
print("removing the last element",a)
#A = {1, 2, 3, 4, 5} and B = {4, 5, 6, 7, 8} are two set elements .how will you get the common elements from in both the sets?
a = {1, 2, 3, 4, 5}
b = {4, 5, 6, 7, 8}
print("intersection",a.intersection(b))
| karthickr1503/python_class | assignment.py | assignment.py | py | 1,760 | python | en | code | 0 | github-code | 36 |
33154135207 | #!/usr/bin/python3
#encoding: UTF-8
import lxml.etree as ET
import markdown as MD
import lib as LIB
#-------------------------------------------------------------------------------
def xpath_list(from_node, xpath):
"""
Return all nodes matching xpath from from_node as dom node list.
"""
if isinstance(from_node, ET._Element):
ret = from_node.xpath(xpath)
else:
ret=[]
return ret
#-------------------------------------------------------------------------------
def xpath_num_sorted(from_node, xpath, xp_key):
"""
Return all nodes matching xpath from from_node as dom node list num sorted
on xp_key xpath (relative to each item).
"""
all = xpath_list(from_node, xpath)
all_sorted = sorted(all, key=lambda itm: xpath_int(itm, xp_key))
return all_sorted
#-------------------------------------------------------------------------------
def xpath_alpha_sorted(from_node, xpath, xp_key):
"""
Return all nodes matching xpath from from_node as dom node list alpha sorted
on xp_key xpath (relative to each item).
"""
all = xpath_list(from_node, xpath)
all_sorted = sorted(all, key=lambda itm: xpath_plain(itm, xp_key))
return all_sorted
#-------------------------------------------------------------------------------
def xpath_node(from_node, xpath):
"""
Return first node matching xpath from from_node as dom node.
"""
return LIB.first(xpath_list(from_node, xpath))
#-------------------------------------------------------------------------------
def xpath_plain(from_node, xpath):
"""
Return first node matching xpath from from_node as plain text.
"""
return LIB.first_str(xpath_list(from_node, xpath))
#-------------------------------------------------------------------------------
def xpath_int(from_node, xpath):
"""
Return first node matching xpath from from_node as integer.
"""
return int(LIB.first_str(xpath_list(from_node, xpath)))
#-------------------------------------------------------------------------------
def xpath_md(from_node, xpath):
"""
Return first node matching xpath from from_node as markdown translated to HTML.
/!\ just for simple paragraphs (no images, no extern refs...)
"""
return MD.markdown(LIB.first_str(xpath_list(from_node, xpath)))
#-------------------------------------------------------------------------------
def add_error(meta, fct, errlevel, errno, errmsg, line, text):
"""
Add new error to meta node of an XML document.
"""
if (meta is not None):
err_node = ET.SubElement(meta, "error", {"fct":fct, "errlevel":errlevel, "errno":errno, "errmsg":errmsg, "line":str(line)})
err_node.text = str(text)
ret = err_node
else:
ret = None
return ret
#-------------------------------------------------------------------------------
def add_external_ref(meta, ext_ref, from_ref):
"""
Add new external_ref to meta node of an XML document.
"""
if (meta is not None):
ext_node = ET.SubElement(meta, "external", {"to_ref":ext_ref, "from_ref":from_ref})
ret = ext_node
else:
ret = None
return ret
#-------------------------------------------------------------------------------
def add_attachment(meta, to_path, ext, file):
"""
Add new attachment to meta node of an XML document.
"""
ret=None
if (meta is not None):
if len(xpath_list(meta,"attachment[@to_path='"+to_path+"']"))==0:
att_node = ET.SubElement(meta, "attachment", {"to_path":to_path, "ext":ext, "file":file})
ret = att_node
return ret
#-------------------------------------------------------------------------------
def extend_to_connected(all_docs, ref_set):
"""
Collect all connected references at any level.
"""
new_ref_set = set() | ref_set
for ref in ref_set:
to_refs = xpath_list(all_docs, ".//external[@from_ref='"+ref+"']/@to_ref")
new_ref_set |= set(to_refs)
#from_refs = xpath_list(all_docs, ".//_meta/external[@to_ref='"+ref+"']/@from_ref")
#new_ref_set |= set(from_refs)
if (len(new_ref_set) != len(ref_set)):
new_ref_set = extend_to_connected(all_docs, new_ref_set)
return new_ref_set
#-------------------------------------------------------------------------------
def add_dict_as_xml(parent, a_dict):
"""
Add to parent all nodes corresponding to tree structure contained in dict.
"""
ret=parent
if (parent is not None):
for name0, value in a_dict.items():
name = name0.lower()
if isinstance(value, str) : #simple string
new = ET.SubElement(parent, name).text=value
elif isinstance(value, list) : #array
for v in value:
new = ET.SubElement(parent, name).text=v
elif isinstance(value, dict) : #dictionnary
new = ET.SubElement(parent, name)
add_dict_as_xml(new, value)
else :
new = ET.SubElement(parent, name)
return ret
| echopen/PRJ-medtec_kit | doc/doc_builder/src/xml_helper.py | xml_helper.py | py | 4,938 | python | en | code | 17 | github-code | 36 |
16068140151 | from unittest.mock import patch
from family_foto import Role, add_user
def mock_user(test_case, user_name, role_name, active=None) -> None:
"""
Mocks a user on current_user
:param test_case: test case class, where the user should be mocked
:param user_name: name of the mocked user
:param role_name: string of the role name
:param active: if the user is already approved
"""
test_case.patcher = patch('flask_login.utils._get_user')
test_case.mock_current_user = test_case.patcher.start()
user_role = Role.query.filter_by(name=role_name).first()
user = add_user(user_name, '1234', [user_role])
test_case.mock_current_user.return_value = user
test_case.mock_current_user.id = user.id
test_case.mock_current_user.active = user.active if active is None else active
| Segelzwerg/FamilyFoto | tests/test_utils/mocking.py | mocking.py | py | 820 | python | en | code | 8 | github-code | 36 |
19499350817 | from sqlite3 import *
from typing import Union
class DB:
def __init__(self):
self.db = connect("app.db")
self.cr = self.db.cursor()
self.cr.execute("create table if not exists `users`(user_id INTEGER, username TEXT, chat_name TEXT, "
"chat_username TEXT, chat_id INTEGER)")
def __del__(self):
self.db.close()
async def get_user(
self, user_id=None, username=None
) -> list:
self.cr.execute("select * from `users` where user_id = ?", (user_id,)) if user_id else self.cr.execute(
"select * from `users` where username = ?", (username,))
data = self.cr.fetchall()
return data
async def get_all(
self
) -> list:
self.cr.execute("select * from users")
res = self.cr.fetchall()
return res
async def insert_user(
self,
user_id: Union[str, int],
username: str,
chat_id: Union[str, int],
chat_name: str,
chat_username: str
):
self.cr.execute("select * from `users` where user_id = ? and chat_id = ?", (user_id, chat_id))
results = self.cr.fetchall()
if results:
return
self.cr.execute(
"insert into `users`(user_id, username, chat_id, chat_name, chat_username) values(?, ?, ?, ?, ?)",
(user_id, username, chat_id, chat_name, chat_username)
)
self.db.commit()
| cytoo/TgGroupScanner | bot/mods/sql.py | sql.py | py | 1,485 | python | en | code | 18 | github-code | 36 |
27337287373 | import os
# redis db
REDIS_URL = os.environ.get('REDIS_URL') or 'http://redis:6379/1'
REDIS_SET_NAME = 'cookiejars'
COOKIES_POOL_SIZE = 10
# 查询结果缓存时间
RESULT_EXPIRE = 24*60*60
# for selenium
CHROME_DRIVER_PATH = '/Users/apple/phantomjs-2.1.1-macosx/bin/chromedriver'
CHROME_BASE_URL = "https://www.sogou.com/"
# for server
IP = '0.0.0.0'
PORT = 6789
WORKER = 4
# download
TIMEOUT = 10
RETRY = 3
| WeiEast/mobile_query | config/config.py | config.py | py | 414 | python | en | code | 0 | github-code | 36 |
43660032355 | '''
a = 2
b = 4
if a > b:
print ("Hola")
elif b > a:
print ("Hola2")
# Una tupla no se incrementa, es estatico, un arreglo si.
# (1,2,3) TUPLA
# [1,2,3...] ARREGLO/LISTA
c = (2,3,4)
print(type(c))
d = [3,6,4]
print(type(d))
'''
# ------------------------------------- EJERCICIO -------------------------------------
'''
Averiguar si un ano es bisiesto o no. Si el ano no es divisible entre 4, es un anio comun. Si el anio no es divisible por 100, es un ano bisiesto
o si no, si el numero no es divisible entre 400, es un ano comun, si no, es un ano bisiesto.
Tip, usa != y %.
'''
'''
ano = int (input("Dame el ano "))
if ano < 1582:
print("No gregoriano")
elif ano % 4 != 0:
print("Common year")
elif ano % 100 != 0:
print ("Leap year")
elif ano % 400 != 0:
print ("Common year")
else:
print ("Leap year")
# Las asignaciones se hacen en paralelo, no en serie.
# No do while.
'''
'''
a = 1
i = 1
while a > 0:
i = i+1
print(i)
'''
even_numbers = 0
odd_numbers = 0
number = int(input("Enter a number"))
while number != 0:
number = int(input("Enter a number"))
if number % 2 == 0:
even_numbers += 1
else:
odd_numbers += 1
print ("Odd numbers:", odd_numbers)
print ("Even numbers:", even_numbers)
counter = 3
while counter: #Sin condicionantes, mientras el numero sea diferente de 0, TRUE, igual a 0, FALSE
print (counter)
counter -= 1
print ("outside the loop")
| UP210878/up210878_dsa | Notes, tests, misc/M3_1.py | M3_1.py | py | 1,441 | python | es | code | 0 | github-code | 36 |
8574459364 | from django.shortcuts import render, redirect, get_object_or_404
from users.models import Profile
from .models import *
from addproject.models import *
from datetime import datetime
from django.shortcuts import render, redirect
from addproject.models import *
import json
import datetime
from django.http import JsonResponse
from addproject.models import *
from users import *
import sys, os
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from addproject import models
from django.core.paginator import Paginator
def showmain(request):
calendar = Calendar.objects.filter(writer=request.user, endday__contains=datetime.date.today(
)).order_by('endday') # 글을 작성한 유저의 캘린더 정보만 가져오겠다. 가까운 날짜 순으로 정렬
projects = Project.objects.filter(followers=request.user)
profile = Profile.objects.all()
posts = Post.objects.all().order_by('-day')
return render(request, 'mateapp/mainpage.html', {'calendar': calendar, 'projects':projects,'posts':posts, })
def showevent(request):
if request.method == 'POST':
date_num = json.loads(request.body)
year = datetime.date.today().year
month = datetime.date.today().month
calendar = Calendar.objects.filter(writer=request.user, endday__contains=datetime.date(
year, month, int(date_num))).order_by('endday')
if calendar.count() == 1:
c0_title = calendar[0].title
c0_startday = calendar[0].startday
c0_endday = calendar[0].endday
c0_starttime = calendar[0].starttime
c0_endtime = calendar[0].endtime
c0_place = calendar[0].place
c0_body = calendar[0].body
c0_color = calendar[0].color
c1_title = None
c1_startday = None
c1_endday = None
c1_starttime = None
c1_endtime = None
c1_place = None
c1_body = None
c1_color = None
context = {
"status": "exist1",
"title1": c0_title,
"startday1": c0_startday,
"endday1": c0_endday,
"starttime1": c0_starttime,
"endtime1": c0_endtime,
"place1": c0_place,
"body1" : c0_body,
"color1" : c0_color,
"title2": c1_title,
"startday2": c1_startday,
"endday2": c1_endday,
"starttime2": c1_starttime,
"endtime2": c1_endtime,
"place2": c1_place,
"body2" : c1_body,
"color2" : c1_color,
}
elif calendar.count() >= 2:
c0_title = calendar[0].title
c0_startday = calendar[0].startday
c0_endday = calendar[0].endday
c0_starttime = calendar[0].starttime
c0_endtime = calendar[0].endtime
c0_place = calendar[0].place
c0_body = calendar[0].body
c0_color = calendar[0].color
c1_title = calendar[1].title
c1_startday = calendar[1].startday
c1_endday = calendar[1].endday
c1_starttime = calendar[1].starttime
c1_endtime = calendar[1].endtime
c1_place = calendar[1].place
c1_body = calendar[1].body
c1_color = calendar[1].color
context = {
"status": "exist2",
"title1": c0_title,
"startday1": c0_startday,
"endday1": c0_endday,
"starttime1": c0_starttime,
"endtime1": c0_endtime,
"place1": c0_place,
"body1" : c0_body,
"color1" : c0_color,
"title2": c1_title,
"startday2": c1_startday,
"endday2": c1_endday,
"starttime2": c1_starttime,
"endtime2": c1_endtime,
"place2": c1_place,
"body2" : c1_body,
"color2" : c1_color,
}
else:
context = {"status": "null"}
return JsonResponse(context)
def login(request):
if request.user.is_authenticated:
projects = Project.objects.all()
return render(request, 'mateapp/mainpage.html', {'projects':projects})
else:
return render(request, 'account/login.html')
def create_schedule(request):
projecttitles = Project.objects.filter(writer=request.user)
if request.method == 'POST':
new_schedule = Calendar()
new_schedule.title = request.POST['title']
new_schedule.writer = request.user
new_schedule.body = request.POST['body']
new_schedule.startday = request.POST.get('startday')
new_schedule.endday = request.POST.get('endday')
new_schedule.starttime = request.POST.get('starttime')
new_schedule.endtime = request.POST.get('endtime')
new_schedule.place = request.POST['place']
new_schedule.save()
return redirect('mateapp:calendar')
else :
new_schedule = Calendar.objects.all()
return render(request, 'mateapp/create_schedule.html',{'new_schedule':new_schedule, 'projecttitles':projecttitles})
def calendar(request):
calendar = Calendar.objects.filter(writer=request.user) # 글을 작성한 유저의 캘린더 정보만 가져오겠다. 가까운 날짜 순으로 정렬
calendars = Calendar.objects.filter(writer=request.user)
schedules_list = []
schedules = Calendar.objects.filter(writer=request.user)
schedules_list.append(schedules)
# 간트차트
projects = Project.objects.all() # 모델을 전부 불러옴
todos_list = [] # 빈리스트를 만듬 , 담아서 렌더링하는 경우가 많음
todos = Calendar.objects.filter(writer=request.user)
todos_list.append(todos) # 그 프로젝트의 등록된 투두를 불러와서 그걸 넣은거임
# 보내고 싶은거 리스트로 보내서 장고나 뭐든 저런식으로 할 일이 많음
# # 알아두기
return render(request, 'mateapp/calendar.html', {'projects':projects, 'todos_list':todos_list,'calendar':calendar, 'schedules_list':schedules_list, 'calendars':calendars})
# 리스트 자체를 렌더링함
def timetable(request):
if request.method == "POST":
# Profile에서 요청받은 user의 정보만 불러옴
profile = Profile.objects.get(user=request.user)
profile.timetable = request.FILES.get('timetable')
profile.save(update_fields=['timetable'])
return redirect('mateapp:showmain') # render 보단 redirect 가 낫다.
def project_detail(request, project_id):
projects = Project.objects.filter(followers=request.user)
project = Project.objects.get(pk=project_id)
posts = Post.objects.all()
post = Post.objects.filter(project=project)
comment = Comment.objects.filter()
page = int(request.GET.get('p',1))
paginator = Paginator(post,4)
boards = paginator.get_page(page)
return render(request, 'mateapp/project.html', {'boards':boards, 'projects':projects,'project':project,'posts':posts, 'post':post})
# 포스트가 갖고 있는 숫자가 가장 높은걸 필터로 찾아서 오늘 날짜와 비교해서 출력함
# 게시물 CRUD
def create_post(request, project_id):
projects = Project.objects.all()
project = Project.objects.get(pk=project_id)
posts = Post.objects.all()
day = datetime.date.today()
# post = Post.objects.get(project=project)
if request.method == "POST":
# project = Project.objects.get(title=project_title)
post_title = request.POST['title']
post_body = request.POST['body']
Post.objects.create(title=post_title, user=request.user, project=project, body=post_body) # post는 세가지 필드가 있는데,
# 어떤 모델이든간에 pk가 있어야함 Foreign key는 생략이 될 수가 없음, 일대다 관계일때 쓴다는건데
#
return redirect('mateapp:project_detail', project_id)
# return render(request, 'mateapp/project.html', {'posts':posts,'projects':projects})
def create_comment(request, project_id, post_id):
project = Project.objects.get(pk=project_id)
post = Post.objects.get(pk=post_id)
if request.method == "POST":
post = get_object_or_404(Post,pk=post_id) #Post로 등록된 값이 잇으면 불러오고 없으면 404 출력시킴
content = request.POST['content']
file = request.FILES.get('file')
Comment.objects.create(content=content, post=post, user=request.user) # 모델=뷰
return redirect('mateapp:post_detail', project_id, post_id)
# id는 식별값이기 때문에 무조건 존재하는 필드임
def post_detail(request, project_id, post_id):
project = Project.objects.get(pk = project_id)
post = Post.objects.get(pk = post_id)
comments = Comment.objects.filter(post = post)
page = int(request.GET.get('p',1))
paginator = Paginator(comments,4)
boards = paginator.get_page(page)
return render(request, 'mateapp/project_post.html', {'boards':boards,'project':project, 'post':post, 'comments':comments})
| SeongJoon-K/Runningmate | runningmate/mateapp/views.py | views.py | py | 9,157 | python | en | code | null | github-code | 36 |
15121359842 | import sys
import os
sys.path.append(os.path.join(sys.path[0], '../../bindings/python/'))
sys.path.append(os.path.join(sys.path[0], '../../bin/'))
import pySmartIdEngine
def output_recognition_result(result):
print('Document type: %s' % result.GetDocumentType())
print('Match results:')
match_results = result.GetMatchResults()
for i in range(len(match_results)):
match_result_quad = match_results[i].GetQuadrangle()
print(' Template Type = %s' % match_results[i].GetTemplateType())
print(' Zone = { (%f, %f), (%f, %f), (%f, %f), (%f, %f) }' % (
match_result_quad.GetPoint(0).x, match_result_quad.GetPoint(0).y,
match_result_quad.GetPoint(1).x, match_result_quad.GetPoint(1).y,
match_result_quad.GetPoint(2).x, match_result_quad.GetPoint(2).y,
match_result_quad.GetPoint(3).x, match_result_quad.GetPoint(3).y))
print('Segmentation results:')
segmentation_results = result.GetSegmentationResults()
for i in range(len(segmentation_results)):
raw_fields_names = segmentation_results[i].GetRawFieldsNames()
for j in range(len(raw_fields_names)):
print(' Raw field %s' % raw_fields_names[j])
print('String fields:')
string_field_names = result.GetStringFieldNames()
for i in range(len(string_field_names)):
field = result.GetStringField(string_field_names[i])
is_accepted = ' [+] ' if field.IsAccepted() else ' [-] '
print(' %-20s %s %s' % (field.GetName(), is_accepted, field.GetUtf8Value()))
print('Image fields:')
image_field_names = result.GetImageFieldNames()
for i in range(len(image_field_names)):
field = result.GetImageField(image_field_names[i])
is_accepted = ' [+] ' if field.IsAccepted() else ' [-] '
print(' %-20s %s W: %d H: %d' % (field.GetName(), is_accepted,
field.GetValue().GetWidth(), field.GetValue().GetHeight()))
print('Result terminal: %s' % (' [+] ' if result.IsTerminal() else ' [-] '))
def main():
if len(sys.argv) != 4:
print('Usage: %s <path-to-image-file> <path-to-config-bundle> <document-type>' % sys.argv[0])
print('Example: %s ../testdata/image.png ../data-zip/bundle_mrz.zip mrz.*' % sys.argv[0])
sys.exit(-1)
image_path, config_path, document_types = sys.argv[1:]
print('RecognitionEngine Version: %s' % pySmartIdEngine.RecognitionEngine.GetVersion())
print('Creating RecognitionEngine from %s' % config_path)
engine = pySmartIdEngine.RecognitionEngine(config_path)
session_settings = engine.CreateSessionSettings()
set_enabled_document_types = pySmartIdEngine.StringVector()
set_enabled_document_types.append(document_types)
session_settings.SetEnabledDocumentTypes(set_enabled_document_types)
# load image
testImage = pySmartIdEngine.Image()
# This works only in trial and full versions
# testImage = pySmartIdEngine.Image(image_path)
# get file as binary buffer
binaryRGBfile = open(image_path, "rb")
size = os.path.getsize(image_path)
binaryRGBfileBuffer = bytearray(size)
binaryRGBfile.readinto(binaryRGBfileBuffer)
# get raw RGB data from file
# You can take image buffer directly from Image object, works in trial and full versions
# bufferSizeForRGB = testImage.GetHeight()*testImage.GetStride()*testImage.GetChannels()
# bufferForRGB = bytearray(bufferSizeForRGB)
# testImage.CopyToBuffer(bufferForRGB, bufferSizeForRGB)
# get file as Base64 string
f = open(os.path.join(sys.path[0], "base64.txt"), 'r')
base64BufferString = f.readline()
f.close()
# You can take base64 buffer directly from Image object
# base64BufferSize = testImage.GetRequiredBase64BufferLength()
# base64Buffer = bytearray(base64BufferSize)
# testImage.CopyBase64ToBuffer(base64Buffer, base64BufferSize)
# base64BufferString = base64Buffer.decode()
# base64BufferString = str(base64BufferString) #explicit conversion need for Python 2
# or you can take base64 string directly by
# base64BufferString = testImage.GetBase64String()
# start recognition session
session = engine.SpawnSession(session_settings)
resultFromImage = session.ProcessImage(testImage)
session.Reset()
resultFromImageFile = session.ProcessImageFile(image_path)
session.Reset()
resultFromImageData = session.ProcessImageData(binaryRGBfileBuffer, size)
session.Reset()
resultFromBase64 = session.ProcessImageDataBase64(base64BufferString)
session.Reset()
# resultFromRawBuffer = session.ProcessSnapshot(bufferForRGB, bufferSizeForRGB, testImage.GetWidth(), testImage.GetHeight(), testImage.GetStride(), testImage.GetChannels())
# print result
output_recognition_result(resultFromImage)
output_recognition_result(resultFromImageFile)
output_recognition_result(resultFromImageData)
output_recognition_result(resultFromBase64)
# output_recognition_result(resultFromRawBuffer)
if __name__ == '__main__':
main()
| SmartEngines/SmartIDReader-Server-SDK | samples/python/smartid_sample.py | smartid_sample.py | py | 5,119 | python | en | code | 5 | github-code | 36 |
75174790505 | from vtk import *
# input data, every row is for a different item
positions = [[0, 0, 0],[1.5, 0, 0]]
orientations = [[1.0, 0.0, 0.0],[0.0, 1.0, 1.0]]
colors = [[255, 0, 0],
[0, 255, 255]]
heights = [1,
2]
# rendering of those two defined cylinders
points = vtkPoints()
points.InsertNextPoint(*positions[0])
points.InsertNextPoint(*positions[1])
polydata = vtkPolyData()
polydata.SetPoints(points)
color_def = vtkUnsignedCharArray()
color_def.SetNumberOfComponents(3)
color_def.SetNumberOfTuples(polydata.GetNumberOfPoints())
color_def.InsertTuple3(0, *colors[0])
color_def.InsertTuple3(1, *colors[1])
polydata.GetPointData().SetScalars(color_def)
pointNormalsArray = vtkDoubleArray()
pointNormalsArray.SetNumberOfComponents(3)
pointNormalsArray.SetNumberOfTuples(polydata.GetNumberOfPoints())
pointNormalsArray.SetTuple(0, orientations[0])
pointNormalsArray.SetTuple(1, orientations[1])
polydata.GetPointData().SetNormals(pointNormalsArray)
cyl_source = vtkCylinderSource()
cyl_source.SetResolution(10)
cyl_source.SetHeight(0.8)
cyl_source.SetRadius(0.1)
cyl_source.Update()
glyph = vtkGlyph3D()
glyph.SetInputData(polydata)
glyph.SetSourceConnection(cyl_source.GetOutputPort())
glyph.SetColorModeToColorByScalar()
glyph.SetVectorModeToUseNormal()
glyph.ScalingOff()
mapper = vtkPolyDataMapper()
mapper.SetInputConnection(glyph.GetOutputPort())
actor = vtkActor()
actor.SetMapper(mapper)
ren = vtkRenderer()
ren.AddActor(actor)
renwin = vtk.vtkRenderWindow()
renwin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renwin)
renwin.Render()
iren.Initialize()
iren.Start()
| squeakus/bitsandbytes | vtk/glyphpos.py | glyphpos.py | py | 1,632 | python | en | code | 2 | github-code | 36 |
3929257072 | import os
import picamera
import numpy as np
from picamera.array import PiMotionAnalysis
# A simple demo of sub-classing PiMotionAnalysis to construct a motion detector
MOTION_MAGNITUDE = 60 # the magnitude of vectors required for motion
MOTION_VECTORS = 10 # the number of vectors required to detect motion
class MyMotionDetector(PiMotionAnalysis):
def analyse(self, a):
# Calculate the magnitude of all vectors with pythagoras' theorem
a = np.sqrt(
np.square(a['x'].astype(np.float)) +
np.square(a['y'].astype(np.float))
).clip(0, 255).astype(np.uint8)
# Count the number of vectors with a magnitude greater than our
# threshold
vector_count = (a > MOTION_MAGNITUDE).sum()
if vector_count > MOTION_VECTORS:
print('Detected motion!')
with picamera.PiCamera() as camera:
camera.resolution = (1280, 720)
camera.framerate = 24
with MyMotionDetector(camera) as motion_detector:
camera.start_recording(
os.devnull, format='h264', motion_output=motion_detector)
try:
while True:
camera.wait_recording(1)
finally:
camera.stop_recording()
| waveform80/picamera_demos | motion_detect.py | motion_detect.py | py | 1,230 | python | en | code | 12 | github-code | 36 |
33452924265 | from tkinter import *
from tkinter import ttk
from tkinter import messagebox
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
fcff_df = pd.read_excel('FCFF_analysis_filtered.xlsx', index_col=[0])
sgx_df = pd.read_csv('myData.csv', index_col=[1])
class StonksApp:
def __init__(self, master):
self.master = master
master.title("StonkApp")
# Initialise app variables
self.idx = 0
self.current_stock = fcff_df.index[self.idx]
# Set up frame with charts and stats
self.update_main_frame(self.generate_info_dict())
# Set up frame for app buttons
self.update_buttons_frame()
def plot_chart(self, row: int, column: int, *args: pd.DataFrame, columnspan: int=2, title: str="", xlabel: str="", ylabel: str=""):
""" Function to plot graphs on same chart from dataframes passed into the function as arguments
:param row, column, and columnspan: variables for Tkinter grid styling
:param title, xlablel, ylabel: variables for matplotlib chart
:param *args: dataframes to be plotted onto chart
"""
# Setting up of chart
figure = plt.Figure(figsize=(6,5), dpi=70)
ax = figure.add_subplot(111)
line_graph = FigureCanvasTkAgg(figure, self.main_frame)
line_graph.get_tk_widget().grid(row=row, column=column, columnspan=columnspan)
# Plotting graphs
for df in args:
df.plot(kind='line', legend=True, ax=ax)
# Chart styling
ax.set_title(title)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
def generate_info_dict(self) -> dict:
""" Function to generate a dictionary of info name and info value pairs to be displayed in app
:return: info_dict
"""
info_dict = {}
# Show name of stock
trading_code = self.current_stock.replace(".SI", "")
self.trading_name = sgx_df.loc[trading_code, "Trading Name"]
info_dict["Name"] = self.trading_name
# Show sector of stock
self.sector = sgx_df.loc[trading_code, "Sector"]
info_dict["Sector"] = self.sector
# Show wacc of stock
self.wacc = fcff_df.loc[self.current_stock, "WACC"]
info_dict["WACC"] = self.wacc
# Show fcf of stock
self.fcff = fcff_df.loc[self.current_stock, "FCFF"]
self.shares_out = fcff_df.loc[self.current_stock, "Shares outstanding"]
self.fcf = self.fcff/self.shares_out
info_dict["FCF"] = self.fcf
# Show fair value stat
self.fair_value = fcff_df.loc[self.current_stock, "Fair value"]
info_dict["Fair value"] = self.fair_value
# Show percentage undervalued stat
self.percentage_undervalued = fcff_df.loc[self.current_stock, "Percentage undervalued"]
info_dict["Percentage undervalued"] = self.percentage_undervalued
return info_dict
def update_main_frame(self, info_dict: dict):
""" Function to populate main frame """
self.main_frame = Frame(self.master)
self.main_frame.grid(row=0, column=0)
# Update variables
self.IS_df = pd.read_csv(f"Database/{self.current_stock}/IS.csv", index_col=[0])
self.BS_df = pd.read_csv(f"Database/{self.current_stock}/BS.csv", index_col=[0])
self.CF_df = pd.read_csv(f"Database/{self.current_stock}/CF.csv", index_col=[0])
# Graphs to be plotted
self.revenue_df = self.IS_df.loc["Revenue"]
self.revenue_df = self.revenue_df.astype(float)
self.operating_income_df = self.IS_df.loc["Operating Income"]
self.operating_income_df=self.operating_income_df.astype(float)
# Plot graph of revenue and operating income
self.plot_chart(0, 0, self.revenue_df, self.operating_income_df, title="", xlabel="Year", ylabel="")
# Display useful information
for i, key in enumerate(info_dict):
Label(self.main_frame, text= f"{key}: \n{info_dict[key]}", font='Helvetica 10').grid(row=(i//2)+1, column=i%2)
def update_buttons_frame(self):
""" Function to populate button frame with back, next, like, and watchlist buttons """
""" Arranges layout of buttons """
self.button_frame = Frame(self.master)
# Back button
self.back_button = Button(self.button_frame, text="Back", command=lambda: self.next(self.idx - 1))
self.back_button.grid(row=0, column=0, pady="10", padx="10")
# Next button
self.next_button = Button(self.button_frame, text="Next", command=lambda: self.next(self.idx + 1))
self.next_button.grid(row=0, column=1)
# Like button
self.like_button = Button(self.button_frame, text="Like", command=self.like)
self.like_button.grid(row=1, column=0, pady="5", padx="10")
# Toggle like button if stock is in watchlist
self.toggle_like_button()
# Watchlist button
self.watchlist_button = Button(self.button_frame, text="Watchlist", command=self.watchlist)
self.watchlist_button.grid(row=1, column=1, pady="5", padx="10")
# Frame palcement
self.button_frame.grid(row=1, column=0)
def toggle_like_button(self):
""" Toggle like button based on whether self.current_stock is in watchlist """
with open("Cache/watchlist.txt", "r") as watchlist:
lines = watchlist.readlines()
if str(self.current_stock + '\n') in lines:
self.like_button.config(relief="sunken")
else:
self.like_button.config(relief="raised")
""" Functions to make buttons interactable """
def next(self, idx):
""" Function for next button to show next or previous stock """
# Update variables
self.idx = idx
self.current_stock = fcff_df.index[self.idx]
self.update_main_frame(self.generate_info_dict())
# Toggle like button based on whether stock is in watchlist
self.toggle_like_button()
def like(self):
""" Function for like button to add stock to watchlist """
if self.like_button.config('relief')[-1] == 'sunken':
self.like_button.config(relief="raised")
with open("Cache/watchlist.txt", "r") as f:
lines = f.readlines()
with open("Cache/watchlist.txt", "w") as f:
for line in lines:
if line.strip("\n") != self.current_stock:
f.write(line)
else:
with open("Cache/watchlist.txt", "a") as myfile:
myfile.write(f"{self.current_stock}\n")
self.like_button.config(relief="sunken")
def watchlist(self):
""" Function to see stocks in watchlist """
def view_watchlist_stock(stock):
""" Function for view button to look at selected stock """
watchlist_window.destroy()
self.master.deiconify()
self.update_main_frame(self.generate_info_dict())
self.current_stock = stock
self.update_buttons_frame()
#update self.idx to that of stock
self.idx = list(fcff_df.index).index(stock)
def delete_watchlist_stock(stock):
""" Function for delete button to delete selected stock """
with open("Cache/watchlist.txt", "r") as f:
lines = f.readlines()
with open("Cache/watchlist.txt", "w") as f:
for line in lines:
if line.strip("\n") != stock:
f.write(line)
idx = Lines.index(stock+'\n')
labels[idx].destroy()
view_buttons[idx].destroy()
delete_buttons[idx].destroy()
if len(lines) == 1:
Label(second_frame, text='Watchlist is currently empty', font='Helvetica 10').grid(column=0)
#untoggle like button on main window if stock on that window is removed from watchlist
if stock == self.current_stock:
self.update_buttons_frame()
def search():
""" Function for search button to search for a specified stock by its full ticker """
search_ticker = search_entry.get()
if search_ticker in fcff_df.index:
view_watchlist_stock(search_ticker)
else:
messagebox.showerror("Error","Sorry the ticker you entered was not found within this spreadsheet")
return
def on_closing():
""" Function to make main window reappear on closing of watchlist window """
watchlist_window.destroy()
self.master.deiconify()
def back_to_main_button_command():
""" Function to get back to main app when button is clicked"""
watchlist_window.destroy()
self.master.deiconify()
# Create new window over current window
self.master.withdraw() # hide main window
watchlist_window = Toplevel(self.master)
watchlist_window.protocol("WM_DELETE_WINDOW", on_closing) # make main window reappear on closing
watchlist_window.title("Watchlist")
watchlist_window.geometry("400x500")
# Create search bar
search_frame = Frame(watchlist_window)
search_frame.pack()
search_entry = Entry(search_frame)
search_entry.pack(side=LEFT)
search_button = Button(search_frame, text='Search', command=search)
search_button.pack(side=LEFT)
# Create a button to get back to main app
back_to_main_button = Button(watchlist_window, text="Back to main app", command=back_to_main_button_command)
back_to_main_button.pack(pady=5)
##### scroll button #####
# Create A Main Frame
main_frame = Frame(watchlist_window)
main_frame.pack(fill=BOTH, expand=1)
# Create A Canvas
my_canvas = Canvas(main_frame)
my_canvas.pack(side=LEFT, fill=BOTH, expand=1)
# Add A Scrollbar To The Canvas
my_scrollbar = ttk.Scrollbar(main_frame, orient=VERTICAL, command=my_canvas.yview)
my_scrollbar.pack(side=RIGHT, fill=Y)
# Configure The Canvas
my_canvas.configure(yscrollcommand=my_scrollbar.set)
my_canvas.bind('<Configure>', lambda e: my_canvas.configure(scrollregion=my_canvas.bbox("all")))
def _on_mouse_wheel(event):
my_canvas.yview_scroll(-1 * int((event.delta / 120)), "units")
my_canvas.bind_all("<MouseWheel>", _on_mouse_wheel)
# Create ANOTHER Frame INSIDE the Canvas
second_frame = Frame(my_canvas)
# Add that New frame To a Window In The Canvas
my_canvas.create_window((0,0), window=second_frame, anchor="nw")
##### end of scroll bar #####
# Get list of stocks in watchlist
file1 = open('Cache/watchlist.txt', 'r')
Lines = file1.readlines()
if len(Lines) == 0:
Label(second_frame, text='Watchlist is currently empty', font='Helvetica 10').grid(column=0)
labels = [] # Create empty lists to reference which ones to delete later on
view_buttons = []
delete_buttons = []
# Display stocks in watchlist, with buttons to view or delete stock
for i in range(len(Lines)):
watchlist_stock_label = Label(second_frame, text=Lines[i], font='Helvetica 10')
watchlist_stock_label.grid(row=i, column=0)
watchlist_stock_button = Button(second_frame, text='View', command=lambda i=i: view_watchlist_stock(Lines[i].strip()))
watchlist_stock_button.grid(row=i, column=1)
delete_watchlist_stock_button = Button(second_frame, text='Remove', command=lambda i=i:delete_watchlist_stock(Lines[i].strip()))
delete_watchlist_stock_button.grid(row=i, column=2)
labels.append(watchlist_stock_label)
view_buttons.append(watchlist_stock_button)
delete_buttons.append(delete_watchlist_stock_button)
def settings(self):
pass
if __name__ == "__main__":
root = Tk()
StonksApp(root)
root.mainloop()
| yuliangod/StonksApp | 03_FCFFapp.py | 03_FCFFapp.py | py | 12,301 | python | en | code | 0 | github-code | 36 |
40597432388 | """General purpose tools get fenced code blocks from Markdown."""
from dataclasses import dataclass
from operator import attrgetter
from pathlib import Path
from typing import List, Optional
import phmutest.direct
import phmutest.reader
import phmutest.select
from phmutest.direct import Marker
class FCBChooser:
"""Choose Markdown FCBs matching criteria."""
def __init__(self, markdown_filename: str):
"""Gather all the Markdown fenced code blocks in the file.
Args:
markdown_filename:
Path to the Markdown file as a string.
"""
self.all_blocks = phmutest.select.configure_block_roles(
skips=[], markdown_file=Path(markdown_filename)
)
def select(
self, *, label: str = "", info_string: Optional[str] = None, contains: str = ""
) -> List[str]:
"""Return list of contents of each FCB that matches all criteria.
Args:
label
FCB has phmutest label directive 'label'. Empty string means
select all FCBs (default).
info_string
FCB info string matches 'info_string'. Empty string means match
FCBs with no info string. None means select all FCBs (default).
contains
FCB contents have substring 'contains'. Empty string means
select all FCBs (default).
Returns:
List of strings, in file order, of the contents of selected FCBs.
Empty list if no matches are found.
Fenced code block strings typically end with a newline.
"""
label_blocks = self.all_blocks
info_blocks = self.all_blocks
contains_blocks = self.all_blocks
if label:
label_blocks = []
for block in self.all_blocks:
for directive in block.directives:
if directive.type == Marker.LABEL and directive.value == label:
label_blocks.append(block)
if info_string is not None:
info_blocks = [b for b in self.all_blocks if info_string == b.info_string]
if contains:
contains_blocks = [b for b in self.all_blocks if contains in b.contents]
satisfies_all = set(label_blocks) & set(info_blocks) & set(contains_blocks)
selected = list(satisfies_all)
selected.sort(key=attrgetter("line"))
return [b.contents for b in selected]
def contents(self, label: str = "") -> str:
"""Return contents of the labeled fenced code block with label.
This works the same as phmdoctest.tool.FCBChooser.contents().
Args:
label
FCB has phmutest label directive 'label'.
Returns:
Contents of the labeled fenced code block as a string
or empty string if the label is not found. Fenced code block
strings typically end with a newline.
"""
blocks = self.select(label=label)
return blocks[0] if blocks else ""
@dataclass
class LabeledFCB:
label: str # the label directive's value
line: str # Markdown file line number of block contents
contents: str # fenced code block contents
"""Information about a fenced code block that has a label directive."""
def labeled_fenced_code_blocks(markdown_filename: str) -> List[LabeledFCB]:
"""Return Markdown fenced code blocks that have label directives.
Label directives are placed immediately before a fenced code block
in the Markdown source file. The directive can be placed before any
fenced code block.
The label directive is the HTML comment `<!--phmutest-label VALUE-->`
where VALUE is a string with no embedded whitespace.
The space before VALUE must be present.
If there is more than one label directive on the block, the
label value that occurs earliest in the file is used.
Args:
markdown_filename
Path to the Markdown file.
Returns:
List of LabeledFCB objects.
LabeledFCB is has these fields:
- label is the value of a label directive
placed in a HTML comment before the fenced code block.
- line is the line number in the Markdown file where the block
starts.
- contents is the fenced code block contents as a string.
"""
fcbnodes = phmutest.reader.fcb_nodes(markdown_filename)
labeled_blocks = []
for node in fcbnodes:
directives = phmutest.direct.get_directives(node)
for directive in directives:
if directive.type == Marker.LABEL:
block = LabeledFCB(
label=directive.value,
line=str(directive.line),
contents=node.payload,
)
labeled_blocks.append(block)
break
return labeled_blocks
def fenced_code_blocks(markdown_filename: str) -> List[str]:
"""Return Markdown fenced code block contents as a list of strings.
Args:
markdown_filename
Path to the Markdown file.
Returns:
List of strings, one for the contents of each Markdown
fenced code block.
"""
fcbnodes = phmutest.reader.fcb_nodes(markdown_filename)
return [node.payload for node in fcbnodes]
| tmarktaylor/phmutest | src/phmutest/tool.py | tool.py | py | 5,339 | python | en | code | 0 | github-code | 36 |
70677233704 | """
Filename: calc_volcello.py
Author: Damien Irving, irving.damien@gmail.com
Description: Calculate the CMIP5 volcello variable
"""
# Import general Python modules
import sys, os, pdb
import argparse
import numpy
import iris
# Import my modules
cwd = os.getcwd()
repo_dir = '/'
for directory in cwd.split('/')[1:]:
repo_dir = os.path.join(repo_dir, directory)
if directory == 'ocean-analysis':
break
modules_dir = os.path.join(repo_dir, 'modules')
sys.path.append(modules_dir)
try:
import general_io as gio
import convenient_universal as uconv
import spatial_weights
except ImportError:
raise ImportError('Must run this script from anywhere within the ocean-analysis git repo')
# Define functions
def construct_volume_cube(volume_data, data_cube, global_atts):
"""Construct the new volume cube """
volume_cube = data_cube.copy()
volume_cube.data = volume_data
volume_cube.standard_name = 'ocean_volume'
volume_cube.long_name = 'Ocean Grid-Cell Volume'
volume_cube.var_name = 'volcello'
volume_cube.units = 'm3'
volume_cube.cell_methods = ()
if global_atts:
volume_cube.attributes = global_atts
return volume_cube
def main(inargs):
"""Run the program."""
# Depth data
data_cube = iris.load_cube(inargs.dummy_file, inargs.dummy_var)
dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
aux_coord_names = [coord.name() for coord in data_cube.aux_coords]
assert dim_coord_names[0] == 'time'
depth_name = dim_coord_names[1]
data_cube = data_cube[0, ::]
data_cube.remove_coord('time')
depth_data = spatial_weights.get_depth_array(data_cube, depth_name)
# Area data
if inargs.area_file:
area_cube = iris.load_cube(inargs.area_file, 'cell_area')
gio.check_global_ocean_area(area_cube.data.sum())
area_data = uconv.broadcast_array(area_cube.data, [1, 2], depth_data.shape)
else:
area_data = spatial_weights.area_array(data_cube)
volume_data = depth_data * area_data
if inargs.sftof_file:
sftof_cube = iris.load_cube(inargs.sftof_file)
assert sftof_cube.data.max() == 100
sftof_data = uconv.broadcast_array(sftof_cube.data, [1, 2], depth_data.shape)
volume_data = volume_data * (sftof_data / 100.0)
volume_data = numpy.ma.asarray(volume_data)
data = numpy.ma.masked_invalid(data_cube.data)
volume_data.mask = data.mask
global_atts = area_cube.attributes if inargs.area_file else None
volume_cube = construct_volume_cube(volume_data, data_cube, global_atts)
volume_cube.attributes['history'] = gio.write_metadata()
gio.check_global_ocean_volume(volume_cube.data.sum())
iris.save(volume_cube, inargs.outfile)
if __name__ == '__main__':
extra_info ="""
author:
Damien Irving, irving.damien@gmail.com
"""
description='Calculate the CMIP volcello variable'
parser = argparse.ArgumentParser(description=description,
epilog=extra_info,
argument_default=argparse.SUPPRESS,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("dummy_file", type=str, help="Dummy file (for depth information)")
parser.add_argument("dummy_var", type=str, help="Dummy variable")
parser.add_argument("outfile", type=str, help="Output file name")
parser.add_argument("--sftof_file", type=str, default=None,
help="Sea area fraction file name")
parser.add_argument("--area_file", type=str, default=None,
help="Area file name (required for curvilinear grids, optional otherwise)")
args = parser.parse_args()
main(args)
| DamienIrving/ocean-analysis | data_processing/calc_volcello.py | calc_volcello.py | py | 3,841 | python | en | code | 9 | github-code | 36 |
39056791379 | from obspy import read
from numpy import r_,ones,zeros
path=u'/Users/dmelgar/Slip_inv/Chiapas_hernandez_new/data/waveforms/before_delta_t/'
outpath='/Users/dmelgar/Slip_inv/Chiapas_hernandez_new/data/waveforms/'
def delay_st(st,delta):
d=st[0].data
npts=int(abs(delta)/st[0].stats.delta)
if delta<0:
d=r_[d[npts:-1],d[-1]*ones(npts+1)]
else:
d=r_[ones(npts)*d[0],d[0:-npts]]
return d
sta='43413'
delta=-3*60.
e=read(path+sta+'.sac')
e[0].data=delay_st(e,delta)
e.write(outpath+sta+'.sac',format='SAC')
sta='huat'
delta=-2*60.
e=read(path+sta+'.sac')
e[0].data=delay_st(e,delta)
e.write(outpath+sta+'.sac',format='SAC')
sta='ptan'
delta=-2*60.
e=read(path+sta+'.sac')
e[0].data=delay_st(e,delta)
e.write(outpath+sta+'.sac',format='SAC')
| Ogweno/mylife | chiapas2017/delay_waveforms_tsunami.py | delay_waveforms_tsunami.py | py | 781 | python | en | code | 0 | github-code | 36 |
70819605545 | #! /usr/bin/env python
from ppclass import pp
u = pp()
u.file = "/home/aymeric/Big_Data/DATAPLOT/diagfired.nc"
u.var = "u"
u.t = "0.5,0.8"
u.z = "10,20"
u.getdefineplot(extraplot=2) # prepare 2 extraplots (do not show)
u.p[0].proj = "ortho"
u.p[0].title = "$u$"
u.makeplot()
v = pp()
v << u # NB: initialize v object with u object's attributes
v.var = "v"
v.get()
v.plotin = u # plotin must be defined before .defineplot()
v.defineplot()
v.p[1].proj = "ortho"
v.p[1].title = "$v$"
v.makeplot() # plot within the previous one (do not show)
wind = u**2 + v**2
wind = wind**0.5
wind.plotin = v
wind.filename = "windspeed"
wind.defineplot()
wind.p[2].proj = "ortho"
wind.p[2].title = "$\sqrt{u^2+v^2}$"
wind.makeplot() # plot within the previous one (show because complete)
## in this case it is not possible to make another plot afterwards...
| aymeric-spiga/planetoplot | examples/ppclass_reference/windspeed.py | windspeed.py | py | 847 | python | en | code | 10 | github-code | 36 |
74050723304 | from parlai_internal.projects.param_sweep_utils.param_sweep import run_grid
import time
import os
SCRIPT_NAME = os.path.basename(__file__).replace(".py", "")
TODAY = format(time.asctime().replace(":", "-").replace(" ", "_")[:-14])
SWEEP_NAME = f"{SCRIPT_NAME}{TODAY}"
here_path = os.path.realpath(__file__).replace(".py", "")
projects = here_path[here_path.find("/projects") :]
SAVEROOT = "/checkpoint/" + projects + TODAY
HOURS = 23
GPUS = 8
TEACHERS_NO_GSGD_GOOD = [
"fb:taskmaster1:SystemTeacher",
"parlai_fb.tasks.taskmaster2.formatted_agents:SystemTeacher",
"fb:taskmaster3:SystemTeacher",
"fb:msr_e2e:SystemTeacher",
"parlai_fb.tasks.taskmaster2.formatted_agents:UserSimulatorTeacher",
"fb:taskmaster3:UserSimulatorTeacher",
"fb:msr_e2e:UserSimulatorTeacher",
"fb:multiwoz_tod:UserSimulatorTeacher",
"fb:multidogo:UserSimulatorTeacher",
]
TEACHERS_NO_GSGD_FUNKY = [
"fb:metalwoz_internal:SystemTeacher", # also without the STANDARD_ whatevers, so could be interesting.
"fb:multiwoz_tod:SystemTeacher", # API responses makes no sense
"fb:multidogo:SystemTeacher", # API responses make no sense
"fb:metalwoz_internal:UserSimulatorTeacher", # also without the STANDARD_ whatevers, so could be interesting.
"fb:taskmaster1:UserSimulatorTeacher", # no goals
]
TEACHER_GSGD = [
"parlai_fb.tasks.google_sgd_rl_splits.agents:InDomainUserSimulatorTeacher",
"parlai_fb.tasks.google_sgd_rl_splits.agents:InDomainSystemTeacher",
]
ALL_TEACHERS = TEACHER_GSGD + TEACHERS_NO_GSGD_GOOD + TEACHERS_NO_GSGD_FUNKY
ALL_TEACHERS_NO_GSGD = TEACHERS_NO_GSGD_GOOD + TEACHERS_NO_GSGD_FUNKY
ALL_GOOD_TEACHERS = TEACHER_GSGD + TEACHERS_NO_GSGD_GOOD
TEACHER_OPTIONS = [
",".join(ALL_TEACHERS),
# ",".join(ALL_TEACHERS_NO_GSGD),
",".join(ALL_GOOD_TEACHERS),
# ",".join(TEACHERS_NO_GSGD_GOOD),
",".join(TEACHER_GSGD),
]
print(TEACHER_OPTIONS[0])
# Define param grid
grid = {
# dataset params
"-t": TEACHER_OPTIONS,
"--api-descriptions": [True, False],
# other params
"--model": ["parlai_fb.agents.bart.r3f:R3fFirstTurnHistoryRepeatAgent"],
"--fp16": [True],
"--label-truncate": [512],
"--log-every-n-secs": [30],
"--lr-scheduler": ["invsqrt"],
"--max-lr-steps": [-1],
"--max-train-steps": [-1],
"--optimizer": ["adam"],
"--save-after-valid": [True],
"--text-truncate": [512],
"--warmup-updates": [1000],
"--fp16-impl": ["mem_efficient"],
"--gradient-clip": [0.1],
"--skip-generation": [True],
"-vp": [8],
"--max-train-time": [HOURS * 60 * 60 - 30 * 60],
"--load-from-checkpoint": ["true"],
"-vmt": ["token_em -vmm max"],
"--multitask-weights": ["stochastic"],
# Sweeping params
"--batchsize": [4],
"--update-freq": [8],
"-lr": [1e-4],
"-vstep": [1000],
}
if __name__ == "__main__":
run_grid(
grid=grid,
name_keys={},
sweep_name=SWEEP_NAME,
saveroot=SAVEROOT,
prefix="python -u -m parlai.scripts.distributed_train",
partition="learnlab",
jobtime=f"{HOURS}:00:00",
gpus=8,
nodes=1,
create_model_file=True,
requeue=True,
include_job_id=False,
volta32=True,
hashname=True,
mem_gb=400,
email_updates=True,
wandb=True,
)
| facebookresearch/ParlAI | projects/tod_simulator/sweeps/pretrain_all.py | pretrain_all.py | py | 3,356 | python | en | code | 10,365 | github-code | 36 |
8088346772 | class Solution:
def smallestDivisor(self, nums: List[int], threshold: int) -> int:
def ispossible(mid):
total = 0
for x in nums:
total += math.ceil(x / mid)
if total <= threshold:
return True
return False
lo, hi = 1, max(nums)
while lo < hi:
mid = (lo + hi) // 2
if ispossible(mid):
hi = mid
else:
lo = mid + 1
return lo | alankrit03/LeetCode_Solutions | 1283. Find the Smallest Divisor Given a Threshold.py | 1283. Find the Smallest Divisor Given a Threshold.py | py | 507 | python | en | code | 1 | github-code | 36 |
12171333516 | # 2016년 요일 찾기
# 2016년 1월 1일은 금요일
# SUN,MON,TUE,WED,THU,FRI,SAT
from datetime import datetime
def solution(a, b):
date = '2016-{0}-{1}'.format(a, b) # 날짜
datetime_date = datetime.strptime(date, '%Y-%m-%d') # 날짜의 타입을 datetime형으로 변경
dateDict = {0: 'MON', 1:'TUE', 2:'WED', 3:'THU', 4:'FRI', 5:'SAT', 6:'SUN'}
return dateDict[datetime_date.weekday()]
# 1년 중 첫 시작 요일을 알기 때문에
# 이전 달까지 모두 더하고, 일 수를 더한 후 7로 나눠주면 요일을 알 수 있다.
# 조건이 1년 내이고, 첫번째 날의 요일을 알기 때문에 이런 연산 방법이 더 적합할 것 같다.
def solution1(a, b):
months = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
days = ['FRI', 'SAT', 'SUN', 'MON', 'TUE', 'WED', 'THU']
return days[(sum(months[:a-1])+b-1)%7]
print(solution(5,24))
| hi-rev/TIL | Programmers/level_1/date.py | date.py | py | 899 | python | ko | code | 0 | github-code | 36 |
25951961583 | import sys
import base64, time, datetime
callbacks = {
'array': lambda x: [v.text for v in x],
'dict': lambda x:
dict((x[i].text, x[i+1].text) for i in range(0, len(x), 2)),
'key': lambda x: x.text or "",
'string': lambda x: x.text or "",
'data': lambda x: base64.b64decode(x.text),
'date': lambda x:
datetime.datetime(
*(time.strptime(x.text, "%Y-%m-%dT%H:%M:%SZ")[0:6])),
'real': lambda x: float(x.text),
'integer': lambda x: int(x.text),
'true': lambda x: True,
'false': lambda x: False,
}
def _xml_plist_parse(xml_input, _iterparse):
parser = _iterparse(xml_input)
for action, element in parser:
callback = callbacks.get(element.tag)
if callback:
data = callback(element)
element.clear()
element.text = data
elif element.tag != 'plist':
raise IOError("unknown plist tag: %s" % element.tag)
return parser.root[0].text
def parse_using_etree(xml_input):
from xml.etree.ElementTree import iterparse as py_iterparse
_xml_plist_parse(xml_input, py_iterparse)
def parse_using_cetree(xml_input):
import xml.etree.cElementTree
from xml.etree.cElementTree import iterparse as c_iterparse
_xml_plist_parse(xml_input, c_iterparse)
if __name__ == '__main__':
xmlin = open(sys.argv[1])
try:
assert parse_using_cetree(xmlin)
finally:
xmlin.close()
| ishikawa/python-plist-parser | tools/performance/etree_parser.py | etree_parser.py | py | 1,444 | python | en | code | 11 | github-code | 36 |
69905738984 | from django.shortcuts import render, redirect
from django.contrib import messages
from .forms import UserRegisterForm
def register(request):
if request.method =='POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request, f'User {username} has been successfully created')
return redirect('all-goods')
else:
form = UserRegisterForm()
return render(
request,
'users/registration.html',
{'title': 'Registration page',
'form': form,
}) | AlekanderOst/python-webstore-drakkar | users/views.py | views.py | py | 648 | python | en | code | 0 | github-code | 36 |
33550810768 | def adding():
file = open("list", 'a')
user_input = input("Enter the item you want to add: ")
file.writelines(f"{user_input}\n")
file.close()
print("\nSuccessfully added to the list")
def viewing():
file = open("list", 'r')
r = file.read()
print(f"\n{r}")
def delete():
input_ = input("Enter the item you want to delete: ")
file = open("list", 'r')
lst = file.readlines()
new_file = open("list", 'w')
for item in lst:
if item != f"{input_}\n":
new_file.writelines(item)
print("\nSuccessfully deleted from the list")
def what_to_do():
input_ = input("Enter 'a' to add an item; Enter 'd' to delete and item; Enter 'v' to view the list\n")
if input_ == "a":
print(adding())
elif input_ == "d":
print(delete())
elif input_.lower() == "v":
print(viewing())
what_to_do() | SethShickluna/Python-Lessons | homework_submissions/file-hannah.py | file-hannah.py | py | 895 | python | en | code | 0 | github-code | 36 |
41204678431 | ############################################################
############################################################
#Reverse a linked list from position m to n. Do it in-place and in one-pass.
#
#For example:
#Given 1->2->3->4->5->NULL, m = 2 and n = 4,
#
#return 1->4->3->2->5->NULL.
############################################################
############################################################
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param A : head node of linked list
# @param B : integer
# @param C : integer
# @return the head node in the linked list
def reverseBetween(self, A, B, C):
p,q = A,A
current = A
value1,value2 = 1,1
if B==C:
return A
while value1<B-1:
p = p.next
value1+=1
while value2<C:
q = q.next
value2+=1
if B==1:
while current != q:
temp = current
current = current.next
temp.next = q.next
q.next = temp
A = current
else:
while p.next != q:
temp = p.next
p.next = p.next.next
temp.next = q.next
q.next = temp
return A
| Ankan-Das/Python-DS | Linked-List/Interviewbit/Reverse-link-list.py | Reverse-link-list.py | py | 1,399 | python | en | code | 0 | github-code | 36 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.