blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b8f4ee041f600aa5ba18e1c0dae926a19c493bbb | 42bd938432bdd5c8bc12ac6887cdc2678159a0dc | /apps/user/services/list.py | db8a6833e481788a3c961947a07482365434f5b8 | [] | no_license | Rhyanz46/halal_be_halal | 73aca58c8e1d3f8fc6572ca45044d0e249294297 | b6e6760b9802c21e13f357bca9e8314de3771785 | refs/heads/master | 2022-12-10T21:05:20.768102 | 2020-09-10T13:40:36 | 2020-09-10T13:40:36 | 233,027,107 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,516 | py | from flask_jwt_extended import get_jwt_identity, jwt_required
from ..models import User
# from apps.category_access.models import CategoryAccess
@jwt_required
def user_list(page):
current_user = User.query.filter_by(id=get_jwt_identity()).first()
if not current_user:
return {"message": "user authentication is wrong"}, 400
# ca = CategoryAccess.query.filter_by(id=current_user.category_access_id).first()
# if not ca:
# return {"message": "you permission is not setup"}, 403
#
# if not ca.root_access:
# return {"message": "only for root access can do this"}, 400
if not page:
page = 1
try:
page = int(page)
except:
return {"message": "page param must be integer"}, 400
users = User.query.paginate(page=page, per_page=20)
if not users.total:
return {"message": "belum ada pekerja, kamu bisa mendaftarkan pekerja baru"}
result = []
for user in users.items:
# ca_ = CategoryAccess.query.filter_by(id=user.category_access_id).first()
data = user.__serialize__()
# data.update({"category_access_name": ca_.name})
result.append(data)
meta = {
"total_data": users.total,
"total_pages": users.pages,
"total_data_per_page": users.per_page,
"next": "?page={}".format(users.next_num) if users.has_next else None,
"prev": "?page={}".format(users.prev_num) if users.has_prev else None
}
return {"data": result, "meta": meta}
| [
"rianariansaputra@gmail.com"
] | rianariansaputra@gmail.com |
85fd333d2b6f43110d9c7b7171b122dfcdc0a466 | e19527d95fb2105a09bc1435146a1148bfe01476 | /utils/general.py | 122f37a50fc4e4ba87c3765b807a74616dfeb9fd | [] | no_license | shuaih7/ishop_ocr | 7da1bc8f3f764853d7c0151e784b821cc3d4b58c | 57e80d336f1362adefeb57a13fa4ca4d2cfd265f | refs/heads/main | 2023-02-22T15:50:36.294246 | 2021-01-28T03:46:36 | 2021-01-28T03:46:36 | 329,258,528 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,469 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on 01.24.2021
Created on 01.24.2021
Author: haoshaui@handaotech.com
'''
import os
import cv2
import sys
import numpy as np
abs_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(abs_path)
def draw_results(image, results, isClosed=True, size=0.6, color=(0,255,0), thickness=3):
font = cv2.FONT_HERSHEY_SIMPLEX
for result in results:
line = np.array(result[0], dtype=np.int32)
pt = (int(line[0][0]), int(line[0][1]))
line = line.reshape((-1,1,2))
image = cv2.polylines(image, [line], isClosed=isClosed, color=color, thickness=thickness)
image = cv2.putText(image, result[1][0], pt, fontFace=font,
fontScale=size, color=color, thickness=max(1,thickness-1))
return image
def draw_polylines(image, polylines, texts=None, isClosed=True, size=0.6, color=(0,255,0), thickness=3):
font = cv2.FONT_HERSHEY_SIMPLEX
polylines = np.array(polylines, dtype=np.int32)#.reshape((-1,1,2))
for i, line in enumerate(polylines):
pt = (int(line[0][0]), int(line[0][1]))
line = line.reshape((-1,1,2))
image = cv2.polylines(image, [line], isClosed=isClosed, color=color, thickness=thickness)
if texts is not None:
image = cv2.putText(image, texts[i], pt, fontFace=font,
fontScale=size, color=color, thickness=max(1,thickness-1))
return image
def draw_texts(image, texts, positions, size=0.6, color=(0,255,0), thickness=3):
font = cv2.FONT_HERSHEY_SIMPLEX
for pos, text in zip(positions, texts):
pt = (int(pos[0]), int(pos[1]))
image = cv2.putText(image, text, pt, fontFace=font, fontScale=size, color=color, thickness=max(1,thickness-1))
return image
def draw_boxes(image, boxes=[], scale=(1.0,1.0), color=(255,0,0), thickness=2):
if len(boxes) == 0: return image
for box in boxes:
start_point = (int(box[0]*scale[1]), int(box[1]*scale[0]))
end_point = (int(box[2]*scale[1]), int(box[3]*scale[0]))
image = cv2.rectangle(image, start_point, end_point, color=color, thickness=thickness)
return image
def create_background(size, seed=0):
image = np.ones(size, dtype=np.uint8) * seed
save_dir = os.path.join(abs_path, "icon")
save_name = os.path.join(save_dir, "background.jpg")
cv2.imwrite(save_name, image)
def transparent_background(img_file, save_name, thresh=10):
image = cv2.imread(img_file, cv2.IMREAD_COLOR)
image_gray = cv2.imread(img_file, cv2.IMREAD_GRAYSCALE)
trans_image = np.zeros((image.shape[0],image.shape[1],4), dtype=np.uint8)
alpha = np.ones(image_gray.shape, dtype=np.uint8) * 255
alpha[image_gray>(255-thresh)] = 0
trans_image[:,:,:3] = image
trans_image[:,:,-1] = alpha
cv2.imwrite(save_name, trans_image)
print("Done")
def resize_image(img_file, save_name, size=(100,100)):
image = cv2.imread(img_file, -1)
image = cv2.resize(image, size, interpolation=cv2.INTER_CUBIC)
cv2.imwrite(save_name, image)
print("Done")
if __name__ == "__main__":
#create_background((352,352))
img_file = r"C:\Users\shuai\Documents\GitHub\FabricUI\FabricUI\icon\folder.jpg"
save_name = r"C:\Users\shuai\Documents\GitHub\FabricUI\FabricUI\icon\folder_icon.png"
#resize_image(img_file, save_name)
transparent_background(img_file, save_name)
| [
"shuaih7@gmail.com"
] | shuaih7@gmail.com |
f46082a4187149ad6505842b4c7f3768800d9dfe | b73668c3bb3c02a19308c2aa68a8dba5f69eee02 | /pam20_python/pamaware.py | ab267491d7be20082b6000e34ead9f5f95aaa202 | [] | no_license | romarco1983/pam20 | ba6dcc98e8f6ede434c703eaeaee3543be61b43c | 7123ad8647ab01372f55d773d78d90545b359847 | refs/heads/master | 2023-02-03T12:20:31.485421 | 2020-12-21T20:33:37 | 2020-12-21T20:33:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | #!/usr/bin/python
#-*- coding: utf-8-*-
import pam
p=pam.pam()
userName=input("Nom usuari: ")
userPasswd=input("Passwd: ")
p.authenticate(userName, userPasswd)
print('{} {}'.format(p.code,p.reason))
if p.code == 0:
for i in range(1,11):
print(i)
else:
print("Error autenticacio")
| [
"romarco1983@gmail.com"
] | romarco1983@gmail.com |
e889c6566264f92d3ca45e7fb7238cb734823742 | 967a09db2131c3c49af18fc909481058ac7bc27b | /package/lib/boltlinux/package/deb2bolt/debianpackagecache.py | 3705616a5a2a591a111794643e09cd0f3e2d813b | [
"MIT"
] | permissive | pombredanne/distro-tools-1 | c0530981f132faabf61abbf0adbe20b5f393d501 | 38b6c3d90ab4580523243c44adb0a47acb46ec72 | refs/heads/master | 2023-02-26T21:44:54.446827 | 2021-02-08T21:39:51 | 2021-02-08T21:39:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,545 | py | # -*- encoding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2019 Tobias Koch <tobias.koch@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import hashlib
import logging
import os
import re
from boltlinux.error import BoltError
from boltlinux.ffi.libarchive import ArchiveFileReader
from boltlinux.miscellaneous.downloader import Downloader
from boltlinux.package.boltpack.debianpackagemetadata import \
DebianPackageMetaData, DebianPackageVersion
from boltlinux.package.deb2bolt.inrelease import InReleaseFile
LOGGER = logging.getLogger(__name__)
class DebianPackageDict(dict):
def keys(self):
for version in super().keys():
yield DebianPackageVersion(version)
def items(self):
for version, pkg_obj in super().items():
yield DebianPackageVersion(version), pkg_obj
def __iter__(self):
for version in super().keys():
yield DebianPackageVersion(version)
#end class
class DebianPackageCache:
SOURCE = 1
BINARY = 2
def __init__(self, release, arch="amd64", pockets=None, cache_dir=None,
security_enabled=True, updates_enabled=False, keyring=None):
self.release = release
self.arch = arch
if not pockets:
pockets = ["main", "contrib", "non-free"]
self.pockets = pockets
if not cache_dir:
cache_dir = os.path.realpath(os.path.join(
os.getcwd(), "pkg-cache"))
self._cache_dir = cache_dir
self._keyring = keyring
self.sources_list = [
(
"release",
"http://ftp.debian.org/debian/dists/{}"
.format(release)
)
]
if security_enabled:
self.sources_list.append(
(
"security",
"http://security.debian.org/debian-security/dists/{}/updates" # noqa:
.format(release)
)
)
#end if
if updates_enabled:
self.sources_list.append(
(
"updates",
"http://ftp.debian.org/debian/dists/{}-updates"
.format(release)
)
)
#end if
self.source = {}
self.binary = {}
#end function
def open(self):
self._parse_package_list()
def update(self, what=SOURCE|BINARY): # noqa:
pkg_types = []
if what & self.SOURCE:
pkg_types.append("source")
if what & self.BINARY:
pkg_types.extend(["binary-{}".format(self.arch), "binary-all"])
LOGGER.info("updating package cache (this may take a while).")
for component, base_url in self.sources_list:
inrelease = self._load_inrelease_file(component, base_url)
for pocket in self.pockets:
for type_ in pkg_types:
cache_dir = os.path.join(self._cache_dir, "dists",
self.release, component, pocket, type_)
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
if type_ == "source":
filename = "{}/source/Sources.gz".format(pocket)
target = os.path.join(cache_dir, "Sources.gz")
else:
filename = "{}/{}/Packages.gz".format(pocket, type_)
target = os.path.join(cache_dir, "Packages.gz")
#end if
try:
sha256sum = inrelease.hash_for_filename(filename)
source = "{}/{}".format(
base_url, inrelease.by_hash_path(filename)
)
except KeyError:
raise BoltError(
"no such entry '{}' in Release file, mistyped "
"command line parameter?".format(filename)
)
#end try
new_tag = sha256sum[:16]
# Check if resource has changed.
if not os.path.islink(target):
old_tag = ""
else:
old_tag = os.path.basename(os.readlink(target))
if old_tag == new_tag:
continue
digest = hashlib.sha256()
# Download file into symlinked blob.
try:
self._download_tagged_http_resource(
source, target, tag=new_tag, digest=digest
)
except BoltError as e:
raise BoltError(
"failed to retrieve {}: {}".format(source, str(e))
)
#end try
# Remove old blob.
if old_tag:
os.unlink(
os.path.join(os.path.dirname(target), old_tag)
)
#end if
# Verify signature trail through sha256sum.
if digest.hexdigest() != sha256sum:
raise BoltError(
"wrong hash for '{}'.".format(source)
)
#end if
#end for
#end for
#end for
self._parse_package_list(what=what)
#end function
# PRIVATE
def _parse_package_list(self, what=SOURCE|BINARY): # noqa:
pkg_types = []
if what & self.SOURCE:
pkg_types.append("source")
self.source.clear()
if what & self.BINARY:
pkg_types.extend(["binary-{}".format(self.arch), "binary-all"])
self.binary.clear()
LOGGER.info("(re)loading package cache, please hold on.")
for component, base_url in self.sources_list:
for pocket in self.pockets:
for type_ in pkg_types:
if type_ == "source":
meta_gz = "Sources.gz"
cache = self.source
else:
meta_gz = "Packages.gz"
cache = self.binary
#end if
meta_file = os.path.join(self._cache_dir, "dists",
self.release, component, pocket, type_, meta_gz)
if not os.path.exists(meta_file):
continue
with ArchiveFileReader(meta_file, raw=True) as archive:
try:
next(iter(archive))
except StopIteration:
# The archive is empty.
continue
buf = archive\
.read_data()\
.decode("utf-8")
pool_base = re.match(
r"^(?P<pool_base>https?://.*?)/dists/.*$",
base_url
).group("pool_base")
for chunk in re.split(r"\n\n+", buf,
flags=re.MULTILINE):
chunk = chunk.strip()
if not chunk:
continue
meta_data = DebianPackageMetaData(
chunk, base_url=pool_base)
pkg_name = meta_data["Package"]
pkg_version = meta_data["Version"]
cache\
.setdefault(pkg_name, DebianPackageDict())\
.setdefault(pkg_version, meta_data)
#end for
#end with
#end for
#end for
#end for
return (self.source, self.binary)
#end function
def _download_tagged_http_resource(self, source_url, target_file, tag="",
digest=None, connection_timeout=30):
downloader = Downloader()
if not tag:
tag = downloader.tag(source_url)
blob_file = os.path.join(os.path.dirname(target_file), tag)
try:
with open(blob_file + "$", "wb+") as f:
for chunk in downloader.get(source_url, digest=digest):
f.write(chunk)
except Exception:
if os.path.exists(blob_file + "$"):
os.unlink(blob_file + "$")
raise
#end try
# Atomically rename blob.
os.rename(blob_file + "$", blob_file)
# Create temporary symlink to new blob.
os.symlink(os.path.basename(blob_file), target_file + "$")
# Atomically rename symlink (hopefully).
os.rename(target_file + "$", target_file)
#end function
def _load_inrelease_file(self, component, base_url):
cache_dir = os.path.join(
self._cache_dir, "dists", self.release, component
)
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
downloader = Downloader()
source = "{}/{}".format(base_url, "InRelease")
target = os.path.join(cache_dir, "InRelease")
if not os.path.islink(target):
old_tag = ""
else:
old_tag = os.path.basename(os.readlink(target))
new_tag = downloader.tag(source)
if old_tag != new_tag:
self._download_tagged_http_resource(source, target, tag=new_tag)
if old_tag:
os.unlink(os.path.join(cache_dir, old_tag))
#end if
inrelease = InReleaseFile.load(os.path.join(cache_dir, new_tag))
if self._keyring:
if not os.path.exists(self._keyring):
raise BoltError(
"keyring file '{}' not found, cannot check '{}' signature."
.format(self._keyring, target)
)
#end if
if not inrelease.valid_signature(keyring=self._keyring):
raise BoltError(
"unable to verify the authenticity of '{}'"
.format(target)
)
#end if
#end if
return inrelease
#end function
#end class
| [
"tobias.koch@gmail.com"
] | tobias.koch@gmail.com |
df0df3a114e599c36a4d9a1fef81af871183c836 | c82a04b8aa975b1596e48e13deaf5f11a2ae94ba | /test.py | 99b9847323d2a912600184ba1f913a0369ba9259 | [
"MIT"
] | permissive | budsus/CodeSearchNet | 466e6d06b8b0f08f418906151af6018cc7253ca1 | d79d0fde2569e4ed7ab0454e3b019fba3d6c7b90 | refs/heads/master | 2023-03-17T07:48:40.451414 | 2019-12-12T13:08:47 | 2019-12-12T13:08:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | import torch
x = torch.randn(10, 5)
print(x)
labels = torch.LongTensor([1,2,3,3,0,0,0,0,0,0])
n_classes = x.shape[-1]
one_hot = torch.nn.functional.one_hot(labels, n_classes)
print(one_hot)
print(x * one_hot)
compare = (x * one_hot).sum(-1).unsqueeze(-1).repeat(1, n_classes)
print(compare)
compared_scores = x >= compare
print(compared_scores)
rr = 1 / compared_scores.float().sum(-1)
print(rr)
mrr = rr.mean()
print(mrr) | [
"bentrevett@gmail.com"
] | bentrevett@gmail.com |
4add977a3570e82af6b1561f73a00e5896c9ac0f | b7f6cfda4fe68bde9d3896c5f29190c496b426d4 | /c11/a2/A2/run_amazon.py | 036216081c49e6426088d74288f4aec4bdeafa58 | [] | no_license | aribshaikh/UofT-Projects | bca513e153fa30860c832fe9af48409ac79a9433 | 97c5b3ed782da53e4ce49c7526a6e47ad5bfc5ee | refs/heads/main | 2023-03-06T20:06:59.927356 | 2021-02-16T19:08:25 | 2021-02-16T19:08:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,999 | py | """
CSCC11 - Introduction to Machine Learning, Fall 2020, Assignment 2
B. Chan, E. Franco, D. Fleet
This script runs an experiment on the Amazon dataset.
It fetches hyperparameters AMAZON_HYPERPARAMETERS from hyperparameters.py
and check model's train, validation, and test accuracies over 10 different seeds.
NOTE: As a rule of thumb, each seed should take no longer than 5 minutes.
"""
import _pickle as pickle
import numpy as np
from experiments import run_experiment
from hyperparameters import AMAZON_HYPERPARAMETERS
def main(final_hyperparameters):
with open("./datasets/amazon_sparse.pkl", "rb") as f:
amazon_data = pickle.load(f)
train_X = amazon_data['Xtr'].toarray()
train_y = amazon_data['Ytr'].toarray()
test_X, test_y = None, None
if final_hyperparameters:
test_X = amazon_data['Xte'].toarray()
test_y = amazon_data['Yte'].toarray()
# Split dataset into training and validation
perm = np.random.RandomState(0).permutation(train_X.shape[0])
validation_X = train_X[perm[1201:], :]
validation_y = train_y[perm[1201:]]
train_X = train_X[perm[:1200], :]
train_y = train_y[perm[:1200]]
# You can try different seeds and check the model's performance!
seeds = np.random.RandomState(0).randint(low=0, high=65536, size=(10))
train_accuracies = []
validation_accuracies = []
test_accuracies = []
AMAZON_HYPERPARAMETERS["debug"] = False
AMAZON_HYPERPARAMETERS["num_classes"] = 50
for seed in seeds:
AMAZON_HYPERPARAMETERS["rng"] = np.random.RandomState(seed)
train_accuracy, validation_accuracy, test_accuracy = run_experiment(AMAZON_HYPERPARAMETERS,
train_X,
train_y,
validation_X,
validation_y,
test_X,
test_y)
print(f"Seed: {seed} - Train Accuracy: {train_accuracy} - Validation Accuracy: {validation_accuracy} - Test Accuracy: {test_accuracy}")
train_accuracies.append(train_accuracy)
validation_accuracies.append(validation_accuracy)
test_accuracies.append(test_accuracy)
print(f"Train Accuracies - Mean: {np.mean(train_accuracies)} - Standard Deviation: {np.std(train_accuracies, ddof=0)}")
print(f"Validation Accuracies - Mean: {np.mean(validation_accuracies)} - Standard Deviation: {np.std(validation_accuracies, ddof=0)}")
print(f"Test Accuracies - Mean: {np.mean(test_accuracies)} - Standard Deviation: {np.std(test_accuracies, ddof=0)}")
if __name__ == "__main__":
final_hyperparameters = False
main(final_hyperparameters=final_hyperparameters)
| [
"jeffersonli.li@mail.utoronto.ca"
] | jeffersonli.li@mail.utoronto.ca |
1646bc77d5dd1916fa665c900bf2713e662ed359 | 3a0feb4707165acab188164a655236017b455020 | /submark/cli.py | e856469cbb5f6a441213ea05df1e009248889fe1 | [
"MIT"
] | permissive | okken/submark | 71039cb036d986a9c45047f27cec67f6f4b8831e | f3d5247b15267d8f87283a660962d1cb3b89f691 | refs/heads/master | 2020-06-14T23:36:59.553407 | 2019-09-06T19:47:03 | 2019-09-06T19:47:03 | 195,155,294 | 22 | 9 | MIT | 2019-08-24T01:30:50 | 2019-07-04T02:26:17 | Python | UTF-8 | Python | false | false | 118 | py | """
"""
from .submark import convert_list
import fileinput
def main():
print(convert_list(fileinput.input()))
| [
"1568356+okken@users.noreply.github.com"
] | 1568356+okken@users.noreply.github.com |
7d7d54e116753c2861b526fd438f53a485720cc3 | 7713bd03f9a6404f079efeddcc74a6da8a395a26 | /controllers/utility.py | ba47f088893a15b851462462d3a62a8f40b61bad | [] | no_license | ShikharKannoje/pScan | bb59d8d17bc3a0281ab29ee15a220e0522a9b735 | 1254fe876064f8a04d6d84b6551110ca7a40fda2 | refs/heads/master | 2020-03-09T05:54:22.263816 | 2018-04-08T09:55:17 | 2018-04-08T09:55:17 | 128,623,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,922 | py | from modules import *
def rendertxt(fname):
"""
function to read the output text file by praat and return it as a numpy array
"""
data = ["" for i in range(27)]
dbData = defaultdict(float)
# for bias
data[0] = 1
dbData["Bias"] = 1
with open(fname) as f:
f.readline()
f.readline()
# for Pitch features
for i in range(15, 20):
tmp = f.readline()
tmp = tmp.strip(" ").split(":")
tmp[1] = tmp[1].replace(" ","").replace("Hz","")
data[i] = float(tmp[1])
dbData[tmp[0]] = float(tmp[1])
# for Pulses features
f.readline()
for i in range(20, 24):
tmp = f.readline()
tmp = tmp.strip(" ").split(":")
tmp[1] = tmp[1].replace(" ","").replace("seconds","")
data[i] = float(tmp[1])
dbData[tmp[0]] = float(tmp[1])
# for Voicing features
f.readline()
for i in range(24, 27):
tmp = f.readline()
print tmp
tmp = tmp.strip(" ").split(":")
print tmp
print tmp[1]
try:
data[i] = float(tmp[1].split("%")[0])
dbData[tmp[0]] = float(tmp[1].split("%")[0])
except ValueError:
data[i] = float(tmp[1].strip(" ").split(" ")[0])
dbData[tmp[0]] = float(tmp[1].strip(" ").split(" ")[0])
# for Jitter features
f.readline()
for i in range(1, 6):
tmp = f.readline()
tmp = tmp.strip(" ").split(":")
tmp[1] = tmp[1].replace(" ","").replace("seconds","").replace("%", "")
data[i] = float(tmp[1])
dbData[tmp[0]] = float(tmp[1])
# for Shimmer features
f.readline()
for i in range(6, 12):
tmp = f.readline()
tmp = tmp.strip(" ").split(":")
tmp[1] = tmp[1].replace(" ","").replace("dB","").replace("%", "")
data[i] = float(tmp[1])
dbData[tmp[0]] = float(tmp[1])
# for Harmonicity of the voiced parts features
f.readline()
tmp = f.readline()
tmp = tmp.strip(" ").split(":")
tmp[1] = tmp[1].replace(" ","").replace("dB","").replace("%", "")
data[12] = float(tmp[1])
dbData["AC"] = float(tmp[1])
tmp = f.readline()
tmp = tmp.strip(" ").split(":")
tmp[1] = tmp[1].replace(" ","").replace("dB","").replace("%", "")
data[13] = float(tmp[1])
dbData["NTH"] = float(tmp[1])
tmp = f.readline()
tmp = tmp.strip(" ").split(":")
tmp[1] = tmp[1].replace(" ","").replace("dB","").replace("%", "")
data[14] = float(tmp[1])
dbData["HTN"] = float(tmp[1])
db.train.insert(dbData)
# remove the text file
os.system("rm " + fname)
return data
def getAttributes(fname):
"""
function to extract the features from audio file
"""
txtName = "/root/pScan-Web/uploads/" + fname.split(".")[0] + ".txt"
from pydub import AudioSegment
print("/root/pScan-Web/uploads/" + fname)
AudioSegment.from_file("/root/pScan-Web/uploads/" + fname).export("/root/pScan-Web/uploads/" + fname[:-3]+"mp3", format="mp3")
os.system("praat --run /root/pScan-Web/Voice \"" + fname[:-4] +"\" > " + txtName)
return rendertxt(txtName)
def mongoTolist(cls = True):
"""
function to parse dictionary to list and return it
"""
data = db.train.find({})
retData = []
if cls:
for d in data:
try:
retData.append([
d["Bias"], d["Jitter (local)"], d["Jitter (local, absolute)"],
d["Jitter (rap)"], d["Jitter (ppq5)"], d["Jitter (ddp)"],
d["Shimmer (local)"], d["Shimmer (local, dB)"], d["Shimmer (apq3)"],
d["Shimmer (apq5)"], d["Shimmer (apq11)"], d["Shimmer (dda)"], d["AC"],
d["NTH"], d["HTN"], d["Median pitch"], d["Mean pitch"], d["Standard deviation"],
d["Minimum pitch"], d["Maximum pitch"], d["Number of pulses"], d["Number of periods"],
d["Mean period"], d["Standard deviation of period"], d["Fraction of locally unvoiced frames"],
d["Number of voice breaks"], d["Degree of voice breaks"], d["UPDRS"], d["Class"]
])
except KeyError:
pass
else:
for d in data:
try:
retData.append([
d["Bias"], d["Jitter (local)"], d["Jitter (local, absolute)"],
d["Jitter (rap)"], d["Jitter (ppq5)"], d["Jitter (ddp)"],
d["Shimmer (local)"], d["Shimmer (local, dB)"], d["Shimmer (apq3)"],
d["Shimmer (apq5)"], d["Shimmer (apq11)"], d["Shimmer (dda)"], d["AC"],
d["NTH"], d["HTN"], d["Median pitch"], d["Mean pitch"], d["Standard deviation"],
d["Minimum pitch"], d["Maximum pitch"], d["Number of pulses"], d["Number of periods"],
d["Mean period"], d["Standard deviation of period"], d["Fraction of locally unvoiced frames"],
d["Number of voice breaks"], d["Degree of voice breaks"], d["UPDRS"]
])
except KeyError:
pass
return np.array(retData)
def updateTheta():
"""
function to read theta1.txt and theta2.txt file and update in database
"""
t1 = []
t2 = []
with open("theta1.txt") as f:
for i in range(28):
temp = f.readline().strip("\n").strip(" ").split(" ")
t1.append(map(float, temp))
with open("theta2.txt") as f:
for i in range(12):
temp = f.readline().strip("\n").strip(" ").split(" ")
t2.append(map(float, temp))
db.theta.remove({})
db.theta.insert({"theta1" : t1})
db.theta.insert({"theta2" : t2})
| [
"shikharkannoje09@gmail.com"
] | shikharkannoje09@gmail.com |
d41843063e71c0eb70cad9cfdc310dcb0056ae21 | 9bc286509d70f564cf7f0551fb0d8ca3fa230a40 | /wheel/main/helper.py | 324ea8d033ecbc5a1f8a9d379948f8e836ab3a24 | [] | no_license | m1ck/SocialWheel | 0cc43eed0613dad79e64b249ea9b3bf30b711899 | 151a4310e16f046fac903f3075a7ad0f31fbc197 | refs/heads/master | 2016-08-03T02:56:08.745610 | 2011-10-13T18:39:48 | 2011-10-13T18:39:48 | 283,036 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,753 | py | #!/usr/bin/python
import sys
import re
import string
import types
import exceptions
import math
class Error(Exception):
def __init__(self, message):
self.message_ = message
def __str__(self):
return self.message_
class InvalidInputException(Error):
def __init__(self, input, message=''):
self.input_ = input
Error.__init__(self, "The following invalid input was given: " \
+ str(self.input_) + "(" + message + ")")
def makeCloud(steps, input):
if not type(input) == types.ListType or len(input) <= 0 or steps <= 0:
raise InvalidInputException,\
"Please be sure steps > 0 and your input list is not empty."
else:
temp, newThresholds, results = [], [], []
for item in input:
if not type(item) == types.TupleType:
raise InvalidInputException, "Be sure input list holds tuples."
else: temp.append(item[1])
maxWeight = float(max(temp))
minWeight = float(min(temp))
newDelta = (maxWeight - minWeight)/float(steps)
for i in range(steps + 1):
newThresholds.append((100 * math.log((minWeight + i * newDelta) + 2), i))
for tag in input:
fontSet = False
for threshold in newThresholds[1:int(steps)+1]:
if (100 * math.log(tag[1] + 2)) <= threshold[0] and not fontSet:
results.append(dict({'trend':str(tag[0]),'num':str(threshold[1])}))
fontSet = True
return results
"""Porter Stemming Algorithm
This is the Porter stemming algorithm, ported to Python from the
version coded up in ANSI C by the author. It may be be regarded
as canonical, in that it follows the algorithm presented in
Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14,
no. 3, pp 130-137,
only differing from it at the points maked --DEPARTURE-- below.
See also http://www.tartarus.org/~martin/PorterStemmer
The algorithm as described in the paper could be exactly replicated
by adjusting the points of DEPARTURE, but this is barely necessary,
because (a) the points of DEPARTURE are definitely improvements, and
(b) no encoding of the Porter stemmer I have seen is anything like
as exact as this version, even with the points of DEPARTURE!
Vivake Gupta (v@nano.com)
Release 1: January 2001
Further adjustments by Santiago Bruno (bananabruno@gmail.com)
to allow word input not restricted to one word per line, leading
to:
release 2: July 2008
"""
class PorterStemmer:
def __init__(self):
"""The main part of the stemming algorithm starts here.
b is a buffer holding a word to be stemmed. The letters are in b[k0],
b[k0+1] ... ending at b[k]. In fact k0 = 0 in this demo program. k is
readjusted downwards as the stemming progresses. Zero termination is
not in fact used in the algorithm.
Note that only lower case sequences are stemmed. Forcing to lower case
should be done before stem(...) is called.
"""
self.b = "" # buffer for word to be stemmed
self.k = 0
self.k0 = 0
self.j = 0 # j is a general offset into the string
def cons(self, i):
"""cons(i) is TRUE <=> b[i] is a consonant."""
if self.b[i] == 'a' or self.b[i] == 'e' or self.b[i] == 'i' or self.b[i] == 'o' or self.b[i] == 'u':
return 0
if self.b[i] == 'y':
if i == self.k0:
return 1
else:
return (not self.cons(i - 1))
return 1
def m(self):
"""m() measures the number of consonant sequences between k0 and j.
if c is a consonant sequence and v a vowel sequence, and <..>
indicates arbitrary presence,
<c><v> gives 0
<c>vc<v> gives 1
<c>vcvc<v> gives 2
<c>vcvcvc<v> gives 3
....
"""
n = 0
i = self.k0
while 1:
if i > self.j:
return n
if not self.cons(i):
break
i = i + 1
i = i + 1
while 1:
while 1:
if i > self.j:
return n
if self.cons(i):
break
i = i + 1
i = i + 1
n = n + 1
while 1:
if i > self.j:
return n
if not self.cons(i):
break
i = i + 1
i = i + 1
def vowelinstem(self):
"""vowelinstem() is TRUE <=> k0,...j contains a vowel"""
for i in range(self.k0, self.j + 1):
if not self.cons(i):
return 1
return 0
def doublec(self, j):
"""doublec(j) is TRUE <=> j,(j-1) contain a double consonant."""
if j < (self.k0 + 1):
return 0
if (self.b[j] != self.b[j-1]):
return 0
return self.cons(j)
def cvc(self, i):
"""cvc(i) is TRUE <=> i-2,i-1,i has the form consonant - vowel - consonant
and also if the second c is not w,x or y. this is used when trying to
restore an e at the end of a short e.g.
cav(e), lov(e), hop(e), crim(e), but
snow, box, tray.
"""
if i < (self.k0 + 2) or not self.cons(i) or self.cons(i-1) or not self.cons(i-2):
return 0
ch = self.b[i]
if ch == 'w' or ch == 'x' or ch == 'y':
return 0
return 1
def ends(self, s):
"""ends(s) is TRUE <=> k0,...k ends with the string s."""
length = len(s)
if s[length - 1] != self.b[self.k]: # tiny speed-up
return 0
if length > (self.k - self.k0 + 1):
return 0
if self.b[self.k-length+1:self.k+1] != s:
return 0
self.j = self.k - length
return 1
def setto(self, s):
"""setto(s) sets (j+1),...k to the characters in the string s, readjusting k."""
length = len(s)
self.b = self.b[:self.j+1] + s + self.b[self.j+length+1:]
self.k = self.j + length
def r(self, s):
"""r(s) is used further down."""
if self.m() > 0:
self.setto(s)
def step1ab(self):
"""step1ab() gets rid of plurals and -ed or -ing. e.g.
caresses -> caress
ponies -> poni
ties -> ti
caress -> caress
cats -> cat
feed -> feed
agreed -> agree
disabled -> disable
matting -> mat
mating -> mate
meeting -> meet
milling -> mill
messing -> mess
meetings -> meet
"""
if self.b[self.k] == 's':
if self.ends("sses"):
self.k = self.k - 2
elif self.ends("ies"):
self.setto("i")
elif self.b[self.k - 1] != 's':
self.k = self.k - 1
if self.ends("eed"):
if self.m() > 0:
self.k = self.k - 1
elif (self.ends("ed") or self.ends("ing")) and self.vowelinstem():
self.k = self.j
if self.ends("at"): self.setto("ate")
elif self.ends("bl"): self.setto("ble")
elif self.ends("iz"): self.setto("ize")
elif self.doublec(self.k):
self.k = self.k - 1
ch = self.b[self.k]
if ch == 'l' or ch == 's' or ch == 'z':
self.k = self.k + 1
elif (self.m() == 1 and self.cvc(self.k)):
self.setto("e")
def step1c(self):
"""step1c() turns terminal y to i when there is another vowel in the stem."""
if (self.ends("y") and self.vowelinstem()):
self.b = self.b[:self.k] + 'i' + self.b[self.k+1:]
def step2(self):
"""step2() maps double suffices to single ones.
so -ization ( = -ize plus -ation) maps to -ize etc. note that the
string before the suffix must give m() > 0.
"""
if self.b[self.k - 1] == 'a':
if self.ends("ational"): self.r("ate")
elif self.ends("tional"): self.r("tion")
elif self.b[self.k - 1] == 'c':
if self.ends("enci"): self.r("ence")
elif self.ends("anci"): self.r("ance")
elif self.b[self.k - 1] == 'e':
if self.ends("izer"): self.r("ize")
elif self.b[self.k - 1] == 'l':
if self.ends("bli"): self.r("ble") # --DEPARTURE--
# To match the published algorithm, replace this phrase with
# if self.ends("abli"): self.r("able")
elif self.ends("alli"): self.r("al")
elif self.ends("entli"): self.r("ent")
elif self.ends("eli"): self.r("e")
elif self.ends("ousli"): self.r("ous")
elif self.b[self.k - 1] == 'o':
if self.ends("ization"): self.r("ize")
elif self.ends("ation"): self.r("ate")
elif self.ends("ator"): self.r("ate")
elif self.b[self.k - 1] == 's':
if self.ends("alism"): self.r("al")
elif self.ends("iveness"): self.r("ive")
elif self.ends("fulness"): self.r("ful")
elif self.ends("ousness"): self.r("ous")
elif self.b[self.k - 1] == 't':
if self.ends("aliti"): self.r("al")
elif self.ends("iviti"): self.r("ive")
elif self.ends("biliti"): self.r("ble")
elif self.b[self.k - 1] == 'g': # --DEPARTURE--
if self.ends("logi"): self.r("log")
# To match the published algorithm, delete this phrase
def step3(self):
"""step3() dels with -ic-, -full, -ness etc. similar strategy to step2."""
if self.b[self.k] == 'e':
if self.ends("icate"): self.r("ic")
elif self.ends("ative"): self.r("")
elif self.ends("alize"): self.r("al")
elif self.b[self.k] == 'i':
if self.ends("iciti"): self.r("ic")
elif self.b[self.k] == 'l':
if self.ends("ical"): self.r("ic")
elif self.ends("ful"): self.r("")
elif self.b[self.k] == 's':
if self.ends("ness"): self.r("")
def step4(self):
"""step4() takes off -ant, -ence etc., in context <c>vcvc<v>."""
if self.b[self.k - 1] == 'a':
if self.ends("al"): pass
else: return
elif self.b[self.k - 1] == 'c':
if self.ends("ance"): pass
elif self.ends("ence"): pass
else: return
elif self.b[self.k - 1] == 'e':
if self.ends("er"): pass
else: return
elif self.b[self.k - 1] == 'i':
if self.ends("ic"): pass
else: return
elif self.b[self.k - 1] == 'l':
if self.ends("able"): pass
elif self.ends("ible"): pass
else: return
elif self.b[self.k - 1] == 'n':
if self.ends("ant"): pass
elif self.ends("ement"): pass
elif self.ends("ment"): pass
elif self.ends("ent"): pass
else: return
elif self.b[self.k - 1] == 'o':
if self.ends("ion") and (self.b[self.j] == 's' or self.b[self.j] == 't'): pass
elif self.ends("ou"): pass
# takes care of -ous
else: return
elif self.b[self.k - 1] == 's':
if self.ends("ism"): pass
else: return
elif self.b[self.k - 1] == 't':
if self.ends("ate"): pass
elif self.ends("iti"): pass
else: return
elif self.b[self.k - 1] == 'u':
if self.ends("ous"): pass
else: return
elif self.b[self.k - 1] == 'v':
if self.ends("ive"): pass
else: return
elif self.b[self.k - 1] == 'z':
if self.ends("ize"): pass
else: return
else:
return
if self.m() > 1:
self.k = self.j
def step5(self):
"""step5() removes a final -e if m() > 1, and changes -ll to -l if
m() > 1.
"""
self.j = self.k
if self.b[self.k] == 'e':
a = self.m()
if a > 1 or (a == 1 and not self.cvc(self.k-1)):
self.k = self.k - 1
if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1:
self.k = self.k -1
def stem(self, p, i, j):
"""In stem(p,i,j), p is a char pointer, and the string to be stemmed
is from p[i] to p[j] inclusive. Typically i is zero and j is the
offset to the last character of a string, (p[j+1] == '\0'). The
stemmer adjusts the characters p[i] ... p[j] and returns the new
end-point of the string, k. Stemming never increases word length, so
i <= k <= j. To turn the stemmer into a module, declare 'stem' as
extern, and delete the remainder of this file.
"""
# copy the parameters into statics
self.b = p
self.k = j
self.k0 = i
if self.k <= self.k0 + 1:
return self.b # --DEPARTURE--
# With this line, strings of length 1 or 2 don't go through the
# stemming process, although no mention is made of this in the
# published algorithm. Remove the line to match the published
# algorithm.
self.step1ab()
self.step1c()
self.step2()
self.step3()
self.step4()
self.step5()
return self.b[self.k0:self.k+1]
| [
"mickcarlo@gmail.com"
] | mickcarlo@gmail.com |
e82199ee6a0782c9298340d3eb954b98610f5eb2 | 66eff0c4dc986a354c08bd1681219d3870651d44 | /Python/QuickSort.py | cb1e420d6adcdee35cdb364788e88b6440cc77af | [] | no_license | Visorgood/CodeBasics | bdd0199821911d602722621d9a7b86bffb9951ef | d015e84f4f61f596a9a87a1a58aa97b6f06641fa | refs/heads/master | 2021-01-22T05:47:56.332189 | 2019-06-06T10:40:14 | 2019-06-06T10:40:14 | 11,051,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | import random
def Swap(A, i, j):
t = A[i]
A[i] = A[j]
A[j] = t
def Partition(A, l, r):
Swap(A, l, random.randint(l, r))
p = A[l]
i = l + 1
for j in range(l + 1, r + 1):
if A[j] < p:
Swap(A, i, j)
i = i + 1
Swap(A, l, i - 1)
return i - 1
def QuickSortRecursive(A, l, r):
if l >= r:
return
i = Partition(A, l, r)
QuickSortRecursive(A, l, i - 1)
QuickSortRecursive(A, i + 1, r)
def QuickSort(array):
QuickSortRecursive(array, 0, len(array) - 1) | [
"slavanw@gmail.com"
] | slavanw@gmail.com |
0120f6c83eece5376403bb9d5185c14bf366b81b | 54f78d59d8cd6d4504cbfa884ad148eac24dbff1 | /alcohall/selections/urls.py | ce0dfafab8b893efccc9f2f7d012793f3347de82 | [] | no_license | pirozhki-2020/backend | 51cc311bf4bb9f0be175d483777c29a696faf527 | 1113bc9f7d6b05b893c7d91c4b503f5b73e3d1e2 | refs/heads/master | 2022-12-10T12:14:35.541832 | 2020-05-30T15:23:45 | 2020-05-30T15:23:45 | 248,353,241 | 0 | 0 | null | 2022-11-22T06:01:43 | 2020-03-18T22:05:18 | Python | UTF-8 | Python | false | false | 249 | py | from django.urls import path
from alcohall.selections import views
urlpatterns = [
path("list", views.SelectionsListView.as_view()),
path("get", views.SelectionGetView.as_view()),
path("create", views.SelectionCreateView.as_view()),
]
| [
"i@artbakulev.com"
] | i@artbakulev.com |
84da4aa3040aff4ba9500cd44c727c2d0862c3a3 | 2771cf964bb56fa7e1326e008622065c6b9ce065 | /portal/views.py | 5896b16c57eb13c15f4bedb4e46b3db9ce0522fd | [] | no_license | chrisivanortega/kenya | 5ad9e820d475eafc91100a6537e46b65d704593f | e6da3e006141aea2015b5764311595304f569965 | refs/heads/master | 2020-05-27T17:44:21.986729 | 2019-05-29T17:01:36 | 2019-05-29T17:01:36 | 188,727,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,403 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse,HttpResponseRedirect
from models import Usuarios,Registro
import json
from dicttoxml import dicttoxml
from django.core import serializers
from datetime import date, datetime
from decorators import user_is_entry_author
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import letter
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
from datetime import datetime, timedelta
def index(request):
return render(request, 'home.html', {})
def registro(request):
if request.method == 'POST':
r = Registro()
nombre = request.POST.get('nombre')
correo = request.POST.get('correo')
carrera = request.POST.get('carrera')
nocontrol = request.POST.get('nocontrol')
r.nombre = nombre
r.correo = correo
r.fecha = datetime.now()
r.carrera = carrera
r.nocontrol = nocontrol
r.save()
return render(request, 'registrado.html',{})
return render(request, 'registro.html',{})
def registros(request):
registros = Registro.objects.filter().order_by('-fecha')
return render(request, 'registros.html', {"registros":registros})
def login(request):
if request.method == 'POST':
username = request.POST.get("username")
password = request.POST.get("password")
if password == 'admin' and username == 'admin':
request.session['name'] = {"username":username,"admin":True}
return render(request, 'login.html', {"session_var":request.session})
return render(request, 'login.html', {"session_var":request.session})
def logout(request):
request.session['name'] = None
return HttpResponseRedirect('/portal')
@user_is_entry_author
def admins(request):
action = request.GET.get('accion')
if action == 'borrar':
id = request.GET.get("id")
Registro.objects.filter(pk=id).delete()
if action == 'generarpdf':
id = request.GET.get("idd")
reg = Registro.objects.filter(pk=id).first()
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="itt.pdf"'
doc = canvas.Canvas(response)
#doc = canvas.Canvas("form.pdf", pagesize=letter)
doc.setLineWidth(.3)
doc.setFont('Helvetica', 12)
#-----------------------#
# | | #
# | | #
#-----------------------#
# cuadro grande
doc.drawString(300,750 , 'Formato de solicitud de')
doc.drawString(300,740 , 'mantenimiento correctivo:')
doc.line(5,790,590,790)
doc.line(5,700,590,700)
# cuadritos
doc.line(250,790,250,700)
doc.line(450,790,450,700)
doc.line(450,770,590,770)
doc.setFont('Helvetica', 8)
doc.drawString(460,780 , 'Responsable: Centro de computo')
doc.line(450,750,590,750)
doc.line(450,730,590,730)
# lineas divisorias
doc.line(5,790,5,700)
doc.line(590,790,590,700)
#-----------------------#
# | #
# | #
#-----------------------#
# cuadro grande
doc.line(5,690,590,690)
doc.line(5,600,590,600)
# cuadritos
doc.setFont('Helvetica', 8)
doc.drawString(30,674 , 'DATOS DE SOLICITANTE:')
doc.drawString(120,662 , str(reg.nombre))
doc.drawString(325,674 , 'REGISTRO DE LA SOLICITUD:')
doc.drawString(350,654 , 'Folio:')
doc.drawString(325,640 , 'Fecha:')
doc.drawString(325,620 , 'Ingeniero de soporte asignado:')
doc.drawString(10,662 , 'Nombre de usuario:')
doc.line(5,670,595,670)
doc.drawString(10,652 , 'Departamento:')
doc.line(5,660,300,660)
doc.drawString(120,652 , str(reg.carrera))
doc.drawString(10,642 , 'No de control:')
doc.line(5,650,300,650)
doc.drawString(120,642 , str(reg.nocontrol))
doc.drawString(10,632 , 'Nombre del jefe inmediato:')
doc.line(5,640,300,640)
doc.drawString(10,622 , 'No de serie del equipo:')
doc.line(5,630,595,630)
doc.drawString(10,612 , 'No de inventario del equipo:')
doc.line(5,620,300,620)
doc.line(5,610,300,610)
# lineas divisorias
doc.line(5,690,5,600)
doc.line(590,690,590,600)
doc.line(300,690,300,600)
# cuadro mas grande de abajo
doc.drawString(200,590 , 'INFORMACION DEL SERVICIO SOLICITADO:')
doc.line(5,600,590,600)
doc.line(5,600,5,30)
doc.line(5,30,590,30)
doc.line(590,600,590,30)
doc.line(5,580,590,580)
#ilera 1
cx1 = 20
cy1 = 560
cx2 = 25
cy2 = 560
doc.drawString(35,555 , 'EQUIPO DE COMPUTO:')
doc.drawString(35,550 , 'no enciende')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(35,515 , 'EQUIPO DE COMPUTO:')
doc.drawString(35,509 , 'configurar/instalar')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(35,475 , 'EQUIPO DE COMPUTO:')
doc.drawString(35,470 , 'reubicar')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.line(cx1,cy1,cx2,cy2)
doc.drawString(35,435 , 'CPU:')
doc.drawString(35,428 , 'Se reinicia/ esta lento/ ase ruido')
doc.drawString(35,419 , 'se apaga/ muestra pantalla azul')
doc.drawString(35,412 , 'No arranca sistema operativo')
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(35,395 , 'IMPRESORA:')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(35,355 , 'UNIDAD DE CD:')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(35,315 , 'CONTRASENA DE USUARIO:')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1 = 20
cy1 = 80
cx2 = 25
cy2 = 80
doc.drawString(35,80 , 'OTROS:')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
# ilera 2
cx1 = 220
cy1 = 560
cx2 = 225
cy2 = 560
doc.drawString(230,555 , 'CARPETAS:')
doc.drawString(230,550 , 'compartir/ problemas de acceso/permisos:')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(230,515 , 'INFORMACIO:')
doc.drawString(230,510 , 'acceso/Recuperar/ respaldar')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(230,475 , 'MONITOR:')
doc.drawString(230,470 , 'sin senal/con lineas/instable')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(230,435 , 'TECLADO:')
doc.drawString(230,430 , 'revicion / limpieza')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(230,395 , 'MOUSE:')
doc.drawString(230,390 , 'revicion/ adquisicion')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(230,355 , 'DICTAMEN TECNICO:')
doc.drawString(230,350 , 'baja/ adquisicion')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(230,315 , 'ANTIVIRUS:')
doc.drawString(230,310 , 'instalar/actualizar/eliminar')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
# ilera 3
cx1 = 420
cy1 = 560
cx2 = 425
cy2 = 560
doc.drawString(430,555 , 'INTERNET:')
doc.drawString(430,550 , 'sin acceso/ falata/ lento:')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(430,515 , 'INTERNET:')
doc.drawString(430,510 , 'acceso apagina web')
doc.drawString(430,505 , 'acceso a sistema SII')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(430,475 , 'DIRECCION IP:')
doc.drawString(430,470 , 'duplicada/ asignar/configurar')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(430,435 , 'CORREO INSTITUCIONAL ')
doc.drawString(430,430 , 'ELETRONICO: ')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(430,395 , 'PORTAL:')
doc.drawString(430,390 , 'tectijuana.edu.mx')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
cx1
cy1 -= 40
cx2
cy2 -= 40
doc.drawString(430,355 , 'SOFTWARE:')
doc.drawString(430,350 , 'instalar/revisar')
doc.drawString(430,345 , 'configurar/ actualizar')
doc.line(cx1,cy1,cx2,cy2)
doc.line(cx1,cy1,cx2-5,cy2-5)
doc.line(cx1,cy1-5,cx2,cy2-5)
doc.line(cx1+5,cy1,cx2,cy2-5)
print "test"
doc.save()
return response
if action == 'generarxml':
qs_json = []
registros = Registro.objects.filter()
for q in registros:
qs_json.append({
"name":q.nombre,
"nocontrol":q.nocontrol,
"carrera":q.carrera
})
xml = dicttoxml(qs_json, custom_root='registros', attr_type=False)
return HttpResponse(xml, content_type='text/xml')
return render(request, 'admin.html')
| [
"chrisivanortega@gmail.com"
] | chrisivanortega@gmail.com |
3f1e7c2be5e4aad81dc3c4cc8973865624a09628 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03645/s021155194.py | 844491319f19594eb094e23da7af0f647cb6eb7c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | n, m = map(int, input().split())
root_map = dict()
root_map[1] = set()
root_map[n] = set()
for i in range(m):
a, b = map(int, input().split())
if a == 1 or a == n:
root_map[a].add(b)
if b == 1 or b == n:
root_map[b].add(a)
for i in root_map[1]:
if i in root_map[n]:
print("POSSIBLE")
break
else:
print("IMPOSSIBLE")
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
cbdd7d86ad60323cff773157e6c5e90ac6b311d9 | 588a76323a92adbd0014adf23ff668416c47afe2 | /pythonDataPreproccessing/chapter_4_DataPreproccessing/4-4_line_rate_construct.py | b4f241861385db840f9cd900a0c51a0f65d3d10b | [] | no_license | QiuSYang/machine_learning | f696458e4f7d46704b02974497ddd01dcdef8356 | f617707236015f9ad7df0c54e8b8a086a165053a | refs/heads/master | 2021-04-02T03:30:47.920782 | 2021-02-24T06:33:59 | 2021-02-24T06:33:59 | 248,239,140 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | #-*- coding: utf-8 -*-
#线损率属性构造
import pandas as pd
#参数初始化
inputfile= './datasets/electricity_data.xls' #供入供出电量数据
outputfile = './datasets/electricity_data_new.xls' #属性构造后数据文件
data = pd.read_excel(inputfile) #读入数据
data[u'线损率'] = (data[u'供入电量'] - data[u'供出电量'])/data[u'供入电量']
data.to_excel(outputfile, index = False) #保存结果
| [
"15962230327@163.com"
] | 15962230327@163.com |
99221f1a5e39ed957ef5ed3c5e9d9f75b4b1f8be | 0b1438bcdb152a6c1135a890ad5a4b13470a4110 | /Stepik/ListOfNumbersByItsCounts.py | 08bca2209bc7ce1594724a89467dc1df951ad9cb | [] | no_license | IuriiBaikov/Python_Education | 8b73d96de7973891d579b596297a67ee9973f880 | 64a73fe3bd1b1e4785daf531a8b23ad2d38ceb39 | refs/heads/master | 2020-03-21T18:41:25.586087 | 2018-07-02T15:20:38 | 2018-07-02T15:20:38 | 138,907,526 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,274 | py | inputCount = int(input())
iterator = 1
outputList = []
while iterator <= inputCount:
for x in range(iterator):
outputList.append(iterator)
iterator += 1
for iterator in range(inputCount):
print(int(outputList[iterator]), end="")
# Напишите программу, которая считывает с консоли числа (по одному в строке) до тех пор, пока сумма введённых чисел не будет равна 0 и сразу после этого выводит сумму квадратов всех считанных чисел.
# Гарантируется, что в какой-то момент сумма введённых чисел окажется равной 0, после этого считывание продолжать не нужно.
# В примере мы считываем числа 1, -3, 5, -6, -10, 13; в этот момент замечаем, что сумма этих чисел равна нулю и выводим сумму их квадратов, не обращая внимания на то, что остались ещё не прочитанные значения.
# Sample Input:
# 1
# -3
# 5
# -6
# -10
# 13
# 4
# -8
# Sample Output:
# 340 | [
"medvedyc@gmail.com"
] | medvedyc@gmail.com |
7c72c43070e4acce3ead564e4440967fa1d1feb2 | fbe6049056c594356a4cc117db48c62d3b08d940 | /users/migrations/0001_initial.py | 07944a1e78764cb1b3da7122c1adfbb7f1c36d76 | [] | no_license | travel-sniper/travel-sniper | 28cb35d887c793cf1e751bb0647f3577d28bf3e7 | fce2c1b4dc4c9d1f7b0a96867afb96213f3c63a0 | refs/heads/master | 2023-04-29T19:53:53.202380 | 2019-08-13T19:16:16 | 2019-08-13T19:16:16 | 201,499,846 | 0 | 0 | null | 2023-04-21T20:35:49 | 2019-08-09T16:03:11 | HTML | UTF-8 | Python | false | false | 2,867 | py | # Generated by Django 2.2.3 on 2019-07-10 19:28
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name_plural': 'users',
'verbose_name': 'user',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| [
"marco@X303"
] | marco@X303 |
d29107f3723074523c4eb7d4bfc103ebcad4be7d | bfd21d41e2faa9419871e8b67fca13b0b21d5146 | /star_pattern_right_pyramid.py | 4393eb148336fd5b4ee17a922911bc3731e4e813 | [] | no_license | harshalkumeriya/Practice-Coding | 6a02c576bdfba0711bda6d65d9f7c81e12c80a2a | 3deba229d210f45a4abb94d4016c3bd342d8057d | refs/heads/master | 2022-11-16T00:58:19.155607 | 2020-07-08T05:44:17 | 2020-07-08T05:44:17 | 277,999,558 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | n = int(5)
for i in range(1,n+1):
print((n - i) * ' ', end = '')
print((i*'*'))
| [
"harshal.kumeriya183@gmail.com"
] | harshal.kumeriya183@gmail.com |
cb7d9dc5b51d2dec2a71ba930093a556b73d551a | 2537756cb251008eacd247db82a26c43967e1381 | /backend/api/v1beta1/python_http_client/kfp_server_api_v1beta1/api/healthz_service_api.py | 3df82be4ce94b2cc598fc68c18a0079435e393e2 | [
"Apache-2.0"
] | permissive | TheMichaelHu/pipelines | 074edf3b08e2877a996061bacdf4b9563270cc5b | 4c6abe5afcc1b0511f0d0ab91b7ec4522da5a1df | refs/heads/master | 2022-12-10T18:34:08.540736 | 2022-11-02T00:31:45 | 2022-11-02T00:31:45 | 241,268,119 | 0 | 0 | Apache-2.0 | 2020-02-18T04:00:31 | 2020-02-18T04:00:30 | null | UTF-8 | Python | false | false | 5,374 | py | # coding: utf-8
"""
Kubeflow Pipelines API
This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.
Contact: kubeflow-pipelines@google.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kfp_server_api_v1beta1.api_client import ApiClient
from kfp_server_api_v1beta1.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class HealthzServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_healthz(self, **kwargs): # noqa: E501
"""Get healthz data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_healthz(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: V1beta1GetHealthzResponse
"""
kwargs['_return_http_data_only'] = True
return self.get_healthz_with_http_info(**kwargs) # noqa: E501
def get_healthz_with_http_info(self, **kwargs): # noqa: E501
"""Get healthz data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_healthz_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(V1beta1GetHealthzResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_healthz" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/apis/v1beta1/healthz', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1GetHealthzResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"noreply@github.com"
] | noreply@github.com |
37879554ca69ceec2c2b1e5c49b2ad93e0e5788a | b0c024fbc86bbb7a351a0eab4d7e400a8f7d020d | /Day-02/codes/leapyear.py | 5d9740889d212396c35483379a7114a255ed569d | [] | no_license | jdee77/100DaysOfPython | 381dad179e01b660dfc8c065e5e0cead7c1ccca4 | c5dd14aea9e2042076d9057d8cfbe1db81fbb848 | refs/heads/main | 2023-05-12T19:46:22.014127 | 2021-05-30T12:13:57 | 2021-05-30T12:13:57 | 371,087,592 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 427 | py | print("LEAP YEAR")
year = int(input("Enter the year :"))
leapYear = False
# a year is leap year if its evenly divisible by 4
# except evenly divisible by 100
# unless not divisible by 400
if year % 100 == 0:
if year % 400 == 0:
leapYear = True
else:
if year % 4 == 0:
leapYear = True
if leapYear:
print(f"{year} is leap year.")
else:
print(f"{year} is not a leap year.") | [
"noreply@github.com"
] | noreply@github.com |
d8220c1a931d3e11b15c1e71a3741d7958e4b74c | e50be2bff113a2fbf6755bf2fdbae0f6c841835a | /utils.py | 826be00c746bf0b8c513e08d59c062116dd11733 | [] | no_license | afrobeard/demo | b254bab2743fab02f432f5b95d3283c936d10e14 | dc8edef3af0607716ad2bd48f641ba933a433267 | refs/heads/master | 2021-01-01T16:19:19.767672 | 2013-04-07T13:19:55 | 2013-04-07T13:19:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | import logging
from logging.handlers import RotatingFileHandler
def addFileLogger(logger_object, log_name, level=2):
rotating_file_handler = RotatingFileHandler(log_name)
rotating_file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
logger_object.addHandler(rotating_file_handler)
logger_object.level = level
| [
"iqbaltalaat@gmail.com"
] | iqbaltalaat@gmail.com |
cb7d4cbbaf41c053f8e9a14a858598473cb7e0c6 | 828d18dc4338b50d1cdc3600a0418fc6b279e268 | /locallibrary/catalog/migrations/0004_auto_20190308_1337.py | 6859b19d729128c86aa12505f4d7820124ddb87e | [] | no_license | dmm4613/w4-MDN-django-tutorial | 614659378761fb1155ef9f10440f9e8c28f8e18e | 57a705aebcca82bd3fbd2f28d50bd37fd2a9339b | refs/heads/master | 2020-04-26T19:38:36.685199 | 2019-03-09T02:59:20 | 2019-03-09T02:59:20 | 173,781,696 | 0 | 0 | null | 2019-03-04T16:33:01 | 2019-03-04T16:33:01 | null | UTF-8 | Python | false | false | 423 | py | # Generated by Django 2.1.7 on 2019-03-08 18:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalog', '0003_bookinstance_borrower'),
]
operations = [
migrations.AlterModelOptions(
name='bookinstance',
options={'ordering': ['due_back'], 'permissions': (('can_mark_returned', 'Set book as returned'),)},
),
]
| [
"dmm4613@gmail.com"
] | dmm4613@gmail.com |
ce130fabb65adf7bb4667f876aa1dff1d65a672d | 0d67ec5869b0c2aa20f1989563cf504afe2fca4b | /python_stack/Django/Users/apps/users/models.py | e973146ad5fc13cf3b4dacd9180c5377453d5a53 | [] | no_license | starxfighter/Python | 17a9bdfbaaa41dc7bea16bc216a24e04860d551a | 9e053e0d41ee8d7ed0159923147951de789f90d6 | refs/heads/master | 2022-11-11T16:52:20.007227 | 2018-04-30T17:16:31 | 2018-04-30T17:16:31 | 127,670,297 | 0 | 1 | null | 2022-10-25T03:47:23 | 2018-04-01T20:51:26 | Python | UTF-8 | Python | false | false | 416 | py | from __future__ import unicode_literals
from django.db import models
class Users(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
email_address = models.CharField(max_length=255)
age=models.IntegerField(default=0)
created_at = models.DateTimeField(auto_now_add = True)
update_at = models.DateTimeField(auto_now = True)
| [
"dosburn@rocketmail.com"
] | dosburn@rocketmail.com |
9e6bd5b2d0d96c3f8b22a5041c619c8588645fcd | 4b90c356173952cb3a645f5a4024e1de50f91e12 | /polls/migrations/0001_initial.py | bd72ff7775edd83be049571d3b1466bd584b496f | [
"MIT"
] | permissive | marcopuccio/mpbb | 1aa5549f82eb7c949092c542e249ce89e58a490b | 18e303308865493886af7667c79720eee766641c | refs/heads/master | 2021-10-20T03:58:22.693558 | 2019-02-25T16:12:34 | 2019-02-25T16:12:34 | 64,941,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,341 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-04 04:29
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(default=datetime.datetime.now, verbose_name='date published')),
('total_votes', models.IntegerField(default=0)),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question'),
),
]
| [
"mpuccio90@gmail.com"
] | mpuccio90@gmail.com |
b8ea6089fbf982c699ef0f102f4a0842d32f6a53 | 24caa6710105a060fab2e17147e6d56609939011 | /03-Python_Data_Science_Toolbox_(Part_1)/02-Default_arguments,_variable-length_arguments_and_scope/01-Pop_quiz_on_understanding_scope.py | 359f983d8c5327a8ca9e09fa52071fdeceb8fece | [] | no_license | inverseundefined/DataCamp | 99607022ad3f899d7681ad1f70fcedab290e269a | 7226b6b6f41888c3610a884db9a226e013d37e56 | refs/heads/master | 2022-01-10T00:53:21.714908 | 2019-07-24T13:27:49 | 2019-07-24T13:27:49 | 198,280,648 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,022 | py | '''
Pop quiz on understanding scope
In this exercise, you will practice what you've learned about scope in functions. The variable num has been predefined as 5, alongside the following function definitions:
def func1():
num = 3
print(num)
def func2():
global num
double_num = num * 2
num = 6
print(double_num)
Try calling func1() and func2() in the shell, then answer the following questions:
What are the values printed out when you call func1() and func2()?
What is the value of num in the global scope after calling func1() and func2()?
Instructions
50 XP
Possible Answers
func1() prints out 3, func2() prints out 6, and the value of num in the global scope is 3.
func1() prints out 3, func2() prints out 3, and the value of num in the global scope is 3.
func1() prints out 3, func2() prints out 10, and the value of num in the global scope is 10.
-> func1() prints out 3, func2() prints out 10, and the value of num in the global scope is 6.
Take Hint (-15 XP)
'''
| [
"inversedrivenundefined@gmail.com"
] | inversedrivenundefined@gmail.com |
7decda344d3588c536191fa56afabfb8700e9140 | 7821eb838dcf8540efe7348c784359fbf959a8ab | /stacktester/tests/test_server_addresses.py | e9a325a6644806fbf824d14f18d6ba56967e7ab9 | [] | no_license | ameade/stacktester | 7162282e1ddfe286bed7a559cad526b615036bd1 | 9bf949106b801550c990d2db55790a74c22a02c4 | refs/heads/master | 2020-12-24T17:36:12.956807 | 2011-07-06T13:29:34 | 2011-07-06T13:29:34 | 1,937,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,853 | py |
import json
import os
import unittest2 as unittest
from stacktester import openstack
from stacktester import exceptions
class ServerAddressesTest(unittest.TestCase):
@classmethod
def setUpClass(self):
self.os = openstack.Manager()
self.image_ref = self.os.config.env.image_ref
self.flavor_ref = self.os.config.env.flavor_ref
def setUp(self):
server = {
'name' : 'testserver',
'imageRef' : self.image_ref,
'flavorRef' : self.flavor_ref,
}
created_server = self.os.nova.create_server(server)
self.server_id = created_server['id']
self.os.nova.wait_for_server_status(self.server_id, 'ACTIVE')
def tearDown(self):
self.os.nova.delete_server(self.server_id)
def test_list_addresses(self):
"""Ensure address information is available"""
url = '/servers/%s' % self.server_id
response, body = self.os.nova.request('GET', url)
self.assertEqual(response.status, 200)
_body = json.loads(body)
self.assertTrue('addresses' in _body['server'].keys())
# KNOWN-ISSUE lp761652
#self.assertEqual(_body['server']['addresses'].keys(), ['private'])
url = '/servers/%s/ips' % self.server_id
response, body = self.os.nova.request('GET', url)
# KNOWN-ISSUE lp761652
#self.assertEqual(response.status, 200)
#_body = json.loads(body)
#self.assertEqual(_body.keys(), ['addresses'])
#self.assertEqual(_body['addresses'].keys(), ['private'])
url = '/servers/%s/ips/private' % self.server_id
response, body = self.os.nova.request('GET', url)
# KNOWN-ISSUE lp761652
#self.assertEqual(response.status, 200)
#_body = json.loads(body)
#self.assertEqual(_body.keys(), ['private'])
| [
"brian.waldon@rackspace.com"
] | brian.waldon@rackspace.com |
725d302931f49195c6ccb7617c31d139349b1e00 | 70d8bc92825682a0ff975ae2972352dbe57e1087 | /move.py | c57381fb388d8e166561a086c610f862dd9fceee | [] | no_license | cmkolb8/112-Term-Project | c501394bba158204c6dfad2ecaabee42f4c0b21d | fa268e8e9071c4063d350fb96e2ead3c9895f171 | refs/heads/master | 2023-01-29T14:11:54.301039 | 2020-12-09T17:00:03 | 2020-12-09T17:00:03 | 320,020,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,077 | py | import math
#This file moves the player when the arrow keys are pressed
#algorithm learned from https://lodev.org/cgtutor/raycasting3.html
def left(mode):
yDir = mode.yDir
xDir = mode.xDir
#uses rotation matrix in order to rotate
mode.yDir = mode.xDir * math.sin(mode.rotate) + mode.yDir * math.cos(mode.rotate)
mode.xDir = mode.xDir * math.cos(mode.rotate) - yDir * math.sin(mode.rotate)
planex = mode.xCameraPlane
#changes plane with the rotation matrix
mode.xCameraPlane = mode.xCameraPlane * math.cos(mode.rotate) - mode.yCameraPlane * math.sin(mode.rotate)
mode.yCameraPlane = planex * math.sin(mode.rotate) + mode.yCameraPlane * math.cos(mode.rotate)
#algorithm learned from https://lodev.org/cgtutor/raycasting3.html
def right(mode):
yDir = mode.yDir
xDir = mode.xDir
mode.yDir = mode.xDir * math.sin(-1 * mode.rotate) + mode.yDir * math.cos(-1 * mode.rotate)
mode.xDir = mode.xDir * math.cos(-1 * mode.rotate) - yDir * math.sin(-1 * mode.rotate)
planex = mode.xCameraPlane
mode.xCameraPlane = mode.xCameraPlane * math.cos(-1 * mode.rotate) - mode.yCameraPlane * math.sin(-1 * mode.rotate)
mode.yCameraPlane = planex * math.sin(-1 * mode.rotate) + mode.yCameraPlane * math.cos(-1 * mode.rotate)
#algorithm learned from https://lodev.org/cgtutor/raycasting3.html
def up(mode):
#checks if there is a block ahead, if not moves forward
if(mode.map[int(mode.xPos + mode.xDir * mode.speed)][int(mode.yPos)] == 0):
mode.xPos += mode.xDir * mode.speed
if(mode.map[int(mode.xPos)][int(mode.yPos + mode.yDir * mode.speed)] == 0):
mode.yPos += mode.yDir * mode.speed
#algorithm learned from https://lodev.org/cgtutor/raycasting3.html
def down(mode):
#checks if there is a block behind, if not moves backward
if(mode.map[int(mode.xPos - mode.xDir * mode.speed)][int(mode.yPos)] == 0):
mode.xPos -= mode.xDir * mode.speed
if(mode.map[int(mode.xPos)][int(mode.yPos - mode.yDir * mode.speed)] == 0):
mode.yPos -= mode.yDir * mode.speed
| [
"caitlinkolb@Caitlins-MBP.attlocal.net"
] | caitlinkolb@Caitlins-MBP.attlocal.net |
ebc40e7127a4ef794cb3fda51781c11a38c0c8f7 | 79b3cbfc92b476bcf49fbd79f19f24442035448d | /accounts/models.py | 96bedd4705e0e1451e0fc1ec176568f6e402504b | [] | no_license | ScottWoodbyrne/blog-site | 1a1679e8c51a484189ef56d46858852660c03080 | e871b6d22d6aef9c231bfc14ed48b0fba670850e | refs/heads/master | 2020-04-06T08:04:25.713014 | 2016-09-01T12:51:59 | 2016-09-01T12:51:59 | 65,009,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,265 | py | from __future__ import unicode_literals
from django.contrib.auth.models import AbstractUser, UserManager
from django.db import models
from django.utils import timezone
# Create your models here.
class AccountUserManager(UserManager):
def _create_user(self, username, email, password, is_staff, is_superuser, **extra_fields):
now = timezone.now()
if not email:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
user = self.model(username=email, email=email, is_staff=is_staff, is_active=True,is_superuser=is_superuser,date_joined=now,**extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
class User(AbstractUser):
stripe_id = models.CharField(max_length=40, default='')
subscription_end = models.DateTimeField(default=timezone.now)
cancel_at_period_end = models.BooleanField(default= False)
objects = AccountUserManager()
def is_subscribed(self, magazine):
try:
purchase = self.purchases.get(magazine__pk=magazine.pk)
except Exception:
return False
if purchase.subscription_end > timezone.now():
return False
return True
| [
"scott.woodbyrne@gmail.com"
] | scott.woodbyrne@gmail.com |
8e8665d33a8f3df1f93560af494176e055b876a4 | 81207a57ae84b2b786b373d9eaa89e04ca662473 | /scripts/update_index.py | b4fa72f135adfbde5960f9e2c3f51b20f42df2a6 | [
"MIT"
] | permissive | ncarkaci/acoustid-server | 9a9187db34c25a4eedbe297564f9d13f05b9c907 | bb0098016d210be8d04ee64d9b42ed80bb947280 | refs/heads/master | 2020-07-22T18:25:46.258746 | 2019-09-05T11:05:01 | 2019-09-05T11:05:01 | 207,288,602 | 1 | 0 | MIT | 2019-09-09T10:58:51 | 2019-09-09T10:58:51 | null | UTF-8 | Python | false | false | 421 | py | #!/usr/bin/env python
# Copyright (C) 2011-2012 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from contextlib import closing
from acoustid.script import run_script
from acoustid.data.fingerprint import update_fingerprint_index
def main(script, opts, args):
with closing(script.engine.connect()) as db:
update_fingerprint_index(db, script.index)
run_script(main)
| [
"lalinsky@gmail.com"
] | lalinsky@gmail.com |
8c0cb938f99334d265cbad6a0c70bae39c93449d | 2030fe0cc1c938024b8fd9662a3f2035b5ec80dc | /aula1_ex3.py | 4001d3086458b5f6ce02df718ee0fb29024c0c4a | [] | no_license | luannaserqueira/Python | 0f26e67dd9f88d4e80a6e977e641ae390dd880f8 | 799e051389786dd667a399f68cb153bbd1645856 | refs/heads/master | 2020-04-02T20:25:42.696230 | 2018-10-26T17:23:06 | 2018-10-26T17:23:06 | 154,768,787 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | # Aula 1 ex3
# Ache os zeros da função: y=3x^2-4x-10
a=3
b=-4
c=-10
delta=b**2-(4*a*c)
print(delta)
x1=(-b+(math.sqrt(delta)))/2*a
print(x1)
x2=(-b-(math.sqrt(delta)))/2*a
print(x2)
print('Os zeros da função são ',x1,' e ',x2,'')
| [
"noreply@github.com"
] | noreply@github.com |
24524f83587d385ff97aec5e49d9379dfb3f883b | b8085ef607da70023214f105eb27bdbc713e596f | /Day2/Slots.py | db6ff0a4f8e7383e149a01736bdb559e14f236c2 | [] | no_license | artheadsweden/python_adv_april19 | 893c9ec76e8505a580439b7a2fd7aa2776503c77 | 04eecd25d4a291dddd608d94968b217fed7b88d8 | refs/heads/master | 2020-05-07T13:41:15.545033 | 2019-04-11T18:47:22 | 2019-04-11T18:47:22 | 180,559,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 682 | py | from pympler import asizeof
class NoSlots:
def __init__(self, name, identifier):
self.name = name
self.identifier = identifier
class WithSlots:
__slots__ = ['name', 'identifier']
def __init__(self, name, identifier):
self.name = name
self.identifier = identifier
def main():
no_slots = [NoSlots(str(n), n) for n in range(100_000)]
size1 = round(asizeof.asizeof(no_slots)/1024/1024, 2)
print("No slots", size1, "mb")
with_slots = [WithSlots(str(n), n) for n in range(100_000)]
size2 = round(asizeof.asizeof(with_slots)/1024/1024, 2)
print("With slots", size2, "mb")
if __name__ == '__main__':
main()
| [
"joakim@arthead.se"
] | joakim@arthead.se |
a4b5a402aeb3a7cf27f5d22baeece8d959ee1e53 | 0f33bdb8bd92699bdf671db478ddbd80e8ed1601 | /venv/Scripts/pip-script.py | aec802255ac000144cbe063eecf469b5e9688ca3 | [] | no_license | FairTraxx/SOUQ-Scrapper | 07c4985b434e6249e568aa6aac4d975f5abdd19f | 076b592925c5e871bf16b0523dceeefc9fbbd642 | refs/heads/master | 2020-09-11T13:47:23.699669 | 2019-11-16T11:41:15 | 2019-11-16T11:41:15 | 222,086,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | #!"C:\Users\medoo\PycharmProjects\Souq tracker\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
| [
"fairtraxx@protonmail.com"
] | fairtraxx@protonmail.com |
0bbf33158003dd02f3f3be544e07a57be5a695b2 | 0a666622f5b91bfe888ac876ca70799c9e18c0e9 | /sdk/AsposeEmailCloudSdk/models/mapi_contact_get_request.py | fd60cfb1c9947962e0846fcac2ece4c5e86b9020 | [
"MIT"
] | permissive | aspose-email-cloud/aspose-email-cloud-python | fd9ad21b9de863544f9462179e29271b3f592e19 | c5c13839cbbbfa5b6617bd1aedf3cf30cd664227 | refs/heads/master | 2023-08-16T22:11:14.755974 | 2021-09-21T18:40:45 | 2021-09-21T18:40:45 | 119,378,720 | 1 | 0 | MIT | 2022-12-08T01:06:16 | 2018-01-29T12:19:40 | Python | UTF-8 | Python | false | false | 2,612 | py | # coding: utf-8
# ----------------------------------------------------------------------------
# <copyright company="Aspose" file="mapi_contact_get_request.py">
# Copyright (c) 2018-2020 Aspose Pty Ltd. All rights reserved.
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# </summary>
# ----------------------------------------------------------------------------
from AsposeEmailCloudSdk.models import *
class MapiContactGetRequest(object):
"""
Request model for mapi_contact_get operation.
Initializes a new instance.
:param format: Contact document format. Enum, available values: VCard, WebDav, Msg
:type format: str
:param file_name: Contact document file name.
:type file_name: str
:param folder: Path to folder in storage.
:type folder: str
:param storage: Storage name.
:type storage: str
"""
def __init__(self, format: str, file_name: str, folder: str = None, storage: str = None):
"""
Request model for mapi_contact_get operation.
Initializes a new instance.
:param format: Contact document format. Enum, available values: VCard, WebDav, Msg
:type format: str
:param file_name: Contact document file name.
:type file_name: str
:param folder: Path to folder in storage.
:type folder: str
:param storage: Storage name.
:type storage: str
"""
self.format = format
self.file_name = file_name
self.folder = folder
self.storage = storage
| [
"marat.gumerof@aspose.com"
] | marat.gumerof@aspose.com |
2b9f243412cb139759c08fda7fb3fbd777ee70aa | 68f1a981bc1a882c4417806eda0ff5731b9dc67b | /webserver-v3/app/routesv2.py | 833b7840814cd32297a95638d74f282f6fb4e942 | [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause"
] | permissive | whittinghama/comp3215 | 3ca477320708525effc43605e0cdef750e4a16f0 | 9d8b4e11214275987c9b39199018157c81e25ee1 | refs/heads/master | 2022-03-30T11:34:45.706281 | 2020-01-11T15:52:11 | 2020-01-11T15:52:11 | 224,467,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,900 | py | import time
import serial
from flask import render_template, request, jsonify
from app import app
#https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-i-hello-world
#https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-ii-templates
#http://www.varesano.net/blog/fabio/serial%20rs232%20connections%20python
#https://pyserial.readthedocs.io/en/latest/pyserial.html
@app.route('/')
@app.route('/index')
def index():
ser = serial.Serial(
port= 'COM8',
baudrate=115200,
#parity=serial.PARITY_ODD,
#stopbits=serial.STOPBITS_TWO,
#bytesize=serial.SEVENBITS
)
#ser.close()
#ser.open()
#ser.isOpen()
ser.write('CONCHECK' + '\n') #connection check
print('CONCHECK DONE')
time.sleep(.1)
ser.write('CONREPLY' + '\n') #connection reply
print('CONREPLY DONE')
time.sleep(.1)
while(ser.in_waiting != 0):
line = ser.readline()
if 'STATE' in line: #look for correct response line and throw away the others
response = line.split(',')
host_no = response[1]
states = response[2]
print(host_no)
print(states)
#ser.close()
startup = {'host' : host_no, 'startup_state' : states}
return render_template('index.html', startup=startup)
#print(states[0])
#print(states[1])
#data = ser2.read(2)
#if data == '66':
# ser2.write('20110')
#startup_data = ser.read(5)
#states = str(startup_data[1:5])
#print(startup_data[0])
#print(startup_data[1:5])
#startup = {'host' : startup_data[0], 'startup_state' : startup_data[1:5]}
#ser.close()
#, startup=startup, states=states
@app.route('/buttonpress',methods=['POST'])
def buttonpress():
button_id=request.args.get('button_id')
button_state=request.args.get('button_state')
#print(button_id)
#print(button_state)
if button_state == "1":
packet = 'LEDON ' + str(button_id) + "\n";
print(packet)
elif button_state == "0":
packet = 'LEDOFF ' + str(button_id) + "\n";
print(packet)
ser = serial.Serial(
port= 'COM8',
baudrate=115200,
#parity=serial.PARITY_ODD,
#stopbits=serial.STOPBITS_TWO,
#bytesize=serial.SEVENBITS
)
ser.close()
ser.open()
ser.isOpen()
flag = 1
ser.write('CONCHECK' + '\n') #connection check
time.sleep(.1)
ser.write('CONREPLY' + '\n') #connection reply
time.sleep(.1)
while(ser.in_waiting != 0):
line = ser.readline()
print(line)
if 'STATE' in line: #look for correct response line and throw away the others
response = line.split(',')
states = response[2]
host = response[1]
if states[int(button_id) - 1] == '0':
flag = 0
print('dont do it')
break
while flag == 1:
ser.write(packet)
time.sleep(.1)
while(ser.in_waiting != 0):
line = ser.readline()
print(line)
if 'AK' in line:
flag = 0
break
ser.close()
ser.open()
print(flag)
#time.sleep(1)
#ser.write(packet)
#out = ''
#time.sleep(1)
#while ser.inWaiting() > 0:
#out += ser2.read(2)
#if out != '':
# print(out)
#ser2.write('1111')
#onlineoffline = ser.readline()
#print(onlineoffline)
#onlineoffline2 = ser.readline()
#print(onlineoffline2)
ser.close()
#ser2.close()
return jsonify({'reply':'success', 'id' : button_id, 'state' : button_state, 'onlineoffline' : states, 'host' : host})
| [
"noreply@github.com"
] | noreply@github.com |
2db7d19023f853c2bbac3ee8b826a77f640a2f7b | 69d82895b49a37f2acd42fd78c1e02441d9fcd40 | /blog/migrations/0004_post_views.py | 7e73e3f4661865d496f698c3743ee38e1171a4be | [] | no_license | Chandrapalsingh12/demo | ca806bb9b6ad3ee7125cbf381968e1f178eb6320 | 3752c858ed6aad95cbcce61e36744c4acd917ed7 | refs/heads/main | 2023-07-03T09:26:16.298498 | 2021-08-11T08:18:31 | 2021-08-11T08:18:31 | 394,915,044 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | # Generated by Django 3.1.6 on 2021-03-03 11:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_auto_20210227_0909'),
]
operations = [
migrations.AddField(
model_name='post',
name='views',
field=models.IntegerField(default=0),
),
]
| [
"cscssingh123@gmail.com"
] | cscssingh123@gmail.com |
e04f14296efc87f773affec296ff647456e771a1 | a628dfe8f012aad0d3d8bd5ccb3fac9e696b3a25 | /model_dynamic_prog.py | 8ef911b76761d5317d0d3ba2b4d2f6234fc80f8b | [] | no_license | tybian/transformer_tts | df20cc92aebed5eb9b1bec587668ae4fbf54d4c6 | 25c8a64d5b03cdbfd33646f58366643f130c73a1 | refs/heads/master | 2023-02-09T14:02:20.912552 | 2021-01-06T13:44:15 | 2021-01-06T13:44:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,063 | py | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
def get_output_mask(out_flag):
# (N, L)
output_mask = out_flag.eq(0)
return output_mask
def get_attn_mask(seq_flag):
"""
generate the mask for attention, 0 for unmask and 1 for mask
Args:
seq_flag - (N, L).
Return:
qmask - (N, L, 1) for query side mask.
kmask - (N, 1, L) for key and value side mask.
"""
mask = seq_flag.eq(0)
qmask = mask.unsqueeze(-1)
kmask = mask.unsqueeze(1)
return qmask, kmask
def get_causal_mask(seq):
"""
For masking out the subsequent info.
Args:
seq (N, _, L)
Return:
mask (1, L, L), 0 for unmask and 1 for mask
"""
causal_mask = (
torch.triu(
torch.ones((1, seq.size(-1), seq.size(-1)), device=seq.device), diagonal=1,
)
).bool()
return causal_mask
class PositionalEncoding(nn.Module):
def __init__(self, n_position, d_hid):
super(PositionalEncoding, self).__init__()
self.alpha = nn.Parameter(torch.ones(1))
self.register_buffer(
"pos_table", self._get_sinusoid_encoding_table(n_position, d_hid)
)
def _get_sinusoid_encoding_table(self, n_position, d_hid):
def get_position_angle_vec(position):
return [
position / np.power(10000, 2 * (hid_j // 2) / d_hid)
for hid_j in range(d_hid)
]
sinusoid_table = np.array(
[get_position_angle_vec(pos_i) for pos_i in range(n_position)]
)
sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i
sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1
return torch.FloatTensor(sinusoid_table).unsqueeze(0)
def forward(self, x):
return x + self.alpha * self.pos_table[:, : x.size(1)].clone().detach()
def infer_dp(self, x, idx):
return x + self.alpha * self.pos_table[:, idx : idx + 1, :].clone().detach()
class ScaledDotProductAttention(nn.Module):
def __init__(self, temperature, attn_dropout=0.1):
super(ScaledDotProductAttention, self).__init__()
self.temperature = temperature
self.dropout = nn.Dropout(attn_dropout)
def init_prev_attn(self, memory, flag=False):
self.force = flag
self.prev_pos = -1
self.cnt = 0
def force_monotonous(self, attn, attn_energy):
# Force a specific head attention to be monotonous.
# Note: the head forced should be the guided head in training
if self.prev_pos == -1:
head_attn = attn[0, 0, :, :]
curr_pos = torch.argmax(head_attn, dim=-1).item()
self.prev_pos = curr_pos
else:
head_attn = attn[0, 0, :, :]
head_energy = attn_energy[0, 0, :, :]
curr_pos = torch.argmax(head_attn, dim=-1).item()
if (curr_pos < self.prev_pos - 1) or (curr_pos > self.prev_pos + 2):
head_energy_new = head_energy.new_full(head_energy.size(), -10000.0)
win_enrg = head_energy[:, self.prev_pos - 1 : self.prev_pos + 3]
head_energy_new[:, self.prev_pos - 1 : self.prev_pos + 3] = win_enrg
# 0.5 is a temperature for more smoothing results
head_attn_new = F.softmax(head_energy_new * 0.5, dim=-1)
attn[0, 0, :, :] = head_attn_new
self.prev_pos = torch.argmax(head_attn_new, dim=-1).item()
else:
self.prev_pos = curr_pos
return attn
def forward(self, q, k, v, qmask=None, kmask=None):
attn = torch.matmul(q / self.temperature, k.transpose(2, 3))
# masking for key side, set -1e9 to minimize the cor attn score
if kmask is not None:
attn = attn.masked_fill(kmask, -10000.0)
attn = F.softmax(attn, dim=-1)
attn = self.dropout(attn)
output = torch.matmul(attn, v)
# masking for query side, ignore the all padding postions for clean plot
if qmask is not None:
attn = attn.masked_fill(qmask, 0.0)
# output (N, H, Lq, Dv), attn (N, H, Lq, Lk)
return output, attn
def infer_dp(self, q, k, v, qmask=None, kmask=None):
attn_energy = torch.matmul(q / self.temperature, k.transpose(2, 3))
# masking for key side, set -1e9 to minimize the cor attn score
if kmask is not None:
attn_energy = attn_energy.masked_fill(kmask, -10000.0)
attn = F.softmax(attn_energy, dim=-1)
attn = self.dropout(attn)
if self.force is True:
attn = self.force_monotonous(attn, attn_energy)
output = torch.matmul(attn, v)
else:
output = torch.matmul(attn, v)
# masking for query side, ignore the all padding postions for clean plot
if qmask is not None:
attn = attn.masked_fill(qmask, 0.0)
# output (N, H, Lq, Dv), attn (N, H, Lq, Lk)
return output, attn
class MultiHeadAttention(nn.Module):
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super(MultiHeadAttention, self).__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_ks = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_vs = nn.Linear(d_model, n_head * d_v, bias=False)
self.fc = nn.Linear(n_head * d_v, d_model, bias=False)
self.attention = ScaledDotProductAttention(temperature=d_k ** 0.5)
self.dropout = nn.Dropout(dropout)
self.layer_norm = nn.LayerNorm(d_model, eps=1e-6)
def forward(self, q, k, v, qmask=None, kmask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, len_k, len_v = q.size(0), q.size(1), k.size(1), v.size(1)
residual = q
# Pass through the pre-attention projection: b x lq x (n*dv)
# Separate different heads: b x lq x n x dv
q = self.w_qs(q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
# Transpose for attention dot product: b x n x lq x dv
q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2)
if qmask is not None:
qmask = qmask.unsqueeze(1) # For head axis broadcasting.
if kmask is not None:
kmask = kmask.unsqueeze(1) # For head axis broadcasting.
q, attn = self.attention(q, k, v, qmask=qmask, kmask=kmask)
# Transpose to move the head dimension back: b x lq x n x dv
# Combine the last two dimensions to concatenate all the heads
# together: b x lq x (n*dv)
q = q.transpose(1, 2).contiguous().view(sz_b, len_q, -1)
q = self.dropout(self.fc(q))
q += residual
q = self.layer_norm(q)
return q, attn
def infer_dp(self, q, k, v, qmask=None, kmask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, len_k, len_v = q.size(0), q.size(1), k.size(1), v.size(1)
residual = q
# Pass through the pre-attention projection: b x lq x (n*dv)
# Separate different heads: b x lq x n x dv
q = self.w_qs(q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
# Transpose for attention dot product: b x n x lq x dv
q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2)
if qmask is not None:
qmask = qmask.unsqueeze(1) # For head axis broadcasting.
if kmask is not None:
kmask = kmask.unsqueeze(1) # For head axis broadcasting.
q, attn = self.attention.infer_dp(q, k, v, qmask=qmask, kmask=kmask)
# Transpose to move the head dimension back: b x lq x n x dv
# Combine the last two dimensions to concatenate all the heads
# together: b x lq x (n*dv)
q = q.transpose(1, 2).contiguous().view(sz_b, len_q, -1)
q = self.dropout(self.fc(q))
q += residual
q = self.layer_norm(q)
return q, attn
class FastSelfAttention(nn.Module):
def __init__(self, n_position, n_head, d_model, d_conv, kernel_size, dropout=0.1):
super().__init__()
# self.glu = nn.Linear(d_model, 2 * d_model)
self.fc = nn.Linear(d_model, d_model, bias=False)
self.sigmoid = nn.Sigmoid()
self.layer_norm = nn.LayerNorm(d_model, eps=1e-6)
self.register_buffer("mask_table", self._get_average_table(n_position))
# For the LConv
self.n_head = n_head
self.d_conv = d_conv
self.d_model = d_model
self.dropout = dropout
"""
self.kernels = kernels
self.attn_scores = nn.ParameterList([])
for kernel_size in kernels:
attn_score = nn.Parameter(torch.ones(n_head, 1, kernel_size))
self.attn_scores.append(attn_score)
self.attn_select = nn.Parameter(torch.ones(len(kernels)))
"""
# For the restricted attention
self.kernel_size = kernel_size
self.predict_energy = nn.Linear(d_conv, kernel_size * 2)
self.stt_energy = nn.Parameter(torch.ones(1, n_head, 1, kernel_size))
def _get_average_table(self, n_position):
average_table = 1 - torch.triu(torch.ones((n_position, n_position)), diagonal=1,)
for idx in range(n_position):
average_table[idx, :] = average_table[idx, :] / (idx + 1)
return average_table
def _restricted_attention(self, input_):
n_head, d_conv = self.n_head, self.d_conv
sz_b, len_conv = input_.size(0), input_.size(1)
conv_input = input_.view(sz_b, len_conv, n_head, d_conv)
conv_input = conv_input.transpose(1, 2).contiguous()
conv_input = conv_input.view(sz_b * n_head, len_conv, d_conv)
# (B*H, n, d//H) ---> (B*H, n, k*2)
predict_ctrl = self.predict_energy(conv_input)
dyn_energy = predict_ctrl[:, :, : self.kernel_size]
gate = predict_ctrl[:, :, self.kernel_size :]
stt_energy = self.stt_energy.expand(
sz_b, n_head, 1, self.kernel_size
).contiguous()
stt_energy = stt_energy.view(sz_b * n_head, 1, self.kernel_size)
energy = stt_energy + self.sigmoid(gate) * dyn_energy
# expand the energy to # (B*H, n, n+k-1)
energy_expanded = energy.new(
sz_b * n_head, len_conv, len_conv + self.kernel_size - 1
).fill_(-10000.0)
# copy the energy to the expanded form
energy_expanded.as_strided(
(sz_b * n_head, len_conv, self.kernel_size),
(
len_conv * (len_conv + self.kernel_size - 1),
len_conv + self.kernel_size,
1,
),
).copy_(energy)
# get final expanded energy, (B*H, n, n)
attn_weight = F.softmax(energy_expanded, dim=2)
attn_weight = attn_weight[:, :, self.kernel_size - 1 :]
attn_weight = F.dropout(attn_weight, p=self.dropout, training=self.training)
# (B*H, n, d//H)
conv_loc = torch.bmm(attn_weight, conv_input)
conv_loc = conv_loc.view(sz_b, n_head, len_conv, d_conv)
conv_loc = conv_loc.transpose(1, 2).contiguous()
conv_loc = conv_loc.view(sz_b, len_conv, -1)
return conv_loc
def _restricted_attention_dp(self, input_, local_hidden):
""" Restricted attention with `Dynamic Programming`.
Arguments:
input_ {Tensor} -- Shape: (B, 1, C).
local_hidden {Tensor} -- Shape: (B, kernel_size - 1, C).
Returns:
list -- Results.
"""
# Concat local context
conv_input = torch.cat((local_hidden, input_), dim=1)
local_hidden = conv_input[:, -(self.kernel_size - 1) :, :]
n_head, d_conv = self.n_head, self.d_conv
sz_b, len_conv = conv_input.size(0), conv_input.size(1)
conv_input = conv_input.view(sz_b, len_conv, n_head, d_conv)
conv_input = conv_input.transpose(1, 2).contiguous()
conv_input = conv_input.view(sz_b * n_head, len_conv, d_conv)
# (B*H, 1, C//H) ---> (B*H, 1, kernel_size*2)
predict_ctrl = self.predict_energy(conv_input)
predict_ctrl = predict_ctrl[:, -1:, :]
dyn_energy = predict_ctrl[:, :, : self.kernel_size]
gate = predict_ctrl[:, :, self.kernel_size :]
stt_energy = self.stt_energy.squeeze(0)
energy = stt_energy + self.sigmoid(gate) * dyn_energy
# Get corresponding weights
attn_weight = F.softmax(energy, dim=2)
attn_weight = F.dropout(attn_weight, p=self.dropout, training=self.training)
# (B*H, 1, C//H) = (B*H, 1, kernel_size) x (B*H, kernel_size, C//H)
conv_loc = torch.bmm(attn_weight, conv_input)
conv_loc = conv_loc.view(sz_b, n_head, 1, d_conv)
conv_loc = conv_loc.transpose(1, 2).contiguous()
conv_loc = conv_loc.view(sz_b, 1, -1)
return conv_loc, local_hidden
def forward(self, input_):
residual = input_
# 0 - Get average info from input, the avg_mask is already a causal mask
avg = torch.matmul(
self.mask_table[: input_.size(1), : input_.size(1)].clone().detach(), input_,
)
# 1 - Apply gating unit for more flexibility
"""
sig = self.glu(avg)
avg = sig[:, :, : self.d_model] * self.sigmoid(sig[:, :, self.d_model :])
"""
# 2 - Apply LConv for the average info
"""
n_head, d_conv = self.n_head, self.d_conv
sz_b, len_conv = conv_input.size(0), conv_input.size(1)
conv_input = conv_input.view(sz_b, len_conv, n_head, d_conv)
conv_input = conv_input.transpose(1, 3).contiguous()
conv_input = conv_input.view(sz_b * d_conv, n_head, len_conv)
conv_loc = conv_input.new_zeros(sz_b * d_conv, n_head, len_conv)
attn_select = F.softmax(self.attn_select, dim=-1)
for loc_idx in range(len(self.attn_scores)):
attn_weight = F.softmax(self.attn_scores[loc_idx], dim=-1)
attn_weight = F.dropout(attn_weight, p=self.dropout, training=self.training)
conv_temp = F.conv1d(
conv_input,
attn_weight,
padding=self.kernels[loc_idx] - 1,
groups=n_head,
)
conv_temp = conv_temp[:, :, :len_conv]
conv_loc += attn_select[loc_idx] * conv_temp
conv_loc = conv_loc.view(sz_b, d_conv, n_head, len_conv)
conv_loc = conv_loc.transpose(1, 3).contiguous()
conv_loc = conv_loc.view(sz_b, len_conv, -1)
"""
conv_loc = self._restricted_attention(avg)
# 3 - Apply FC for information flow across channels
v = F.dropout(self.fc(conv_loc), p=self.dropout, training=self.training)
v += residual
v = self.layer_norm(v)
return v, None
def infer_dp(self, input_, input_idx, accum_hidden, local_hidden):
""" The fast self-attention module with `Dynamic Programming`.
Arguments:
input_ {Tensor} -- Shape: (B, 1, C).
input_idx {int} -- Integer number.
accum_hidden {Tensor} -- Shape: (B, 1, C).
local_hidden {Tensor} -- Shape: (B, kernel_size - 1, C).
Returns:
list -- Results.
"""
residual = input_
# 0 - Average pooling for the sequence
accum_hidden = input_ + accum_hidden
avg = accum_hidden / (input_idx + 1)
# 1 - Apply gating unit for more flexibility
"""
sig = self.glu(avg)
avg = sig[:, :, : self.d_model] * self.sigmoid(sig[:, :, self.d_model :])
"""
# 2 - Apply restricted attention
conv_loc, local_hidden = self._restricted_attention_dp(avg, local_hidden)
# 3 - Apply FC for information flow across channels
v = F.dropout(self.fc(conv_loc), p=self.dropout, training=self.training)
v += residual
v = self.layer_norm(v)
return v, None, accum_hidden, local_hidden
class PositionwiseFeedForward(nn.Module):
def __init__(self, d_in, d_hid, dropout=0.1):
super(PositionwiseFeedForward, self).__init__()
self.w_1 = nn.Linear(d_in, d_hid)
self.w_2 = nn.Linear(d_hid, d_in)
self.layer_norm = nn.LayerNorm(d_in, eps=1e-6)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
x = self.w_2(F.relu(self.w_1(x)))
x = self.dropout(x)
x += residual
x = self.layer_norm(x)
return x
class EncoderLayer(nn.Module):
"""
Composed with two sub-layers, i.e. Multi-Head Attention (encoder) and FFN.
"""
def __init__(self, d_model, d_inner, n_head, d_k, d_v, dropout=0.1):
super(EncoderLayer, self).__init__()
self.multi_head_attn = MultiHeadAttention(
n_head, d_model, d_k, d_v, dropout=dropout
)
self.ffn = PositionwiseFeedForward(d_model, d_inner, dropout=dropout)
def forward(self, input, enc_qmask=None, enc_kmask=None):
output, enc_attn = self.multi_head_attn(
input, input, input, qmask=enc_qmask, kmask=enc_kmask
)
output = self.ffn(output)
return output, enc_attn
class Encoder(nn.Module):
def __init__(self, hparams):
super(Encoder, self).__init__()
self.layer_stack = nn.ModuleList(
[
EncoderLayer(
hparams.d_model,
hparams.d_inner,
hparams.n_head,
hparams.d_model // hparams.n_head,
hparams.d_model // hparams.n_head,
dropout=0.1,
)
for _ in range(hparams.n_layers)
]
)
def forward(self, input, enc_qmask=None, enc_kmask=None, return_attns=False):
enc_attn_list = []
# positional dropout
output = F.dropout(input, p=0.1, training=self.training)
for layer in self.layer_stack:
output, head_attn = layer(output, enc_qmask=enc_qmask, enc_kmask=enc_kmask)
enc_attn_list += [head_attn] if return_attns else []
if return_attns:
return output, enc_attn_list
return output
class EncoderPrenet(nn.Module):
def __init__(self, hparams):
super(EncoderPrenet, self).__init__()
self.convolutions = nn.ModuleList()
self.convolutions.append(
nn.Sequential(
nn.Conv1d(
hparams.d_embed,
hparams.eprenet_chans,
kernel_size=hparams.eprenet_kernel_size,
stride=1,
padding=int((hparams.eprenet_kernel_size - 1) / 2),
),
nn.BatchNorm1d(hparams.eprenet_chans),
)
)
for _ in range(hparams.eprenet_n_convolutions - 1):
self.convolutions.append(
nn.Sequential(
nn.Conv1d(
hparams.eprenet_chans,
hparams.eprenet_chans,
kernel_size=hparams.eprenet_kernel_size,
stride=1,
padding=int((hparams.eprenet_kernel_size - 1) / 2),
),
nn.BatchNorm1d(hparams.eprenet_chans),
)
)
self.project = nn.Linear(hparams.eprenet_chans, hparams.d_model)
self.txt_embed = nn.Embedding(hparams.n_symbols, hparams.d_embed, padding_idx=0,)
self.position = PositionalEncoding(hparams.n_position, hparams.d_model)
def forward(self, txt_seq):
# (N, C, L)
x = self.txt_embed(txt_seq).transpose(1, 2)
for conv in self.convolutions:
x = F.dropout(F.relu(conv(x)), p=0.5, training=self.training)
# (N, L, C)
x = self.project(x.transpose(1, 2))
x = self.position(x)
return x
class DecoderLayer(nn.Module):
"""
Composed with three sub-layers, i.e. Masked Multi-Head Attention (decoder),
Multi-Head Attention (encoder-decoder) and FFN.
"""
def __init__(self, n_position, d_model, d_inner, n_head, d_k, d_v, dropout=0.1):
super(DecoderLayer, self).__init__()
self.masked_fast_attn = FastSelfAttention(
n_position, n_head, d_model, d_v, kernel_size=15, dropout=dropout
)
self.multi_head_attn = MultiHeadAttention(
n_head, d_model, d_k, d_v, dropout=dropout
)
self.ffn = PositionwiseFeedForward(d_model, d_inner, dropout=dropout)
def forward(
self, input, enc_output, dec_qmask=None, dec_kmask=None, dec_enc_kmask=None,
):
output, dec_attn = self.masked_fast_attn(input)
output, dec_enc_attn = self.multi_head_attn(
output, enc_output, enc_output, qmask=dec_qmask, kmask=dec_enc_kmask,
)
output = self.ffn(output)
return output, dec_attn, dec_enc_attn
def infer_dp(
self,
input_,
input_idx,
enc_output,
accum_hidden,
local_hidden,
dec_enc_kmask=None,
):
output, dec_attn, accum_hidden, local_hidden = self.masked_fast_attn.infer_dp(
input_, input_idx, accum_hidden, local_hidden
)
output, dec_enc_attn = self.multi_head_attn.infer_dp(
output, enc_output, enc_output, qmask=None, kmask=dec_enc_kmask
)
output = self.ffn(output)
return output, dec_attn, dec_enc_attn, accum_hidden, local_hidden
class Decoder(nn.Module):
def __init__(self, hparams):
super(Decoder, self).__init__()
self.layer_stack = nn.ModuleList(
[
DecoderLayer(
hparams.n_position,
hparams.d_model,
hparams.d_inner,
hparams.n_head,
hparams.d_model // hparams.n_head,
hparams.d_model // hparams.n_head,
dropout=0.1,
)
for _ in range(hparams.n_layers)
]
)
def forward(
self, input, dec_qmask, dec_kmask, enc_output, dec_enc_kmask, return_attns=False,
):
dec_attn_list, dec_enc_attn_list = [], []
# positional dropout
output = F.dropout(input, p=0.1, training=self.training)
for layer in self.layer_stack:
output, dec_attn, dec_enc_attn = layer(
output,
enc_output,
dec_qmask=dec_qmask,
dec_kmask=dec_kmask,
dec_enc_kmask=dec_enc_kmask,
)
dec_attn_list += [dec_attn] if return_attns else []
dec_enc_attn_list += [dec_enc_attn] if return_attns else []
if return_attns:
return output, dec_attn_list, dec_enc_attn_list
return output
def init_dp(self, memory):
# init the hidden states used for dynamic programming.
B, C = memory.size(0), memory.size(2)
layer_num = len(self.layer_stack)
self.accum_hiddens = [memory.new_zeros(B, 1, C) for _ in range(layer_num)]
self.local_hiddens = [memory.new_zeros(B, 14, C) for _ in range(layer_num)]
def infer_dp(self, input_, input_idx, enc_output, dec_enc_kmask, return_attns=False):
dec_attn_list, dec_enc_attn_list = [], []
# Positional dropout
output = F.dropout(input_, p=0.1, training=self.training)
for layer_id, layer in enumerate(self.layer_stack):
output, dec_attn, dec_enc_attn, accum_hidden, local_hidden = layer.infer_dp(
output,
input_idx,
enc_output,
self.accum_hiddens[layer_id],
self.local_hiddens[layer_id],
dec_enc_kmask=dec_enc_kmask,
)
self.accum_hiddens[layer_id] = accum_hidden
self.local_hiddens[layer_id] = local_hidden
dec_attn_list += [dec_attn] if return_attns else []
dec_enc_attn_list += [dec_enc_attn] if return_attns else []
if return_attns:
return output, dec_attn_list, dec_enc_attn_list
return output
class DecoderPrenet(nn.Module):
def __init__(self, d_input, d_prenet, d_model, n_position):
super(DecoderPrenet, self).__init__()
sizes = [d_prenet, d_prenet]
in_sizes = [d_input] + sizes[:-1]
self.layers = nn.ModuleList(
[
nn.Linear(in_size, out_size)
for (in_size, out_size) in zip(in_sizes, sizes)
]
)
self.project = nn.Linear(sizes[-1], d_model)
self.position = PositionalEncoding(n_position, d_model)
def forward(self, mel_seq):
# (N, L, C)
x = mel_seq.transpose(1, 2)
for layer in self.layers:
x = F.dropout(F.relu(layer(x)), p=0.5, training=True)
x = self.project(x)
x = self.position(x)
return x
def infer_dp(self, input_, input_idx):
# input_ (N, C, 1) ---> (N, 1, C)
input_ = input_.transpose(1, 2)
for layer in self.layers:
input_ = F.dropout(F.relu(layer(input_)), p=0.5, training=True)
input_ = self.project(input_)
input_ = self.position.infer_dp(input_, input_idx)
return input_
class Postnet(nn.Module):
"""
Five 1-d convolution with 512 channels and kernel size 5
"""
def __init__(self, hparams):
super(Postnet, self).__init__()
self.convolutions = nn.ModuleList()
self.convolutions.append(
nn.Sequential(
nn.Conv1d(
hparams.d_mel,
hparams.dpostnet_chans,
kernel_size=hparams.dpostnet_kernel_size,
stride=1,
padding=int((hparams.dpostnet_kernel_size - 1) / 2),
),
nn.BatchNorm1d(hparams.dpostnet_chans),
)
)
for i in range(1, hparams.dpostnet_n_convolutions - 1):
self.convolutions.append(
nn.Sequential(
nn.Conv1d(
hparams.dpostnet_chans,
hparams.dpostnet_chans,
kernel_size=hparams.dpostnet_kernel_size,
stride=1,
padding=int((hparams.dpostnet_kernel_size - 1) / 2),
),
nn.BatchNorm1d(hparams.dpostnet_chans),
)
)
self.convolutions.append(
nn.Sequential(
nn.Conv1d(
hparams.dpostnet_chans,
hparams.d_mel,
kernel_size=hparams.dpostnet_kernel_size,
stride=1,
padding=int((hparams.dpostnet_kernel_size - 1) / 2),
),
nn.BatchNorm1d(hparams.d_mel),
)
)
def forward(self, x):
for i in range(len(self.convolutions) - 1):
x = F.dropout(
torch.tanh(self.convolutions[i](x)), p=0.5, training=self.training,
)
x = F.dropout(self.convolutions[-1](x), p=0.5, training=self.training)
return x
class Transformer(nn.Module):
def __init__(self, hparams):
super(Transformer, self).__init__()
self.encoder_prenet = EncoderPrenet(hparams)
self.encoder = Encoder(hparams)
self.decoder_prenet = DecoderPrenet(
hparams.d_mel * hparams.n_frames_per_step,
hparams.dprenet_size,
hparams.d_model,
hparams.n_position,
)
self.decoder = Decoder(hparams)
self.d_mel = hparams.d_mel
self.n_frames_per_step = hparams.n_frames_per_step
self.stop_threshold = hparams.stop_threshold
self.max_decoder_steps = hparams.max_decoder_steps
self.infer_trim = hparams.infer_trim
self.mel_linear = nn.Linear(
hparams.d_model, hparams.d_mel * hparams.n_frames_per_step,
)
self.stop_linear = nn.Linear(hparams.d_model, hparams.n_frames_per_step,)
self.postnet = Postnet(hparams)
def parse_output(self, outputs, in_flag=None, out_flag=None):
if out_flag is not None:
mel_num = outputs[0].size(1)
mask = get_output_mask(out_flag)
# (mel_num, N, L) ---> (N, mel_num, L)
mask = mask.expand(mel_num, mask.size(0), mask.size(1))
mask = mask.permute(1, 0, 2)
# for supporting r frames per step
N, C, L_r = outputs[0].size()
L = mask.size(2)
mask_r = mask.new_ones(N, C, L_r)
mask_r[:, :, :L] = mask
outputs[0] = outputs[0].masked_fill(mask_r, 0.0)
outputs[1] = outputs[1].masked_fill(mask_r, 0.0)
outputs[2] = outputs[2].masked_fill(mask_r[:, 0, :], 1e3)
return outputs, in_flag, out_flag
def forward(self, inputs):
# parse input
src_seq, src_flag, trg_seq, trg_flag = inputs
src_qmask, src_kmask = get_attn_mask(src_flag)
trg_qmask, trg_kmask = get_attn_mask(trg_flag)
trg_cmask = get_causal_mask(trg_seq)
trg_kmask = trg_kmask | trg_cmask
# encoder
src_input = self.encoder_prenet(src_seq)
enc_output, enc_attn_list = self.encoder(
src_input, src_qmask, src_kmask, return_attns=True
)
# decoder
trg_input = self.decoder_prenet(trg_seq)
dec_output, dec_attn_list, dec_enc_attn_list = self.decoder(
trg_input, trg_qmask, trg_kmask, enc_output, src_kmask, return_attns=True,
)
mel_output = self.mel_linear(dec_output)
stop_output = self.stop_linear(dec_output)
# reshape to original format
mel_output = mel_output.transpose(1, 2)
stop_output = stop_output.squeeze(-1)
# postnet and residual connection
mel_output_postnet = self.postnet(mel_output)
mel_output_postnet = mel_output_postnet + mel_output
# parse output
outputs = self.parse_output(
[
mel_output,
mel_output_postnet,
stop_output,
enc_attn_list,
dec_attn_list,
dec_enc_attn_list,
],
src_flag,
trg_flag,
)
return outputs
def inference(self, inputs):
src_seq = inputs[0]
src_qmask, src_kmask = None, None
trg_qmask = None
# encoder
# enc_output (b, enc_l, d_model), enc_attn_list [(b, h, enc_l, enc_l)]
src_input = self.encoder_prenet(src_seq)
enc_output, enc_attn_list = self.encoder(
src_input, src_qmask, src_kmask, return_attns=True
)
# create the go frame (b, d_mel, 1)
trg_go = enc_output.new_full((enc_output.size(0), self.d_mel), 1.0)
trg_go = trg_go.unsqueeze(-1)
while True:
# trg_go (b, d_mel, dec_l), trg_input (b, dec_l, d_model)
trg_input = self.decoder_prenet(trg_go)
trg_kmask = get_causal_mask(trg_go)
# dec_attn_list [(b, h, dec_l, dec_l)]
# dec_enc_attn_list [(b, h, dec_l, enc_l)]
dec_output, dec_attn_list, dec_enc_attn_list = self.decoder(
trg_input,
trg_qmask,
trg_kmask,
enc_output,
src_kmask,
return_attns=True,
)
mel_output = self.mel_linear(dec_output)
stop_output = self.stop_linear(dec_output)
# reshape to original format
# mel_output (b, d_mel, dec_l), stop_output (b, dec_l)
mel_output = mel_output.transpose(1, 2)
stop_output = stop_output.squeeze(-1)
eos = torch.sigmoid(stop_output).detach().cpu().numpy()
if True in (eos >= self.stop_threshold):
break
elif mel_output.size(-1) == self.max_decoder_steps:
print("Warning! Reached max decoder steps")
break
# create the new input
trg_go = torch.cat((trg_go, mel_output[:, :, -1:]), dim=-1)
# Remove the added go frame and delete the potential noise frame
mel_output = trg_go[:, :, 1:]
for cnt in range(self.infer_trim):
mel_output = mel_output[:, :, :-1]
mel_output_postnet = self.postnet(mel_output)
mel_output_postnet = mel_output + mel_output_postnet
outputs = self.parse_output(
[
mel_output,
mel_output_postnet,
stop_output,
enc_attn_list,
dec_attn_list,
dec_enc_attn_list,
]
)
return outputs[0]
def parse_outputs_dp(self, mel_outputs, stop_outputs):
""" Parse the outputs from recurrent decoding with `Dynamic Programming`.
Arguments:
mel_outputs {list} -- The mel_outputs is a list containing the mel frame of
each time step. Each element is a tensor and has a shape of (B, C, 1).
stop_outputs {list} -- The stop_outputs is a list containing the stop token
of each time step. Each element is a tensor and has a shape of (B, 1).
Returns:
list -- Results
"""
mel_outputs = torch.cat(mel_outputs, dim=-1)
stop_outputs = torch.cat(stop_outputs, dim=-1)
return mel_outputs, stop_outputs
def inference_dp(self, input_, force_layer_list=[]):
""" This is an inference version with `Dynamic Programming`.
Arguments:
input_ {list} -- Parsed input data for the network, i.e.,
(txt_padded, txt_flag, ref_padded, mel_flag)
Returns:
list -- Results
"""
src_seq = input_[0]
tgr_idx = 0
src_qmask, src_kmask = None, None
# Forward pass for encoder
# enc_output (b, enc_l, d_model), enc_attn_list [(b, h, enc_l, enc_l)]
src_input = self.encoder_prenet(src_seq)
enc_output, enc_attn_list = self.encoder(
src_input, src_qmask, src_kmask, return_attns=True
)
# Forward pass for decoder
# create the go frame (b, d_mel, 1) and init the decoder for DP
self.decoder.init_dp(enc_output)
for layer_id in range(len(self.decoder.layer_stack)):
if layer_id in force_layer_list:
self.decoder.layer_stack[
layer_id
].multi_head_attn.attention.init_prev_attn(enc_output, True)
else:
self.decoder.layer_stack[
layer_id
].multi_head_attn.attention.init_prev_attn(enc_output, False)
trg_input = enc_output.new_full((enc_output.size(0), self.d_mel, 1), 1.0)
mel_outputs, stop_outputs, alignments = [], [], []
# Decoding
while True:
trg_input = self.decoder_prenet.infer_dp(trg_input, tgr_idx)
dec_output, _, dec_enc_attn_list = self.decoder.infer_dp(
trg_input, tgr_idx, enc_output, src_kmask, return_attns=True
)
mel_output = self.mel_linear(dec_output)
stop_output = self.stop_linear(dec_output)
# reshape to original format
# mel_output (b, d_mel, 1), stop_output (b, 1)
mel_output = mel_output.transpose(1, 2)
stop_output = stop_output.squeeze(-1)
eos = F.sigmoid(stop_output).detach().cpu().numpy()
if True in (eos >= self.stop_threshold):
break
elif len(mel_outputs) == self.max_decoder_steps:
print("Warning! Reached max decoder steps")
break
mel_outputs += [mel_output]
stop_outputs += [stop_output]
alignments += [dec_enc_attn_list]
trg_input = mel_output
tgr_idx += 1
# Add for delete the potential noise
for cnt in range(self.infer_trim):
mel_outputs.pop()
stop_outputs.pop()
alignments.pop()
# Parse for postnet forward
mel_outputs, stop_outputs = self.parse_outputs_dp(mel_outputs, stop_outputs)
mel_outputs_postnet = self.postnet(mel_outputs)
mel_outputs_postnet = mel_outputs + mel_outputs_postnet
outputs = [
mel_outputs,
mel_outputs_postnet,
stop_outputs,
enc_attn_list,
"mode_dp",
alignments,
]
return outputs
| [
"zhaowei.0817@163.com"
] | zhaowei.0817@163.com |
85658af6a7b79e5450b577beccbc06522bd0f00d | 25c1bba5c9954ab757fed0ce3236cd6b3bd50c59 | /BUILD.cr.py | 47ccf5633ca37f891f4761834ecae7183d4632fb | [] | no_license | craftr-build/craftr-chaiscript | c09e32e7ddd72c75d482cd3b627f2183cceaf379 | 09e6434016915c9745e3c841076ad193cdebb9dd | refs/heads/master | 2021-09-04T17:35:54.491031 | 2018-01-20T14:31:21 | 2018-01-20T14:31:21 | 118,172,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,092 | py |
import craftr, {fmt, glob, path} from 'craftr'
import cxx from '@craftr/cxx'
source_dir = craftr.options.get('chaiscript.source_dir')
gitref = craftr.options.get('chaiscript.gitref', 'v6.0.0')
if not source_dir:
url = fmt('https://github.com/ChaiScript/ChaiScript/archive/{gitref}.zip')
source_dir = path.join(craftr.get_source_archive(url), 'ChaiScript-' + gitref.lstrip('v'))
defines = []
if craftr.options.get('chaiscript.no_threads', True):
defines.append('CHAISCRIPT_NO_THREADS')
if craftr.options.get('chaiscript.no_protect_dividebyzero', False):
defines.append('CHAISCRIPT_NO_PROTECT_DIVIDEBYZERO')
cxx.prebuilt(
name = 'chaiscript',
includes = [path.join(source_dir, 'include')],
defines = defines
)
cxx.library(
name = 'chaiscript-static',
public_deps = [':chaiscript'],
explicit = True,
srcs = glob('static_libs/*.cpp', parent=source_dir),
cpp_std = 'c++11',
options = dict(
msvc_compile_flags = ['/bigobj']
)
)
cxx.binary(
name = 'main',
deps = [':chaiscript-static'],
explicit = True,
srcs = [path.join(source_dir, 'src/main.cpp')]
)
| [
"rosensteinniklas@gmail.com"
] | rosensteinniklas@gmail.com |
250c0e4235499578ff3824e0c66b41dfcf1bdd93 | 991b68c35ac97aaf944380eb7b60e09a037412cc | /RuuvitagProject/main.py | 6d1b711349ac9531b25e925d479e635e4c0c0620 | [] | no_license | JamieLeon/Python | 8caa71e43f0920de2f71d5e6aef54675c53f0fd9 | f4f615986418a4f39082ac28352e12458f7335ab | refs/heads/master | 2023-05-05T09:44:28.807527 | 2021-05-21T12:27:05 | 2021-05-21T12:27:05 | 369,527,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,071 | py | from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
import time
import argparse
import json
from ruuvitag_sensor.ruuvitag import RuuviTag
from datetime import datetime, timedelta
# General message notification callback
def customOnMessage(message):
print("Received a new message: ")
print(message.payload)
print("from topic: ")
print(message.topic)
print("--------------\n\n")
# Suback callback
# def customSubackCallback(mid, data):
# print("Received SUBACK packet id: ")
# print(mid)
# print("Granted QoS: ")
# print(data)
# print("++++++++++++++\n\n")
# Puback callback
# def customPubackCallback(mid):
# print("Received PUBACK packet id: ")
# print(mid)
# print("++++++++++++++\n\n")
parser = argparse.ArgumentParser()
host = "af11qzz9ui86-ats.iot.eu-west-2.amazonaws.com"
rootCAPath = "/home/pi/AWSIoT/root-ca.pem"
certificatePath = "/home/pi/AWSIoT/certificate.pem.crt"
privateKeyPath = "/home/pi/AWSIoT/private.pem.key"
useWebsocket = False
clientId = "RaspberryPi"
topic = "Ruuvitag"
if (not certificatePath or not privateKeyPath):
parser.error("Missing credentials for authentication.")
exit(2)
# Configure logging
# logger = logging.getLogger("AWSIoTPythonSDK.core")
# logger.setLevel(logging.DEBUG)
# streamHandler = logging.StreamHandler()
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# streamHandler.setFormatter(formatter)
# logger.addHandler(streamHandler)
myAWSIoTMQTTClient = AWSIoTMQTTClient(clientId)
myAWSIoTMQTTClient.configureEndpoint(host, 8883)
myAWSIoTMQTTClient.configureCredentials(rootCAPath, privateKeyPath, certificatePath)
# AWSIoTMQTTClient connection configuration
myAWSIoTMQTTClient.configureAutoReconnectBackoffTime(1, 32, 20)
myAWSIoTMQTTClient.configureOfflinePublishQueueing(-1) # Infinite offline Publish queueing
myAWSIoTMQTTClient.configureDrainingFrequency(2) # Draining: 2 Hz
myAWSIoTMQTTClient.configureConnectDisconnectTimeout(10) # 10 sec
myAWSIoTMQTTClient.configureMQTTOperationTimeout(5) # 5 sec
myAWSIoTMQTTClient.onMessage = customOnMessage
# Connect and subscribe to AWS IoT
myAWSIoTMQTTClient.connect()
# Note that we are not putting a message callback here. We are using the general message notification callback.
myAWSIoTMQTTClient.subscribeAsync(topic, 1, ackCallback=customSubackCallback)
myAWSIoTMQTTClient.subscribeAsync(topic, 1, ackCallback=customSubackCallback)
time.sleep(2)
mac = 'EC:73:06:99:F2:FC'
sensor = RuuviTag(mac)
# Publish to the same topic in a loop forever
loopCount = 0
while True:
data = sensor.update()
tem = str(data['temperature'])
hum = str(data['humidity'])
now = datetime.now()
exp = now + timedelta(hours=3)
expiration_date = str(int(exp.timestamp()))
timestamp = str(int((now.timestamp())))
msg = '"timestamp": ' + timestamp + ', "humidity": "' + hum + '", "temperature": "' + tem
msg_json = (json.loads(msg))
myAWSIoTMQTTClient.publishAsync(topic, msg, 1, ackCallback=customPubackCallback)
loopCount += 1
time.sleep(900)
| [
"jamie.leon1@yahoo.co.uk"
] | jamie.leon1@yahoo.co.uk |
b77761e27e1ee73cd77ae1da875ba26db9728c06 | 15a9f5666b543a40363e6f5bc74d55d8030d608f | /connecting/admin.py | 3d7628c48803019473816df5bf12adcdfca05c96 | [] | no_license | Amaya54/connect_newUI | 63db9f0816cbfae42de3127c16c1e7cb4e3c3e3f | 9d178868361e0adf5d1478ff7eb0c61a926fcec2 | refs/heads/master | 2016-09-07T18:51:27.987911 | 2015-05-10T06:02:05 | 2015-05-10T06:02:05 | 35,359,135 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | from django.contrib import admin
from connecting.models import *
# Register your models here.
class connectAdmin(admin.ModelAdmin):
list_display = ('connectId','postId','userId','doc','exchangeFlag')
admin.site.register(connectDetails, connectAdmin) | [
"amaya.das@gmail.com"
] | amaya.das@gmail.com |
1fd28a32175794c8a1b0959d713aa53612b0f54d | 71f72e8953164c8912fe66a12a800043ff2ecec5 | /data.py | 3148097f9c5353df8176533c7ad456d6b0c5f05b | [] | no_license | iliakplv/ml-math | eabe8e8ec0985b0402745799a9b72cf333ecefdd | c437b3e464de1b3ef259e6c7d158d14ef2b96df7 | refs/heads/master | 2020-04-06T09:47:34.262656 | 2019-02-17T06:35:20 | 2019-02-17T06:35:20 | 157,356,685 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,873 | py | import csv
import random
def encode_label(label):
if label == 'Iris-setosa':
return [1, 0, 0]
elif label == 'Iris-versicolor':
return [0, 1, 0]
elif label == 'Iris-virginica':
return [0, 0, 1]
raise Exception('Unknown label')
def feature_column(features, column):
return [feature[column] for feature in features]
def normalise_features(features):
min0 = min(feature_column(features, 0))
max0 = max(feature_column(features, 0))
min1 = min(feature_column(features, 1))
max1 = max(feature_column(features, 1))
min2 = min(feature_column(features, 2))
max2 = max(feature_column(features, 2))
min3 = min(feature_column(features, 3))
max3 = max(feature_column(features, 3))
mins = [min0, min1, min2, min3]
maxs = [max0, max1, max2, max3]
for row in range(len(features)):
for col in range(4):
feature_max = maxs[col]
feature_min = mins[col]
value = features[row][col]
norm = (value - feature_min) / (feature_max - feature_min)
features[row][col] = norm
def get_training_data():
"""
Read Iris dataset from 'data.csv'
Encode labels (one-hot vectors)
Shuffle dataset
Normalise feature values
:return: features, labels
"""
features = []
labels = []
with open('data.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
rows = [line for line in csv_reader]
random.shuffle(rows)
for vector in rows:
feature_vector = [float(vector[i]) for i in range(4)]
features.append(feature_vector)
labels.append(encode_label(vector[4]))
normalise_features(features)
return features, labels
if __name__ == '__main__':
features, labels = get_training_data()
print(features)
print(labels)
| [
"iliakplv@gmail.com"
] | iliakplv@gmail.com |
44f036ce09dcbc8edee1182f56fc3dce125c9ecb | 1ff3aa4614c4e2ee0e30234fdbd4264b3d9e9dbe | /1-7/test.py | f66d20e9068cae00cd127dde815071807e3c33a5 | [] | no_license | Wk20017/webSpider | 0065ec618f3d3631b11f15c8ddacf2440c9078bd | 7cbd232d231c1dbabb3525627c7bd96eb5df2cfa | refs/heads/master | 2023-01-10T21:41:25.361580 | 2020-11-13T13:04:53 | 2020-11-13T13:04:53 | 296,341,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | from urllib import request, parse
url = 'http://httpbin.org/post'
headers = {
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 5.5; windows NT)',
'Host': 'httpbin.org'
}
dict = {
'name': 'Germey'
}
data = bytes(parse.urlencode(dict), encoding='utf8')
req = request.Request(url=url, headers=headers, data=data, method='POST')
response = request.urlopen(req)
print(response.read().decode('utf-8'))
| [
"wkk972509350@gmail.com"
] | wkk972509350@gmail.com |
8e830639fc2ef0cc682f1d742ee537d47985f00f | a643c2ed78b48e4cacf140776fbedd0191881e18 | /samples/openapi3/client/3_0_3_unit_test/python/unit_test_api/paths/request_body_post_array_type_matches_arrays_request_body/post.py | 0ea29561b94824f85ffa27f8c2c1bf9e99c5e0c7 | [
"Apache-2.0"
] | permissive | padamstx/openapi-generator | 5ae41f68a4f9349d76c1db81b9ff82e18e5b4b7c | 00604aff594864447c134ddb1982565136e27857 | refs/heads/master | 2023-03-08T20:11:36.318959 | 2022-09-28T16:34:17 | 2022-09-28T16:34:17 | 160,528,958 | 0 | 1 | Apache-2.0 | 2023-02-24T16:13:11 | 2018-12-05T14:17:50 | Java | UTF-8 | Python | false | false | 5,206 | py | # coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from unit_test_api import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from unit_test_api import schemas # noqa: F401
from unit_test_api.model.array_type_matches_arrays import ArrayTypeMatchesArrays
from . import path
# body param
SchemaForRequestBodyApplicationJson = ArrayTypeMatchesArrays
request_body_array_type_matches_arrays = api_client.RequestBody(
content={
'application/json': api_client.MediaType(
schema=SchemaForRequestBodyApplicationJson),
},
required=True,
)
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
)
_status_code_to_response = {
'200': _response_for_200,
}
class BaseApi(api_client.Api):
def _post_array_type_matches_arrays_request_body_oapg(
self: api_client.Api,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
used_path = path.value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if body is schemas.unset:
raise exceptions.ApiValueError(
'The required body parameter has an invalid value of: unset. Set a valid value instead')
_fields = None
_body = None
serialized_data = request_body_array_type_matches_arrays.serialize(body, content_type)
_headers.add('Content-Type', content_type)
if 'fields' in serialized_data:
_fields = serialized_data['fields']
elif 'body' in serialized_data:
_body = serialized_data['body']
response = self.api_client.call_api(
resource_path=used_path,
method='post'.upper(),
headers=_headers,
fields=_fields,
body=_body,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
class PostArrayTypeMatchesArraysRequestBody(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
def post_array_type_matches_arrays_request_body(
self: BaseApi,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_array_type_matches_arrays_request_body_oapg(
body=body,
content_type=content_type,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForpost(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
def post(
self: BaseApi,
body: typing.Union[SchemaForRequestBodyApplicationJson, ],
content_type: str = 'application/json',
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._post_array_type_matches_arrays_request_body_oapg(
body=body,
content_type=content_type,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
| [
"noreply@github.com"
] | noreply@github.com |
47b13cbf68cba49d07c499ee6026f47fc228aece | 353def93fa77384ee3a5e3de98cfed318c480634 | /.history/week02/1/proxy/proxy/spiders/maoyan_20200705155519.py | 5832d2f7ffe5ee7f1c5b3c601dddf5c249d1eb51 | [] | no_license | ydbB/Python001-class01 | d680abc3ea1ccaeb610751e3488421417d381156 | ad80037ccfc68d39125fa94d2747ab7394ac1be8 | refs/heads/master | 2022-11-25T11:27:45.077139 | 2020-07-19T12:35:12 | 2020-07-19T12:35:12 | 272,783,233 | 0 | 0 | null | 2020-06-16T18:28:15 | 2020-06-16T18:28:15 | null | UTF-8 | Python | false | false | 1,400 | py | import scrapy
from proxy.items import ProxyItem
import lxml.etree
class MaoyanSpider(scrapy.Spider):
name = 'maoyan'
allowed_domains = ['maoyan.com']
start_urls = ['http://maoyan.com/']
header =
# def parse(self, response):
# pass
def start_requests(self):
url = f'https://maoyan.com/board/4'
yield scrapy.Request(url=url,headers=self.header,callback=self.parse)
def parse(self, response):
selector = lxml.etree.HTML(response.text)
item =ProxyItem()
for i in range(0,10):
link = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd[i]/div/div/div[1]/p[1]/a').get('href')
name = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd[i]/div/div/div[1]/p[1]/a').get('title')
time = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd[i]/div/div/div[1]/p[3]').text
item['films_name'] = name
item['release_time'] = time
print(link)
yield scrapy.Request(url=link, headers = self.header, meta={'item':item},callback=self.parse1)
def parse1(self, response):
item = response.meta['item']
selector = lxml.etree.HTML(response.text)
type = selector.xpath('/html/body/div[3]/div/div[2]/div[1]/ul/li[1]').text.replace('\n',' ')
print(type)
item['films_type'] = type
print(item)
yield item
| [
"31039587+ydbB@users.noreply.github.com"
] | 31039587+ydbB@users.noreply.github.com |
379be89df60db83ebb933ca5c3a731bc0ff4aa09 | df7ce684cb7904f79c70b2756b11dc0846664369 | /viriato/projects/models/milestone.py | e98593daae8d6c38a527b6a222a2786a5c581a1f | [] | no_license | mlouro/django-viriato | 0c0e76241e4c1478312db7afd19964d3766720bc | c1ed88ab5c436198082ac04de2fcfc57455b769e | refs/heads/master | 2016-09-05T20:43:17.673548 | 2009-10-19T18:58:10 | 2009-10-19T18:58:10 | 260,345 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | # -*- coding: utf-8 -*-
from django.db import models
from django.contrib import admin
from project import Project
from django.contrib.auth.models import User
from django.forms import ModelForm
from datetime import datetime
# Milestone
class Milestone(models.Model):
title = models.CharField(max_length=200)
end_date = models.DateTimeField()
project = models.ForeignKey(Project)
completed = models.BooleanField()
created = models.DateTimeField(blank=False, auto_now_add=True)
modified = models.DateTimeField(blank=False, auto_now=True)
class Meta:
app_label = 'projects'
permissions = (
("view_milestone", "Can view milestones"),
)
def __unicode__(self):
return self.title
def get_parent_tasks(self):
return self.task_set.filter(parent=None).select_related() | [
"mlouro@gmail.com"
] | mlouro@gmail.com |
c36cb49992f854c8dec5b85a42e7a89d1dadb3e3 | 1901305018a10c5acdf92b79a79724b09a3f49d1 | /FisherLDA.py | 8fa274e5b6ef0cc04fc6dd059f35a3ab49f79c1a | [] | no_license | liuchen11/RobustFisherLDA | 3e060180dbed4cad3a85cddc12001685aa5128b8 | 0fae198095532403444afc0086c8cfdd9326f028 | refs/heads/master | 2021-01-19T04:44:59.941195 | 2016-06-12T11:45:07 | 2016-06-12T11:45:07 | 60,423,672 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 9,250 | py | from load import loader as loader
from sklearn.preprocessing import LabelEncoder
import numpy as np
from log import log
from matplotlib import pyplot as plt
import util
def computeMeanVec(X, y, uniqueClass):
"""
Step 1: Computing the d-dimensional mean vectors for different class
"""
np.set_printoptions(precision=4)
mean_vectors = []
for cl in range(1,len(uniqueClass)+1):
mean_vectors.append(np.mean(X[y==cl], axis=0))
log('Mean Vector class %s: %s\n' %(cl, mean_vectors[cl-1]))
return mean_vectors
def computeWithinScatterMatrices(X, y, feature_no, uniqueClass, mean_vectors):
# 2.1 Within-class scatter matrix
S_W = np.zeros((feature_no, feature_no))
for cl,mv in zip(range(1,len(uniqueClass)+1), mean_vectors):
class_sc_mat = np.zeros((feature_no, feature_no)) # scatter matrix for every class
for row in X[y == cl]:
row, mv = row.reshape(feature_no,1), mv.reshape(feature_no,1) # make column vectors
class_sc_mat += (row-mv).dot((row-mv).T)
S_W += class_sc_mat # sum class scatter matrices
log('within-class Scatter Matrix: {}\n'.format(S_W))
return S_W
def computeBetweenClassScatterMatrices(X, y, feature_no, mean_vectors):
# 2.2 Between-class scatter matrix
overall_mean = np.mean(X, axis=0)
S_B = np.zeros((feature_no, feature_no))
for i,mean_vec in enumerate(mean_vectors):
n = X[y==i+1,:].shape[0]
mean_vec = mean_vec.reshape(feature_no, 1) # make column vector
overall_mean = overall_mean.reshape(feature_no, 1) # make column vector
S_B += n * (mean_vec - overall_mean).dot((mean_vec - overall_mean).T)
log('between-class Scatter Matrix: {}\n'.format(S_B))
return S_B
def computeEigenDecom(S_W, S_B, feature_no):
"""
Step 3: Solving the generalized eigenvalue problem for the matrix S_W^-1 * S_B
"""
m = 10^-6 # add a very small value to the diagonal of your matrix before inversion
eig_vals, eig_vecs = np.linalg.eig(np.linalg.inv(S_W+np.eye(S_W.shape[1])*m).dot(S_B))
for i in range(len(eig_vals)):
eigvec_sc = eig_vecs[:,i].reshape(feature_no, 1)
log('\nEigenvector {}: \n{}'.format(i+1, eigvec_sc.real))
log('Eigenvalue {:}: {:.2e}'.format(i+1, eig_vals[i].real))
for i in range(len(eig_vals)):
eigv = eig_vecs[:,i].reshape(feature_no, 1)
np.testing.assert_array_almost_equal(np.linalg.inv(S_W+np.eye(S_W.shape[1])*m).dot(S_B).dot(eigv),
eig_vals[i] * eigv,
decimal=6, err_msg='', verbose=True)
log('Eigenvalue Decomposition OK')
return eig_vals, eig_vecs
def selectFeature(eig_vals, eig_vecs, feature_no):
"""
Step 4: Selecting linear discriminants for the new feature subspace
"""
# 4.1. Sorting the eigenvectors by decreasing eigenvalues
# Make a list of (eigenvalue, eigenvector) tuples
eig_pairs = [(np.abs(eig_vals[i]), eig_vecs[:,i]) for i in range(len(eig_vals))]
# Sort the (eigenvalue, eigenvector) tuples from high to low by the value of eigenvalue
eig_pairs = sorted(eig_pairs, key=lambda k: k[0], reverse=True)
log('Eigenvalues in decreasing order:\n')
for i in eig_pairs:
log(i[0])
log('Variance explained:\n')
eigv_sum = sum(eig_vals)
for i,j in enumerate(eig_pairs):
log('eigenvalue {0:}: {1:.2%}'.format(i+1, (j[0]/eigv_sum).real))
# 4.2. Choosing k eigenvectors with the largest eigenvalues - here I choose the first two eigenvalues
W = np.hstack((eig_pairs[0][1].reshape(feature_no, 1), eig_pairs[1][1].reshape(feature_no, 1)))
log('Matrix W: \n{}'.format(W.real))
return W
def transformToNewSpace(X, W, sample_no, mean_vectors, uniqueClass):
"""
Step 5: Transforming the samples onto the new subspace
"""
X_trans = X.dot(W)
mean_vecs_trans = []
for i in range(len(uniqueClass)):
mean_vecs_trans.append(mean_vectors[i].dot(W))
#assert X_trans.shape == (sample_no,2), "The matrix is not size of (sample number, 2) dimensional."
return X_trans, mean_vecs_trans
def computeErrorRate(X_trans, mean_vecs_trans, y):
"""
Compute the error rate
"""
"""
Project to the second largest eigenvalue
"""
uniqueClass = np.unique(y)
threshold = 0
for i in range(len(uniqueClass)):
threshold += mean_vecs_trans[i][1]
threshold /= len(uniqueClass)
log("threshold: {}".format(threshold))
errors = 0
for (i,cl) in enumerate(uniqueClass):
label = cl
tmp = X_trans[y==label, 1]
# compute the error numbers for class i
num = len(tmp[tmp<threshold]) if mean_vecs_trans[i][1] > threshold else len(tmp[tmp>=threshold])
log("error rate in class {} = {}".format(i, num*1.0/len(tmp)))
errors += num
errorRate = errors*1.0/X_trans.shape[0]
log("Error rate for the second largest eigenvalue = {}".format(errorRate))
log("Accuracy for the second largest eigenvalue = {}".format(1-errorRate))
"""
Project to the largest eigenvalue - and return
"""
uniqueClass = np.unique(y)
threshold = 0
for i in range(len(uniqueClass)):
threshold += mean_vecs_trans[i][0]
threshold /= len(uniqueClass)
log("threshold: {}".format(threshold))
errors = 0
for (i,cl) in enumerate(uniqueClass):
label = cl
tmp = X_trans[y==label, 0]
# compute the error numbers for class i
num = len(tmp[tmp<threshold]) if mean_vecs_trans[i][0] > threshold else len(tmp[tmp>=threshold])
log("error rate in class {} = {}".format(i, num*1.0/len(tmp)))
errors += num
errorRate = errors*1.0/X_trans.shape[0]
log("Error rate = {}".format(errorRate))
log("Accuracy = {}".format(1-errorRate))
return 1-errorRate, threshold
def plot_step_lda(X_trans, y, label_dict, uniqueClass, dataset, threshold):
ax = plt.subplot(111)
for label,marker,color in zip(range(1, len(uniqueClass)+1),('^', 's'),('blue', 'red')):
plt.scatter(x=X_trans[:,0].real[y == label],
y=X_trans[:,1].real[y == label],
marker=marker,
color=color,
alpha=0.5,
label=label_dict[label]
)
plt.xlabel('LDA1')
plt.ylabel('LDA2')
leg = plt.legend(loc='upper right', fancybox=True)
leg.get_frame().set_alpha(0.5)
plt.title('Fisher LDA: {} data projection onto the first 2 linear discriminants'.format(dataset))
# plot the the threshold line
[bottom, up] = ax.get_ylim()
#plt.axvline(x=threshold.real, ymin=bottom, ymax=0.3, linewidth=2, color='k', linestyle='--')
plt.axvline(threshold.real, linewidth=2, color='g')
# hide axis ticks
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on")
# remove axis spines
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["left"].set_visible(False)
plt.grid()
#plt.tight_layout
plt.show()
def mainFisherLDAtest(dataset='sonar', alpha=0.5):
# load data
path = dataset + '/' + dataset + '.data'
load = loader(path)
[X, y] = load.load()
[X, y, testX, testY] = util.divide(X, y, alpha)
X = np.array(X)
testX = np.array(testX)
feature_no = X.shape[1] # define the dimension
sample_no = X.shape[0] # define the sample number
# preprocessing
enc = LabelEncoder()
label_encoder = enc.fit(y)
y = label_encoder.transform(y) + 1
testY = label_encoder.transform(testY) + 1
uniqueClass = np.unique(y) # define how many class in the outputs
label_dict = {} # define the label name
for i in range(1, len(uniqueClass)+1):
label_dict[i] = "Class"+str(i)
log(label_dict)
# Step 1: Computing the d-dimensional mean vectors for different class
mean_vectors = computeMeanVec(X, y, uniqueClass)
# Step 2: Computing the Scatter Matrices
S_W = computeWithinScatterMatrices(X, y, feature_no, uniqueClass, mean_vectors)
S_B = computeBetweenClassScatterMatrices(X, y, feature_no, mean_vectors)
# Step 3: Solving the generalized eigenvalue problem for the matrix S_W^-1 * S_B
eig_vals, eig_vecs = computeEigenDecom(S_W, S_B, feature_no)
# Step 4: Selecting linear discriminants for the new feature subspace
W = selectFeature(eig_vals, eig_vecs, feature_no)
# Step 5: Transforming the samples onto the new subspace
X_trans, mean_vecs_trans = transformToNewSpace(testX, W, sample_no, mean_vectors, uniqueClass)
# Step 6: compute error rate
accuracy, threshold = computeErrorRate(X_trans, mean_vecs_trans, testY)
# plot
#plot_step_lda(X_trans, testY, label_dict, uniqueClass, dataset, threshold)
return accuracy
if __name__ == "__main__":
dataset = ['ionosphere', 'sonar'] # choose the dataset
alpha = 0.6 # choose the train data percentage
accuracy = mainFisherLDAtest(dataset[1], alpha)
print accuracy
| [
"locky1218@gmail.com"
] | locky1218@gmail.com |
4b21f558ab50a8365c5dc1d50a6927abcfbca3d4 | 019353a6c263f8da4631e6a1c0c8094e749440b3 | /lab3/ex3_another_attempt_of_refactoring.py | ab9a554a8c9c0bb6aa8b66e1746a2f8fc97c2cb7 | [] | no_license | veratsurkis/infa_2020_postnikov | 544b15583c9c57af2fb3e3bc6176647a2962b37d | 3b7009f063c6b9b016308d49abdf41943513ad9e | refs/heads/main | 2023-01-05T15:02:25.033602 | 2020-10-29T07:52:30 | 2020-10-29T07:52:30 | 300,522,328 | 0 | 0 | null | 2020-10-02T06:29:58 | 2020-10-02T06:29:57 | null | UTF-8 | Python | false | false | 10,510 | py | import pygame
from pygame.draw import *
import numpy as np
pygame.init()
# Function draws a whitebear
def whitebear(Surface, x, y, size):
head(Surface, x, y, size)
body(Surface, x, y, size)
leg(Surface, x, y, size)
lake(Surface, x, y, size)
fishing_rod(Surface, x, y, size)
arm(Surface, x, y, size)
# Function draws a fish
def fish(Surface, x, y, size):
fins_of_fish(Surface, x, y, size)
tail_of_fish(Surface, x, y, size)
body_of_fish(Surface, x, y, size)
eyes_of_fish(Surface, x, y, size)
# Functions draw different parts of whitebear
def head(Surface, x, y, size):
'''
Function draws a head of whitebear.
Surface is an area where bear is drawn.
x is horizontal coordinate of center of bear's body.
y is vertical coordinate of center of bear's body.
size is a parameter equals 1/9 of bears's body's horizontal size
'''
ellipse(Surface, white, (x + size*4, y - size*2.1, size*6, size*3))
ellipse(Surface, black, (x + size*4, y - size*2.1, size*6, size*3), 1)
ellipse(Surface, white, (x + size*4.5, y - size*2, size*1, size*1.2))
ellipse(Surface, black, (x + size*4.5, y - size*2, size*1, size*1.2), 1)
ellipse(Surface, black, (x + size*6.5, y - size*1.3, size*0.5, size*0.5))
ellipse(Surface, black, (x + size*9.7, y - size*1, size*0.5, size*0.5))
arc(Surface, black, (x + size*4, y - size*1.6, size*6, size*2), 4.5, 0, 1)
def body(Surface, x, y, size):
'''
Function draws a body of whitebear.
Surface is an area where bear is drawn.
x is horizontal coordinate of center of bear's body.
y is vertical coordinate of center of bear's body.
size is a parameter equals 1/9 of bears's body's horizontal size
'''
ellipse(Surface, white, (x, y, size*9, size*15))
ellipse(Surface, black, (x, y, size*9, size*15), 1)
def leg (Surface, x, y, size):
'''
Function draws legs of whitebear.
Surface is an area where bear is drawn.
x is horizontal coordinate of center of bear's body.
y is vertical coordinate of center of bear's body.
size is a parameter equals 1/9 of bears's body's horizontal size
'''
ellipse(Surface, white, (x + size*4, y + size*10, size*6, size*5))
ellipse(Surface, black, (x + size*4, y + size*10, size*6, size*5), 1)
ellipse(Surface, white, (x + size*8, y + size*13.5, size*5, size*2))
ellipse(Surface, black, (x + size*8, y + size*13.5, size*5, size*2), 1)
def lake(Surface, x, y, size):
'''
Function draws a lake near a whitebear.
Surface is an area where bear is drawn.
x is horizontal coordinate of center of bear's body.
y is vertical coordinate of center of bear's body.
size is a parameter equals 1/9 of bears's body's horizontal size
'''
ellipse(Surface, (200, 200, 200), (x + size*15, y + size*10 - size*2, size*10, size*4))
ellipse(Surface, black, (x + size*15, y + size*10 - size*2, size*10, size*4), 1)
ellipse(Surface, (0, 150, 150), (x + size*16, y + size*8.75, size*8, size*3.2))
ellipse(Surface, (0, 50, 50), (x + size*16, y + size*8.75, size*8, size*3.2), 1)
def fishing_rod(Surface, x, y, size):
'''
Function draws a whitebear's fishing rod.
Surface is an area where bear is drawn.
x is horizontal coordinate of center of bear's body.
y is vertical coordinate of center of bear's body.
size is a parameter equals 1/9 of bears's body's horizontal size
'''
lines(Surface, black, False, [
(x + size*9.5, y + size*7),
(x + size*13.5, y + size*2.3),
(x + size*15, y + size*1),
(x + size*18, y + size*-1.5),
(x + size*21, y + size*-2.8),
], int(size*0.2))
line(Surface, black, (x + size*21, y + size*-2.8), (x + size*21, y + size*10), 1)
def arm(Surface, x, y, size):
'''
Function draws arms of whitebear.
Surface is an area where bear is drawn.
x is horizontal coordinate of center of bear's body.
y is vertical coordinate of center of bear's body.
size is a parameter equals 1/9 of bears's body's horizontal size.
'''
ellipse(Surface, white, (x + size*7, y + size*4, size*5, size*2))
ellipse(Surface, black, (x + size*7, y + size*4, size*5, size*2), 1)
# Functions draw different parts of fish
def fins_of_fish(Surface, x, y, size):
'''
Function draws fins of fish.
Surface is an area where fish is drawn.
x, y are coordinates of centre of fish.
size is the size of fish.
'''
polygon(Surface, red, [
(x + size*4, y - size*0.6),
(x + size*3.7, y - size*0.9),
(x + size*3, y - size*1.1),
(x + size*2.5, y - size*1.2),
(x + size*4.2, y - size*1.4),
(x + size*4.55, y - size*1.1),
(x + size*4.5, y - size*0.6),
])
polygon(Surface, black, [
(x + size*4, y - size*0.6),
(x + size*3.7, y - size*0.9),
(x + size*3, y - size*1.1),
(x + size*2.5, y - size*1.2),
(x + size*4.2, y - size*1.4),
(x + size*4.55, y - size*1.1),
(x + size*4.5, y - size*0.6),
], 1)
polygon(Surface, red, [
(x + size*3, y + size*0.3),
(x + size*3.4, y + size*0.3),
(x + size*3.5, y + size*0.7),
(x + size*2.8, y + size*0.8),
(x + size*3, y + size*0.55),
])
polygon(Surface, black, [
(x + size*3, y + size*0.3),
(x + size*3.4, y + size*0.3),
(x + size*3.5, y + size*0.7),
(x + size*2.8, y + size*0.8),
(x + size*3, y + size*0.55),
], 1)
polygon(Surface, red, [
(x + size*4.8, y + size*0.2),
(x + size*4.9, y + size*0.1),
(x + size*5.2, y + size*0.25),
(x + size*5.8, y + size*0.3),
(x + size*4.8, y + size*0.7),
(x + size*4.7, y + size*0.55),
])
polygon(Surface, black, [
(x + size*4.8, y + size*0.2),
(x + size*4.9, y + size*0.1),
(x + size*5.2, y + size*0.25),
(x + size*5.8, y + size*0.3),
(x + size*4.8, y + size*0.7),
(x + size*4.7, y + size*0.55),
], 1)
def tail_of_fish(Surface, x, y, size):
polygon(Surface, grey, [
(x, y),
(x + size*0.5, y + size*0.1),
(x + size*2, y),
(x + size*0.5, y + size)
])
polygon(Surface, black, [
(x, y),
(x + size*0.5, y + size*0.1),
(x + size*2, y),
(x + size*0.5, y + size)
], 1)
def body_of_fish(Surface, x, y, size):
polygon(Surface, grey, [
(x + size*2, y),
(x + size*3.25, y - size*0.72),
(x + size*4.5, y - size*0.9),
(x + size*5.25, y - size*0.75),
(x + size*6, y - size*0.4),
(x + size*5.4, y + size*0.1),
(x + size*4.5, y + size*0.4),
(x + size*3, y + size*0.4)
])
polygon(Surface, black, [
(x + size*2, y),
(x + size*3.25, y - size*0.72),
(x + size*4.5, y - size*0.9),
(x + size*5.25, y - size*0.75),
(x + size*6, y - size*0.4),
(x + size*5.4, y + size*0.1),
(x + size*4.5, y + size*0.4),
(x + size*3, y + size*0.4)
], 1)
def eyes_of_fish(Surface, x, y, size):
circle(Surface, (0, 121, 255), (int(x + size*5), int(y - size*0.42)), int(size*0.27))
circle(Surface, black, (int(x + size*5), int(y - size*0.42)), int(size*0.1))
circle(Surface, white, (int(x + size*4.9), int(y - size*0.43)), int(size*0.07))
# Constants:
FPS = 30
white = (235, 235, 235)
black = (0, 0, 0)
chromakey = (0, 255, 0)
grey = (230, 220, 255)
red = (255, 120, 115)
yellow = (255, 255, 100)
backcolor = (0, 255, 255)
zoom = 12
# Background
main_background = pygame.display.set_mode((40*zoom, 60*zoom))
main_background.fill(backcolor)
rect(main_background, white, (0, 34*zoom, 40*zoom, 60*zoom))
rect(main_background, black, (0, 34*zoom, 40*zoom, 60*zoom), 1)
# Surfaces with Sun:
sun_surface_1 = pygame.Surface((30*zoom, 60*zoom))
sun_surface_1.fill(chromakey)
sun_surface_1.set_colorkey(chromakey)
sun_surface_1.set_alpha(150)
circle(sun_surface_1, yellow, (15*zoom, 15*zoom), 15*zoom)
circle(sun_surface_1, backcolor, (15*zoom, 15*zoom), 12*zoom)
rect(sun_surface_1, yellow, (14*zoom, 0, 2*zoom, 30*zoom))
rect(sun_surface_1, yellow, (0, 14*zoom, 30*zoom, 2*zoom))
main_background.blit(sun_surface_1, (10*zoom, -zoom))
sun_surface_2 = pygame.Surface((30*zoom, 60*zoom))
sun_surface_2.fill(chromakey)
sun_surface_2.set_colorkey(chromakey)
sun_surface_2.set_alpha(175)
circle(sun_surface_2, yellow, (15*zoom, 15*zoom), int(2*zoom))
circle(sun_surface_2, yellow, (15*zoom, 2*zoom), int(1.2*zoom))
circle(sun_surface_2, yellow, (28*zoom, 15*zoom), int(1.2*zoom))
circle(sun_surface_2, yellow, (2*zoom, 15*zoom), int(1.1*zoom))
circle(sun_surface_2, yellow, (15*zoom, 28*zoom), int(1*zoom))
main_background.blit(sun_surface_2, (10*zoom, -zoom))
# Surface with fishes and bear № 1:
background = pygame.Surface((60*zoom, 60*zoom))
background.fill(chromakey)
background.set_colorkey(chromakey)
fish_surface = pygame.Surface((40*zoom, 40*zoom))
fish_surface.fill(chromakey)
fish_surface.set_colorkey(chromakey)
fish(fish_surface, 2*zoom, 8*zoom, 2*zoom)
background.blit(pygame.transform.rotate(fish_surface, -10), (20*zoom, 38*zoom))
background.blit(pygame.transform.flip(fish_surface, True, False), (1*zoom, 40*zoom))
background.blit(pygame.transform.flip(fish_surface, True, False), (4*zoom, 45*zoom))
background.blit(fish_surface, (17*zoom, 43*zoom))
background.blit(pygame.transform.scale(pygame.transform.flip(fish_surface, True, False), (20*zoom, 20*zoom)), (17*zoom, 32*zoom))
background.blit(pygame.transform.scale(pygame.transform.flip(fish_surface, False, True), (20*zoom, 20*zoom)), (22*zoom, 20*zoom))
background.blit(pygame.transform.scale(pygame.transform.flip(fish_surface, False, True), (20*zoom, 20*zoom)), (32*zoom, 21*zoom))
bear_surface = pygame.Surface((60*zoom, 60*zoom))
bear_surface.fill(chromakey)
bear_surface.set_colorkey(chromakey)
whitebear(bear_surface, 5*zoom, 6*zoom, 1.5*zoom)
background.blit(bear_surface, (- 3*zoom, 20*zoom))
main_background.blit(pygame.transform.scale(pygame.transform.flip(background, True, False), (20*zoom, 20*zoom)), (20*zoom, 23*zoom))
main_background.blit(pygame.transform.scale(pygame.transform.flip(background, True, False), (23*zoom, 23*zoom)), (3*zoom, 25*zoom))
main_background.blit(pygame.transform.scale(background, (25*zoom, 25*zoom)), (0, 35*zoom))
main_background.blit(pygame.transform.scale(pygame.transform.flip(background, True, False), (45*zoom, 45*zoom)), (2*zoom, 31*zoom))
pygame.display.update()
clock = pygame.time.Clock()
finished = False
while not finished:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
finished = True
pygame.quit() | [
"veratsurkis@mail.ru"
] | veratsurkis@mail.ru |
2e700a8b104f941e835ad3727e5fea3b40320152 | 81e1c4617881ae8a9e32d08d9c25bd3dbd4501a5 | /code/Jiachen/test.py | b38b38ec72980968b99fb1e26f61fc69d12ff62c | [] | no_license | Jiacli/NLP-QA | 1a67c3a5c0e25311d498450ef9d9747804c9f5af | 69a8ced0603a4ad453fba31b7396ccb2fdcfb499 | refs/heads/master | 2021-01-11T19:34:42.777688 | 2015-04-14T05:55:23 | 2015-04-14T05:55:23 | 30,160,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | #!/usr/bin/env python
import os
from nltk.parse import stanford
os.environ['STANFORD_PARSER'] = r'C:\Users\Jiachen\Documents\GitHub\AEIOU\code'
os.environ['STANFORD_MODELS'] = r'C:\Users\Jiachen\Documents\GitHub\AEIOU\code'
os.environ['JAVAHOME'] = r'C:\Root\Tools\Java\jdk1.8.0_25\bin'
parser = stanford.StanfordParser(model_path=r"C:\Users\Jiachen\Documents\GitHub\AEIOU\code\englishPCFG.ser.gz")
sentences = parser.raw_parse("he drops more than six courses, it's amazing!")
print sentences
print sentences[0].pos()
# GUI
for sentence in sentences:
sentence.draw() | [
"jiacli.jk@gmail.com"
] | jiacli.jk@gmail.com |
7e37f6248c7ce6abe30827214c081a6ba02303b1 | 4b773e01344ed7e24b7a3fac775f6c0496a97d6f | /apsys2016/figure/aveUncover.py | 170f3d4e3ed847ff6c0c5b7a948464626c0f338f | [] | no_license | iCodeIN/bigsecurity | 3865802da90447ceb4a92f579bc42704e0b9df3b | f6061a6168cc9b0e8769b9539a5dcbc323bc6086 | refs/heads/master | 2022-01-05T07:39:42.746257 | 2019-01-25T03:16:22 | 2019-01-25T03:16:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,109 | py | import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
from matplotlib.ticker import FixedLocator
import numpy as np
phiList = [10, 100, 1000]
overlapList = [3.87E-03, 8.77E-04, 9.86E-05]
#for i in rateList:
# print i, accumList[i]
fig, ax = plt.subplots()
ax.plot(phiList, overlapList, 'b-x', linewidth=2.0, markersize=14, mew=4)
#major_ticks = np.arange(0, 101, 10)
xmin = 9
xmax = 1100
ax.set_xlim(xmin, xmax)
#xticks = ax.xaxis.get_major_ticks()
#xticks[0].label1.set_visible(False)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(18)
ax.set_xscale("log", nonposx='clip')
plt.xlabel('$1/\phi$', fontsize=24)
#ymin = 0
#ymax = 101
#ax.set_ylim(ymin, ymax)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(18)
ax.set_yscale("log", nonposx='clip')
plt.ylabel('Degree of aveUncover (%)', fontsize=24)
plt.gcf().subplots_adjust(bottom=0.15)
plt.gcf().subplots_adjust(left=0.15)
ax.grid(True)
#plt.title('Cumulative frequencies')
#plt.show()
fig.savefig('aveUncover.pdf')
plt.close(fig) | [
"songlinhai0543@gmail.com"
] | songlinhai0543@gmail.com |
b3425b7ae724a9576651dd48c81a63a043675bfe | 926d93f74d83f30e34bde9a169b7811335ee0c0d | /Support Vector Machine/Support_Vector_Machine.py | 6c792240f173a4bcda0d0b3484df74698ebd554f | [
"MIT"
] | permissive | shreshthtuli/ML-Assignments | 27aa80d585ac3d6506df31e32e29c6cda1d29804 | 64228fdb602e21064b18dd5aa4e5f7de7e8df2ec | refs/heads/master | 2020-04-18T12:55:59.606823 | 2019-04-07T12:46:54 | 2019-04-07T12:46:54 | 167,548,423 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,893 | py | """
MIT License
Copyright (c) 2019 Shreshth Tuli
Machine Learning Model : Support Vector Machine
"""
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import cvxpy as cvx
import math
from svmutil import *
from sklearn import svm, datasets
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
from sklearn.utils.multiclass import unique_labels
from sklearn.metrics import classification_report
import time
def plot_confusion_matrix(cm, classes):
fig, ax = plt.subplots()
im = ax.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues)
ax.figure.colorbar(im, ax=ax)
ax.set(xticks=np.arange(cm.shape[1]), yticks=np.arange(cm.shape[0]),
xticklabels=classes, yticklabels=classes, title='Confusion matrix',
ylabel='True label', xlabel='Predicted label')
thresh = cm.max() / 2.
for i in range(cm.shape[0]):
for j in range(cm.shape[1]):
ax.text(j, i, format(cm[i, j], 'd'), ha="center", va="center",
color="white" if cm[i, j] > thresh else "black")
fig.tight_layout()
return ax
def parseData(filename):
read = np.matrix([map(float, line.strip().split(',')) for line in open(filename)])
X = read[:,0:-1]/255
Y = read[:,-1]
return X, Y
def convertLinear(X, Y, d, e, changeY=True):
retY = Y[np.logical_or.reduce([Y == x for x in [d,e]])]
if changeY:
retY[retY == d] = -1
retY[retY == e] = 1
retX = np.array(X[np.where(np.logical_or.reduce([Y == x for x in [d,e]]))[0], :])
return retX, retY.T
def savefig(x, name="Image"):
plt.imshow(x.reshape(28,28), cmap='gist_gray', interpolation='nearest')
plt.savefig(name)
def linear_kernel(X, Y):
return np.multiply(X, Y)*np.multiply(X, Y).T
def gaussian_kernel(X, Y, gamma):
M = [map(float, [0]) * Y.shape[0] for _ in xrange(Y.shape[0])]
for i in xrange(Y.shape[0]):
for j in xrange(Y.shape[0]):
M[i][j] = Y.item(i)*Y.item(j)*(math.exp(-gamma * np.square(np.linalg.norm(X[i] - X[j])).item()))
return M
def weight_vector(X, Y, a):
return np.sum(np.multiply(X, np.multiply(Y, a)), axis = 0)
def gaussian_weight(X, Ya, Xn):
K = np.array(gaussian_kernel(Xn, X, 0.05))
print K.shape, Ya.T.shape
res = K.dot(Ya.T)
return np.sum(res)/res.size
def intercept(x, y, w):
return (y - w*(x.reshape(1,784).T)).item(0)
def gaussian_intercept(X, Y, alpha):
K = np.array(gaussian_kernel(X, X, 0.05))
a = np.multiply(Y, alpha.value)
res = (Y - a.T.dot(K).transpose())
res = np.sum(res)/res.size
return res
def listify(X, Y):
retx = []
rety = []
for i in xrange(Y.shape[0]):
rety.append(int(Y.item(i)))
for i in xrange(X.shape[0]):
param = []
for j in xrange(X.shape[1]):
param.append(X.item(i,j))
retx.append(param)
return retx, rety
def train(X, Y, kernel_type, C=1, gamma=0.05):
alpha = cvx.Variable((Y.shape[0], 1)) # Variable for optimization
Q = linear_kernel(X, Y) if kernel_type == "linear" else gaussian_kernel(X, Y, gamma) # Kernel
objective = cvx.Maximize(cvx.sum(alpha) - 0.5*(cvx.quad_form(alpha, Q))) # Objective funtion
constraints = [alpha >= 0, alpha <= C, alpha.T*Y == 0] # Constraints
cvx.Problem(objective, constraints).solve()
# print alpha.value
index = np.zeros((alpha.value.size, 1)) # indentify support vectors
sv = 0; count = 0
for i in xrange(alpha.size):
index[i,0] = alpha[i].value
if alpha[i].value > 0.1 and alpha[i].value <= 1:
# print i
sv = i; count += 1
# savefig(X[i].reshape(1, 784), "./sv/supportvector"+str(i)+"y"+str(Y[i])+".png")
w = weight_vector(X, Y, index)
b = intercept(X[sv], Y[sv], w) if kernel_type == "linear" else gaussian_intercept(X, Y, alpha)
return w, b, alpha, count
def test(w, b, d, e, filename, alpha, X, Y, kernel_type):
X1, Y1 = parseData(filename)
X1, Y1 = convertLinear(X1, Y1, d, e, False)
correct = 0
total = 0
Ya = np.multiply(Y, alpha.value).T if kernel_type != "linear" else []
for i in xrange(Y1.shape[0]):
val = float(w*(X1[i].reshape(1, X.shape[1]).T)) + b# if kernel_type == "linear" else gaussian_weight(X, Ya, X1[i]) + b
clsfy = e if val >= 0 else d
if clsfy == Y1.item(i):
correct += 1
else:
savefig(X1[i].reshape(1, X.shape[1]), "./wrong/wrong"+str(total)+"a"+str(int(Y1.item(i)))+"p"+str(int(clsfy))+".png")
total += 1
return float(correct) / float(total)
def train_multiclass_cvx(X, Y, kernel):
w = np.empty((10, 10),dtype=object)
b = np.empty((10, 10))
for i in range(10):
for j in range(i+1,10):
Xd, Yd = convertLinear(X, Y, i, j, True)
w[i][j], b[i][j], a, c = train(Xd, Yd, kernel, 1, 0.05)
return w, b
def classify_cvx(w, b, x):
wins = np.zeros(10)
confidence = np.zeros(10)
for i in range(10):
for j in range(i+1, 10):
val = float(w[i][j]*(x.T)) + b[i][j]
clsfy = j if val >= 0 else i
wins[clsfy] += 1
confidence[clsfy] += math.fabs(val)
maxes = np.argwhere(wins == np.amax(wins))
if maxes.size == 1:
argmax = maxes[0][0]
else:
argmax = np.argwhere(confidence == np.amax(confidence[maxes]))[0][0]
return argmax, wins
def test_multiclass_cvx(w, b, X1, Y1):
correct = 0
total = 0
predicted = []
actual = []
for i in xrange(Y1.shape[0]):
clsfy, wins = classify_cvx(w, b, X1[i])
predicted.append(clsfy); actual.append(Y1.item(i))
if clsfy == Y1.item(i):
correct += 1
# else:
# print correct, total, wins, clsfy, test_labels[i]
# savefig(X1[i], "./wrong/wrong"+str(total)+"a"+str(int(test_labels[i]))+"p"+str(int(clsfy))+".png")
total += 1
return float(correct) / float(total), predicted, test_labels
def train_multiclass(X, Y, param):
models = np.empty((10, 10),dtype=object)
for i in range(10):
for j in range(i+1,10):
Xd, Yd = convertLinear(X, Y, i, j, True)
train_data, train_labels = listify(Xd, Yd)
m = svm_train(train_labels, train_data, param)
models[i][j] = m
return models
def classify(models, x):
wins = np.zeros(10)
confidence = np.zeros(10)
for i in range(10):
for j in range(i+1, 10):
predicted_label,a,conf = svm_predict([1], x, models[i][j], "-q")
clsfy = j if predicted_label[0] >= 0 else i
wins[clsfy] += 1
confidence[clsfy] += math.fabs(conf[0][0])
maxes = np.argwhere(wins == np.amax(wins))
if maxes.size == 1:
argmax = maxes[0][0]
else:
argmax = np.argwhere(confidence == np.amax(confidence[maxes]))[0][0]
return argmax, wins
def test_multiclass(models, X1, Y1):
test_data, test_labels = listify(X1, Y1)
correct = 0
total = 0
predicted = []
for i in xrange(Y1.shape[0]):
clsfy, wins = classify(models, [test_data[i]])
predicted.append(clsfy)
if clsfy == test_labels[i]:
correct += 1
# else:
# print correct, total, wins, clsfy, test_labels[i]
# savefig(X1[i], "./wrong/wrong"+str(total)+"a"+str(int(test_labels[i]))+"p"+str(int(clsfy))+".png")
total += 1
return float(correct) / float(total), predicted, test_labels
trainfile = "train.csv"
testfile = "test.csv"
# Read data from file
X, Y = parseData(trainfile)
print("Data parse complete...")
d = 0
print "D = ", d
print '\033[95m'+"---Binary Classification---"+'\033[0m'
########## BINARY CONVOPT ##########
print
print '\033[94m'+"ConvOpt results:"+'\033[0m'
Xd, Yd = convertLinear(X, Y, d, (d+1)%10)
# Linear SVM Model
start = time.time()
w, b, a, n = train(Xd, Yd, "linear", 1, 0)
end = time.time() - start
print "Accuracy (Linear Kernel) = ", test(w, b, d, (d+1)%10, testfile, a, Xd, Yd, "linear")*100
# print "Weight ", w
print "Bias ", b
print "nSV ", n
print "Time ", end
# Gaussian SVM Model
start = time.time()
w, b, a, n = train(Xd, Yd, "gaussian", 1, 0.05)
end = time.time() - start
print "Accuracy (Gaussian Kernel) = ", test(w, b, d, (d+1)%10, testfile, a, Xd, Yd, "gaussian")*100
# print "Weight ", w
print "Bias ", b
print "nSV ", n
print "Time ", end
########## BINARY LIBSVM ##########
print
print '\033[94m'+"LibSVM results:"+'\033[0m'
train_data, train_labels = listify(Xd, Yd)
Xt, Yt = parseData(testfile)
X1, Y1 = convertLinear(Xt, Yt, d, (d+1)%10, True)
test_data, test_labels = listify(X1, Y1)
# Linear SVM Model
start = time.time()
model = svm_train(train_labels, train_data,'-t 0 -c 1')
end = time.time() - start
[predicted_label, accuracy, decision_values] = svm_predict(test_labels, test_data, model, "-q")
print "Accuracy (Linear Kernel) = ", accuracy[0]
print "Time ", end
# print "Weight ", w
# Gaussian SVM Model
start = time.time()
model = svm_train(train_labels, train_data,'-g 0.05 -c 1')
end = time.time() - start
[predicted_label, accuracy, decision_values] = svm_predict(test_labels, test_data, model, "-q")
print "Accuracy (Gaussian Kernel) = ", accuracy[0]
print "Time ", end
########## MULTICLASS CONVOPT ##########
print '\033[95m'+"---Multiclass Classification---"+'\033[0m'
# Test data
Xtest, Ytest = parseData(testfile)
# Training accuracy
print
print '\033[94m'+"ConvOpt results:"+'\033[0m'
# Linear SVM Model
start = time.time()
w, b = train_multiclass_cvx(X, Y, 'linear')
end = time.time() - start
acc, pred, actual = test_multiclass_cvx(w, b, X, Y)
acc1, pred1, actual1 = test_multiclass_cvx(w, b, Xtest, Ytest)
print "Multiclass Training Accuracy (Linear Kernel) = ", acc*100
print "Multiclass Test Accuracy (Linear Kernel) = ", acc1*100
print "Time ", end
# Gaussian SVM Model
start = time.time()
w, b = train_multiclass_cvx(X, Y, 'gaussian')
end = time.time() - start
acc, pred, actual = test_multiclass_cvx(w, b, X, Y)
acc1, pred1, actual1 = test_multiclass_cvx(w, b, Xtest, Ytest)
print "Multiclass Training Accuracy (Gaussian Kernel) = ", acc*100
print "Multiclass Test Accuracy (Gaussian Kernel) = ", acc1*100
print "Time ", end
########## MULTICLASS LIBSVM ##########
print
print '\033[94m'+"LibSVM results:"+'\033[0m'
# Linear SVM Model
start = time.time()
models = train_multiclass(X, Y, '-t 0 -c 1 -q')
end = time.time() - start
acc, pred, actual = test_multiclass(models, X, Y)
acc1, pred1, actual1 = test_multiclass(models, Xtest, Ytest)
print "Multiclass Training Accuracy (Linear Kernel) = ", acc*100
print "Multiclass Test Accuracy (Linear Kernel) = ", acc1*100
print "Time ", end
# Gaussian SVM Model
start = time.time()
models = train_multiclass(X, Y, '-g 0.05 -c 1 -q')
end = time.time() - start
acc, pred, actual = test_multiclass(models, X, Y)
acc1, pred1, actual1 = test_multiclass(models, Xtest, Ytest)
print "Multiclass Training Accuracy (Gaussian Kernel) = ", acc*100
print "Multiclass Test Accuracy (Gaussian Kernel) = ", acc1*100
print "Time ", end
########## CONFUSION MATRIX ##########
cm = confusion_matrix(actual1, pred1)
print '\033[94m'+"Confusion Matrix:"+'\033[0m'
print(cm)
plot_confusion_matrix(cm, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
plt.savefig("Confusion-Matrix")
########## VALIDATION ##########
Xv = X[0:X.shape[0]/10:1]
Yv = Y[0:Y.shape[0]/10:1]
Xtrain = X[X.shape[0]/10::1]
Ytrain = Y[Y.shape[0]/10::1]
print '\033[94m'+"Validation:"+'\033[0m'
for i in [0.00001, 0.001, 1, 5, 10]:
models = train_multiclass(Xtrain, Ytrain, '-g 0.05 -c '+str(i)+' -q')
acc, pred, actual = test_multiclass(models, Xv, Yv)
print "Validation Accuracy with C = ", i, " is : ", acc*100
acc, pred, actual = test_multiclass(models, Xtest, Ytest)
print "Test Accuracy with C = ", i, " is : ", acc*100 | [
"shreshthtuli@gmail.com"
] | shreshthtuli@gmail.com |
c56180686a2cdd0b8dc7a67f71164d244a673cfb | 4ac7e253ff7b9637e872a38a1b1e33c22c9d8d70 | /test/pytest/testutil/fixtures.py | 109d97cab4069e1efe901f9327cb4f64576b172b | [
"Apache-2.0"
] | permissive | nnishiza/apache-websocket | ec3938ecba705be6f666b8c0f7599219f5d75f67 | 6968083264b90b89b1b9597a4ca03ba29e7ea2e1 | refs/heads/master | 2021-04-29T08:39:04.363552 | 2017-01-20T23:55:27 | 2017-01-20T23:55:27 | 77,671,760 | 0 | 0 | null | 2016-12-30T08:09:07 | 2016-12-30T08:09:07 | null | UTF-8 | Python | false | false | 3,097 | py | import autobahn.twisted.websocket as ws
import pytest
import urlparse
from twisted.internet import reactor
from twisted.internet.ssl import ClientContextFactory
from twisted.web import client
from .protocols import ProtocolFactory
class _UnsecureClientContextFactory(ClientContextFactory):
"""An SSL context factory that performs no cert checks."""
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
_context_factory = _UnsecureClientContextFactory()
class _HTTP10Agent:
"""
A hacky attempt at an HTTP/1.0 version of t.w.c.Agent. Unfortunately
t.w.c.Agent only supports HTTP/1.1, so we have to create this ourselves. It
uses the old HTTPClientFactory implementation in Twisted.
Note that this only sort of implements Agent (it doesn't callback until the
response is received, and it doesn't even return the full response from
request()) and is really only useful for the purposes of these tests.
"""
def __init__(self, reactor):
self._reactor = reactor
class _FakeResponse:
def __init__(self, code):
self.code = code
def request(self, method, uri, headers=None, bodyProducer=None):
url = urlparse.urlparse(uri, scheme='http')
host = url.hostname
port = url.port
if port is None:
port = 443 if (url.scheme == 'https') else 80
# Translate from Agent's Headers object back into a dict.
if headers is not None:
old_headers = {}
for name, value_list in headers.getAllRawHeaders():
old_headers[name] = value_list[0]
headers = old_headers
f = client.HTTPClientFactory(uri, method=method, headers=headers,
timeout=2)
def gotResponse(page):
return _HTTP10Agent._FakeResponse(int(f.status))
f.deferred.addBoth(gotResponse)
if url.scheme == 'https':
self._reactor.connectSSL(host, port, f, ClientContextFactory())
else:
self._reactor.connectTCP(host, port, f)
return f.deferred
#
# Fixture Helper Functions
#
def fixture_connect(uri, protocol):
"""
Connects to the given WebSocket URI using an instance of the provided
WebSocketClientProtocol subclass.
This is intended to be called by pytest fixtures; it will block until a
connection is made and return the protocol instance that wraps the
connection.
"""
factory = ProtocolFactory(uri, protocol)
factory.setProtocolOptions(failByDrop=False,
openHandshakeTimeout=1,
closeHandshakeTimeout=1)
ws.connectWS(factory, timeout=1)
protocol = pytest.blockon(factory.connected)
pytest.blockon(protocol.opened)
return protocol
#
# Fixtures
#
@pytest.fixture
def agent():
"""Returns a t.w.c.Agent for use by tests."""
return client.Agent(reactor, _context_factory)
@pytest.fixture
def agent_10():
"""Returns an HTTP/1.0 "Agent"."""
return _HTTP10Agent(reactor)
| [
"champion.p@gmail.com"
] | champion.p@gmail.com |
92f388b243e714fb884b524b4a78321c244fc32f | 8b02fb816e0f2efec34157dfba5bf14c773a3b08 | /b2912a/SMU Control Tests/SMU Characterization/Speed Test 10.py | e89046645c6ad41b19ea29bde7c12e24bcad47e7 | [] | no_license | stevennoyce/CNT_IV | c0c6bd1438778c35c4cfca0301750e2555a14867 | 36bb27df3280af7bf365c860405235d54038e46f | refs/heads/master | 2018-10-22T14:20:00.175335 | 2018-09-18T21:19:03 | 2018-09-18T21:19:03 | 111,465,107 | 1 | 0 | null | 2018-09-10T11:43:17 | 2017-11-20T21:31:35 | C | UTF-8 | Python | false | false | 5,207 | py | import visa
import numpy as np
import time
from matplotlib import pyplot as plt
# Future ideas ---------------------
# List output for sin wave or similar
# inst.write(':SOUR:VOLT:MODE LIST')
# inst.write(':SOUR:LIST:VOLT 0,2,4,6,8,10,0')
# inst.write(':source:voltage:mode list')
# inst.write(':souce:list:voltage {}'.format(','.join(voltages)))
# np.fft.rfft(, norm='ortho')
# ----------------------------------
rm = visa.ResourceManager()
print(rm.list_resources())
inst = rm.open_resource(rm.list_resources()[0], timeout=20000)
inst.write('*CLS') # Clear
inst.write('*RST') # Reset
inst.write(':system:lfrequency 60')
inst.write(':form real,32')
inst.write(':form:border swap')
inst.values_format.use_binary('d', False, np.array)
channels = [1,2]
Npoints = 10000
Vds = 0.01
Vgs = -15
voltageSetpoints = [None, Vds, Vgs]
for channel in channels:
inst.write(':source{}:function:mode voltage'.format(channel))
inst.write(':sense{}:function current'.format(channel))
# inst.write(':sense{}:current:nplc 1'.format(channel))
# inst.write(':sense{}:current:prot 10e-6'.format(channel))
inst.write(':sense{}:current:range:auto on'.format(channel))
inst.write(':sense{}:current:range 1E-6'.format(channel))
# inst.write(':sense{}:current:range:auto:mode speed'.format(channel))
# inst.write(':sense{}:current:range:auto:THR 80'.format(channel))
inst.write(':sense{}:current:range:auto:LLIM 1E-8'.format(channel))
inst.write(':source{}:voltage:mode sweep'.format(channel))
inst.write(':source{}:voltage:start {}'.format(channel, voltageSetpoints[channel]))
inst.write(':source{}:voltage:stop {}'.format(channel, voltageSetpoints[channel]))
inst.write(':source{}:voltage:points {}'.format(channel, Npoints))
# inst.write(':source2:voltage:start {}'.format(Vgs))
# inst.write(':source2:voltage:stop {}'.format(Vgs))
# inst.write(':source2:voltage:points {}'.format(Npoints))
inst.write(':output{} on'.format(channel))
inst.write("*WAI")
time.sleep(0.1)
for vds in np.linspace(0,Vds,30):
inst.write(':source1:voltage {}'.format(vds))
time.sleep(0.01)
inst.query_binary_values(':measure? (@1:2)')
for vgs in np.linspace(0,Vgs,30):
inst.write(':source2:voltage {}'.format(vgs))
time.sleep(0.01)
inst.query_binary_values(':measure? (@1:2)')
for channel in channels:
sinVoltages = 0.1*np.cos(np.linspace(0,2*np.pi,2500))
sinVoltages = voltageSetpoints[channel]*np.linspace(1,1,2500)
inst.write(':source{}:voltage:mode list'.format(channel))
inst.write(':source{}:list:voltage {}'.format(channel, ','.join(map(str, sinVoltages))))
for channel in channels:
inst.write(':trig{}:source timer'.format(channel))
inst.write(':trig{}:timer {}'.format(channel, 10e-6))
inst.write(':trig{}:count {}'.format(channel, Npoints))
# inst.write(':trig{}:ACQ:DEL 1E-4'.format(channel))
# inst.write(':sense{}:current:APER 1E-4'.format(channel))
# smu.write(':source{}:voltage:mode sweep'.format(channel))
# smu.write(':source{}:voltage:start 0'.format(channel, src1start))
# smu.write(':source{}:voltage:stop 0'.format(channel, src1stop))
# smu.write(':source{}:voltage:points 100'.format(channel, points))
# inst.write(':output{} on'.format(channel))
inst.write(':init (@1,2)')
startTime = time.time()
# Retrieve measurement result
currents = inst.query_binary_values(':fetch:array:current? (@1)')
voltages = inst.query_binary_values(':fetch:array:voltage? (@1)')
times = inst.query_binary_values(':fetch:array:time?')
currents2 = inst.query_binary_values(':fetch:array:current? (@2)')
voltages2 = inst.query_binary_values(':fetch:array:voltage? (@2)')
times2 = inst.query_binary_values(':fetch:array:time? (@2)')
endTime = time.time()
for vgs in np.linspace(Vgs,0,30):
inst.write(':source2:voltage {}'.format(vgs))
time.sleep(0.01)
inst.query_binary_values(':measure? (@1:2)')
for vds in np.linspace(Vds,0,30):
inst.write(':source1:voltage {}'.format(vds))
time.sleep(0.01)
inst.query_binary_values(':measure? (@1:2)')
totalTime = endTime - startTime
print('Total time: {} s'.format(totalTime))
print('Rate: {}'.format(Npoints/totalTime))
times = np.array(times)
totalTime = max(times) - min(times)
print('Total time: {} s'.format(totalTime))
print('Rate: {}'.format(len(times)/totalTime))
print('Rate: {:e}'.format(len(times)/totalTime))
print('Max is:')
print(np.max(currents))
plt.plot(times, currents)
plt.plot(times2, currents2)
plt.xlabel('Time [s]')
plt.ylabel('Current [A]')
plt.show()
exit()
plt.psd(currents, Fs=len(times)/totalTime, NFFT=2**16)
plt.psd(currents2, Fs=len(times)/totalTime, NFFT=2**16)
plt.show()
plt.magnitude_spectrum(currents, Fs=len(times)/totalTime)
plt.magnitude_spectrum(currents2, Fs=len(times)/totalTime)
plt.show()
plt.plot(np.linspace(0, len(times)/totalTime/2, len(times)//2 + 1), np.abs(np.fft.rfft(currents, norm='ortho')))
plt.plot(np.linspace(0, len(times)/totalTime/2, len(times)//2 + 1), np.abs(np.fft.rfft(currents2, norm='ortho')))
plt.show()
plt.semilogy(np.linspace(0, len(times)/totalTime/2, len(times)//2 + 1), np.abs(np.fft.rfft(currents, norm='ortho')))
plt.semilogy(np.linspace(0, len(times)/totalTime/2, len(times)//2 + 1), np.abs(np.fft.rfft(currents2, norm='ortho')))
plt.show()
| [
"steven.noyce@gmail.com"
] | steven.noyce@gmail.com |
1b1c02e75d0c463404a738766c2fe6e24d2476c7 | ad849c40e75d098e38db897154c63054e6f89fca | /models_class/model.py | 8ff2a80539b1abd9c6c4efeda44b6b17c4e7e9f6 | [] | permissive | vietnamican/Pytorch_Retinaface | 768a96eb7e48b002dc91cc97cc41473206903c59 | 8d69dd191e16421bb399f49c7706d6e154d4a80e | refs/heads/main | 2023-06-18T08:23:25.860727 | 2021-06-24T10:48:18 | 2021-06-24T10:48:18 | 366,045,702 | 1 | 0 | MIT | 2021-05-10T13:06:47 | 2021-05-10T13:06:46 | null | UTF-8 | Python | false | false | 2,929 | py | import torch
from torch import nn
from torchmetrics import Accuracy
from .base import ConvBatchNormRelu
from .base import Base
class Config(object):
dataroot = 'data/mrleye'
train_image_dir = '../LaPa_negpos_fusion/train/images'
train_label_dir = '../LaPa_negpos_fusion/train/labels'
val_image_dir = '../LaPa_negpos_fusion/val/images'
val_label_dir = '../LaPa_negpos_fusion/val/labels'
batch_size = 512
pin_memory= True
num_workers = 6
device = 'gpu'
max_epochs = 200
steps = [0.5, 0.7, 0.9]
cfg = Config()
class IrisModel(Base):
def __init__(self, cfg=cfg):
super().__init__()
self.conv1 = ConvBatchNormRelu(3, 10, kernel_size=3, padding=1, with_relu=False)
self.maxpool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.relu1 = nn.ReLU(inplace=True)
self.conv2 = ConvBatchNormRelu(10, 20, kernel_size=3, padding=1, with_relu=False)
self.maxpool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.relu2 = nn.ReLU(inplace=True)
self.conv3 = ConvBatchNormRelu(20, 50, kernel_size=3, padding=1, with_relu=False)
self.conv4 = ConvBatchNormRelu(50, 2, kernel_size=1, padding=0, with_relu=False)
self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
self.flatten = nn.Flatten()
self.criterion = nn.CrossEntropyLoss()
self.cfg = cfg
self.val_acc = Accuracy()
def forward(self, x):
x = self.relu1(self.maxpool1(self.conv1(x)))
x = self.relu2(self.maxpool2(self.conv2(x)))
x = self.conv3(x)
x = self.conv4(x)
x = self.avg_pool(x)
x = self.flatten(x)
return x
def _shared_step(self, batch, batch_dix):
eye, label, *_ = batch
logit = self.forward(eye)
loss = self.criterion(logit, label)
return loss, logit
def training_step(self, batch, batch_dix):
_, label, *_ = batch
loss, logit = self._shared_step(batch, batch_dix)
pred = logit.argmax(dim=1)
self.log('train_acc', self.val_acc(pred, label))
self.log('train_loss', loss)
return loss
def validation_step(self, batch, batch_dix):
_, label, *_ = batch
loss, logit = self._shared_step(batch, batch_dix)
pred = logit.argmax(dim=1)
self.log('val_acc', self.val_acc(pred, label))
self.log('val_loss', loss)
return loss
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.parameters(), lr=0.001, momentum=0.9, weight_decay=5e-4)
max_epochs = self.cfg.max_epochs
step0, step1, step2 = self.cfg.steps
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, [max_epochs*step0, max_epochs*step1, max_epochs*step2], gamma=0.1)
return {'optimizer': optimizer, 'lr_scheduler': lr_scheduler}
| [
"vietnamican@gmail.com"
] | vietnamican@gmail.com |
a66d33de13362abe85bb1eaea386c7fdb853db98 | d57b51ec207002e333b8655a8f5832ed143aa28c | /.history/l5/work/app_20200705183534.py | b04a220ca1102f3467a3b5ad0e580157e43c7a65 | [] | no_license | yevheniir/python_course_2020 | b42766c4278a08b8b79fec77e036a1b987accf51 | a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b | refs/heads/master | 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | from flask import Flask
from flask import render_template
app = Flask(__name__)
scoreboard = [{"name": "Tester", "score": 10}, {"name": "Tester", "score": 11}]
@app.route("/<name>")
def hello(n):
return render_template("index.html")
@app.route("/game")
def game():
return render_template("index.html")
if __name__ == "__main__":
app.run() | [
"yevheniira@intelink-ua.com"
] | yevheniira@intelink-ua.com |
fc09eccfe10dbeeeda3cb73e26619a48f4e21f7d | 88680d780cf0e6b07897fb0096a09e45f2cf2a4b | /userbot/plugins/pmpermit.py | 4de3efa0132af0808fc3005e0a35b29aff40e5f3 | [
"MIT"
] | permissive | phantombd/Subrozbot | a58f6fde972c48f3ae7a37d884c645e8b2bcd2ed | becd99e7438857a83215658419e09e23d103750d | refs/heads/master | 2022-12-15T14:56:13.962679 | 2020-08-30T06:53:26 | 2020-08-30T06:53:26 | 295,969,353 | 1 | 0 | MIT | 2020-09-16T08:20:31 | 2020-09-16T08:20:30 | null | UTF-8 | Python | false | false | 9,350 | py | import time
import asyncio
import io
import userbot.plugins.sql_helper.pmpermit_sql as pmpermit_sql
from telethon.tl.functions.users import GetFullUserRequest
from telethon import events, errors, functions, types
from userbot import ALIVE_NAME
from userbot.utils import admin_cmd
PM_WARNS = {}
PREV_REPLY_MESSAGE = {}
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else "Set ALIVE_NAME in config vars in Heroku"
USER_BOT_WARN_ZERO = ("─────▄████▀█▄\n───▄█████████████████▄\n─▄█████.▼.▼.▼.▼.▼.▼▼▼▼\n▄███████▄.▲.▲▲▲▲▲▲▲▲\n████████████████████▀▀\n\n"
"**This is (@Subroz) Artificial Intelligence.\n**"
"You Were Spamming My Peru Master's Inbox, Henceforth Your Retarded Lame Ass Has Been Blocked By My Master's Userbot.\n\n"
"**NOW GO TO HELL!**")
USER_BOT_NO_WARN = ("[┈╭━━╮╭━━╮┈┈┈┈┈ \n┈┃╭╮┗┻━━┻━╮┈┈┈ \n┈┃╰┓╭━╮╭━╮┃┈┈┈ \n┈╰━┓┃▇┃┃▇┃┃┈┈┈ \n┈┈┈┃╱▔▔▔▔▔▔▔▇┈ \n┈┈┈┃▏┏┳┳┳┳┳━┛┈ \n┈┈┈┃╲╰┻┻┻┻┻┓┈┈](tg://user?id=973779397)\n\n"
"`Hello, This is SUBROZ SECURITY SERVICE for Peru users only.You have found your way here to my master,`"
f"{DEFAULTUSER}'s` inbox.\n\n"
"Leave your name, phone number, address and 50,00,00,00,000$ and hopefully you'll get a reply within 2 light years.`\n\n"
"** Send** `/start` ** so that we can decide why you're here.**")
if Var.PRIVATE_GROUP_ID is not None:
@command(pattern="^.approve ?(.*)")
async def approve_p_m(event):
if event.fwd_from:
return
replied_user = await event.client(GetFullUserRequest(event.chat_id))
firstname = replied_user.user.first_name
reason = event.pattern_match.group(1)
chat = await event.get_chat()
if event.is_private:
if not pmpermit_sql.is_approved(chat.id):
if chat.id in PM_WARNS:
del PM_WARNS[chat.id]
if chat.id in PREV_REPLY_MESSAGE:
await PREV_REPLY_MESSAGE[chat.id].delete()
del PREV_REPLY_MESSAGE[chat.id]
pmpermit_sql.approve(chat.id, reason)
await event.edit("Approved to pm [{}](tg://user?id={})".format(firstname, chat.id))
await asyncio.sleep(3)
await event.delete()
@command(pattern="^.block ?(.*)")
async def approve_p_m(event):
if event.fwd_from:
return
replied_user = await event.client(GetFullUserRequest(event.chat_id))
firstname = replied_user.user.first_name
reason = event.pattern_match.group(1)
chat = await event.get_chat()
if event.is_private:
if chat.id == 729596789:
await event.edit("U Bitch Tryed To Block My BOSS, Now I Will Sleep For 100 Seconds")
await asyncio.sleep(100)
else:
if pmpermit_sql.is_approved(chat.id):
pmpermit_sql.disapprove(chat.id)
await event.edit(" ███████▄▄███████████▄ \n▓▓▓▓▓▓█░░░░░░░░░░░░░░█\n▓▓▓▓▓▓█░░░░░░░░░░░░░░█\n▓▓▓▓▓▓█░░░░░░░░░░░░░░█\n▓▓▓▓▓▓█░░░░░░░░░░░░░░█\n▓▓▓▓▓▓█░░░░░░░░░░░░░░█\n▓▓▓▓▓▓███░░░░░░░░░░░░█\n██████▀▀▀█░░░░██████▀ \n░░░░░░░░░█░░░░█ \n░░░░░░░░░░█░░░█ \n░░░░░░░░░░░█░░█ \n░░░░░░░░░░░█░░█ \n░░░░░░░░░░░░▀▀ \n\n**This is Subroz AI..U HAVE BEEN BANNED DUE TO NONSENCE SHIT**..[{}](tg://user?id={})".format(firstname, chat.id))
await asyncio.sleep(3)
await event.client(functions.contacts.BlockRequest(chat.id))
@command(pattern="^.disapprove ?(.*)")
async def approve_p_m(event):
if event.fwd_from:
return
replied_user = await event.client(GetFullUserRequest(event.chat_id))
firstname = replied_user.user.first_name
reason = event.pattern_match.group(1)
chat = await event.get_chat()
if event.is_private:
if chat.id == 973779397:
await event.edit("Sorry, I Can't Disapprove My Master")
else:
if pmpermit_sql.is_approved(chat.id):
pmpermit_sql.disapprove(chat.id)
await event.edit("Disapproved [{}](tg://user?id={})".format(firstname, chat.id))
@command(pattern="^.listapproved")
async def approve_p_m(event):
if event.fwd_from:
return
approved_users = pmpermit_sql.get_all_approved()
APPROVED_PMs = "Current Approved PMs\n"
if len(approved_users) > 0:
for a_user in approved_users:
if a_user.reason:
APPROVED_PMs += f"👉 [{a_user.chat_id}](tg://user?id={a_user.chat_id}) for {a_user.reason}\n"
else:
APPROVED_PMs += f"👉 [{a_user.chat_id}](tg://user?id={a_user.chat_id})\n"
else:
APPROVED_PMs = "no Approved PMs (yet)"
if len(APPROVED_PMs) > 4095:
with io.BytesIO(str.encode(APPROVED_PMs)) as out_file:
out_file.name = "approved.pms.text"
await event.client.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption="Current Approved PMs",
reply_to=event
)
await event.delete()
else:
await event.edit(APPROVED_PMs)
@bot.on(events.NewMessage(incoming=True))
async def on_new_private_message(event):
if event.from_id == bot.uid:
return
if Var.PRIVATE_GROUP_ID is None:
return
if not event.is_private:
return
message_text = event.message.message
chat_id = event.from_id
current_message_text = message_text.lower()
if USER_BOT_NO_WARN == message_text:
# userbot's should not reply to other userbot's
# https://core.telegram.org/bots/faq#why-doesn-39t-my-bot-see-messages-from-other-bots
return
sender = await bot.get_entity(chat_id)
if chat_id == bot.uid:
# don't log Saved Messages
return
if sender.bot:
# don't log bots
return
if sender.verified:
# don't log verified accounts
return
if any([x in event.raw_text for x in ("/start", "1", "2", "3", "4", "5")]):
return
if not pmpermit_sql.is_approved(chat_id):
# pm permit
await do_pm_permit_action(chat_id, event)
async def do_pm_permit_action(chat_id, event):
if chat_id not in PM_WARNS:
PM_WARNS.update({chat_id: 0})
if PM_WARNS[chat_id] == 5:
r = await event.reply(USER_BOT_WARN_ZERO)
await asyncio.sleep(3)
await event.client(functions.contacts.BlockRequest(chat_id))
if chat_id in PREV_REPLY_MESSAGE:
await PREV_REPLY_MESSAGE[chat_id].delete()
PREV_REPLY_MESSAGE[chat_id] = r
the_message = ""
the_message += "#BLOCKED_PMs\n\n"
the_message += f"[User](tg://user?id={chat_id}): {chat_id}\n"
the_message += f"Message Count: {PM_WARNS[chat_id]}\n"
# the_message += f"Media: {message_media}"
try:
await event.client.send_message(
entity=Var.PRIVATE_GROUP_ID,
message=the_message,
# reply_to=,
# parse_mode="html",
link_preview=False,
# file=message_media,
silent=True
)
return
except:
return
r = await event.reply(USER_BOT_NO_WARN)
PM_WARNS[chat_id] += 1
if chat_id in PREV_REPLY_MESSAGE:
await PREV_REPLY_MESSAGE[chat_id].delete()
PREV_REPLY_MESSAGE[chat_id] = r
from userbot.utils import admin_cmd
import io
import userbot.plugins.sql_helper.pmpermit_sql as pmpermit_sql
from telethon import events
@bot.on(events.NewMessage(incoming=True, from_users=(973779397)))
async def hehehe(event):
if event.fwd_from:
return
chat = await event.get_chat()
if event.is_private:
if not pmpermit_sql.is_approved(chat.id):
pmpermit_sql.approve(chat.id, "**My BOSS🙈🙈**")
await borg.send_message(chat, "**My BOSS is come....U are Lucky**")
| [
"noreply@github.com"
] | noreply@github.com |
3f55df74a54acc7c97942121f229c80f7848457f | fda6fae59f151f2c433b3bbbbfd02dac1390ca03 | /DisInV/disindb/models/brand_model.py | 3e568d21abc8091c2827fdf7c7998139c2bbaf81 | [] | no_license | kksweet8845/DisInV | 27424835d1a57e2f70ad0f0029f06dac1faa90ad | 638e613ac29c1bb852c3a6f3cffa5a812486fa85 | refs/heads/main | 2023-03-30T10:29:54.966348 | 2020-11-02T06:09:48 | 2020-11-02T06:09:48 | 309,267,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | from django.db import models
class Brand(models.Model):
brand_name = models.CharField(
max_length=20,
blank=False,
default=''
)
def __str__(self):
return "brand_name: {}".format(self.brand_name)
class Meta:
db_table = "brands" | [
"f74064054@mail.ncku.edu.tw"
] | f74064054@mail.ncku.edu.tw |
f32c32efd6824655e6ea2871c24a9a2b562f8933 | eb3683f9127befb9ef96d8eb801206cf7b84d6a7 | /stypy/sgmc/sgmc_cache/distutils/emxccompiler.py | 62c23b54b675b3edfdfdaf8c2e59d042e5b9440b | [] | no_license | ComputationalReflection/stypy | 61ec27333a12f76ac055d13f8969d3e0de172f88 | be66ae846c82ac40ba7b48f9880d6e3990681a5b | refs/heads/master | 2021-05-13T18:24:29.005894 | 2018-06-14T15:42:50 | 2018-06-14T15:42:50 | 116,855,812 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186,190 | py |
# -*- coding: utf-8 -*-
"""
ORIGINAL PROGRAM SOURCE CODE:
1: '''distutils.emxccompiler
2:
3: Provides the EMXCCompiler class, a subclass of UnixCCompiler that
4: handles the EMX port of the GNU C compiler to OS/2.
5: '''
6:
7: # issues:
8: #
9: # * OS/2 insists that DLLs can have names no longer than 8 characters
10: # We put export_symbols in a def-file, as though the DLL can have
11: # an arbitrary length name, but truncate the output filename.
12: #
13: # * only use OMF objects and use LINK386 as the linker (-Zomf)
14: #
15: # * always build for multithreading (-Zmt) as the accompanying OS/2 port
16: # of Python is only distributed with threads enabled.
17: #
18: # tested configurations:
19: #
20: # * EMX gcc 2.81/EMX 0.9d fix03
21:
22: __revision__ = "$Id$"
23:
24: import os,sys,copy
25: from distutils.ccompiler import gen_preprocess_options, gen_lib_options
26: from distutils.unixccompiler import UnixCCompiler
27: from distutils.file_util import write_file
28: from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
29: from distutils import log
30:
31: class EMXCCompiler (UnixCCompiler):
32:
33: compiler_type = 'emx'
34: obj_extension = ".obj"
35: static_lib_extension = ".lib"
36: shared_lib_extension = ".dll"
37: static_lib_format = "%s%s"
38: shared_lib_format = "%s%s"
39: res_extension = ".res" # compiled resource file
40: exe_extension = ".exe"
41:
42: def __init__ (self,
43: verbose=0,
44: dry_run=0,
45: force=0):
46:
47: UnixCCompiler.__init__ (self, verbose, dry_run, force)
48:
49: (status, details) = check_config_h()
50: self.debug_print("Python's GCC status: %s (details: %s)" %
51: (status, details))
52: if status is not CONFIG_H_OK:
53: self.warn(
54: "Python's pyconfig.h doesn't seem to support your compiler. " +
55: ("Reason: %s." % details) +
56: "Compiling may fail because of undefined preprocessor macros.")
57:
58: (self.gcc_version, self.ld_version) = \
59: get_versions()
60: self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
61: (self.gcc_version,
62: self.ld_version) )
63:
64: # Hard-code GCC because that's what this is all about.
65: # XXX optimization, warnings etc. should be customizable.
66: self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
67: compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
68: linker_exe='gcc -Zomf -Zmt -Zcrtdll',
69: linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
70:
71: # want the gcc library statically linked (so that we don't have
72: # to distribute a version dependent on the compiler we have)
73: self.dll_libraries=["gcc"]
74:
75: # __init__ ()
76:
77: def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
78: if ext == '.rc':
79: # gcc requires '.rc' compiled to binary ('.res') files !!!
80: try:
81: self.spawn(["rc", "-r", src])
82: except DistutilsExecError, msg:
83: raise CompileError, msg
84: else: # for other files use the C-compiler
85: try:
86: self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
87: extra_postargs)
88: except DistutilsExecError, msg:
89: raise CompileError, msg
90:
91: def link (self,
92: target_desc,
93: objects,
94: output_filename,
95: output_dir=None,
96: libraries=None,
97: library_dirs=None,
98: runtime_library_dirs=None,
99: export_symbols=None,
100: debug=0,
101: extra_preargs=None,
102: extra_postargs=None,
103: build_temp=None,
104: target_lang=None):
105:
106: # use separate copies, so we can modify the lists
107: extra_preargs = copy.copy(extra_preargs or [])
108: libraries = copy.copy(libraries or [])
109: objects = copy.copy(objects or [])
110:
111: # Additional libraries
112: libraries.extend(self.dll_libraries)
113:
114: # handle export symbols by creating a def-file
115: # with executables this only works with gcc/ld as linker
116: if ((export_symbols is not None) and
117: (target_desc != self.EXECUTABLE)):
118: # (The linker doesn't do anything if output is up-to-date.
119: # So it would probably better to check if we really need this,
120: # but for this we had to insert some unchanged parts of
121: # UnixCCompiler, and this is not what we want.)
122:
123: # we want to put some files in the same directory as the
124: # object files are, build_temp doesn't help much
125: # where are the object files
126: temp_dir = os.path.dirname(objects[0])
127: # name of dll to give the helper files the same base name
128: (dll_name, dll_extension) = os.path.splitext(
129: os.path.basename(output_filename))
130:
131: # generate the filenames for these files
132: def_file = os.path.join(temp_dir, dll_name + ".def")
133:
134: # Generate .def file
135: contents = [
136: "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
137: os.path.splitext(os.path.basename(output_filename))[0],
138: "DATA MULTIPLE NONSHARED",
139: "EXPORTS"]
140: for sym in export_symbols:
141: contents.append(' "%s"' % sym)
142: self.execute(write_file, (def_file, contents),
143: "writing %s" % def_file)
144:
145: # next add options for def-file and to creating import libraries
146: # for gcc/ld the def-file is specified as any other object files
147: objects.append(def_file)
148:
149: #end: if ((export_symbols is not None) and
150: # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
151:
152: # who wants symbols and a many times larger output file
153: # should explicitly switch the debug mode on
154: # otherwise we let dllwrap/ld strip the output file
155: # (On my machine: 10KB < stripped_file < ??100KB
156: # unstripped_file = stripped_file + XXX KB
157: # ( XXX=254 for a typical python extension))
158: if not debug:
159: extra_preargs.append("-s")
160:
161: UnixCCompiler.link(self,
162: target_desc,
163: objects,
164: output_filename,
165: output_dir,
166: libraries,
167: library_dirs,
168: runtime_library_dirs,
169: None, # export_symbols, we do this in our def-file
170: debug,
171: extra_preargs,
172: extra_postargs,
173: build_temp,
174: target_lang)
175:
176: # link ()
177:
178: # -- Miscellaneous methods -----------------------------------------
179:
180: # override the object_filenames method from CCompiler to
181: # support rc and res-files
182: def object_filenames (self,
183: source_filenames,
184: strip_dir=0,
185: output_dir=''):
186: if output_dir is None: output_dir = ''
187: obj_names = []
188: for src_name in source_filenames:
189: # use normcase to make sure '.rc' is really '.rc' and not '.RC'
190: (base, ext) = os.path.splitext (os.path.normcase(src_name))
191: if ext not in (self.src_extensions + ['.rc']):
192: raise UnknownFileError, \
193: "unknown file type '%s' (from '%s')" % \
194: (ext, src_name)
195: if strip_dir:
196: base = os.path.basename (base)
197: if ext == '.rc':
198: # these need to be compiled to object files
199: obj_names.append (os.path.join (output_dir,
200: base + self.res_extension))
201: else:
202: obj_names.append (os.path.join (output_dir,
203: base + self.obj_extension))
204: return obj_names
205:
206: # object_filenames ()
207:
208: # override the find_library_file method from UnixCCompiler
209: # to deal with file naming/searching differences
210: def find_library_file(self, dirs, lib, debug=0):
211: shortlib = '%s.lib' % lib
212: longlib = 'lib%s.lib' % lib # this form very rare
213:
214: # get EMX's default library directory search path
215: try:
216: emx_dirs = os.environ['LIBRARY_PATH'].split(';')
217: except KeyError:
218: emx_dirs = []
219:
220: for dir in dirs + emx_dirs:
221: shortlibp = os.path.join(dir, shortlib)
222: longlibp = os.path.join(dir, longlib)
223: if os.path.exists(shortlibp):
224: return shortlibp
225: elif os.path.exists(longlibp):
226: return longlibp
227:
228: # Oops, didn't find it in *any* of 'dirs'
229: return None
230:
231: # class EMXCCompiler
232:
233:
234: # Because these compilers aren't configured in Python's pyconfig.h file by
235: # default, we should at least warn the user if he is using a unmodified
236: # version.
237:
238: CONFIG_H_OK = "ok"
239: CONFIG_H_NOTOK = "not ok"
240: CONFIG_H_UNCERTAIN = "uncertain"
241:
242: def check_config_h():
243:
244: '''Check if the current Python installation (specifically, pyconfig.h)
245: appears amenable to building extensions with GCC. Returns a tuple
246: (status, details), where 'status' is one of the following constants:
247: CONFIG_H_OK
248: all is well, go ahead and compile
249: CONFIG_H_NOTOK
250: doesn't look good
251: CONFIG_H_UNCERTAIN
252: not sure -- unable to read pyconfig.h
253: 'details' is a human-readable string explaining the situation.
254:
255: Note there are two ways to conclude "OK": either 'sys.version' contains
256: the string "GCC" (implying that this Python was built with GCC), or the
257: installed "pyconfig.h" contains the string "__GNUC__".
258: '''
259:
260: # XXX since this function also checks sys.version, it's not strictly a
261: # "pyconfig.h" check -- should probably be renamed...
262:
263: from distutils import sysconfig
264: import string
265: # if sys.version contains GCC then python was compiled with
266: # GCC, and the pyconfig.h file should be OK
267: if string.find(sys.version,"GCC") >= 0:
268: return (CONFIG_H_OK, "sys.version mentions 'GCC'")
269:
270: fn = sysconfig.get_config_h_filename()
271: try:
272: # It would probably better to read single lines to search.
273: # But we do this only once, and it is fast enough
274: f = open(fn)
275: try:
276: s = f.read()
277: finally:
278: f.close()
279:
280: except IOError, exc:
281: # if we can't read this file, we cannot say it is wrong
282: # the compiler will complain later about this file as missing
283: return (CONFIG_H_UNCERTAIN,
284: "couldn't read '%s': %s" % (fn, exc.strerror))
285:
286: else:
287: # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
288: if string.find(s,"__GNUC__") >= 0:
289: return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
290: else:
291: return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
292:
293:
294: def get_versions():
295: ''' Try to find out the versions of gcc and ld.
296: If not possible it returns None for it.
297: '''
298: from distutils.version import StrictVersion
299: from distutils.spawn import find_executable
300: import re
301:
302: gcc_exe = find_executable('gcc')
303: if gcc_exe:
304: out = os.popen(gcc_exe + ' -dumpversion','r')
305: try:
306: out_string = out.read()
307: finally:
308: out.close()
309: result = re.search('(\d+\.\d+\.\d+)',out_string)
310: if result:
311: gcc_version = StrictVersion(result.group(1))
312: else:
313: gcc_version = None
314: else:
315: gcc_version = None
316: # EMX ld has no way of reporting version number, and we use GCC
317: # anyway - so we can link OMF DLLs
318: ld_version = None
319: return (gcc_version, ld_version)
320:
"""
# Import the stypy library necessary elements
from stypy.type_inference_programs.type_inference_programs_imports import *
# Create the module type store
module_type_store = Context(None, __file__)
# ################# Begin of the type inference program ##################
str_3103 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 5, (-1)), 'str', 'distutils.emxccompiler\n\nProvides the EMXCCompiler class, a subclass of UnixCCompiler that\nhandles the EMX port of the GNU C compiler to OS/2.\n')
# Assigning a Str to a Name (line 22):
# Assigning a Str to a Name (line 22):
str_3104 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 15), 'str', '$Id$')
# Assigning a type to the variable '__revision__' (line 22)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 22, 0), '__revision__', str_3104)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 24, 0))
# Multiple import statement. import os (1/3) (line 24)
import os
import_module(stypy.reporting.localization.Localization(__file__, 24, 0), 'os', os, module_type_store)
# Multiple import statement. import sys (2/3) (line 24)
import sys
import_module(stypy.reporting.localization.Localization(__file__, 24, 0), 'sys', sys, module_type_store)
# Multiple import statement. import copy (3/3) (line 24)
import copy
import_module(stypy.reporting.localization.Localization(__file__, 24, 0), 'copy', copy, module_type_store)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 25, 0))
# 'from distutils.ccompiler import gen_preprocess_options, gen_lib_options' statement (line 25)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3105 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler')
if (type(import_3105) is not StypyTypeError):
if (import_3105 != 'pyd_module'):
__import__(import_3105)
sys_modules_3106 = sys.modules[import_3105]
import_from_module(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler', sys_modules_3106.module_type_store, module_type_store, ['gen_preprocess_options', 'gen_lib_options'])
nest_module(stypy.reporting.localization.Localization(__file__, 25, 0), __file__, sys_modules_3106, sys_modules_3106.module_type_store, module_type_store)
else:
from distutils.ccompiler import gen_preprocess_options, gen_lib_options
import_from_module(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler', None, module_type_store, ['gen_preprocess_options', 'gen_lib_options'], [gen_preprocess_options, gen_lib_options])
else:
# Assigning a type to the variable 'distutils.ccompiler' (line 25)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 25, 0), 'distutils.ccompiler', import_3105)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 26, 0))
# 'from distutils.unixccompiler import UnixCCompiler' statement (line 26)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3107 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler')
if (type(import_3107) is not StypyTypeError):
if (import_3107 != 'pyd_module'):
__import__(import_3107)
sys_modules_3108 = sys.modules[import_3107]
import_from_module(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler', sys_modules_3108.module_type_store, module_type_store, ['UnixCCompiler'])
nest_module(stypy.reporting.localization.Localization(__file__, 26, 0), __file__, sys_modules_3108, sys_modules_3108.module_type_store, module_type_store)
else:
from distutils.unixccompiler import UnixCCompiler
import_from_module(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler', None, module_type_store, ['UnixCCompiler'], [UnixCCompiler])
else:
# Assigning a type to the variable 'distutils.unixccompiler' (line 26)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 26, 0), 'distutils.unixccompiler', import_3107)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 27, 0))
# 'from distutils.file_util import write_file' statement (line 27)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3109 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util')
if (type(import_3109) is not StypyTypeError):
if (import_3109 != 'pyd_module'):
__import__(import_3109)
sys_modules_3110 = sys.modules[import_3109]
import_from_module(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util', sys_modules_3110.module_type_store, module_type_store, ['write_file'])
nest_module(stypy.reporting.localization.Localization(__file__, 27, 0), __file__, sys_modules_3110, sys_modules_3110.module_type_store, module_type_store)
else:
from distutils.file_util import write_file
import_from_module(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util', None, module_type_store, ['write_file'], [write_file])
else:
# Assigning a type to the variable 'distutils.file_util' (line 27)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 27, 0), 'distutils.file_util', import_3109)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 28, 0))
# 'from distutils.errors import DistutilsExecError, CompileError, UnknownFileError' statement (line 28)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3111 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors')
if (type(import_3111) is not StypyTypeError):
if (import_3111 != 'pyd_module'):
__import__(import_3111)
sys_modules_3112 = sys.modules[import_3111]
import_from_module(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors', sys_modules_3112.module_type_store, module_type_store, ['DistutilsExecError', 'CompileError', 'UnknownFileError'])
nest_module(stypy.reporting.localization.Localization(__file__, 28, 0), __file__, sys_modules_3112, sys_modules_3112.module_type_store, module_type_store)
else:
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
import_from_module(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors', None, module_type_store, ['DistutilsExecError', 'CompileError', 'UnknownFileError'], [DistutilsExecError, CompileError, UnknownFileError])
else:
# Assigning a type to the variable 'distutils.errors' (line 28)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 28, 0), 'distutils.errors', import_3111)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 29, 0))
# 'from distutils import log' statement (line 29)
try:
from distutils import log
except:
log = UndefinedType
import_from_module(stypy.reporting.localization.Localization(__file__, 29, 0), 'distutils', None, module_type_store, ['log'], [log])
# Declaration of the 'EMXCCompiler' class
# Getting the type of 'UnixCCompiler' (line 31)
UnixCCompiler_3113 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 31, 20), 'UnixCCompiler')
class EMXCCompiler(UnixCCompiler_3113, ):
# Assigning a Str to a Name (line 33):
# Assigning a Str to a Name (line 34):
# Assigning a Str to a Name (line 35):
# Assigning a Str to a Name (line 36):
# Assigning a Str to a Name (line 37):
# Assigning a Str to a Name (line 38):
# Assigning a Str to a Name (line 39):
# Assigning a Str to a Name (line 40):
@norecursion
def __init__(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
int_3114 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 43, 26), 'int')
int_3115 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 44, 26), 'int')
int_3116 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 45, 24), 'int')
defaults = [int_3114, int_3115, int_3116]
# Create a new context for function '__init__'
module_type_store = module_type_store.open_function_context('__init__', 42, 4, False)
# Assigning a type to the variable 'self' (line 43)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 43, 4), 'self', type_of_self)
# Passed parameters checking function
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.__init__', ['verbose', 'dry_run', 'force'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return
# Initialize method data
init_call_information(module_type_store, '__init__', localization, ['verbose', 'dry_run', 'force'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of '__init__(...)' code ##################
# Call to __init__(...): (line 47)
# Processing the call arguments (line 47)
# Getting the type of 'self' (line 47)
self_3119 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 32), 'self', False)
# Getting the type of 'verbose' (line 47)
verbose_3120 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 38), 'verbose', False)
# Getting the type of 'dry_run' (line 47)
dry_run_3121 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 47), 'dry_run', False)
# Getting the type of 'force' (line 47)
force_3122 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 56), 'force', False)
# Processing the call keyword arguments (line 47)
kwargs_3123 = {}
# Getting the type of 'UnixCCompiler' (line 47)
UnixCCompiler_3117 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 8), 'UnixCCompiler', False)
# Obtaining the member '__init__' of a type (line 47)
init___3118 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 47, 8), UnixCCompiler_3117, '__init__')
# Calling __init__(args, kwargs) (line 47)
init___call_result_3124 = invoke(stypy.reporting.localization.Localization(__file__, 47, 8), init___3118, *[self_3119, verbose_3120, dry_run_3121, force_3122], **kwargs_3123)
# Assigning a Call to a Tuple (line 49):
# Assigning a Subscript to a Name (line 49):
# Obtaining the type of the subscript
int_3125 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 8), 'int')
# Call to check_config_h(...): (line 49)
# Processing the call keyword arguments (line 49)
kwargs_3127 = {}
# Getting the type of 'check_config_h' (line 49)
check_config_h_3126 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 28), 'check_config_h', False)
# Calling check_config_h(args, kwargs) (line 49)
check_config_h_call_result_3128 = invoke(stypy.reporting.localization.Localization(__file__, 49, 28), check_config_h_3126, *[], **kwargs_3127)
# Obtaining the member '__getitem__' of a type (line 49)
getitem___3129 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 49, 8), check_config_h_call_result_3128, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 49)
subscript_call_result_3130 = invoke(stypy.reporting.localization.Localization(__file__, 49, 8), getitem___3129, int_3125)
# Assigning a type to the variable 'tuple_var_assignment_3095' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3095', subscript_call_result_3130)
# Assigning a Subscript to a Name (line 49):
# Obtaining the type of the subscript
int_3131 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 8), 'int')
# Call to check_config_h(...): (line 49)
# Processing the call keyword arguments (line 49)
kwargs_3133 = {}
# Getting the type of 'check_config_h' (line 49)
check_config_h_3132 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 28), 'check_config_h', False)
# Calling check_config_h(args, kwargs) (line 49)
check_config_h_call_result_3134 = invoke(stypy.reporting.localization.Localization(__file__, 49, 28), check_config_h_3132, *[], **kwargs_3133)
# Obtaining the member '__getitem__' of a type (line 49)
getitem___3135 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 49, 8), check_config_h_call_result_3134, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 49)
subscript_call_result_3136 = invoke(stypy.reporting.localization.Localization(__file__, 49, 8), getitem___3135, int_3131)
# Assigning a type to the variable 'tuple_var_assignment_3096' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3096', subscript_call_result_3136)
# Assigning a Name to a Name (line 49):
# Getting the type of 'tuple_var_assignment_3095' (line 49)
tuple_var_assignment_3095_3137 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3095')
# Assigning a type to the variable 'status' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 9), 'status', tuple_var_assignment_3095_3137)
# Assigning a Name to a Name (line 49):
# Getting the type of 'tuple_var_assignment_3096' (line 49)
tuple_var_assignment_3096_3138 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 8), 'tuple_var_assignment_3096')
# Assigning a type to the variable 'details' (line 49)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 49, 17), 'details', tuple_var_assignment_3096_3138)
# Call to debug_print(...): (line 50)
# Processing the call arguments (line 50)
str_3141 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 50, 25), 'str', "Python's GCC status: %s (details: %s)")
# Obtaining an instance of the builtin type 'tuple' (line 51)
tuple_3142 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 51, 26), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 51)
# Adding element type (line 51)
# Getting the type of 'status' (line 51)
status_3143 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 26), 'status', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 51, 26), tuple_3142, status_3143)
# Adding element type (line 51)
# Getting the type of 'details' (line 51)
details_3144 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 34), 'details', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 51, 26), tuple_3142, details_3144)
# Applying the binary operator '%' (line 50)
result_mod_3145 = python_operator(stypy.reporting.localization.Localization(__file__, 50, 25), '%', str_3141, tuple_3142)
# Processing the call keyword arguments (line 50)
kwargs_3146 = {}
# Getting the type of 'self' (line 50)
self_3139 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 50, 8), 'self', False)
# Obtaining the member 'debug_print' of a type (line 50)
debug_print_3140 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 50, 8), self_3139, 'debug_print')
# Calling debug_print(args, kwargs) (line 50)
debug_print_call_result_3147 = invoke(stypy.reporting.localization.Localization(__file__, 50, 8), debug_print_3140, *[result_mod_3145], **kwargs_3146)
# Getting the type of 'status' (line 52)
status_3148 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 11), 'status')
# Getting the type of 'CONFIG_H_OK' (line 52)
CONFIG_H_OK_3149 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 25), 'CONFIG_H_OK')
# Applying the binary operator 'isnot' (line 52)
result_is_not_3150 = python_operator(stypy.reporting.localization.Localization(__file__, 52, 11), 'isnot', status_3148, CONFIG_H_OK_3149)
# Testing the type of an if condition (line 52)
if_condition_3151 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 52, 8), result_is_not_3150)
# Assigning a type to the variable 'if_condition_3151' (line 52)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 52, 8), 'if_condition_3151', if_condition_3151)
# SSA begins for if statement (line 52)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Call to warn(...): (line 53)
# Processing the call arguments (line 53)
str_3154 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 54, 16), 'str', "Python's pyconfig.h doesn't seem to support your compiler. ")
str_3155 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 55, 17), 'str', 'Reason: %s.')
# Getting the type of 'details' (line 55)
details_3156 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 55, 33), 'details', False)
# Applying the binary operator '%' (line 55)
result_mod_3157 = python_operator(stypy.reporting.localization.Localization(__file__, 55, 17), '%', str_3155, details_3156)
# Applying the binary operator '+' (line 54)
result_add_3158 = python_operator(stypy.reporting.localization.Localization(__file__, 54, 16), '+', str_3154, result_mod_3157)
str_3159 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 56, 16), 'str', 'Compiling may fail because of undefined preprocessor macros.')
# Applying the binary operator '+' (line 55)
result_add_3160 = python_operator(stypy.reporting.localization.Localization(__file__, 55, 42), '+', result_add_3158, str_3159)
# Processing the call keyword arguments (line 53)
kwargs_3161 = {}
# Getting the type of 'self' (line 53)
self_3152 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 53, 12), 'self', False)
# Obtaining the member 'warn' of a type (line 53)
warn_3153 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 53, 12), self_3152, 'warn')
# Calling warn(args, kwargs) (line 53)
warn_call_result_3162 = invoke(stypy.reporting.localization.Localization(__file__, 53, 12), warn_3153, *[result_add_3160], **kwargs_3161)
# SSA join for if statement (line 52)
module_type_store = module_type_store.join_ssa_context()
# Assigning a Call to a Tuple (line 58):
# Assigning a Subscript to a Name (line 58):
# Obtaining the type of the subscript
int_3163 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 58, 8), 'int')
# Call to get_versions(...): (line 59)
# Processing the call keyword arguments (line 59)
kwargs_3165 = {}
# Getting the type of 'get_versions' (line 59)
get_versions_3164 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 12), 'get_versions', False)
# Calling get_versions(args, kwargs) (line 59)
get_versions_call_result_3166 = invoke(stypy.reporting.localization.Localization(__file__, 59, 12), get_versions_3164, *[], **kwargs_3165)
# Obtaining the member '__getitem__' of a type (line 58)
getitem___3167 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 8), get_versions_call_result_3166, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 58)
subscript_call_result_3168 = invoke(stypy.reporting.localization.Localization(__file__, 58, 8), getitem___3167, int_3163)
# Assigning a type to the variable 'tuple_var_assignment_3097' (line 58)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3097', subscript_call_result_3168)
# Assigning a Subscript to a Name (line 58):
# Obtaining the type of the subscript
int_3169 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 58, 8), 'int')
# Call to get_versions(...): (line 59)
# Processing the call keyword arguments (line 59)
kwargs_3171 = {}
# Getting the type of 'get_versions' (line 59)
get_versions_3170 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 12), 'get_versions', False)
# Calling get_versions(args, kwargs) (line 59)
get_versions_call_result_3172 = invoke(stypy.reporting.localization.Localization(__file__, 59, 12), get_versions_3170, *[], **kwargs_3171)
# Obtaining the member '__getitem__' of a type (line 58)
getitem___3173 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 8), get_versions_call_result_3172, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 58)
subscript_call_result_3174 = invoke(stypy.reporting.localization.Localization(__file__, 58, 8), getitem___3173, int_3169)
# Assigning a type to the variable 'tuple_var_assignment_3098' (line 58)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3098', subscript_call_result_3174)
# Assigning a Name to a Attribute (line 58):
# Getting the type of 'tuple_var_assignment_3097' (line 58)
tuple_var_assignment_3097_3175 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3097')
# Getting the type of 'self' (line 58)
self_3176 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 9), 'self')
# Setting the type of the member 'gcc_version' of a type (line 58)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 9), self_3176, 'gcc_version', tuple_var_assignment_3097_3175)
# Assigning a Name to a Attribute (line 58):
# Getting the type of 'tuple_var_assignment_3098' (line 58)
tuple_var_assignment_3098_3177 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'tuple_var_assignment_3098')
# Getting the type of 'self' (line 58)
self_3178 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 27), 'self')
# Setting the type of the member 'ld_version' of a type (line 58)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 27), self_3178, 'ld_version', tuple_var_assignment_3098_3177)
# Call to debug_print(...): (line 60)
# Processing the call arguments (line 60)
# Getting the type of 'self' (line 60)
self_3181 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 60, 25), 'self', False)
# Obtaining the member 'compiler_type' of a type (line 60)
compiler_type_3182 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 60, 25), self_3181, 'compiler_type')
str_3183 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 60, 46), 'str', ': gcc %s, ld %s\n')
# Obtaining an instance of the builtin type 'tuple' (line 61)
tuple_3184 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 61, 26), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 61)
# Adding element type (line 61)
# Getting the type of 'self' (line 61)
self_3185 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 61, 26), 'self', False)
# Obtaining the member 'gcc_version' of a type (line 61)
gcc_version_3186 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 61, 26), self_3185, 'gcc_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 61, 26), tuple_3184, gcc_version_3186)
# Adding element type (line 61)
# Getting the type of 'self' (line 62)
self_3187 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 62, 26), 'self', False)
# Obtaining the member 'ld_version' of a type (line 62)
ld_version_3188 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 62, 26), self_3187, 'ld_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 61, 26), tuple_3184, ld_version_3188)
# Applying the binary operator '%' (line 60)
result_mod_3189 = python_operator(stypy.reporting.localization.Localization(__file__, 60, 46), '%', str_3183, tuple_3184)
# Applying the binary operator '+' (line 60)
result_add_3190 = python_operator(stypy.reporting.localization.Localization(__file__, 60, 25), '+', compiler_type_3182, result_mod_3189)
# Processing the call keyword arguments (line 60)
kwargs_3191 = {}
# Getting the type of 'self' (line 60)
self_3179 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 60, 8), 'self', False)
# Obtaining the member 'debug_print' of a type (line 60)
debug_print_3180 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 60, 8), self_3179, 'debug_print')
# Calling debug_print(args, kwargs) (line 60)
debug_print_call_result_3192 = invoke(stypy.reporting.localization.Localization(__file__, 60, 8), debug_print_3180, *[result_add_3190], **kwargs_3191)
# Call to set_executables(...): (line 66)
# Processing the call keyword arguments (line 66)
str_3195 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 66, 38), 'str', 'gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall')
keyword_3196 = str_3195
str_3197 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 67, 41), 'str', 'gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall')
keyword_3198 = str_3197
str_3199 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 68, 40), 'str', 'gcc -Zomf -Zmt -Zcrtdll')
keyword_3200 = str_3199
str_3201 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 69, 39), 'str', 'gcc -Zomf -Zmt -Zcrtdll -Zdll')
keyword_3202 = str_3201
kwargs_3203 = {'linker_exe': keyword_3200, 'compiler_so': keyword_3198, 'linker_so': keyword_3202, 'compiler': keyword_3196}
# Getting the type of 'self' (line 66)
self_3193 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 66, 8), 'self', False)
# Obtaining the member 'set_executables' of a type (line 66)
set_executables_3194 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 66, 8), self_3193, 'set_executables')
# Calling set_executables(args, kwargs) (line 66)
set_executables_call_result_3204 = invoke(stypy.reporting.localization.Localization(__file__, 66, 8), set_executables_3194, *[], **kwargs_3203)
# Assigning a List to a Attribute (line 73):
# Assigning a List to a Attribute (line 73):
# Obtaining an instance of the builtin type 'list' (line 73)
list_3205 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 73, 27), 'list')
# Adding type elements to the builtin type 'list' instance (line 73)
# Adding element type (line 73)
str_3206 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 73, 28), 'str', 'gcc')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 73, 27), list_3205, str_3206)
# Getting the type of 'self' (line 73)
self_3207 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 73, 8), 'self')
# Setting the type of the member 'dll_libraries' of a type (line 73)
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 73, 8), self_3207, 'dll_libraries', list_3205)
# ################# End of '__init__(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
@norecursion
def _compile(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function '_compile'
module_type_store = module_type_store.open_function_context('_compile', 77, 4, False)
# Assigning a type to the variable 'self' (line 78)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 78, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler._compile.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler._compile.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler._compile.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler._compile.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler._compile')
EMXCCompiler._compile.__dict__.__setitem__('stypy_param_names_list', ['obj', 'src', 'ext', 'cc_args', 'extra_postargs', 'pp_opts'])
EMXCCompiler._compile.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler._compile.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler._compile.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler._compile.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler._compile.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler._compile.__dict__.__setitem__('stypy_declared_arg_number', 7)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler._compile', ['obj', 'src', 'ext', 'cc_args', 'extra_postargs', 'pp_opts'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, '_compile', localization, ['obj', 'src', 'ext', 'cc_args', 'extra_postargs', 'pp_opts'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of '_compile(...)' code ##################
# Getting the type of 'ext' (line 78)
ext_3208 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 78, 11), 'ext')
str_3209 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 78, 18), 'str', '.rc')
# Applying the binary operator '==' (line 78)
result_eq_3210 = python_operator(stypy.reporting.localization.Localization(__file__, 78, 11), '==', ext_3208, str_3209)
# Testing the type of an if condition (line 78)
if_condition_3211 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 78, 8), result_eq_3210)
# Assigning a type to the variable 'if_condition_3211' (line 78)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 78, 8), 'if_condition_3211', if_condition_3211)
# SSA begins for if statement (line 78)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# SSA begins for try-except statement (line 80)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Call to spawn(...): (line 81)
# Processing the call arguments (line 81)
# Obtaining an instance of the builtin type 'list' (line 81)
list_3214 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 27), 'list')
# Adding type elements to the builtin type 'list' instance (line 81)
# Adding element type (line 81)
str_3215 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 28), 'str', 'rc')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 81, 27), list_3214, str_3215)
# Adding element type (line 81)
str_3216 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 34), 'str', '-r')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 81, 27), list_3214, str_3216)
# Adding element type (line 81)
# Getting the type of 'src' (line 81)
src_3217 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 81, 40), 'src', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 81, 27), list_3214, src_3217)
# Processing the call keyword arguments (line 81)
kwargs_3218 = {}
# Getting the type of 'self' (line 81)
self_3212 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 81, 16), 'self', False)
# Obtaining the member 'spawn' of a type (line 81)
spawn_3213 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 81, 16), self_3212, 'spawn')
# Calling spawn(args, kwargs) (line 81)
spawn_call_result_3219 = invoke(stypy.reporting.localization.Localization(__file__, 81, 16), spawn_3213, *[list_3214], **kwargs_3218)
# SSA branch for the except part of a try statement (line 80)
# SSA branch for the except 'DistutilsExecError' branch of a try statement (line 80)
# Storing handler type
module_type_store.open_ssa_branch('except')
# Getting the type of 'DistutilsExecError' (line 82)
DistutilsExecError_3220 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 82, 19), 'DistutilsExecError')
# Assigning a type to the variable 'msg' (line 82)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 82, 12), 'msg', DistutilsExecError_3220)
# Getting the type of 'CompileError' (line 83)
CompileError_3221 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 83, 22), 'CompileError')
ensure_var_of_types(stypy.reporting.localization.Localization(__file__, 83, 16), CompileError_3221, 'raise parameter', BaseException)
# SSA join for try-except statement (line 80)
module_type_store = module_type_store.join_ssa_context()
# SSA branch for the else part of an if statement (line 78)
module_type_store.open_ssa_branch('else')
# SSA begins for try-except statement (line 85)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Call to spawn(...): (line 86)
# Processing the call arguments (line 86)
# Getting the type of 'self' (line 86)
self_3224 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 27), 'self', False)
# Obtaining the member 'compiler_so' of a type (line 86)
compiler_so_3225 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 86, 27), self_3224, 'compiler_so')
# Getting the type of 'cc_args' (line 86)
cc_args_3226 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 46), 'cc_args', False)
# Applying the binary operator '+' (line 86)
result_add_3227 = python_operator(stypy.reporting.localization.Localization(__file__, 86, 27), '+', compiler_so_3225, cc_args_3226)
# Obtaining an instance of the builtin type 'list' (line 86)
list_3228 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 86, 56), 'list')
# Adding type elements to the builtin type 'list' instance (line 86)
# Adding element type (line 86)
# Getting the type of 'src' (line 86)
src_3229 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 57), 'src', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 86, 56), list_3228, src_3229)
# Adding element type (line 86)
str_3230 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 86, 62), 'str', '-o')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 86, 56), list_3228, str_3230)
# Adding element type (line 86)
# Getting the type of 'obj' (line 86)
obj_3231 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 68), 'obj', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 86, 56), list_3228, obj_3231)
# Applying the binary operator '+' (line 86)
result_add_3232 = python_operator(stypy.reporting.localization.Localization(__file__, 86, 54), '+', result_add_3227, list_3228)
# Getting the type of 'extra_postargs' (line 87)
extra_postargs_3233 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 87, 27), 'extra_postargs', False)
# Applying the binary operator '+' (line 86)
result_add_3234 = python_operator(stypy.reporting.localization.Localization(__file__, 86, 73), '+', result_add_3232, extra_postargs_3233)
# Processing the call keyword arguments (line 86)
kwargs_3235 = {}
# Getting the type of 'self' (line 86)
self_3222 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 86, 16), 'self', False)
# Obtaining the member 'spawn' of a type (line 86)
spawn_3223 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 86, 16), self_3222, 'spawn')
# Calling spawn(args, kwargs) (line 86)
spawn_call_result_3236 = invoke(stypy.reporting.localization.Localization(__file__, 86, 16), spawn_3223, *[result_add_3234], **kwargs_3235)
# SSA branch for the except part of a try statement (line 85)
# SSA branch for the except 'DistutilsExecError' branch of a try statement (line 85)
# Storing handler type
module_type_store.open_ssa_branch('except')
# Getting the type of 'DistutilsExecError' (line 88)
DistutilsExecError_3237 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 88, 19), 'DistutilsExecError')
# Assigning a type to the variable 'msg' (line 88)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 88, 12), 'msg', DistutilsExecError_3237)
# Getting the type of 'CompileError' (line 89)
CompileError_3238 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 89, 22), 'CompileError')
ensure_var_of_types(stypy.reporting.localization.Localization(__file__, 89, 16), CompileError_3238, 'raise parameter', BaseException)
# SSA join for try-except statement (line 85)
module_type_store = module_type_store.join_ssa_context()
# SSA join for if statement (line 78)
module_type_store = module_type_store.join_ssa_context()
# ################# End of '_compile(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function '_compile' in the type store
# Getting the type of 'stypy_return_type' (line 77)
stypy_return_type_3239 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 77, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3239)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function '_compile'
return stypy_return_type_3239
@norecursion
def link(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
# Getting the type of 'None' (line 95)
None_3240 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 95, 25), 'None')
# Getting the type of 'None' (line 96)
None_3241 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 96, 24), 'None')
# Getting the type of 'None' (line 97)
None_3242 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 97, 27), 'None')
# Getting the type of 'None' (line 98)
None_3243 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 98, 35), 'None')
# Getting the type of 'None' (line 99)
None_3244 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 99, 29), 'None')
int_3245 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 100, 20), 'int')
# Getting the type of 'None' (line 101)
None_3246 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 101, 28), 'None')
# Getting the type of 'None' (line 102)
None_3247 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 102, 29), 'None')
# Getting the type of 'None' (line 103)
None_3248 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 103, 25), 'None')
# Getting the type of 'None' (line 104)
None_3249 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 104, 26), 'None')
defaults = [None_3240, None_3241, None_3242, None_3243, None_3244, int_3245, None_3246, None_3247, None_3248, None_3249]
# Create a new context for function 'link'
module_type_store = module_type_store.open_function_context('link', 91, 4, False)
# Assigning a type to the variable 'self' (line 92)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 92, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler.link.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler.link.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler.link.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler.link.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler.link')
EMXCCompiler.link.__dict__.__setitem__('stypy_param_names_list', ['target_desc', 'objects', 'output_filename', 'output_dir', 'libraries', 'library_dirs', 'runtime_library_dirs', 'export_symbols', 'debug', 'extra_preargs', 'extra_postargs', 'build_temp', 'target_lang'])
EMXCCompiler.link.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler.link.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler.link.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler.link.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler.link.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler.link.__dict__.__setitem__('stypy_declared_arg_number', 14)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.link', ['target_desc', 'objects', 'output_filename', 'output_dir', 'libraries', 'library_dirs', 'runtime_library_dirs', 'export_symbols', 'debug', 'extra_preargs', 'extra_postargs', 'build_temp', 'target_lang'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'link', localization, ['target_desc', 'objects', 'output_filename', 'output_dir', 'libraries', 'library_dirs', 'runtime_library_dirs', 'export_symbols', 'debug', 'extra_preargs', 'extra_postargs', 'build_temp', 'target_lang'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'link(...)' code ##################
# Assigning a Call to a Name (line 107):
# Assigning a Call to a Name (line 107):
# Call to copy(...): (line 107)
# Processing the call arguments (line 107)
# Evaluating a boolean operation
# Getting the type of 'extra_preargs' (line 107)
extra_preargs_3252 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 107, 34), 'extra_preargs', False)
# Obtaining an instance of the builtin type 'list' (line 107)
list_3253 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 107, 51), 'list')
# Adding type elements to the builtin type 'list' instance (line 107)
# Applying the binary operator 'or' (line 107)
result_or_keyword_3254 = python_operator(stypy.reporting.localization.Localization(__file__, 107, 34), 'or', extra_preargs_3252, list_3253)
# Processing the call keyword arguments (line 107)
kwargs_3255 = {}
# Getting the type of 'copy' (line 107)
copy_3250 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 107, 24), 'copy', False)
# Obtaining the member 'copy' of a type (line 107)
copy_3251 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 107, 24), copy_3250, 'copy')
# Calling copy(args, kwargs) (line 107)
copy_call_result_3256 = invoke(stypy.reporting.localization.Localization(__file__, 107, 24), copy_3251, *[result_or_keyword_3254], **kwargs_3255)
# Assigning a type to the variable 'extra_preargs' (line 107)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 107, 8), 'extra_preargs', copy_call_result_3256)
# Assigning a Call to a Name (line 108):
# Assigning a Call to a Name (line 108):
# Call to copy(...): (line 108)
# Processing the call arguments (line 108)
# Evaluating a boolean operation
# Getting the type of 'libraries' (line 108)
libraries_3259 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 108, 30), 'libraries', False)
# Obtaining an instance of the builtin type 'list' (line 108)
list_3260 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 108, 43), 'list')
# Adding type elements to the builtin type 'list' instance (line 108)
# Applying the binary operator 'or' (line 108)
result_or_keyword_3261 = python_operator(stypy.reporting.localization.Localization(__file__, 108, 30), 'or', libraries_3259, list_3260)
# Processing the call keyword arguments (line 108)
kwargs_3262 = {}
# Getting the type of 'copy' (line 108)
copy_3257 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 108, 20), 'copy', False)
# Obtaining the member 'copy' of a type (line 108)
copy_3258 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 108, 20), copy_3257, 'copy')
# Calling copy(args, kwargs) (line 108)
copy_call_result_3263 = invoke(stypy.reporting.localization.Localization(__file__, 108, 20), copy_3258, *[result_or_keyword_3261], **kwargs_3262)
# Assigning a type to the variable 'libraries' (line 108)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 108, 8), 'libraries', copy_call_result_3263)
# Assigning a Call to a Name (line 109):
# Assigning a Call to a Name (line 109):
# Call to copy(...): (line 109)
# Processing the call arguments (line 109)
# Evaluating a boolean operation
# Getting the type of 'objects' (line 109)
objects_3266 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 109, 28), 'objects', False)
# Obtaining an instance of the builtin type 'list' (line 109)
list_3267 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 109, 39), 'list')
# Adding type elements to the builtin type 'list' instance (line 109)
# Applying the binary operator 'or' (line 109)
result_or_keyword_3268 = python_operator(stypy.reporting.localization.Localization(__file__, 109, 28), 'or', objects_3266, list_3267)
# Processing the call keyword arguments (line 109)
kwargs_3269 = {}
# Getting the type of 'copy' (line 109)
copy_3264 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 109, 18), 'copy', False)
# Obtaining the member 'copy' of a type (line 109)
copy_3265 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 109, 18), copy_3264, 'copy')
# Calling copy(args, kwargs) (line 109)
copy_call_result_3270 = invoke(stypy.reporting.localization.Localization(__file__, 109, 18), copy_3265, *[result_or_keyword_3268], **kwargs_3269)
# Assigning a type to the variable 'objects' (line 109)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 109, 8), 'objects', copy_call_result_3270)
# Call to extend(...): (line 112)
# Processing the call arguments (line 112)
# Getting the type of 'self' (line 112)
self_3273 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 112, 25), 'self', False)
# Obtaining the member 'dll_libraries' of a type (line 112)
dll_libraries_3274 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 112, 25), self_3273, 'dll_libraries')
# Processing the call keyword arguments (line 112)
kwargs_3275 = {}
# Getting the type of 'libraries' (line 112)
libraries_3271 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 112, 8), 'libraries', False)
# Obtaining the member 'extend' of a type (line 112)
extend_3272 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 112, 8), libraries_3271, 'extend')
# Calling extend(args, kwargs) (line 112)
extend_call_result_3276 = invoke(stypy.reporting.localization.Localization(__file__, 112, 8), extend_3272, *[dll_libraries_3274], **kwargs_3275)
# Evaluating a boolean operation
# Getting the type of 'export_symbols' (line 116)
export_symbols_3277 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 116, 13), 'export_symbols')
# Getting the type of 'None' (line 116)
None_3278 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 116, 35), 'None')
# Applying the binary operator 'isnot' (line 116)
result_is_not_3279 = python_operator(stypy.reporting.localization.Localization(__file__, 116, 13), 'isnot', export_symbols_3277, None_3278)
# Getting the type of 'target_desc' (line 117)
target_desc_3280 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 117, 13), 'target_desc')
# Getting the type of 'self' (line 117)
self_3281 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 117, 28), 'self')
# Obtaining the member 'EXECUTABLE' of a type (line 117)
EXECUTABLE_3282 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 117, 28), self_3281, 'EXECUTABLE')
# Applying the binary operator '!=' (line 117)
result_ne_3283 = python_operator(stypy.reporting.localization.Localization(__file__, 117, 13), '!=', target_desc_3280, EXECUTABLE_3282)
# Applying the binary operator 'and' (line 116)
result_and_keyword_3284 = python_operator(stypy.reporting.localization.Localization(__file__, 116, 12), 'and', result_is_not_3279, result_ne_3283)
# Testing the type of an if condition (line 116)
if_condition_3285 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 116, 8), result_and_keyword_3284)
# Assigning a type to the variable 'if_condition_3285' (line 116)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 116, 8), 'if_condition_3285', if_condition_3285)
# SSA begins for if statement (line 116)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 126):
# Assigning a Call to a Name (line 126):
# Call to dirname(...): (line 126)
# Processing the call arguments (line 126)
# Obtaining the type of the subscript
int_3289 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 126, 47), 'int')
# Getting the type of 'objects' (line 126)
objects_3290 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 126, 39), 'objects', False)
# Obtaining the member '__getitem__' of a type (line 126)
getitem___3291 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 126, 39), objects_3290, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 126)
subscript_call_result_3292 = invoke(stypy.reporting.localization.Localization(__file__, 126, 39), getitem___3291, int_3289)
# Processing the call keyword arguments (line 126)
kwargs_3293 = {}
# Getting the type of 'os' (line 126)
os_3286 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 126, 23), 'os', False)
# Obtaining the member 'path' of a type (line 126)
path_3287 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 126, 23), os_3286, 'path')
# Obtaining the member 'dirname' of a type (line 126)
dirname_3288 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 126, 23), path_3287, 'dirname')
# Calling dirname(args, kwargs) (line 126)
dirname_call_result_3294 = invoke(stypy.reporting.localization.Localization(__file__, 126, 23), dirname_3288, *[subscript_call_result_3292], **kwargs_3293)
# Assigning a type to the variable 'temp_dir' (line 126)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 126, 12), 'temp_dir', dirname_call_result_3294)
# Assigning a Call to a Tuple (line 128):
# Assigning a Subscript to a Name (line 128):
# Obtaining the type of the subscript
int_3295 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 128, 12), 'int')
# Call to splitext(...): (line 128)
# Processing the call arguments (line 128)
# Call to basename(...): (line 129)
# Processing the call arguments (line 129)
# Getting the type of 'output_filename' (line 129)
output_filename_3302 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 33), 'output_filename', False)
# Processing the call keyword arguments (line 129)
kwargs_3303 = {}
# Getting the type of 'os' (line 129)
os_3299 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 16), 'os', False)
# Obtaining the member 'path' of a type (line 129)
path_3300 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), os_3299, 'path')
# Obtaining the member 'basename' of a type (line 129)
basename_3301 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), path_3300, 'basename')
# Calling basename(args, kwargs) (line 129)
basename_call_result_3304 = invoke(stypy.reporting.localization.Localization(__file__, 129, 16), basename_3301, *[output_filename_3302], **kwargs_3303)
# Processing the call keyword arguments (line 128)
kwargs_3305 = {}
# Getting the type of 'os' (line 128)
os_3296 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 40), 'os', False)
# Obtaining the member 'path' of a type (line 128)
path_3297 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), os_3296, 'path')
# Obtaining the member 'splitext' of a type (line 128)
splitext_3298 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), path_3297, 'splitext')
# Calling splitext(args, kwargs) (line 128)
splitext_call_result_3306 = invoke(stypy.reporting.localization.Localization(__file__, 128, 40), splitext_3298, *[basename_call_result_3304], **kwargs_3305)
# Obtaining the member '__getitem__' of a type (line 128)
getitem___3307 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 12), splitext_call_result_3306, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 128)
subscript_call_result_3308 = invoke(stypy.reporting.localization.Localization(__file__, 128, 12), getitem___3307, int_3295)
# Assigning a type to the variable 'tuple_var_assignment_3099' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3099', subscript_call_result_3308)
# Assigning a Subscript to a Name (line 128):
# Obtaining the type of the subscript
int_3309 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 128, 12), 'int')
# Call to splitext(...): (line 128)
# Processing the call arguments (line 128)
# Call to basename(...): (line 129)
# Processing the call arguments (line 129)
# Getting the type of 'output_filename' (line 129)
output_filename_3316 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 33), 'output_filename', False)
# Processing the call keyword arguments (line 129)
kwargs_3317 = {}
# Getting the type of 'os' (line 129)
os_3313 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 129, 16), 'os', False)
# Obtaining the member 'path' of a type (line 129)
path_3314 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), os_3313, 'path')
# Obtaining the member 'basename' of a type (line 129)
basename_3315 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 129, 16), path_3314, 'basename')
# Calling basename(args, kwargs) (line 129)
basename_call_result_3318 = invoke(stypy.reporting.localization.Localization(__file__, 129, 16), basename_3315, *[output_filename_3316], **kwargs_3317)
# Processing the call keyword arguments (line 128)
kwargs_3319 = {}
# Getting the type of 'os' (line 128)
os_3310 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 40), 'os', False)
# Obtaining the member 'path' of a type (line 128)
path_3311 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), os_3310, 'path')
# Obtaining the member 'splitext' of a type (line 128)
splitext_3312 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 40), path_3311, 'splitext')
# Calling splitext(args, kwargs) (line 128)
splitext_call_result_3320 = invoke(stypy.reporting.localization.Localization(__file__, 128, 40), splitext_3312, *[basename_call_result_3318], **kwargs_3319)
# Obtaining the member '__getitem__' of a type (line 128)
getitem___3321 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 128, 12), splitext_call_result_3320, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 128)
subscript_call_result_3322 = invoke(stypy.reporting.localization.Localization(__file__, 128, 12), getitem___3321, int_3309)
# Assigning a type to the variable 'tuple_var_assignment_3100' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3100', subscript_call_result_3322)
# Assigning a Name to a Name (line 128):
# Getting the type of 'tuple_var_assignment_3099' (line 128)
tuple_var_assignment_3099_3323 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3099')
# Assigning a type to the variable 'dll_name' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 13), 'dll_name', tuple_var_assignment_3099_3323)
# Assigning a Name to a Name (line 128):
# Getting the type of 'tuple_var_assignment_3100' (line 128)
tuple_var_assignment_3100_3324 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 128, 12), 'tuple_var_assignment_3100')
# Assigning a type to the variable 'dll_extension' (line 128)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 128, 23), 'dll_extension', tuple_var_assignment_3100_3324)
# Assigning a Call to a Name (line 132):
# Assigning a Call to a Name (line 132):
# Call to join(...): (line 132)
# Processing the call arguments (line 132)
# Getting the type of 'temp_dir' (line 132)
temp_dir_3328 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 132, 36), 'temp_dir', False)
# Getting the type of 'dll_name' (line 132)
dll_name_3329 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 132, 46), 'dll_name', False)
str_3330 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 132, 57), 'str', '.def')
# Applying the binary operator '+' (line 132)
result_add_3331 = python_operator(stypy.reporting.localization.Localization(__file__, 132, 46), '+', dll_name_3329, str_3330)
# Processing the call keyword arguments (line 132)
kwargs_3332 = {}
# Getting the type of 'os' (line 132)
os_3325 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 132, 23), 'os', False)
# Obtaining the member 'path' of a type (line 132)
path_3326 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 132, 23), os_3325, 'path')
# Obtaining the member 'join' of a type (line 132)
join_3327 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 132, 23), path_3326, 'join')
# Calling join(args, kwargs) (line 132)
join_call_result_3333 = invoke(stypy.reporting.localization.Localization(__file__, 132, 23), join_3327, *[temp_dir_3328, result_add_3331], **kwargs_3332)
# Assigning a type to the variable 'def_file' (line 132)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 132, 12), 'def_file', join_call_result_3333)
# Assigning a List to a Name (line 135):
# Assigning a List to a Name (line 135):
# Obtaining an instance of the builtin type 'list' (line 135)
list_3334 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 135, 23), 'list')
# Adding type elements to the builtin type 'list' instance (line 135)
# Adding element type (line 135)
str_3335 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 136, 16), 'str', 'LIBRARY %s INITINSTANCE TERMINSTANCE')
# Obtaining the type of the subscript
int_3336 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 137, 68), 'int')
# Call to splitext(...): (line 137)
# Processing the call arguments (line 137)
# Call to basename(...): (line 137)
# Processing the call arguments (line 137)
# Getting the type of 'output_filename' (line 137)
output_filename_3343 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 137, 50), 'output_filename', False)
# Processing the call keyword arguments (line 137)
kwargs_3344 = {}
# Getting the type of 'os' (line 137)
os_3340 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 137, 33), 'os', False)
# Obtaining the member 'path' of a type (line 137)
path_3341 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 33), os_3340, 'path')
# Obtaining the member 'basename' of a type (line 137)
basename_3342 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 33), path_3341, 'basename')
# Calling basename(args, kwargs) (line 137)
basename_call_result_3345 = invoke(stypy.reporting.localization.Localization(__file__, 137, 33), basename_3342, *[output_filename_3343], **kwargs_3344)
# Processing the call keyword arguments (line 137)
kwargs_3346 = {}
# Getting the type of 'os' (line 137)
os_3337 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 137, 16), 'os', False)
# Obtaining the member 'path' of a type (line 137)
path_3338 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 16), os_3337, 'path')
# Obtaining the member 'splitext' of a type (line 137)
splitext_3339 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 16), path_3338, 'splitext')
# Calling splitext(args, kwargs) (line 137)
splitext_call_result_3347 = invoke(stypy.reporting.localization.Localization(__file__, 137, 16), splitext_3339, *[basename_call_result_3345], **kwargs_3346)
# Obtaining the member '__getitem__' of a type (line 137)
getitem___3348 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 137, 16), splitext_call_result_3347, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 137)
subscript_call_result_3349 = invoke(stypy.reporting.localization.Localization(__file__, 137, 16), getitem___3348, int_3336)
# Applying the binary operator '%' (line 136)
result_mod_3350 = python_operator(stypy.reporting.localization.Localization(__file__, 136, 16), '%', str_3335, subscript_call_result_3349)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 135, 23), list_3334, result_mod_3350)
# Adding element type (line 135)
str_3351 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 138, 16), 'str', 'DATA MULTIPLE NONSHARED')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 135, 23), list_3334, str_3351)
# Adding element type (line 135)
str_3352 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 139, 16), 'str', 'EXPORTS')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 135, 23), list_3334, str_3352)
# Assigning a type to the variable 'contents' (line 135)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 135, 12), 'contents', list_3334)
# Getting the type of 'export_symbols' (line 140)
export_symbols_3353 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 140, 23), 'export_symbols')
# Testing the type of a for loop iterable (line 140)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 140, 12), export_symbols_3353)
# Getting the type of the for loop variable (line 140)
for_loop_var_3354 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 140, 12), export_symbols_3353)
# Assigning a type to the variable 'sym' (line 140)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 140, 12), 'sym', for_loop_var_3354)
# SSA begins for a for statement (line 140)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Call to append(...): (line 141)
# Processing the call arguments (line 141)
str_3357 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 141, 32), 'str', ' "%s"')
# Getting the type of 'sym' (line 141)
sym_3358 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 141, 43), 'sym', False)
# Applying the binary operator '%' (line 141)
result_mod_3359 = python_operator(stypy.reporting.localization.Localization(__file__, 141, 32), '%', str_3357, sym_3358)
# Processing the call keyword arguments (line 141)
kwargs_3360 = {}
# Getting the type of 'contents' (line 141)
contents_3355 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 141, 16), 'contents', False)
# Obtaining the member 'append' of a type (line 141)
append_3356 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 141, 16), contents_3355, 'append')
# Calling append(args, kwargs) (line 141)
append_call_result_3361 = invoke(stypy.reporting.localization.Localization(__file__, 141, 16), append_3356, *[result_mod_3359], **kwargs_3360)
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# Call to execute(...): (line 142)
# Processing the call arguments (line 142)
# Getting the type of 'write_file' (line 142)
write_file_3364 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 25), 'write_file', False)
# Obtaining an instance of the builtin type 'tuple' (line 142)
tuple_3365 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 142, 38), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 142)
# Adding element type (line 142)
# Getting the type of 'def_file' (line 142)
def_file_3366 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 38), 'def_file', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 142, 38), tuple_3365, def_file_3366)
# Adding element type (line 142)
# Getting the type of 'contents' (line 142)
contents_3367 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 48), 'contents', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 142, 38), tuple_3365, contents_3367)
str_3368 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 143, 25), 'str', 'writing %s')
# Getting the type of 'def_file' (line 143)
def_file_3369 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 143, 40), 'def_file', False)
# Applying the binary operator '%' (line 143)
result_mod_3370 = python_operator(stypy.reporting.localization.Localization(__file__, 143, 25), '%', str_3368, def_file_3369)
# Processing the call keyword arguments (line 142)
kwargs_3371 = {}
# Getting the type of 'self' (line 142)
self_3362 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 142, 12), 'self', False)
# Obtaining the member 'execute' of a type (line 142)
execute_3363 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 142, 12), self_3362, 'execute')
# Calling execute(args, kwargs) (line 142)
execute_call_result_3372 = invoke(stypy.reporting.localization.Localization(__file__, 142, 12), execute_3363, *[write_file_3364, tuple_3365, result_mod_3370], **kwargs_3371)
# Call to append(...): (line 147)
# Processing the call arguments (line 147)
# Getting the type of 'def_file' (line 147)
def_file_3375 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 147, 27), 'def_file', False)
# Processing the call keyword arguments (line 147)
kwargs_3376 = {}
# Getting the type of 'objects' (line 147)
objects_3373 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 147, 12), 'objects', False)
# Obtaining the member 'append' of a type (line 147)
append_3374 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 147, 12), objects_3373, 'append')
# Calling append(args, kwargs) (line 147)
append_call_result_3377 = invoke(stypy.reporting.localization.Localization(__file__, 147, 12), append_3374, *[def_file_3375], **kwargs_3376)
# SSA join for if statement (line 116)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'debug' (line 158)
debug_3378 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 158, 15), 'debug')
# Applying the 'not' unary operator (line 158)
result_not__3379 = python_operator(stypy.reporting.localization.Localization(__file__, 158, 11), 'not', debug_3378)
# Testing the type of an if condition (line 158)
if_condition_3380 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 158, 8), result_not__3379)
# Assigning a type to the variable 'if_condition_3380' (line 158)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 158, 8), 'if_condition_3380', if_condition_3380)
# SSA begins for if statement (line 158)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Call to append(...): (line 159)
# Processing the call arguments (line 159)
str_3383 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 159, 33), 'str', '-s')
# Processing the call keyword arguments (line 159)
kwargs_3384 = {}
# Getting the type of 'extra_preargs' (line 159)
extra_preargs_3381 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 159, 12), 'extra_preargs', False)
# Obtaining the member 'append' of a type (line 159)
append_3382 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 159, 12), extra_preargs_3381, 'append')
# Calling append(args, kwargs) (line 159)
append_call_result_3385 = invoke(stypy.reporting.localization.Localization(__file__, 159, 12), append_3382, *[str_3383], **kwargs_3384)
# SSA join for if statement (line 158)
module_type_store = module_type_store.join_ssa_context()
# Call to link(...): (line 161)
# Processing the call arguments (line 161)
# Getting the type of 'self' (line 161)
self_3388 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 161, 27), 'self', False)
# Getting the type of 'target_desc' (line 162)
target_desc_3389 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 162, 27), 'target_desc', False)
# Getting the type of 'objects' (line 163)
objects_3390 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 163, 27), 'objects', False)
# Getting the type of 'output_filename' (line 164)
output_filename_3391 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 164, 27), 'output_filename', False)
# Getting the type of 'output_dir' (line 165)
output_dir_3392 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 165, 27), 'output_dir', False)
# Getting the type of 'libraries' (line 166)
libraries_3393 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 166, 27), 'libraries', False)
# Getting the type of 'library_dirs' (line 167)
library_dirs_3394 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 167, 27), 'library_dirs', False)
# Getting the type of 'runtime_library_dirs' (line 168)
runtime_library_dirs_3395 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 168, 27), 'runtime_library_dirs', False)
# Getting the type of 'None' (line 169)
None_3396 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 169, 27), 'None', False)
# Getting the type of 'debug' (line 170)
debug_3397 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 170, 27), 'debug', False)
# Getting the type of 'extra_preargs' (line 171)
extra_preargs_3398 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 171, 27), 'extra_preargs', False)
# Getting the type of 'extra_postargs' (line 172)
extra_postargs_3399 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 172, 27), 'extra_postargs', False)
# Getting the type of 'build_temp' (line 173)
build_temp_3400 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 173, 27), 'build_temp', False)
# Getting the type of 'target_lang' (line 174)
target_lang_3401 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 174, 27), 'target_lang', False)
# Processing the call keyword arguments (line 161)
kwargs_3402 = {}
# Getting the type of 'UnixCCompiler' (line 161)
UnixCCompiler_3386 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 161, 8), 'UnixCCompiler', False)
# Obtaining the member 'link' of a type (line 161)
link_3387 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 161, 8), UnixCCompiler_3386, 'link')
# Calling link(args, kwargs) (line 161)
link_call_result_3403 = invoke(stypy.reporting.localization.Localization(__file__, 161, 8), link_3387, *[self_3388, target_desc_3389, objects_3390, output_filename_3391, output_dir_3392, libraries_3393, library_dirs_3394, runtime_library_dirs_3395, None_3396, debug_3397, extra_preargs_3398, extra_postargs_3399, build_temp_3400, target_lang_3401], **kwargs_3402)
# ################# End of 'link(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'link' in the type store
# Getting the type of 'stypy_return_type' (line 91)
stypy_return_type_3404 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 91, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3404)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'link'
return stypy_return_type_3404
@norecursion
def object_filenames(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
int_3405 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 184, 36), 'int')
str_3406 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 185, 37), 'str', '')
defaults = [int_3405, str_3406]
# Create a new context for function 'object_filenames'
module_type_store = module_type_store.open_function_context('object_filenames', 182, 4, False)
# Assigning a type to the variable 'self' (line 183)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 183, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler.object_filenames')
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_param_names_list', ['source_filenames', 'strip_dir', 'output_dir'])
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler.object_filenames.__dict__.__setitem__('stypy_declared_arg_number', 4)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.object_filenames', ['source_filenames', 'strip_dir', 'output_dir'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'object_filenames', localization, ['source_filenames', 'strip_dir', 'output_dir'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'object_filenames(...)' code ##################
# Type idiom detected: calculating its left and rigth part (line 186)
# Getting the type of 'output_dir' (line 186)
output_dir_3407 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 186, 11), 'output_dir')
# Getting the type of 'None' (line 186)
None_3408 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 186, 25), 'None')
(may_be_3409, more_types_in_union_3410) = may_be_none(output_dir_3407, None_3408)
if may_be_3409:
if more_types_in_union_3410:
# Runtime conditional SSA (line 186)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'idiom if')
else:
module_type_store = module_type_store
# Assigning a Str to a Name (line 186):
# Assigning a Str to a Name (line 186):
str_3411 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 186, 44), 'str', '')
# Assigning a type to the variable 'output_dir' (line 186)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 186, 31), 'output_dir', str_3411)
if more_types_in_union_3410:
# SSA join for if statement (line 186)
module_type_store = module_type_store.join_ssa_context()
# Assigning a List to a Name (line 187):
# Assigning a List to a Name (line 187):
# Obtaining an instance of the builtin type 'list' (line 187)
list_3412 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 187, 20), 'list')
# Adding type elements to the builtin type 'list' instance (line 187)
# Assigning a type to the variable 'obj_names' (line 187)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 187, 8), 'obj_names', list_3412)
# Getting the type of 'source_filenames' (line 188)
source_filenames_3413 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 188, 24), 'source_filenames')
# Testing the type of a for loop iterable (line 188)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 188, 8), source_filenames_3413)
# Getting the type of the for loop variable (line 188)
for_loop_var_3414 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 188, 8), source_filenames_3413)
# Assigning a type to the variable 'src_name' (line 188)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 188, 8), 'src_name', for_loop_var_3414)
# SSA begins for a for statement (line 188)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Assigning a Call to a Tuple (line 190):
# Assigning a Subscript to a Name (line 190):
# Obtaining the type of the subscript
int_3415 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 190, 12), 'int')
# Call to splitext(...): (line 190)
# Processing the call arguments (line 190)
# Call to normcase(...): (line 190)
# Processing the call arguments (line 190)
# Getting the type of 'src_name' (line 190)
src_name_3422 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 61), 'src_name', False)
# Processing the call keyword arguments (line 190)
kwargs_3423 = {}
# Getting the type of 'os' (line 190)
os_3419 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 44), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3420 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), os_3419, 'path')
# Obtaining the member 'normcase' of a type (line 190)
normcase_3421 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), path_3420, 'normcase')
# Calling normcase(args, kwargs) (line 190)
normcase_call_result_3424 = invoke(stypy.reporting.localization.Localization(__file__, 190, 44), normcase_3421, *[src_name_3422], **kwargs_3423)
# Processing the call keyword arguments (line 190)
kwargs_3425 = {}
# Getting the type of 'os' (line 190)
os_3416 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 26), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3417 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), os_3416, 'path')
# Obtaining the member 'splitext' of a type (line 190)
splitext_3418 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), path_3417, 'splitext')
# Calling splitext(args, kwargs) (line 190)
splitext_call_result_3426 = invoke(stypy.reporting.localization.Localization(__file__, 190, 26), splitext_3418, *[normcase_call_result_3424], **kwargs_3425)
# Obtaining the member '__getitem__' of a type (line 190)
getitem___3427 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 12), splitext_call_result_3426, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 190)
subscript_call_result_3428 = invoke(stypy.reporting.localization.Localization(__file__, 190, 12), getitem___3427, int_3415)
# Assigning a type to the variable 'tuple_var_assignment_3101' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3101', subscript_call_result_3428)
# Assigning a Subscript to a Name (line 190):
# Obtaining the type of the subscript
int_3429 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 190, 12), 'int')
# Call to splitext(...): (line 190)
# Processing the call arguments (line 190)
# Call to normcase(...): (line 190)
# Processing the call arguments (line 190)
# Getting the type of 'src_name' (line 190)
src_name_3436 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 61), 'src_name', False)
# Processing the call keyword arguments (line 190)
kwargs_3437 = {}
# Getting the type of 'os' (line 190)
os_3433 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 44), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3434 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), os_3433, 'path')
# Obtaining the member 'normcase' of a type (line 190)
normcase_3435 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 44), path_3434, 'normcase')
# Calling normcase(args, kwargs) (line 190)
normcase_call_result_3438 = invoke(stypy.reporting.localization.Localization(__file__, 190, 44), normcase_3435, *[src_name_3436], **kwargs_3437)
# Processing the call keyword arguments (line 190)
kwargs_3439 = {}
# Getting the type of 'os' (line 190)
os_3430 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 26), 'os', False)
# Obtaining the member 'path' of a type (line 190)
path_3431 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), os_3430, 'path')
# Obtaining the member 'splitext' of a type (line 190)
splitext_3432 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 26), path_3431, 'splitext')
# Calling splitext(args, kwargs) (line 190)
splitext_call_result_3440 = invoke(stypy.reporting.localization.Localization(__file__, 190, 26), splitext_3432, *[normcase_call_result_3438], **kwargs_3439)
# Obtaining the member '__getitem__' of a type (line 190)
getitem___3441 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 190, 12), splitext_call_result_3440, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 190)
subscript_call_result_3442 = invoke(stypy.reporting.localization.Localization(__file__, 190, 12), getitem___3441, int_3429)
# Assigning a type to the variable 'tuple_var_assignment_3102' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3102', subscript_call_result_3442)
# Assigning a Name to a Name (line 190):
# Getting the type of 'tuple_var_assignment_3101' (line 190)
tuple_var_assignment_3101_3443 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3101')
# Assigning a type to the variable 'base' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 13), 'base', tuple_var_assignment_3101_3443)
# Assigning a Name to a Name (line 190):
# Getting the type of 'tuple_var_assignment_3102' (line 190)
tuple_var_assignment_3102_3444 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 190, 12), 'tuple_var_assignment_3102')
# Assigning a type to the variable 'ext' (line 190)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 190, 19), 'ext', tuple_var_assignment_3102_3444)
# Getting the type of 'ext' (line 191)
ext_3445 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 191, 15), 'ext')
# Getting the type of 'self' (line 191)
self_3446 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 191, 27), 'self')
# Obtaining the member 'src_extensions' of a type (line 191)
src_extensions_3447 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 191, 27), self_3446, 'src_extensions')
# Obtaining an instance of the builtin type 'list' (line 191)
list_3448 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 191, 49), 'list')
# Adding type elements to the builtin type 'list' instance (line 191)
# Adding element type (line 191)
str_3449 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 191, 50), 'str', '.rc')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 191, 49), list_3448, str_3449)
# Applying the binary operator '+' (line 191)
result_add_3450 = python_operator(stypy.reporting.localization.Localization(__file__, 191, 27), '+', src_extensions_3447, list_3448)
# Applying the binary operator 'notin' (line 191)
result_contains_3451 = python_operator(stypy.reporting.localization.Localization(__file__, 191, 15), 'notin', ext_3445, result_add_3450)
# Testing the type of an if condition (line 191)
if_condition_3452 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 191, 12), result_contains_3451)
# Assigning a type to the variable 'if_condition_3452' (line 191)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 191, 12), 'if_condition_3452', if_condition_3452)
# SSA begins for if statement (line 191)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Getting the type of 'UnknownFileError' (line 192)
UnknownFileError_3453 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 192, 22), 'UnknownFileError')
ensure_var_of_types(stypy.reporting.localization.Localization(__file__, 192, 16), UnknownFileError_3453, 'raise parameter', BaseException)
# SSA join for if statement (line 191)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'strip_dir' (line 195)
strip_dir_3454 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 195, 15), 'strip_dir')
# Testing the type of an if condition (line 195)
if_condition_3455 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 195, 12), strip_dir_3454)
# Assigning a type to the variable 'if_condition_3455' (line 195)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 195, 12), 'if_condition_3455', if_condition_3455)
# SSA begins for if statement (line 195)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 196):
# Assigning a Call to a Name (line 196):
# Call to basename(...): (line 196)
# Processing the call arguments (line 196)
# Getting the type of 'base' (line 196)
base_3459 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 196, 41), 'base', False)
# Processing the call keyword arguments (line 196)
kwargs_3460 = {}
# Getting the type of 'os' (line 196)
os_3456 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 196, 23), 'os', False)
# Obtaining the member 'path' of a type (line 196)
path_3457 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 196, 23), os_3456, 'path')
# Obtaining the member 'basename' of a type (line 196)
basename_3458 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 196, 23), path_3457, 'basename')
# Calling basename(args, kwargs) (line 196)
basename_call_result_3461 = invoke(stypy.reporting.localization.Localization(__file__, 196, 23), basename_3458, *[base_3459], **kwargs_3460)
# Assigning a type to the variable 'base' (line 196)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 196, 16), 'base', basename_call_result_3461)
# SSA join for if statement (line 195)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'ext' (line 197)
ext_3462 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 197, 15), 'ext')
str_3463 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 197, 22), 'str', '.rc')
# Applying the binary operator '==' (line 197)
result_eq_3464 = python_operator(stypy.reporting.localization.Localization(__file__, 197, 15), '==', ext_3462, str_3463)
# Testing the type of an if condition (line 197)
if_condition_3465 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 197, 12), result_eq_3464)
# Assigning a type to the variable 'if_condition_3465' (line 197)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 197, 12), 'if_condition_3465', if_condition_3465)
# SSA begins for if statement (line 197)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Call to append(...): (line 199)
# Processing the call arguments (line 199)
# Call to join(...): (line 199)
# Processing the call arguments (line 199)
# Getting the type of 'output_dir' (line 199)
output_dir_3471 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 199, 48), 'output_dir', False)
# Getting the type of 'base' (line 200)
base_3472 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 200, 44), 'base', False)
# Getting the type of 'self' (line 200)
self_3473 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 200, 51), 'self', False)
# Obtaining the member 'res_extension' of a type (line 200)
res_extension_3474 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 200, 51), self_3473, 'res_extension')
# Applying the binary operator '+' (line 200)
result_add_3475 = python_operator(stypy.reporting.localization.Localization(__file__, 200, 44), '+', base_3472, res_extension_3474)
# Processing the call keyword arguments (line 199)
kwargs_3476 = {}
# Getting the type of 'os' (line 199)
os_3468 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 199, 34), 'os', False)
# Obtaining the member 'path' of a type (line 199)
path_3469 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 199, 34), os_3468, 'path')
# Obtaining the member 'join' of a type (line 199)
join_3470 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 199, 34), path_3469, 'join')
# Calling join(args, kwargs) (line 199)
join_call_result_3477 = invoke(stypy.reporting.localization.Localization(__file__, 199, 34), join_3470, *[output_dir_3471, result_add_3475], **kwargs_3476)
# Processing the call keyword arguments (line 199)
kwargs_3478 = {}
# Getting the type of 'obj_names' (line 199)
obj_names_3466 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 199, 16), 'obj_names', False)
# Obtaining the member 'append' of a type (line 199)
append_3467 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 199, 16), obj_names_3466, 'append')
# Calling append(args, kwargs) (line 199)
append_call_result_3479 = invoke(stypy.reporting.localization.Localization(__file__, 199, 16), append_3467, *[join_call_result_3477], **kwargs_3478)
# SSA branch for the else part of an if statement (line 197)
module_type_store.open_ssa_branch('else')
# Call to append(...): (line 202)
# Processing the call arguments (line 202)
# Call to join(...): (line 202)
# Processing the call arguments (line 202)
# Getting the type of 'output_dir' (line 202)
output_dir_3485 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 202, 48), 'output_dir', False)
# Getting the type of 'base' (line 203)
base_3486 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 203, 44), 'base', False)
# Getting the type of 'self' (line 203)
self_3487 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 203, 51), 'self', False)
# Obtaining the member 'obj_extension' of a type (line 203)
obj_extension_3488 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 203, 51), self_3487, 'obj_extension')
# Applying the binary operator '+' (line 203)
result_add_3489 = python_operator(stypy.reporting.localization.Localization(__file__, 203, 44), '+', base_3486, obj_extension_3488)
# Processing the call keyword arguments (line 202)
kwargs_3490 = {}
# Getting the type of 'os' (line 202)
os_3482 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 202, 34), 'os', False)
# Obtaining the member 'path' of a type (line 202)
path_3483 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 202, 34), os_3482, 'path')
# Obtaining the member 'join' of a type (line 202)
join_3484 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 202, 34), path_3483, 'join')
# Calling join(args, kwargs) (line 202)
join_call_result_3491 = invoke(stypy.reporting.localization.Localization(__file__, 202, 34), join_3484, *[output_dir_3485, result_add_3489], **kwargs_3490)
# Processing the call keyword arguments (line 202)
kwargs_3492 = {}
# Getting the type of 'obj_names' (line 202)
obj_names_3480 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 202, 16), 'obj_names', False)
# Obtaining the member 'append' of a type (line 202)
append_3481 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 202, 16), obj_names_3480, 'append')
# Calling append(args, kwargs) (line 202)
append_call_result_3493 = invoke(stypy.reporting.localization.Localization(__file__, 202, 16), append_3481, *[join_call_result_3491], **kwargs_3492)
# SSA join for if statement (line 197)
module_type_store = module_type_store.join_ssa_context()
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'obj_names' (line 204)
obj_names_3494 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 204, 15), 'obj_names')
# Assigning a type to the variable 'stypy_return_type' (line 204)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 204, 8), 'stypy_return_type', obj_names_3494)
# ################# End of 'object_filenames(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'object_filenames' in the type store
# Getting the type of 'stypy_return_type' (line 182)
stypy_return_type_3495 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 182, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3495)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'object_filenames'
return stypy_return_type_3495
@norecursion
def find_library_file(type_of_self, localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
int_3496 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 210, 49), 'int')
defaults = [int_3496]
# Create a new context for function 'find_library_file'
module_type_store = module_type_store.open_function_context('find_library_file', 210, 4, False)
# Assigning a type to the variable 'self' (line 211)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 211, 4), 'self', type_of_self)
# Passed parameters checking function
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_localization', localization)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_type_of_self', type_of_self)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_type_store', module_type_store)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_function_name', 'EMXCCompiler.find_library_file')
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_param_names_list', ['dirs', 'lib', 'debug'])
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_varargs_param_name', None)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_kwargs_param_name', None)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_call_defaults', defaults)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_call_varargs', varargs)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_call_kwargs', kwargs)
EMXCCompiler.find_library_file.__dict__.__setitem__('stypy_declared_arg_number', 4)
arguments = process_argument_values(localization, type_of_self, module_type_store, 'EMXCCompiler.find_library_file', ['dirs', 'lib', 'debug'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'find_library_file', localization, ['dirs', 'lib', 'debug'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'find_library_file(...)' code ##################
# Assigning a BinOp to a Name (line 211):
# Assigning a BinOp to a Name (line 211):
str_3497 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 211, 19), 'str', '%s.lib')
# Getting the type of 'lib' (line 211)
lib_3498 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 211, 30), 'lib')
# Applying the binary operator '%' (line 211)
result_mod_3499 = python_operator(stypy.reporting.localization.Localization(__file__, 211, 19), '%', str_3497, lib_3498)
# Assigning a type to the variable 'shortlib' (line 211)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 211, 8), 'shortlib', result_mod_3499)
# Assigning a BinOp to a Name (line 212):
# Assigning a BinOp to a Name (line 212):
str_3500 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 212, 18), 'str', 'lib%s.lib')
# Getting the type of 'lib' (line 212)
lib_3501 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 212, 32), 'lib')
# Applying the binary operator '%' (line 212)
result_mod_3502 = python_operator(stypy.reporting.localization.Localization(__file__, 212, 18), '%', str_3500, lib_3501)
# Assigning a type to the variable 'longlib' (line 212)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 212, 8), 'longlib', result_mod_3502)
# SSA begins for try-except statement (line 215)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Assigning a Call to a Name (line 216):
# Assigning a Call to a Name (line 216):
# Call to split(...): (line 216)
# Processing the call arguments (line 216)
str_3509 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 216, 56), 'str', ';')
# Processing the call keyword arguments (line 216)
kwargs_3510 = {}
# Obtaining the type of the subscript
str_3503 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 216, 34), 'str', 'LIBRARY_PATH')
# Getting the type of 'os' (line 216)
os_3504 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 216, 23), 'os', False)
# Obtaining the member 'environ' of a type (line 216)
environ_3505 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 216, 23), os_3504, 'environ')
# Obtaining the member '__getitem__' of a type (line 216)
getitem___3506 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 216, 23), environ_3505, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 216)
subscript_call_result_3507 = invoke(stypy.reporting.localization.Localization(__file__, 216, 23), getitem___3506, str_3503)
# Obtaining the member 'split' of a type (line 216)
split_3508 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 216, 23), subscript_call_result_3507, 'split')
# Calling split(args, kwargs) (line 216)
split_call_result_3511 = invoke(stypy.reporting.localization.Localization(__file__, 216, 23), split_3508, *[str_3509], **kwargs_3510)
# Assigning a type to the variable 'emx_dirs' (line 216)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 216, 12), 'emx_dirs', split_call_result_3511)
# SSA branch for the except part of a try statement (line 215)
# SSA branch for the except 'KeyError' branch of a try statement (line 215)
module_type_store.open_ssa_branch('except')
# Assigning a List to a Name (line 218):
# Assigning a List to a Name (line 218):
# Obtaining an instance of the builtin type 'list' (line 218)
list_3512 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 218, 23), 'list')
# Adding type elements to the builtin type 'list' instance (line 218)
# Assigning a type to the variable 'emx_dirs' (line 218)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 218, 12), 'emx_dirs', list_3512)
# SSA join for try-except statement (line 215)
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'dirs' (line 220)
dirs_3513 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 220, 19), 'dirs')
# Getting the type of 'emx_dirs' (line 220)
emx_dirs_3514 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 220, 26), 'emx_dirs')
# Applying the binary operator '+' (line 220)
result_add_3515 = python_operator(stypy.reporting.localization.Localization(__file__, 220, 19), '+', dirs_3513, emx_dirs_3514)
# Testing the type of a for loop iterable (line 220)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 220, 8), result_add_3515)
# Getting the type of the for loop variable (line 220)
for_loop_var_3516 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 220, 8), result_add_3515)
# Assigning a type to the variable 'dir' (line 220)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 220, 8), 'dir', for_loop_var_3516)
# SSA begins for a for statement (line 220)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Assigning a Call to a Name (line 221):
# Assigning a Call to a Name (line 221):
# Call to join(...): (line 221)
# Processing the call arguments (line 221)
# Getting the type of 'dir' (line 221)
dir_3520 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 221, 37), 'dir', False)
# Getting the type of 'shortlib' (line 221)
shortlib_3521 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 221, 42), 'shortlib', False)
# Processing the call keyword arguments (line 221)
kwargs_3522 = {}
# Getting the type of 'os' (line 221)
os_3517 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 221, 24), 'os', False)
# Obtaining the member 'path' of a type (line 221)
path_3518 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 221, 24), os_3517, 'path')
# Obtaining the member 'join' of a type (line 221)
join_3519 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 221, 24), path_3518, 'join')
# Calling join(args, kwargs) (line 221)
join_call_result_3523 = invoke(stypy.reporting.localization.Localization(__file__, 221, 24), join_3519, *[dir_3520, shortlib_3521], **kwargs_3522)
# Assigning a type to the variable 'shortlibp' (line 221)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 221, 12), 'shortlibp', join_call_result_3523)
# Assigning a Call to a Name (line 222):
# Assigning a Call to a Name (line 222):
# Call to join(...): (line 222)
# Processing the call arguments (line 222)
# Getting the type of 'dir' (line 222)
dir_3527 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 222, 36), 'dir', False)
# Getting the type of 'longlib' (line 222)
longlib_3528 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 222, 41), 'longlib', False)
# Processing the call keyword arguments (line 222)
kwargs_3529 = {}
# Getting the type of 'os' (line 222)
os_3524 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 222, 23), 'os', False)
# Obtaining the member 'path' of a type (line 222)
path_3525 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 222, 23), os_3524, 'path')
# Obtaining the member 'join' of a type (line 222)
join_3526 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 222, 23), path_3525, 'join')
# Calling join(args, kwargs) (line 222)
join_call_result_3530 = invoke(stypy.reporting.localization.Localization(__file__, 222, 23), join_3526, *[dir_3527, longlib_3528], **kwargs_3529)
# Assigning a type to the variable 'longlibp' (line 222)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 222, 12), 'longlibp', join_call_result_3530)
# Call to exists(...): (line 223)
# Processing the call arguments (line 223)
# Getting the type of 'shortlibp' (line 223)
shortlibp_3534 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 223, 30), 'shortlibp', False)
# Processing the call keyword arguments (line 223)
kwargs_3535 = {}
# Getting the type of 'os' (line 223)
os_3531 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 223, 15), 'os', False)
# Obtaining the member 'path' of a type (line 223)
path_3532 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 223, 15), os_3531, 'path')
# Obtaining the member 'exists' of a type (line 223)
exists_3533 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 223, 15), path_3532, 'exists')
# Calling exists(args, kwargs) (line 223)
exists_call_result_3536 = invoke(stypy.reporting.localization.Localization(__file__, 223, 15), exists_3533, *[shortlibp_3534], **kwargs_3535)
# Testing the type of an if condition (line 223)
if_condition_3537 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 223, 12), exists_call_result_3536)
# Assigning a type to the variable 'if_condition_3537' (line 223)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 223, 12), 'if_condition_3537', if_condition_3537)
# SSA begins for if statement (line 223)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Getting the type of 'shortlibp' (line 224)
shortlibp_3538 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 224, 23), 'shortlibp')
# Assigning a type to the variable 'stypy_return_type' (line 224)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 224, 16), 'stypy_return_type', shortlibp_3538)
# SSA branch for the else part of an if statement (line 223)
module_type_store.open_ssa_branch('else')
# Call to exists(...): (line 225)
# Processing the call arguments (line 225)
# Getting the type of 'longlibp' (line 225)
longlibp_3542 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 225, 32), 'longlibp', False)
# Processing the call keyword arguments (line 225)
kwargs_3543 = {}
# Getting the type of 'os' (line 225)
os_3539 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 225, 17), 'os', False)
# Obtaining the member 'path' of a type (line 225)
path_3540 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 225, 17), os_3539, 'path')
# Obtaining the member 'exists' of a type (line 225)
exists_3541 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 225, 17), path_3540, 'exists')
# Calling exists(args, kwargs) (line 225)
exists_call_result_3544 = invoke(stypy.reporting.localization.Localization(__file__, 225, 17), exists_3541, *[longlibp_3542], **kwargs_3543)
# Testing the type of an if condition (line 225)
if_condition_3545 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 225, 17), exists_call_result_3544)
# Assigning a type to the variable 'if_condition_3545' (line 225)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 225, 17), 'if_condition_3545', if_condition_3545)
# SSA begins for if statement (line 225)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Getting the type of 'longlibp' (line 226)
longlibp_3546 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 226, 23), 'longlibp')
# Assigning a type to the variable 'stypy_return_type' (line 226)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 226, 16), 'stypy_return_type', longlibp_3546)
# SSA join for if statement (line 225)
module_type_store = module_type_store.join_ssa_context()
# SSA join for if statement (line 223)
module_type_store = module_type_store.join_ssa_context()
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# Getting the type of 'None' (line 229)
None_3547 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 229, 15), 'None')
# Assigning a type to the variable 'stypy_return_type' (line 229)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 229, 8), 'stypy_return_type', None_3547)
# ################# End of 'find_library_file(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'find_library_file' in the type store
# Getting the type of 'stypy_return_type' (line 210)
stypy_return_type_3548 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 210, 4), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3548)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'find_library_file'
return stypy_return_type_3548
# Assigning a type to the variable 'EMXCCompiler' (line 31)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 31, 0), 'EMXCCompiler', EMXCCompiler)
# Assigning a Str to a Name (line 33):
str_3549 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 33, 20), 'str', 'emx')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3550 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'compiler_type' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3550, 'compiler_type', str_3549)
# Assigning a Str to a Name (line 34):
str_3551 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 20), 'str', '.obj')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3552 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'obj_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3552, 'obj_extension', str_3551)
# Assigning a Str to a Name (line 35):
str_3553 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 35, 27), 'str', '.lib')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3554 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'static_lib_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3554, 'static_lib_extension', str_3553)
# Assigning a Str to a Name (line 36):
str_3555 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 36, 27), 'str', '.dll')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3556 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'shared_lib_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3556, 'shared_lib_extension', str_3555)
# Assigning a Str to a Name (line 37):
str_3557 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 37, 24), 'str', '%s%s')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3558 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'static_lib_format' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3558, 'static_lib_format', str_3557)
# Assigning a Str to a Name (line 38):
str_3559 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 38, 24), 'str', '%s%s')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3560 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'shared_lib_format' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3560, 'shared_lib_format', str_3559)
# Assigning a Str to a Name (line 39):
str_3561 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 39, 20), 'str', '.res')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3562 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'res_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3562, 'res_extension', str_3561)
# Assigning a Str to a Name (line 40):
str_3563 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 40, 20), 'str', '.exe')
# Getting the type of 'EMXCCompiler'
EMXCCompiler_3564 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'EMXCCompiler')
# Setting the type of the member 'exe_extension' of a type
module_type_store.set_type_of_member(stypy.reporting.localization.Localization(__file__, 0, 0), EMXCCompiler_3564, 'exe_extension', str_3563)
# Assigning a Str to a Name (line 238):
# Assigning a Str to a Name (line 238):
str_3565 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 238, 14), 'str', 'ok')
# Assigning a type to the variable 'CONFIG_H_OK' (line 238)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 238, 0), 'CONFIG_H_OK', str_3565)
# Assigning a Str to a Name (line 239):
# Assigning a Str to a Name (line 239):
str_3566 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 239, 17), 'str', 'not ok')
# Assigning a type to the variable 'CONFIG_H_NOTOK' (line 239)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 239, 0), 'CONFIG_H_NOTOK', str_3566)
# Assigning a Str to a Name (line 240):
# Assigning a Str to a Name (line 240):
str_3567 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 240, 21), 'str', 'uncertain')
# Assigning a type to the variable 'CONFIG_H_UNCERTAIN' (line 240)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 240, 0), 'CONFIG_H_UNCERTAIN', str_3567)
@norecursion
def check_config_h(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'check_config_h'
module_type_store = module_type_store.open_function_context('check_config_h', 242, 0, False)
# Passed parameters checking function
check_config_h.stypy_localization = localization
check_config_h.stypy_type_of_self = None
check_config_h.stypy_type_store = module_type_store
check_config_h.stypy_function_name = 'check_config_h'
check_config_h.stypy_param_names_list = []
check_config_h.stypy_varargs_param_name = None
check_config_h.stypy_kwargs_param_name = None
check_config_h.stypy_call_defaults = defaults
check_config_h.stypy_call_varargs = varargs
check_config_h.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'check_config_h', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'check_config_h', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'check_config_h(...)' code ##################
str_3568 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 258, (-1)), 'str', 'Check if the current Python installation (specifically, pyconfig.h)\n appears amenable to building extensions with GCC. Returns a tuple\n (status, details), where \'status\' is one of the following constants:\n CONFIG_H_OK\n all is well, go ahead and compile\n CONFIG_H_NOTOK\n doesn\'t look good\n CONFIG_H_UNCERTAIN\n not sure -- unable to read pyconfig.h\n \'details\' is a human-readable string explaining the situation.\n\n Note there are two ways to conclude "OK": either \'sys.version\' contains\n the string "GCC" (implying that this Python was built with GCC), or the\n installed "pyconfig.h" contains the string "__GNUC__".\n ')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 263, 4))
# 'from distutils import sysconfig' statement (line 263)
try:
from distutils import sysconfig
except:
sysconfig = UndefinedType
import_from_module(stypy.reporting.localization.Localization(__file__, 263, 4), 'distutils', None, module_type_store, ['sysconfig'], [sysconfig])
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 264, 4))
# 'import string' statement (line 264)
import string
import_module(stypy.reporting.localization.Localization(__file__, 264, 4), 'string', string, module_type_store)
# Call to find(...): (line 267)
# Processing the call arguments (line 267)
# Getting the type of 'sys' (line 267)
sys_3571 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 267, 19), 'sys', False)
# Obtaining the member 'version' of a type (line 267)
version_3572 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 267, 19), sys_3571, 'version')
str_3573 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 267, 31), 'str', 'GCC')
# Processing the call keyword arguments (line 267)
kwargs_3574 = {}
# Getting the type of 'string' (line 267)
string_3569 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 267, 7), 'string', False)
# Obtaining the member 'find' of a type (line 267)
find_3570 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 267, 7), string_3569, 'find')
# Calling find(args, kwargs) (line 267)
find_call_result_3575 = invoke(stypy.reporting.localization.Localization(__file__, 267, 7), find_3570, *[version_3572, str_3573], **kwargs_3574)
int_3576 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 267, 41), 'int')
# Applying the binary operator '>=' (line 267)
result_ge_3577 = python_operator(stypy.reporting.localization.Localization(__file__, 267, 7), '>=', find_call_result_3575, int_3576)
# Testing the type of an if condition (line 267)
if_condition_3578 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 267, 4), result_ge_3577)
# Assigning a type to the variable 'if_condition_3578' (line 267)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 267, 4), 'if_condition_3578', if_condition_3578)
# SSA begins for if statement (line 267)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Obtaining an instance of the builtin type 'tuple' (line 268)
tuple_3579 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 268, 16), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 268)
# Adding element type (line 268)
# Getting the type of 'CONFIG_H_OK' (line 268)
CONFIG_H_OK_3580 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 268, 16), 'CONFIG_H_OK')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 268, 16), tuple_3579, CONFIG_H_OK_3580)
# Adding element type (line 268)
str_3581 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 268, 29), 'str', "sys.version mentions 'GCC'")
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 268, 16), tuple_3579, str_3581)
# Assigning a type to the variable 'stypy_return_type' (line 268)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 268, 8), 'stypy_return_type', tuple_3579)
# SSA join for if statement (line 267)
module_type_store = module_type_store.join_ssa_context()
# Assigning a Call to a Name (line 270):
# Assigning a Call to a Name (line 270):
# Call to get_config_h_filename(...): (line 270)
# Processing the call keyword arguments (line 270)
kwargs_3584 = {}
# Getting the type of 'sysconfig' (line 270)
sysconfig_3582 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 270, 9), 'sysconfig', False)
# Obtaining the member 'get_config_h_filename' of a type (line 270)
get_config_h_filename_3583 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 270, 9), sysconfig_3582, 'get_config_h_filename')
# Calling get_config_h_filename(args, kwargs) (line 270)
get_config_h_filename_call_result_3585 = invoke(stypy.reporting.localization.Localization(__file__, 270, 9), get_config_h_filename_3583, *[], **kwargs_3584)
# Assigning a type to the variable 'fn' (line 270)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 270, 4), 'fn', get_config_h_filename_call_result_3585)
# SSA begins for try-except statement (line 271)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'try-except')
# Assigning a Call to a Name (line 274):
# Assigning a Call to a Name (line 274):
# Call to open(...): (line 274)
# Processing the call arguments (line 274)
# Getting the type of 'fn' (line 274)
fn_3587 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 274, 17), 'fn', False)
# Processing the call keyword arguments (line 274)
kwargs_3588 = {}
# Getting the type of 'open' (line 274)
open_3586 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 274, 12), 'open', False)
# Calling open(args, kwargs) (line 274)
open_call_result_3589 = invoke(stypy.reporting.localization.Localization(__file__, 274, 12), open_3586, *[fn_3587], **kwargs_3588)
# Assigning a type to the variable 'f' (line 274)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 274, 8), 'f', open_call_result_3589)
# Try-finally block (line 275)
# Assigning a Call to a Name (line 276):
# Assigning a Call to a Name (line 276):
# Call to read(...): (line 276)
# Processing the call keyword arguments (line 276)
kwargs_3592 = {}
# Getting the type of 'f' (line 276)
f_3590 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 276, 16), 'f', False)
# Obtaining the member 'read' of a type (line 276)
read_3591 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 276, 16), f_3590, 'read')
# Calling read(args, kwargs) (line 276)
read_call_result_3593 = invoke(stypy.reporting.localization.Localization(__file__, 276, 16), read_3591, *[], **kwargs_3592)
# Assigning a type to the variable 's' (line 276)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 276, 12), 's', read_call_result_3593)
# finally branch of the try-finally block (line 275)
# Call to close(...): (line 278)
# Processing the call keyword arguments (line 278)
kwargs_3596 = {}
# Getting the type of 'f' (line 278)
f_3594 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 278, 12), 'f', False)
# Obtaining the member 'close' of a type (line 278)
close_3595 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 278, 12), f_3594, 'close')
# Calling close(args, kwargs) (line 278)
close_call_result_3597 = invoke(stypy.reporting.localization.Localization(__file__, 278, 12), close_3595, *[], **kwargs_3596)
# SSA branch for the except part of a try statement (line 271)
# SSA branch for the except 'IOError' branch of a try statement (line 271)
# Storing handler type
module_type_store.open_ssa_branch('except')
# Getting the type of 'IOError' (line 280)
IOError_3598 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 280, 11), 'IOError')
# Assigning a type to the variable 'exc' (line 280)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 280, 4), 'exc', IOError_3598)
# Obtaining an instance of the builtin type 'tuple' (line 283)
tuple_3599 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 283, 16), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 283)
# Adding element type (line 283)
# Getting the type of 'CONFIG_H_UNCERTAIN' (line 283)
CONFIG_H_UNCERTAIN_3600 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 283, 16), 'CONFIG_H_UNCERTAIN')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 283, 16), tuple_3599, CONFIG_H_UNCERTAIN_3600)
# Adding element type (line 283)
str_3601 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 284, 16), 'str', "couldn't read '%s': %s")
# Obtaining an instance of the builtin type 'tuple' (line 284)
tuple_3602 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 284, 44), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 284)
# Adding element type (line 284)
# Getting the type of 'fn' (line 284)
fn_3603 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 284, 44), 'fn')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 284, 44), tuple_3602, fn_3603)
# Adding element type (line 284)
# Getting the type of 'exc' (line 284)
exc_3604 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 284, 48), 'exc')
# Obtaining the member 'strerror' of a type (line 284)
strerror_3605 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 284, 48), exc_3604, 'strerror')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 284, 44), tuple_3602, strerror_3605)
# Applying the binary operator '%' (line 284)
result_mod_3606 = python_operator(stypy.reporting.localization.Localization(__file__, 284, 16), '%', str_3601, tuple_3602)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 283, 16), tuple_3599, result_mod_3606)
# Assigning a type to the variable 'stypy_return_type' (line 283)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 283, 8), 'stypy_return_type', tuple_3599)
# SSA branch for the else branch of a try statement (line 271)
module_type_store.open_ssa_branch('except else')
# Call to find(...): (line 288)
# Processing the call arguments (line 288)
# Getting the type of 's' (line 288)
s_3609 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 288, 23), 's', False)
str_3610 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 288, 25), 'str', '__GNUC__')
# Processing the call keyword arguments (line 288)
kwargs_3611 = {}
# Getting the type of 'string' (line 288)
string_3607 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 288, 11), 'string', False)
# Obtaining the member 'find' of a type (line 288)
find_3608 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 288, 11), string_3607, 'find')
# Calling find(args, kwargs) (line 288)
find_call_result_3612 = invoke(stypy.reporting.localization.Localization(__file__, 288, 11), find_3608, *[s_3609, str_3610], **kwargs_3611)
int_3613 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 288, 40), 'int')
# Applying the binary operator '>=' (line 288)
result_ge_3614 = python_operator(stypy.reporting.localization.Localization(__file__, 288, 11), '>=', find_call_result_3612, int_3613)
# Testing the type of an if condition (line 288)
if_condition_3615 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 288, 8), result_ge_3614)
# Assigning a type to the variable 'if_condition_3615' (line 288)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 288, 8), 'if_condition_3615', if_condition_3615)
# SSA begins for if statement (line 288)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Obtaining an instance of the builtin type 'tuple' (line 289)
tuple_3616 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 289, 20), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 289)
# Adding element type (line 289)
# Getting the type of 'CONFIG_H_OK' (line 289)
CONFIG_H_OK_3617 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 289, 20), 'CONFIG_H_OK')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 289, 20), tuple_3616, CONFIG_H_OK_3617)
# Adding element type (line 289)
str_3618 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 289, 33), 'str', "'%s' mentions '__GNUC__'")
# Getting the type of 'fn' (line 289)
fn_3619 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 289, 62), 'fn')
# Applying the binary operator '%' (line 289)
result_mod_3620 = python_operator(stypy.reporting.localization.Localization(__file__, 289, 33), '%', str_3618, fn_3619)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 289, 20), tuple_3616, result_mod_3620)
# Assigning a type to the variable 'stypy_return_type' (line 289)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 289, 12), 'stypy_return_type', tuple_3616)
# SSA branch for the else part of an if statement (line 288)
module_type_store.open_ssa_branch('else')
# Obtaining an instance of the builtin type 'tuple' (line 291)
tuple_3621 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 291, 20), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 291)
# Adding element type (line 291)
# Getting the type of 'CONFIG_H_NOTOK' (line 291)
CONFIG_H_NOTOK_3622 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 291, 20), 'CONFIG_H_NOTOK')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 291, 20), tuple_3621, CONFIG_H_NOTOK_3622)
# Adding element type (line 291)
str_3623 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 291, 36), 'str', "'%s' does not mention '__GNUC__'")
# Getting the type of 'fn' (line 291)
fn_3624 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 291, 73), 'fn')
# Applying the binary operator '%' (line 291)
result_mod_3625 = python_operator(stypy.reporting.localization.Localization(__file__, 291, 36), '%', str_3623, fn_3624)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 291, 20), tuple_3621, result_mod_3625)
# Assigning a type to the variable 'stypy_return_type' (line 291)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 291, 12), 'stypy_return_type', tuple_3621)
# SSA join for if statement (line 288)
module_type_store = module_type_store.join_ssa_context()
# SSA join for try-except statement (line 271)
module_type_store = module_type_store.join_ssa_context()
# ################# End of 'check_config_h(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'check_config_h' in the type store
# Getting the type of 'stypy_return_type' (line 242)
stypy_return_type_3626 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 242, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3626)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'check_config_h'
return stypy_return_type_3626
# Assigning a type to the variable 'check_config_h' (line 242)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 242, 0), 'check_config_h', check_config_h)
@norecursion
def get_versions(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'get_versions'
module_type_store = module_type_store.open_function_context('get_versions', 294, 0, False)
# Passed parameters checking function
get_versions.stypy_localization = localization
get_versions.stypy_type_of_self = None
get_versions.stypy_type_store = module_type_store
get_versions.stypy_function_name = 'get_versions'
get_versions.stypy_param_names_list = []
get_versions.stypy_varargs_param_name = None
get_versions.stypy_kwargs_param_name = None
get_versions.stypy_call_defaults = defaults
get_versions.stypy_call_varargs = varargs
get_versions.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'get_versions', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'get_versions', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'get_versions(...)' code ##################
str_3627 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 297, (-1)), 'str', ' Try to find out the versions of gcc and ld.\n If not possible it returns None for it.\n ')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 298, 4))
# 'from distutils.version import StrictVersion' statement (line 298)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3628 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version')
if (type(import_3628) is not StypyTypeError):
if (import_3628 != 'pyd_module'):
__import__(import_3628)
sys_modules_3629 = sys.modules[import_3628]
import_from_module(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version', sys_modules_3629.module_type_store, module_type_store, ['StrictVersion'])
nest_module(stypy.reporting.localization.Localization(__file__, 298, 4), __file__, sys_modules_3629, sys_modules_3629.module_type_store, module_type_store)
else:
from distutils.version import StrictVersion
import_from_module(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version', None, module_type_store, ['StrictVersion'], [StrictVersion])
else:
# Assigning a type to the variable 'distutils.version' (line 298)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 298, 4), 'distutils.version', import_3628)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 299, 4))
# 'from distutils.spawn import find_executable' statement (line 299)
update_path_to_current_file_folder('C:/Python27/lib/distutils/')
import_3630 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn')
if (type(import_3630) is not StypyTypeError):
if (import_3630 != 'pyd_module'):
__import__(import_3630)
sys_modules_3631 = sys.modules[import_3630]
import_from_module(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn', sys_modules_3631.module_type_store, module_type_store, ['find_executable'])
nest_module(stypy.reporting.localization.Localization(__file__, 299, 4), __file__, sys_modules_3631, sys_modules_3631.module_type_store, module_type_store)
else:
from distutils.spawn import find_executable
import_from_module(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn', None, module_type_store, ['find_executable'], [find_executable])
else:
# Assigning a type to the variable 'distutils.spawn' (line 299)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 299, 4), 'distutils.spawn', import_3630)
remove_current_file_folder_from_path('C:/Python27/lib/distutils/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 300, 4))
# 'import re' statement (line 300)
import re
import_module(stypy.reporting.localization.Localization(__file__, 300, 4), 're', re, module_type_store)
# Assigning a Call to a Name (line 302):
# Assigning a Call to a Name (line 302):
# Call to find_executable(...): (line 302)
# Processing the call arguments (line 302)
str_3633 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 302, 30), 'str', 'gcc')
# Processing the call keyword arguments (line 302)
kwargs_3634 = {}
# Getting the type of 'find_executable' (line 302)
find_executable_3632 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 302, 14), 'find_executable', False)
# Calling find_executable(args, kwargs) (line 302)
find_executable_call_result_3635 = invoke(stypy.reporting.localization.Localization(__file__, 302, 14), find_executable_3632, *[str_3633], **kwargs_3634)
# Assigning a type to the variable 'gcc_exe' (line 302)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 302, 4), 'gcc_exe', find_executable_call_result_3635)
# Getting the type of 'gcc_exe' (line 303)
gcc_exe_3636 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 303, 7), 'gcc_exe')
# Testing the type of an if condition (line 303)
if_condition_3637 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 303, 4), gcc_exe_3636)
# Assigning a type to the variable 'if_condition_3637' (line 303)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 303, 4), 'if_condition_3637', if_condition_3637)
# SSA begins for if statement (line 303)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 304):
# Assigning a Call to a Name (line 304):
# Call to popen(...): (line 304)
# Processing the call arguments (line 304)
# Getting the type of 'gcc_exe' (line 304)
gcc_exe_3640 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 304, 23), 'gcc_exe', False)
str_3641 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 304, 33), 'str', ' -dumpversion')
# Applying the binary operator '+' (line 304)
result_add_3642 = python_operator(stypy.reporting.localization.Localization(__file__, 304, 23), '+', gcc_exe_3640, str_3641)
str_3643 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 304, 49), 'str', 'r')
# Processing the call keyword arguments (line 304)
kwargs_3644 = {}
# Getting the type of 'os' (line 304)
os_3638 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 304, 14), 'os', False)
# Obtaining the member 'popen' of a type (line 304)
popen_3639 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 304, 14), os_3638, 'popen')
# Calling popen(args, kwargs) (line 304)
popen_call_result_3645 = invoke(stypy.reporting.localization.Localization(__file__, 304, 14), popen_3639, *[result_add_3642, str_3643], **kwargs_3644)
# Assigning a type to the variable 'out' (line 304)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 304, 8), 'out', popen_call_result_3645)
# Try-finally block (line 305)
# Assigning a Call to a Name (line 306):
# Assigning a Call to a Name (line 306):
# Call to read(...): (line 306)
# Processing the call keyword arguments (line 306)
kwargs_3648 = {}
# Getting the type of 'out' (line 306)
out_3646 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 306, 25), 'out', False)
# Obtaining the member 'read' of a type (line 306)
read_3647 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 306, 25), out_3646, 'read')
# Calling read(args, kwargs) (line 306)
read_call_result_3649 = invoke(stypy.reporting.localization.Localization(__file__, 306, 25), read_3647, *[], **kwargs_3648)
# Assigning a type to the variable 'out_string' (line 306)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 306, 12), 'out_string', read_call_result_3649)
# finally branch of the try-finally block (line 305)
# Call to close(...): (line 308)
# Processing the call keyword arguments (line 308)
kwargs_3652 = {}
# Getting the type of 'out' (line 308)
out_3650 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 308, 12), 'out', False)
# Obtaining the member 'close' of a type (line 308)
close_3651 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 308, 12), out_3650, 'close')
# Calling close(args, kwargs) (line 308)
close_call_result_3653 = invoke(stypy.reporting.localization.Localization(__file__, 308, 12), close_3651, *[], **kwargs_3652)
# Assigning a Call to a Name (line 309):
# Assigning a Call to a Name (line 309):
# Call to search(...): (line 309)
# Processing the call arguments (line 309)
str_3656 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 309, 27), 'str', '(\\d+\\.\\d+\\.\\d+)')
# Getting the type of 'out_string' (line 309)
out_string_3657 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 309, 45), 'out_string', False)
# Processing the call keyword arguments (line 309)
kwargs_3658 = {}
# Getting the type of 're' (line 309)
re_3654 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 309, 17), 're', False)
# Obtaining the member 'search' of a type (line 309)
search_3655 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 309, 17), re_3654, 'search')
# Calling search(args, kwargs) (line 309)
search_call_result_3659 = invoke(stypy.reporting.localization.Localization(__file__, 309, 17), search_3655, *[str_3656, out_string_3657], **kwargs_3658)
# Assigning a type to the variable 'result' (line 309)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 309, 8), 'result', search_call_result_3659)
# Getting the type of 'result' (line 310)
result_3660 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 310, 11), 'result')
# Testing the type of an if condition (line 310)
if_condition_3661 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 310, 8), result_3660)
# Assigning a type to the variable 'if_condition_3661' (line 310)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 310, 8), 'if_condition_3661', if_condition_3661)
# SSA begins for if statement (line 310)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 311):
# Assigning a Call to a Name (line 311):
# Call to StrictVersion(...): (line 311)
# Processing the call arguments (line 311)
# Call to group(...): (line 311)
# Processing the call arguments (line 311)
int_3665 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 311, 53), 'int')
# Processing the call keyword arguments (line 311)
kwargs_3666 = {}
# Getting the type of 'result' (line 311)
result_3663 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 311, 40), 'result', False)
# Obtaining the member 'group' of a type (line 311)
group_3664 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 311, 40), result_3663, 'group')
# Calling group(args, kwargs) (line 311)
group_call_result_3667 = invoke(stypy.reporting.localization.Localization(__file__, 311, 40), group_3664, *[int_3665], **kwargs_3666)
# Processing the call keyword arguments (line 311)
kwargs_3668 = {}
# Getting the type of 'StrictVersion' (line 311)
StrictVersion_3662 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 311, 26), 'StrictVersion', False)
# Calling StrictVersion(args, kwargs) (line 311)
StrictVersion_call_result_3669 = invoke(stypy.reporting.localization.Localization(__file__, 311, 26), StrictVersion_3662, *[group_call_result_3667], **kwargs_3668)
# Assigning a type to the variable 'gcc_version' (line 311)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 311, 12), 'gcc_version', StrictVersion_call_result_3669)
# SSA branch for the else part of an if statement (line 310)
module_type_store.open_ssa_branch('else')
# Assigning a Name to a Name (line 313):
# Assigning a Name to a Name (line 313):
# Getting the type of 'None' (line 313)
None_3670 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 313, 26), 'None')
# Assigning a type to the variable 'gcc_version' (line 313)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 313, 12), 'gcc_version', None_3670)
# SSA join for if statement (line 310)
module_type_store = module_type_store.join_ssa_context()
# SSA branch for the else part of an if statement (line 303)
module_type_store.open_ssa_branch('else')
# Assigning a Name to a Name (line 315):
# Assigning a Name to a Name (line 315):
# Getting the type of 'None' (line 315)
None_3671 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 315, 22), 'None')
# Assigning a type to the variable 'gcc_version' (line 315)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 315, 8), 'gcc_version', None_3671)
# SSA join for if statement (line 303)
module_type_store = module_type_store.join_ssa_context()
# Assigning a Name to a Name (line 318):
# Assigning a Name to a Name (line 318):
# Getting the type of 'None' (line 318)
None_3672 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 318, 17), 'None')
# Assigning a type to the variable 'ld_version' (line 318)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 318, 4), 'ld_version', None_3672)
# Obtaining an instance of the builtin type 'tuple' (line 319)
tuple_3673 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 319, 12), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 319)
# Adding element type (line 319)
# Getting the type of 'gcc_version' (line 319)
gcc_version_3674 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 319, 12), 'gcc_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 319, 12), tuple_3673, gcc_version_3674)
# Adding element type (line 319)
# Getting the type of 'ld_version' (line 319)
ld_version_3675 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 319, 25), 'ld_version')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 319, 12), tuple_3673, ld_version_3675)
# Assigning a type to the variable 'stypy_return_type' (line 319)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 319, 4), 'stypy_return_type', tuple_3673)
# ################# End of 'get_versions(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'get_versions' in the type store
# Getting the type of 'stypy_return_type' (line 294)
stypy_return_type_3676 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 294, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_3676)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'get_versions'
return stypy_return_type_3676
# Assigning a type to the variable 'get_versions' (line 294)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 294, 0), 'get_versions', get_versions)
# ################# End of the type inference program ##################
module_errors = stypy.errors.type_error.StypyTypeError.get_error_msgs()
module_warnings = stypy.errors.type_warning.TypeWarning.get_warning_msgs()
| [
"redondojose@uniovi.es"
] | redondojose@uniovi.es |
d9bc407174d0bdafdf9157cae4e049eddfe82f3e | 2dd3ac8e6fd58f01ba77bfeb07cdb779ed8fa621 | /day00/ex06/recipe.py | cc6e46cbe930138a53b0d47ad6457524a58e70d9 | [] | no_license | msoares-prog/42AI-BootcampPython | 3f17991220ab6c4196c1ad4f71b62b99348aa4e9 | 0b0a50d1676071067759027fb5882faee96b2482 | refs/heads/main | 2022-12-28T13:00:42.481659 | 2020-10-18T21:46:21 | 2020-10-18T21:46:21 | 302,148,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,396 | py | cookbook = {
"sandwich": {
'ingredients': ["ham", "bread", "cheese", "tomatoes"],
'meal': "It is a lunch",
'prep_time': 10
},
"cake": {
'ingredients': ['flour', 'sugar', 'eggs'],
'meal': "It is a dessert",
'prep_time': 60
},
"salad": {
'ingredients': ['avocado', 'arugula', 'tomatoes', 'spinach'],
'meal': "It is a lunch",
'prep_time': 15
}
}
def print_recipe(option):
message = f"Recipe for {option}:\n\
Ingredient list: {cookbook[option]['ingredients']}.\n\
{cookbook[option]['meal']}.\n\
Takes {cookbook[option]['prep_time']} minutes of cooking."
print(message)
print("")
menu()
def print_cookbook():
print("List of cookbooks:")
for key in list(cookbook.keys()):
print("." + key)
print("")
menu()
def add_recipe():
recipe = input("Please give the recipe's name: ")
cookbook[recipe] = {}
cookbook[recipe]['ingredients'] = \
input("Enter the ingredients(separate by one space): ").split(' ')
cookbook[recipe]['meal'] = input("Enter the meal's type: ")
cookbook[recipe]['prep_time'] = input("Preparation time in minutes: ")
print("")
menu()
def delete_recipe():
recipe = input("Please give the recipe's name to be DELETED: ")
cookbook.pop(recipe)
print(f"Recipe {recipe} deleted")
print("")
menu()
def menu():
while True:
option = input("Please select an option by typing the\
corresponding number:\n \
1: Add a recipe\n \
2: Delete a recipe\n \
3: Print a recipe\n \
4: Print the cookbook\n \
5: Quit\n")
print("")
if not option.isdigit():
print("This option does not exist, please type the corresponding \
number.\nTo exit, enter 5.\n")
else:
option = int(option)
if option == 1:
add_recipe()
elif option == 2:
delete_recipe()
elif option == 3:
recipe = input("Please enter the recipe's name to get \
its details:\n")
print_recipe(recipe)
elif option == 4:
print_cookbook()
elif option == 5:
print("Cookbook closed")
exit()
else:
print("This option does not exist, please type the corresponding \
number.\nTo exit, enter 5.\n")
menu() | [
"mariana.soares@simbioseventures.com"
] | mariana.soares@simbioseventures.com |
a1996577fbf1403c95237f518027cdfb35bcb597 | 75d0a9c4cb677653adf4e0322cec30b47eeb730d | /build/vision_opencv/image_geometry/catkin_generated/pkg.develspace.context.pc.py | e0d2b91e9c4adc65449a6b29a9578d91c46f2d2b | [] | no_license | daichi-kamba/sciurus17_ros_kamba | 376380c975215c9c14f382701ee3b5a57687d010 | 4ce7555ee3b39ab69211adcd05f5dfa79f792180 | refs/heads/main | 2023-08-03T17:55:28.525391 | 2021-09-10T08:12:46 | 2021-09-10T08:12:46 | 404,644,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,671 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/kamba/catkin_ws/src/vision_opencv/image_geometry/include;/opt/ros/kinetic/include/opencv-3.3.1-dev;/opt/ros/kinetic/include/opencv-3.3.1-dev/opencv".split(';') if "/home/kamba/catkin_ws/src/vision_opencv/image_geometry/include;/opt/ros/kinetic/include/opencv-3.3.1-dev;/opt/ros/kinetic/include/opencv-3.3.1-dev/opencv" != "" else []
PROJECT_CATKIN_DEPENDS = "sensor_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-limage_geometry;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_calib3d3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_core3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_dnn3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_features2d3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_flann3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_highgui3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_imgcodecs3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_imgproc3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_ml3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_objdetect3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_photo3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_shape3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_stitching3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_superres3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_video3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_videoio3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_videostab3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_viz3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_aruco3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_bgsegm3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_bioinspired3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_ccalib3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_cvv3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_datasets3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_dpm3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_face3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_fuzzy3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_hdf3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_img_hash3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_line_descriptor3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_optflow3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_phase_unwrapping3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_plot3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_reg3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_rgbd3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_saliency3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_stereo3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_structured_light3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_surface_matching3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_text3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_tracking3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_xfeatures2d3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_ximgproc3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_xobjdetect3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_xphoto3.so.3.3.1".split(';') if "-limage_geometry;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_calib3d3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_core3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_dnn3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_features2d3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_flann3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_highgui3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_imgcodecs3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_imgproc3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_ml3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_objdetect3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_photo3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_shape3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_stitching3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_superres3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_video3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_videoio3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_videostab3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_viz3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_aruco3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_bgsegm3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_bioinspired3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_ccalib3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_cvv3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_datasets3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_dpm3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_face3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_fuzzy3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_hdf3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_img_hash3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_line_descriptor3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_optflow3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_phase_unwrapping3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_plot3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_reg3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_rgbd3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_saliency3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_stereo3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_structured_light3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_surface_matching3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_text3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_tracking3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_xfeatures2d3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_ximgproc3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_xobjdetect3.so.3.3.1;/opt/ros/kinetic/lib/x86_64-linux-gnu/libopencv_xphoto3.so.3.3.1" != "" else []
PROJECT_NAME = "image_geometry"
PROJECT_SPACE_DIR = "/home/kamba/catkin_ws/devel"
PROJECT_VERSION = "1.15.0"
| [
"is0406fi@ed.ritsumei.ac.jp"
] | is0406fi@ed.ritsumei.ac.jp |
ff6a89c9d8caa645d888b9e16e0ea2c34211bdcd | 27f48f7a8a91538904f82d711bb9f128c3810057 | /shop/vendor/forms.py | 1315f0efac7c8144f76088d0cc5e8dd7a0ef7f50 | [] | no_license | aallisha/ClothingStore | 1831218c8cc150cbc5a745c8025f3d79c871267a | 85327fa33b8fd32b06b070782e24b2ae49e210ac | refs/heads/master | 2023-04-05T18:57:16.629574 | 2021-04-12T06:45:01 | 2021-04-12T06:45:01 | 357,085,363 | 0 | 2 | null | 2021-04-27T18:26:19 | 2021-04-12T06:41:11 | Python | UTF-8 | Python | false | false | 211 | py | from django.forms import ModelForm
from product.models import Product
class ProductForm(ModelForm):
class Meta:
model = Product
fields = ['category', 'image','title','description','price']
| [
"48907175+aallisha@users.noreply.github.com"
] | 48907175+aallisha@users.noreply.github.com |
aaecf452371b0b9d14bd97d814ddd9714ead9f81 | 45478b97052795e08e883e97bafeea216148085a | /exam/views.py | 0b172af324052376b30b853512c0c2d1d3be9a87 | [] | no_license | prerna1428/onlinetest | a64cc29dc6f18d0a413e1230593ec61f26b29b32 | 6625b83460408647d1d3d1e15e13ea2f2396bb84 | refs/heads/master | 2020-05-16T00:05:50.533231 | 2019-04-21T07:26:10 | 2019-04-21T07:26:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | from django.shortcuts import render
# Create your views here.
def newExam(request):
return render(request,'addexam.html',{})
| [
"thapabikash48@gmail.com"
] | thapabikash48@gmail.com |
e7e4e084cf887e1b7d26dbb62a47cc39114a85d8 | 3c66f9ddd488507144df6c82c698fca27dc23fa4 | /NKT/NKTMan_Fianium.py | d3c6bcf5b95d1a0c2cbd4918f2b89ef45a5b1ce7 | [
"MIT"
] | permissive | EdwarDu/autools | 50347519cf67fa42ff334dbbe15a30ae2009ba46 | 9d58bea49c9dca3504d4da00c7b3fc7d618a1f4c | refs/heads/master | 2022-12-12T10:49:52.782537 | 2022-12-06T10:32:54 | 2022-12-06T10:32:54 | 177,474,285 | 0 | 0 | MIT | 2022-12-06T10:32:55 | 2019-03-24T21:58:29 | Python | UTF-8 | Python | false | false | 5,305 | py | import serial
from .NKTMan import NKTMan
import numpy as np
class NKTMan_Fianium(NKTMan):
"""
With specific register read/write for Fianium
"""
def __init__(self,
module_addr,
serial_name,
baudrate=9600,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
host_addr=0x52):
super(NKTMan_Fianium, self).__init__(serial_name, baudrate, parity, stopbits, host_addr)
self.module_addr = module_addr
if self.get_module_type(module_addr) != 0x60:
raise IOError(f'Module at {module_addr} is not SuperK EXTREME (S4x2)/Fianium')
@property
def system_type(self):
raw_data = self.read_reg(self.module_addr, 0x6B)
return raw_data[0]
@property
def inlet_temperature(self):
raw_data = self.read_reg(self.module_addr, 0x11)
return np.frombuffer(raw_data, dtype='<i2')[0] / 10
@property
def emission(self):
raw_data = self.read_reg(self.module_addr, 0x30)
if raw_data[0] == 0:
return False
elif raw_data[0] == 3:
return True
else:
raise ValueError(f'Unknown emission status {raw_data[0]}')
@emission.setter
def emission(self, b_on: bool):
data = b'\x03' if b_on else b'\x00'
self.write_reg(self.module_addr, 0x30, data)
@property
def monitor_input2_gain(self):
raw_data = self.read_reg(self.module_addr, 0x33)
return raw_data[0]
@monitor_input2_gain.setter
def monitor_input2_gain(self, gain: int):
if not 0 <= gain <= 7:
raise ValueError(f'gain must by U8 [0, 7]')
else:
self.write_reg(self.module_addr, 0x33, np.array([gain, ], dtype='<u1').tobytes())
@property
def rf_switch(self):
raw_data = self.read_reg(self.module_addr, 0x34)
return raw_data[0]
@rf_switch.setter
def rf_switch(self, value: int):
if not 0 <= value <= 1:
raise ValueError(f'value must by U8 [0, 1]')
else:
self.write_reg(self.module_addr, 0x34, np.array([value, ], dtype='<u1').tobytes())
@property
def monitor_switch(self):
raw_data = self.read_reg(self.module_addr, 0x34)
return raw_data[0]
@monitor_switch.setter
def monitor_switch(self, value: int):
if not 0 <= value <= 255:
raise ValueError(f'value must by U8 [0, 1]')
else:
self.write_reg(self.module_addr, 0x34, np.array([value, ], dtype='<u1').tobytes())
@property
def crystal1_minimal_wavelength(self):
raw_data = self.read_reg(self.module_addr, 0x9)
return np.frombuffer(raw_data, dtype='<u4')[0]
@property
def crystal1_maximal_wavelength(self):
raw_data = self.read_reg(self.module_addr, 0x91)
return np.frombuffer(raw_data, dtype='<u4')[0]
@property
def crystal2_minimal_wavelength(self):
raw_data = self.read_reg(self.module_addr, 0xA0)
return np.frombuffer(raw_data, dtype='<u4')[0]
@property
def crystal2_maximal_wavelength(self):
raw_data = self.read_reg(self.module_addr, 0xA1)
return np.frombuffer(raw_data, dtype='<u4')[0]
@property
def serial_number(self):
return self.read_reg(self.module_addr, 0x65).decode('utf-8')
@property
def status(self):
raw_data = self.read_reg(self.module_addr, 0x66)
return np.frombuffer(raw_data, dtype='<u2')[0]
def status_interlock_off(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 1)) != 0x0000
def status_interlock_loop_in(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 2)) != 0x0000
def status_interlock_loop_out(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 3)) != 0x0000
def status_supply_voltage_low(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 5)) != 0x0000
def status_module_temp_range(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 6)) != 0x0000
def status_shutter_sensor1(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 8)) != 0x0000
def status_shutter_sensor2(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 9)) != 0x0000
def status_new_crystal1_temperature(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 10)) != 0x0000
def status_new_crystal2_temperature(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 11)) != 0x0000
def status_error_code_present(self, status=None):
if status is None:
status = self.status
return (status & (0x0001 << 15)) != 0x0000
@property
def error_code(self):
return self.read_reg(self.module_addr, 0x67)[0]
| [
"duboyang.h@gmail.com"
] | duboyang.h@gmail.com |
a14cb2cae1fd69db8497611253f1cb936df5a192 | 23ec2d87fb96626992df44af73a7daa202be79a6 | /src/examples/connectfour/vs.py | 3810ad1a332014b09a368e147f4ae73e9ef179df | [] | no_license | ishikota/pymcts | 5d560ec7d0dcdf881a52c607adfdd384ae23e0c2 | 2d1ba191cadbbaab0ab922a785478210cf0709f4 | refs/heads/master | 2021-01-01T19:31:00.932984 | 2015-07-28T14:45:23 | 2015-07-28T14:45:23 | 39,330,236 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,146 | py | # add path to the src and test directory
import os
import sys
PARENT_PATH = os.getenv('PYMCTS_ROOT')
SRC_PATH = PARENT_PATH +"src/"
sys.path.append(SRC_PATH+"algorithm")
import mcts
import connectfour_model
import heuristic_model
# Clear the shell
os.system("clear")
# Setup for MCTS
model = heuristic_model.ConnectFour()
#model = connectfour_model.ConnectFour()
print '> Input the maximum number of iteration in MCTS...'
playout_num = int(raw_input())
_mcts = mcts.MCTS()
_mcts.set_playout(playout_num)
_mcts.show_progress = True
# start the game !!
print 'Let\'s ConnectFour !!'
model.display()
while True:
# Player turn
print '> Input the column to make a move...'
action = int(raw_input())-1
end_flg, score = model.is_terminal(1, action)
model.update(action)
model.display()
if end_flg:
print '\nYou win !!!\n'
break
# MCTS CPU Turn
root, action = _mcts.start(model)
print 'MCTS make a move on column '+str(action+1)
end_flg, score = model.is_terminal(-1, action)
model.update(action)
model.display()
if end_flg:
print '\nYou lose ...\n'
break
| [
"ishikota086@gmail.com"
] | ishikota086@gmail.com |
a3b172322b6b5285674b3384e9dc45f6733b5b84 | 7a47c29fd131bd574425303dc4c6e9f27134184a | /nlu/app/pickle_example.py | 86247b199f72b6b64c1192c2bc07197c36e799a4 | [] | no_license | conversational-pipeline/reusable-pipeline | 692b75b2f9ec31de0e5f3170f3a290457dd8873d | 1a8713ef2d2e8699e1cd9a0dc8f909c10ddabb18 | refs/heads/master | 2020-04-26T06:18:30.945634 | 2019-03-25T18:32:13 | 2019-03-25T18:32:13 | 173,360,252 | 2 | 0 | null | 2019-03-20T21:12:00 | 2019-03-01T19:56:46 | TypeScript | UTF-8 | Python | false | false | 4,330 | py | import pickle
word2idx = {'infusion': 0, 'cold': 165, 'anything': 1, 'be': 2, 'coffee': 3, 'water': 4, 'green': 5, 'shot': 7, 'medium': 8, 'forget': 9, 'swap': 10, 'grande': 12, 'give': 13, "we're": 14, 'macchiato': 15, 'shaken': 16, 'me': 17, 'is': 83, 'soy': 172, "she'd": 19, 'item': 21, 'should': 85, "they'd": 22, "they'll": 25, 'roast': 24, 'syrup': 28, 'good': 86, 'fat': 27, 'tea': 29, 'first': 30, 'little': 31, 'whole': 43, 'vanilla': 32, 'mocha': 33, 'light': 35, 'really': 201, 'he': 36, 'switch': 126, 'much': 37, 'set': 38, 'that': 39, 'very': 40, 'moment': 41, 'nitro': 42, 'single': 134, 'on': 44, 'eleven': 88, 'her': 45, 'every': 46, 'non': 47, 'thirteen': 48, 'dragonfruit': 49, 'iced': 50, 'the': 51, 'spice': 52, 'not': 53, 'pike': 54, 'large': 55, 'off': 56, 'of': 57, 'venti': 210, 'second': 59, 'a': 60, 'bye': 61, "everyone's": 62, 'ready': 216, 'passion': 171, 'time': 138, 'caffe': 209, 'none': 63, 'him': 64, 'bit': 65, "they're": 67, 'it': 68, "he'd": 176, "everbody's": 69, 'leave': 70, 'no': 93, "we'd": 71, 'lose': 72, 'need': 73, 'substitute': 75, 'classic': 76, 'hook': 78, 'drink': 79, 'finished': 80, 'drop': 81, 'creamer': 82, 'hold': 87, 'acai': 84, 'caramel': 95, 'heavy': 89, 'frappuccino': 96, 'thank': 90, 'lemon': 91, 'third': 92, 'five': 94, 'have': 142, 'small': 97, 'four': 11, 'they': 98, 'us': 100, 'your': 102, 'ten': 99, 'hot': 104, 'so': 108, 'bunch': 109, 'doppio': 112, 'solo': 111, 'fifth': 114, 'would': 115, 'black': 117, "we'll": 118, 'whip': 119, 'everything': 101, "she's": 122, 'eight': 121, 'she': 123, 'place': 124, 'minute': 125, 'tall': 146, 'lemonade': 128, 'flat': 103, 'replace': 129, 'help': 182, 'lime': 130, "that's": 131, 'I': 132, 'seven': 18, 'salted': 133, 'could': 135, 'any': 136, 'make': 107, 'milk': 137, 'cappuccino': 58, 'those': 139, 'white': 148, 'tango': 140, 'an': 141, 'sparkling': 143, 'brew': 144, 'percent': 20, 'can': 145, 'cascara': 147, 'hang': 149, 'dubble': 150, 'coconut': 110, 'for': 151, 'am': 153, 'equal': 154, 'want': 155, 'pina': 113, 'ENDPAD': 233, 'from': 156, 'fourth': 157, 'two': 158, 'done': 159, 'are': 160, 'sweet': 161, 'colada': 162, "everything's": 152, 'take': 164, 'in': 166, 'UNKNOWN': 234, 'and': 167, 'espresso': 168, 'i': 169, "she'll": 170, "i'm": 173, 'to': 23, "that'll": 174, 'short': 175, 'you': 26, 'lots': 177, "he'll": 77, 'thing': 178, 'latte': 179, "i'd": 116, 'sugar': 180, 'hibiscus': 181, 'okay': 191, 'add': 183, "he's": 184, 'sans': 185, 'strawberry': 105, "i'll": 186, 'thanks': 187, 'lot': 188, 'fine': 189, 'order': 190, 'pumpkin': 120, 'may': 66, 'without': 192, 'fog': 193, 'everyone': 194, 'chocolate': 195, 'splenda': 196, 'change': 197, 'blonde': 198, 'pineapple': 199, 'like': 200, 'last': 34, 'get': 202, 'lieu': 203, 'pink': 204, 'mango': 205, 'twelve': 206, 'more': 207, 'with': 208, 'everbody': 211, 'up': 212, 'will': 213, 'appreciate': 127, 'berry': 214, 'cancel': 215, 'extra': 217, 'six': 218, 'wait': 219, 'one': 163, 'nine': 220, 'cream': 221, 'actually': 74, 'just': 222, 'we': 223, 'remove': 224, 'too': 225, 'teavana': 226, 'them': 227, 'three': 228, 'americano': 229, 'foam': 232, 'all': 230, 'skip': 106, 'do': 231, 'instead': 6}
tag2idx = {'I-END_OF_ORDER': 22, 'I-NEED_MORE_TIME': 23, 'B-TARGET': 24, 'B-PREPOSITION_TARGET_FORWARD': 36, 'B-POSITION': 0, 'I-SYRUP_AMT': 1, 'I-PRONOUN_ALL': 26, 'I-FLAVOR': 37, 'B-MILK': 27, 'B-PRONOUN_ALL': 28, 'B-ITEM': 8, 'B-WITH': 3, 'B-QUANTITY': 29, 'B-PREPOSITION_QUANTITY': 30, 'B-INSTEAD_OF': 39, 'B-PREPOSITION_TARGET_BACK': 31, 'B-REMOVE': 4, 'I-SIZE': 34, 'B-FOR': 6, 'I-SHOT': 7, 'I-ITEM': 38, 'O': 41, 'B-SIZE': 33, 'B-NEED_MORE_TIME': 32, 'I-POSITION': 9, 'B-SYRUP_AMT': 14, 'I-ADD': 11, 'B-SUBSTITUTE_WITH_TARGET': 13, 'I-SUBSTITUTE_WITH_TARGET': 2, 'B-FLAVOR': 15, 'B-ADD': 16, 'B-SYRUP': 17, 'B-SHOT': 18, 'B-END_OF_ORDER': 21, 'I-REMOVE': 35, 'I-MILK': 19, 'B-CONJUNCTION': 10, 'B-THE': 20, 'I-INSTEAD_OF': 5, 'I-TARGET': 12, 'I-SYRUP': 25, 'B-PRONOUN': 40}
groups2idx = {'B-ADD': 4, 'B-REMOVE': 6, 'B-SUBSTITUTE': 3, 'I-ADD': 0, 'I-REMOVE': 5, 'I-SUBSTITUTE': 2, 'None': 1, 'O': 7}
result = {
'word2idx': word2idx,
'tag2idx': tag2idx,
'groups2idx': groups2idx
}
with open("pickled_idx", "wb") as f:
pickle.dump(result, f)
with open("pickled_idx", "rb") as f:
result = pickle.load(f)
print(result)
| [
"michaelsethperel@microsoft.com"
] | michaelsethperel@microsoft.com |
213f42b8e3c626c96fdba83225479382cdd7034f | 544cfadc742536618168fc80a5bd81a35a5f2c99 | /tools/test/connectivity/acts/framework/acts/controllers/pdu_lib/synaccess/np02b.py | 655328feb4bede2c154cc3f44e04463ee9f339ee | [] | no_license | ZYHGOD-1/Aosp11 | 0400619993b559bf4380db2da0addfa9cccd698d | 78a61ca023cbf1a0cecfef8b97df2b274ac3a988 | refs/heads/main | 2023-04-21T20:13:54.629813 | 2021-05-22T05:28:21 | 2021-05-22T05:28:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,057 | py | #!/usr/bin/env python3
#
# Copyright 2019 - The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from acts import utils
from acts.controllers import pdu
import re
import telnetlib
import time
class PduDevice(pdu.PduDevice):
"""Implementation of pure abstract PduDevice object for the Synaccess np02b
Pdu.
"""
def __init__(self, host, username, password):
super(PduDevice, self).__init__(host, username, password)
self.tnhelper = _TNHelperNP02B(host)
def on_all(self):
""" Turns on both outlets on the np02b."""
self.tnhelper.cmd('ps 1')
self._verify_state({'1': True, '2': True})
def off_all(self):
""" Turns off both outlets on the np02b."""
self.tnhelper.cmd('ps 0')
self._verify_state({'1': False, '2': False})
def on(self, outlet):
""" Turns on specific outlet on the np02b.
Args:
outlet: string of the outlet to turn on ('1' or '2')
"""
self.tnhelper.cmd('pset %s 1' % outlet)
self._verify_state({outlet: True})
def off(self, outlet):
""" Turns off a specifc outlet on the np02b.
Args:
outlet: string of the outlet to turn off ('1' or '2')
"""
self.tnhelper.cmd('pset %s 0' % outlet)
self._verify_state({outlet: False})
def reboot(self, outlet):
""" Toggles a specific outlet on the np02b to off, then to on.
Args:
outlet: string of the outlet to reboot ('1' or '2')
"""
self.off(outlet)
self._verify_state({outlet: False})
self.on(outlet)
self._verify_state({outlet: True})
def status(self):
""" Returns the status of the np02b outlets.
Return:
a dict mapping outlet strings ('1' and '2') to:
True if outlet is ON
False if outlet is OFF
"""
res = self.tnhelper.cmd('pshow')
status_list = re.findall('(ON|OFF)', res)
status_dict = {}
for i, status in enumerate(status_list):
status_dict[str(i + 1)] = (status == 'ON')
return status_dict
def close(self):
"""Ensure connection to device is closed.
In this implementation, this shouldn't be necessary, but could be in
others that open on creation.
"""
self.tnhelper.close()
def _verify_state(self, expected_state, timeout=3):
"""Returns when expected_state is reached on device.
In order to prevent command functions from exiting until the desired
effect has occurred, this function verifys that the expected_state is a
subset of the desired state.
Args:
expected_state: a dict representing the expected state of one or
more outlets on the device. Maps outlet strings ('1' and/or '2')
to:
True if outlet is expected to be ON.
False if outlet is expected to be OFF.
timeout (default: 3): time in seconds until raising an exception.
Return:
True, if expected_state is reached.
Raises:
PduError if expected_state has not been reached by timeout.
"""
end_time = time.time() + timeout
while time.time() < end_time:
actual_state = self.status()
if expected_state.items() <= actual_state.items():
return True
time.sleep(.1)
raise pdu.PduError('Timeout while verifying state.\n'
'Expected State: %s\n'
'Actual State: %s' % (expected_state, actual_state))
class _TNHelperNP02B(object):
"""An internal helper class for Telnet with the Synaccess NP02B Pdu. This
helper is specific to the idiosyncrasies of the NP02B and therefore should
not be used with other devices.
"""
def __init__(self, host):
self._tn = telnetlib.Telnet()
self.host = host
self.tx_cmd_separator = '\n\r'
self.rx_cmd_separator = '\r\n'
self.prompt = '>'
"""
Executes a command on the device via telnet.
Args:
cmd_str: A string of the command to be run.
Returns:
A string of the response from the valid command (often empty).
"""
def cmd(self, cmd_str):
# Open session
try:
self._tn.open(self.host, timeout=3)
except:
raise pdu.PduError("Failed to open telnet session to host (%s)" %
self.host)
time.sleep(.1)
# Read to end of first prompt
cmd_str.strip(self.tx_cmd_separator)
self._tn.read_eager()
time.sleep(.1)
# Write command and read all output text
self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator))
res = self._tn.read_until(utils.ascii_string(self.prompt), 2)
# Parses out the commands output
if res is None:
raise pdu.PduError("Command failed: %s" % cmd_str)
res = res.decode()
if re.search('Invalid', res):
raise pdu.PduError("Command Invalid: %s" % cmd_str)
res = res.replace(self.prompt, '')
res = res.replace(self.tx_cmd_separator, '')
res = res.replace(self.rx_cmd_separator, '')
res = res.replace(cmd_str, '')
# Close session
self._tn.close()
time.sleep(0.5)
return res
def close(self):
self._tn.close() | [
"rick_tan@qq.com"
] | rick_tan@qq.com |
6680e050b3c46d287a898764310ee185d8ad11be | 22bda291803362ca4af353a8ed034110b9cb6d8e | /DeepCDN_UI/urls.py | 966624b8ec307063087e5260119b7459f2d16b4d | [] | no_license | QwertyJacob/DeepCDN_UI | cb5f9b4e86a1e7e365451f29f5f0fc29a500fb14 | d63981ae14c30129e5b5426e2640d72f0869e25b | refs/heads/master | 2021-02-10T02:13:26.270979 | 2020-03-03T12:05:17 | 2020-03-03T12:05:17 | 244,344,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 812 | py | """DeepCDN_UI URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('dashboard/', include('dashboard.urls')),
]
| [
"jesusfernando.cevallosmoreno@jc.elis.org"
] | jesusfernando.cevallosmoreno@jc.elis.org |
f35ed62ce5d2bcd6961d7c14df34fb173be14a4a | 7924db73ecec5fe3eeab8f023d1856814618c718 | /fragcode.py | c2e6f79315ca8321df4dee683c17386447435864 | [] | no_license | mohmhm1/Miscellaneous | 2f59894644be2b7c7ee7d5c6ccb0ad0a7c34fc23 | 634c26c1107e5cb5ac09663a8d2f177e1e556407 | refs/heads/master | 2021-01-19T13:26:12.570648 | 2017-04-12T23:45:16 | 2017-04-12T23:45:16 | 88,088,425 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 679 | py |
entry = raw_input("Please enter your chemical structure....")
mol_database = ["NH4+","COOH","COSH","COSeH","SO3H","SO2H","NH2","NH","NHNH2"]
mol = ""
itr = 0
inc = 0
L = list(entry)
print "Parsed substructures"
while itr < len(entry):
itr += 1
L = [entry[i:i + itr] for i in range(0, len(entry), itr)]
print L
for structures in L:
frag_code = ""
if structures in mol_database:
frag_code += "1"
mol += "1"
print "STRUCTURES FOUND!: " + structures
else:
frag_code += "0"
mol += "0"
print " Fingerprint for " + entry + ":"
print mol
mol = ""
| [
"noreply@github.com"
] | noreply@github.com |
62cb88b22f9988b968329a59721e0b8ca6c8328b | a26c1b336a2ea2178c14c7d16d300863a748bc12 | /realtime_demo.py | 708abeec7094070580813722755e62af86e74a4b | [] | no_license | ashu9999/Deep-Advertisement-Application | 98491d27833129e374e5d4d31a603908fb192897 | c00b59d38bd1a4ab1d1427f50ca890b0fe66a2a3 | refs/heads/master | 2023-01-09T16:45:12.050368 | 2020-11-07T12:29:35 | 2020-11-07T12:29:35 | 310,294,068 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,008 | py | """
Face detection
"""
import cv2
import os
from time import sleep
import numpy as np
import argparse
from wide_resnet import WideResNet
from keras.utils.data_utils import get_file
import os, random
class FaceCV(object):
"""
Singleton class for face recongnition task
"""
CASE_PATH = ".\\pretrained_models\\haarcascade_frontalface_alt.xml"
WRN_WEIGHTS_PATH = ".\\pretrained_models\\weights.18-4.06.hdf5"
def __new__(cls, weight_file=None, depth=16, width=8, face_size=64):
if not hasattr(cls, 'instance'):
cls.instance = super(FaceCV, cls).__new__(cls)
return cls.instance
def __init__(self, depth=16, width=8, face_size=64):
self.face_size = face_size
self.model = WideResNet(face_size, depth=depth, k=width)()
model_dir = os.path.join(os.getcwd(), "pretrained_models").replace("//", "\\")
fpath = get_file('weights.18-4.06.hdf5',
self.WRN_WEIGHTS_PATH,
cache_subdir=model_dir)
self.model.load_weights(fpath)
@classmethod
def draw_label(cls, image, point, label, font=cv2.FONT_HERSHEY_SIMPLEX,
font_scale=1, thickness=2):
size = cv2.getTextSize(label, font, font_scale, thickness)[0]
x, y = point
cv2.rectangle(image, (x, y - size[1]), (x + size[0], y), (255, 0, 0), cv2.FILLED)
cv2.putText(image, label, point, font, font_scale, (255, 255, 255), thickness)
def crop_face(self, imgarray, section, margin=40, size=64):
"""
:param imgarray: full image
:param section: face detected area (x, y, w, h)
:param margin: add some margin to the face detected area to include a full head
:param size: the result image resolution with be (size x size)
:return: resized image in numpy array with shape (size x size x 3)
"""
img_h, img_w, _ = imgarray.shape
if section is None:
section = [0, 0, img_w, img_h]
(x, y, w, h) = section
margin = int(min(w,h) * margin / 100)
x_a = x - margin
y_a = y - margin
x_b = x + w + margin
y_b = y + h + margin
if x_a < 0:
x_b = min(x_b - x_a, img_w-1)
x_a = 0
if y_a < 0:
y_b = min(y_b - y_a, img_h-1)
y_a = 0
if x_b > img_w:
x_a = max(x_a - (x_b - img_w), 0)
x_b = img_w
if y_b > img_h:
y_a = max(y_a - (y_b - img_h), 0)
y_b = img_h
cropped = imgarray[y_a: y_b, x_a: x_b]
resized_img = cv2.resize(cropped, (size, size), interpolation=cv2.INTER_AREA)
resized_img = np.array(resized_img)
return resized_img, (x_a, y_a, x_b - x_a, y_b - y_a)
def detect_face(self):
face_cascade = cv2.CascadeClassifier(self.CASE_PATH)
# 0 means the default video capture device in OS
video_capture = cv2.VideoCapture(0)
# infinite loop, break by key ESC
while True:
if not video_capture.isOpened():
sleep(5)
# Capture frame-by-frame
ret, frame = video_capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(
gray,
scaleFactor=1.2,
minNeighbors=10,
minSize=(self.face_size, self.face_size)
)
if faces is not ():
# placeholder for cropped faces
face_imgs = np.empty((len(faces), self.face_size, self.face_size, 3))
for i, face in enumerate(faces):
face_img, cropped = self.crop_face(frame, face, margin=40, size=self.face_size)
(x, y, w, h) = cropped
cv2.rectangle(frame, (x, y), (x + w, y + h), (255, 200, 0), 2)
face_imgs[i,:,:,:] = face_img
if len(face_imgs) > 0:
# predict ages and genders of the detected faces
results = self.model.predict(face_imgs)
predicted_genders = results[0]
ages = np.arange(0, 101).reshape(101, 1)
predicted_ages = results[1].dot(ages).flatten()
# draw results
for i, face in enumerate(faces):
label = "{}, {}".format(int(predicted_ages[i]),
"F" if predicted_genders[i][0] > 0.5 else "M")
print(int(predicted_ages[i]),predicted_genders[i][0])
if predicted_genders[i][0] < 0.5 and ((int(predicted_ages[i])>25) and (int(predicted_ages[i])<30)) :
print("Hello")
filename=random.choice(os.listdir("25-30/"))
cap = cv2.VideoCapture("25-30/"+filename)
# Read until video is completed
while(cap.isOpened()):
# Capture frame-by-frame
ret, frame1 = cap.read()
if ret == True:
# Display the resulting frame
cv2.imshow('Frame',frame1)
# Press Q on keyboard to exit
if cv2.waitKey(25) & 0xFF == ord('q'):
break
else:
break
elif predicted_genders[i][0] < 0.5 and ((int(predicted_ages[i])>30) and (int(predicted_ages[i])<35)) :
print("Hello")
filename=random.choice(os.listdir("30-35/"))
cap = cv2.VideoCapture("30-35/"+filename)
# Read until video is completed
while(cap.isOpened()):
# Capture frame-by-frame
ret, frame1 = cap.read()
if ret == True:
# Display the resulting frame
cv2.imshow('Frame',frame1)
# Press Q on keyboard to exit
if cv2.waitKey(25) & 0xFF == ord('q'):
break
else:
break
elif predicted_genders[i][0] > 0.5 and ((int(predicted_ages[i])>25) and (int(predicted_ages[i])<30)) :
print("Hello")
filename=random.choice(os.listdir("F25-30/"))
cap = cv2.VideoCapture("F25-30/"+filename)
# Read until video is completed
while(cap.isOpened()):
# Capture frame-by-frame
ret, frame1 = cap.read()
if ret == True:
# Display the resulting frame
cv2.imshow('Frame',frame1)
# Press Q on keyboard to exit
if cv2.waitKey(25) & 0xFF == ord('q'):
break
else:
break
# When everything done, release the video capture object
cap.release()
# Closes all the frames
cv2.destroyAllWindows()
self.draw_label(frame, (face[0], face[1]), label)
else:
print('No faces')
cv2.imshow('Keras Faces', frame)
if cv2.waitKey(5) == 27: # ESC key press
break
# When everything is done, release the capture
video_capture.release()
cv2.destroyAllWindows()
def get_args():
parser = argparse.ArgumentParser(description="This script detects faces from web cam input, "
"and estimates age and gender for the detected faces.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--depth", type=int, default=16,
help="depth of network")
parser.add_argument("--width", type=int, default=8,
help="width of network")
args = parser.parse_args()
return args
def main():
args = get_args()
depth = args.depth
width = args.width
face = FaceCV(depth=depth, width=width)
face.detect_face()
if __name__ == "__main__":
main()
| [
"kmr.ashutosh9999@gmail.com"
] | kmr.ashutosh9999@gmail.com |
61c90a5a68de5d9fddb0ef91c1c3666064a8f85e | 7bededcada9271d92f34da6dae7088f3faf61c02 | /pypureclient/flasharray/FA_2_22/models/pod_replica_link_lag_response.py | 4b80aac36f529a563527baa650fb2b54f0d5839c | [
"BSD-2-Clause"
] | permissive | PureStorage-OpenConnect/py-pure-client | a5348c6a153f8c809d6e3cf734d95d6946c5f659 | 7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e | refs/heads/master | 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 | BSD-2-Clause | 2023-09-08T09:08:30 | 2018-12-04T17:02:51 | Python | UTF-8 | Python | false | false | 3,922 | py | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.22
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_22 import models
class PodReplicaLinkLagResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[PodReplicaLinkLag]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.PodReplicaLinkLag]
):
"""
Keyword args:
items (list[PodReplicaLinkLag]): A list of pod replica link lag objects.
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PodReplicaLinkLagResponse`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PodReplicaLinkLagResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PodReplicaLinkLagResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"noreply@github.com"
] | noreply@github.com |
a44b1e142384963a8199905ffd28e41de93d8139 | 37129a07acb8aae9e87e35d42fb9aa6c958f76cb | /cedula/Cedula.py | 92275150749657f8f6c141a97c5c475c900f0edc | [] | no_license | dignacio/python | 1fedead3e9228c6c3b10d692fc007dfb740ae657 | 753a714fb9a5c4509e8b4061661b6ddcbe347beb | refs/heads/master | 2021-04-15T05:57:47.750646 | 2018-11-13T19:56:55 | 2018-11-13T19:56:55 | 126,255,131 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,902 | py | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Cedula
A QGIS plugin
Cedula Catastral
-------------------
begin : 2018-03-20
git sha : $Format:%H$
copyright : (C) 2018 by Worknest
email : dignacio.lopezo@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt5.QtCore import QSettings, QTranslator, qVersion, QCoreApplication
from PyQt5.QtGui import QIcon, QCursor, QPixmap
from PyQt5.QtWidgets import QAction, QMessageBox
# Initialize Qt resources from file resources.py
from .resources import *
# Import the code for the dialog
from .Cedula_dialog import CedulaDialog
from .Cedula_MainWindow import CedulaMainWindow
import os.path
from qgis.core import QgsMessageLog, QgsProject
class Cedula:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'Cedula_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Create the dialog (after translation) and keep reference
self.dlg = CedulaDialog()
# Declare instance attributes
self.actions = []
self.menu = self.tr(u'&Cedula')
# TODO: We are going to let the user set this up in a future iteration
self.toolbar = self.iface.addToolBar(u'Cedula')
self.toolbar.setObjectName(u'Cedula')
# evento boton
#self.dlg.pushButton.clicked.connect(self.abreVentana)
#self.dlg.pushButton_2.clicked.connect(self.imprimeStatus)
self.dlg.pushButton.clicked.connect(self.cambiaCursor)
self.dlg.variable = {'uno': 1, 'dos': 2}
self.dlg.lista = {}
self.contador = 0
self.dlg.predio = None
self.canvas = iface.mapCanvas()
self.cursorRedondo = QCursor(QPixmap(["16 16 3 1",
" c None",
". c #FF0000",
"+ c #FFFFFF",
" ",
" +.+ ",
" ++.++ ",
" +.....+ ",
" +. .+ ",
" +. . .+ ",
" +. . .+ ",
" ++. . .++",
" ... ...+... ...",
" ++. . .++",
" +. . .+ ",
" +. . .+ ",
" ++. .+ ",
" ++.....+ ",
" ++.++ ",
" +.+ "]))
def imprimeStatus(self):
for key, value in self.dlg.lista.items():
#print(key, value) windowTitle
print(str(self.dlg.lista[key].isVisible()),str(self.dlg.lista[key].windowTitle()))
def cambiaCursor(self):
self.dlg.predio = QgsProject.instance().mapLayersByName('predios.geom')[0]
print(self.canvas)
self.iface.actionSelect().trigger()
self.canvas.setCursor(self.cursorRedondo)
self.dlg.predio.selectionChanged.connect(self.touchPredio)
self.dlg.pushButton.setEnabled(False)
def abreVentana(self):
if len(self.dlg.lista) == 5:
self.msg = QMessageBox()
self.msg.setText("Ha completado el numero maximo de Cedulas abiertas")
self.msg.setIcon(QMessageBox().Warning)
self.msg.setWindowTitle("titulo")
self.msg.show()
result = self.msg.exec_()
#self.contador -= 1
return
#self.window = None
#self.window = CedulaMainWindow()
#self.window.pushButton.clicked.connect(self.hasAlgo)
#self.window.show()
#self.window1 = CedulaMainWindow()
#self.window1.pushButton.clicked.connect(self.hasAlgo)
#self.window1.show()
self.dlg.lista[str(self.contador)] = CedulaMainWindow(str(self.contador))
#self.dlg.lista[str(self.contador)].pushButton.clicked.connect(self.hasAlgo)
#self.dlg.lista[str(self.contador)].closeEvent(self,event)
self.dlg.lista[str(self.contador)].setWindowTitle(str(self.contador))
#self.dlg.lista[str(self.contador)].setAttribute(55, True)
self.dlg.lista[str(self.contador)].show()
self.contador += 1
# print('imprime algo x2')
# QgsMessageLog.logMessage("message", "name")
# print('imprime algo x3')
def hasAlgo(self):
texto = "Sin descripcion enviada"
#self.msg = QMessageBox()
#self.msg.setText("mensaje")
#self.msg.setIcon(QMessageBox().Critical)
#self.msg.setWindowTitle("titulo")
#self.msg.show()
#result = self.msg.exec_()
#QgsMessageLog.logMessage("message", "name")
#print('entro')
if len(texto) != 0:
return
if "0" in self.dlg.lista:
#print(str(self.dlg.lista["0"].isActiveWindow()) + str(self.dlg.lista["0"].windowTitle()) + str(self.dlg.lista["0"].close()))
print(self.dlg.lista["0"].key, self.dlg.lista["0"].value)
#self.dlg.lista["0"].lineEdit.setText("texto")
if "1" in self.dlg.lista:
#print(str(self.dlg.lista["1"].isActiveWindow()) + str(self.dlg.lista["1"].windowTitle()) + str(self.dlg.lista["1"].close()))
print(self.dlg.lista["1"].key, self.dlg.lista["1"].value)
#self.dlg.lista["1"].lineEdit.setText("otro texto")
if "2" in self.dlg.lista:
#print(str(self.dlg.lista["2"].isActiveWindow()) + str(self.dlg.lista["2"].windowTitle()) + str(self.dlg.lista["2"].close()))
print(self.dlg.lista["2"].key, self.dlg.lista["2"].value)
#self.dlg.lista["2"].lineEdit.setText("otro texto del dos")
if "3" in self.dlg.lista:
#print(str(self.dlg.lista["3"].isActiveWindow()) + str(self.dlg.lista["3"].windowTitle()) + str(self.dlg.lista["3"].close()))
print(self.dlg.lista["3"].key, self.dlg.lista["3"].value)
#self.dlg.lista["3"].lineEdit.setText("otro texto del tres")
if "4" in self.dlg.lista:
#print(str(self.dlg.lista["4"].isActiveWindow()) + str(self.dlg.lista["4"].windowTitle()) + str(self.dlg.lista["4"].close()))
print(self.dlg.lista["4"].key, self.dlg.lista["4"].value)
#self.dlg.lista["4"].lineEdit.setText("otro texto del tres")
if "5" in self.dlg.lista:
#print(str(self.dlg.lista["5"].isActiveWindow()) + str(self.dlg.lista["5"].windowTitle()) + str(self.dlg.lista["5"].close()))
print(self.dlg.lista["5"].key, self.dlg.lista["5"].value)
#self.dlg.lista["5"].lineEdit.setText("otro texto del tres")
# - EVENTOS -
# PREDIOS.selectionChange()
def touchPredio(self):
campos = self.dlg.predio.fields()
print('entrap')
features = self.dlg.predio.selectedFeatures()
for f in features:
print(f["cve_cat"])
print(self.dlg.predio.isSignalConnected())
self.dlg.predio.selectionChanged.disconnect()
print(self.dlg.predio.isSignalConnected())
self.dlg.pushButton.setEnabled(True)
# CONDOMINIOS_VERTICALES.selectionChange()
def touchCondV(self):
print('entrav')
features = self.condV.selectedFeatures()
for f in features:
print(f.id())
# CLAVES_CONDOMINIOS_VERTICALES.selectionChange()
def touchCvesCondV(self):
print('entracvev')
features = self.cveCondV.selectedFeatures()
for f in features:
print(f.id())
# CONDOMINIOS_HORIZONTALES.selectionChange()
def touchCondH(self):
print('entrah')
features = self.condH.selectedFeatures()
for f in features:
print(f.id())
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('Cedula', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToMenu(
self.menu,
action)
self.actions.append(action)
return action
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/plugins/Cedula/icon.png'
self.add_action(
icon_path,
text=self.tr(u'Cedula Catastral'),
callback=self.run,
parent=self.iface.mainWindow())
def onClosePlugin(self):
"""Cleanup necessary items here when plugin dockwidget is closed"""
# disconnects
self.dlg.closingPlugin.disconnect(self.onClosePlugin)
print('cerramos el plugin')
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
for action in self.actions:
self.iface.removePluginMenu(
self.tr(u'&Cedula'),
action)
self.iface.removeToolBarIcon(action)
# remove the toolbar
del self.toolbar
def run(self):
self.dlg.pushButton.setEnabled(True)
#self.dlg.closingPlugin.connect(self.onClosePlugin)
"""Run method that performs all the real work"""
# show the dialog
self.dlg.show()
# Run the dialog event loop
result = self.dlg.exec_()
# See if OK was pressed
if result:
# Do something useful here - delete the line containing pass and
# substitute with your code.
# pass
print("CERRAR")
# OBTENER CAPAS A USAR
# predio
#self.predio.selectionChanged.disconnect(self.touchPredio)
| [
"dignacio.lopezo@gmail.com"
] | dignacio.lopezo@gmail.com |
ae7be395bf195fbe7533ea1b7fcf07c26d7d12d2 | a63c8f6455b705eaff3d5a55fe83a35d0fb8b0fa | /configures/Parameter_setting.py | ad70828258c8aaa972c75811dc58ac4295ce397f | [] | no_license | Azure-Whale/Portfolio-management-tools | d82516449eb97375f8ea4ce15ddd348e7ec65538 | 4abd97077a1f2237f2d8bf485a05f5955d8376c7 | refs/heads/master | 2023-04-13T04:06:25.906416 | 2023-04-04T13:03:07 | 2023-04-04T13:03:07 | 277,915,533 | 1 | 1 | null | 2023-04-04T13:03:08 | 2020-07-07T20:28:32 | Python | UTF-8 | Python | false | false | 781 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
@File : Parameter_setting.py
@Time : 6/24/2020 5:34 PM
@Author : Kazuo
@Email : azurewhale1127@gmail.com
@Software: PyCharm
'''
from datetime import datetime
"""Hyper parameters"""
'''Init DB date range'''
datetime_object = datetime.strptime('2020-05-10', '%Y-%m-%d')
init_start = datetime_object.date()
datetime_object = datetime.strptime('2020-05-20', '%Y-%m-%d')
init_end = datetime_object.date()
'''Calculate Date range'''
datetime_object = datetime.strptime('2020-04-29', '%Y-%m-%d')
start = datetime_object.date()
datetime_object = datetime.strptime('2020-05-20', '%Y-%m-%d')
end = datetime_object.date()
# Investment inforamtion
investment = 1000000
portfolio_path = 'import/portfolio.xlsx'
# db set
init = False | [
"Azure-Whale@github.com"
] | Azure-Whale@github.com |
4000723d47874d224628ea2b6026b6b070215a9b | fa00a20686cd5fbb72b6e396a7d2234277c4af24 | /trancesection/views.py | ad370f24e30d3694e2cd9a9fb8da45a0128a8f45 | [
"MIT"
] | permissive | xuorig/trancesection | 1f70236e65400aca90a01b827a24a0121fd6ef49 | a7d353bf1bc3bd4598a2b70ee9413cff7d497fc4 | refs/heads/master | 2021-01-10T19:51:28.352422 | 2015-03-17T21:28:57 | 2015-03-17T21:28:57 | 15,836,384 | 0 | 0 | null | 2014-01-14T00:16:28 | 2014-01-12T04:29:54 | CSS | UTF-8 | Python | false | false | 1,681 | py | from trancesection import app
from flask import render_template, jsonify, request
from scrapers import Scraper, AbgtScraper
from trancesection.models import Podcast,Episode,Track
from trancesection import init_db
import trancesection.matchmaking as mm
import soundcloud
sc_client = soundcloud.Client(client_id='e926a44d2e037d8e80e98008741fdf91')
@app.route('/')
@app.route('/index')
def index():
episode_list = Episode.query.order_by(Episode.created_on).limit(10).all()
episodes = [(Podcast.query.get(x.podcast_id).name, x) for x in episode_list]
podcasts = Podcast.query.limit(6).all()
return render_template('index.html',podcasts=podcasts, episodes=episodes)
@app.route('/podcasts')
def podcasts():
podcasts = Podcast.query.all()
return render_template('podcasts.html',podcasts=podcasts)
@app.route('/podcasts/<podcast>')
def podcast(podcast):
pc = Podcast.query.filter_by(slug=podcast).first()
episodes = pc.episodes
return render_template('podcast.html',pc=pc,episodes=episodes)
@app.route('/podcasts/<podcast>/<episode>')
def episode(podcast,episode):
epi = Episode.query.filter_by(number=episode).first()
pc = Podcast.query.filter_by(slug=podcast).first()
pcname = pc.name
tracks = epi.trax.all()
print tracks
return render_template('episode.html',episode=epi,pcname=pcname,tracks=tracks)
@app.route('/tracks/<trackname>/')
def track(trackname):
tr = Track.query.filter_by(slug=trackname).first()
return render_template('track.html',tr=tr)
@app.route('/_find_match')
def find_match():
track_name = request.args.get('track_name','')
track_embed = dict(html = mm.find_match(track_name))
print track_embed
return jsonify(track_embed)
| [
"marc-andre.giroux2@usherbrooke.ca"
] | marc-andre.giroux2@usherbrooke.ca |
2b64ebb97713e9334fb0bac1dc5a95611fc6a03f | 9ad2a5d0f7131b9056a0e98aa08b634b34191fa0 | /offline.py | b1c29d2f0f44eb224438b6c4bd845fd37909e20e | [] | no_license | benabbes-slimane-takiedine/Face_recognition_ALTP | 253af05b2cc62df97a9188dc17e92952ffe6d944 | 48a77b2fd785b5d9aad3bf333ffbc2026f13b451 | refs/heads/master | 2023-06-23T10:38:56.550387 | 2021-07-23T01:36:37 | 2021-07-23T01:36:37 | 388,634,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,738 | py | import numpy as np
import os
import cv2 as cv
# return the 8 adjacent pixels in the right order (clockwise)
def get_surounding_bits(img):
items=[]
items.append(img[0][0])
items.append(img[0][1])
items.append(img[0][2])
items.append(img[1][2])
items.append(img[2][2])
items.append(img[2][1])
items.append(img[2][0])
items.append(img[1][0])
return items
# return a histogram of img using np.histogram()
def cal_histogram(img):
# his=cv.calcHist([img],[0],None,[256],[0,256])
#Mask None Maybe changd if w switch to n*m cells i guss !!
# his=np.histogram(img,256,(0,255))
his=np.histogram(img,16,(0,15))
return his[0]
# return an array with the origenal LTP result and the two rsults with the upper pass (+1) and lower pass(-1)
def cs_altp(center,suroundings,k=0.1):
t=center*k
result=[]
up_down=[]
for i in range(4):
beg=int(suroundings[i])
oposit=int(suroundings[i+4])
dif=beg-oposit
rang=[-t,t]
if (dif <rang[0]):
result.append(-1)
elif(dif >= rang[0] and dif <= rang[1]):
result.append(0)
elif(dif >rang[1]):
result.append(1)
ori=''
for i in result:
ori= ori + str(i)
up=''
for i in result:
if(i ==-1):
up=up+str(0)
else:
up= up + str(i)
down=''
for i in result:
if(i ==-1):
down= down + str(1)
else:
down= down + str(0)
up_down.append(ori)
up_down.append(up)
up_down.append(down)
return up_down
def part_1():
path=os.path.join('Faces','')
allpics = os.listdir(path)
histograms=[]
# # creating the two images with th uppr and lowr pass
# # from the ltp procedure to calculate the 2 histograms needed as a discriptor
for o in allpics:
cur_path=os.path.join(path,o)
img=cv.imread(cur_path)
img = cv.cvtColor(img,cv.COLOR_RGB2GRAY)
blank_1 = np.zeros(img.shape, dtype='uint8')
blank_2 = np.zeros(img.shape, dtype='uint8')
height=img.shape[0]
width=img.shape[1]
for h in range(1,height-1):
for w in range(1,width-1):
window=img[h-1:h+2,w-1:w+2]
suroundings=get_surounding_bits(window)
result = cs_altp(window[1,1],suroundings,0.1)
blank_1[h,w]=int(result[1],2)
blank_2[h,w]=int(result[2],2)
# blank_1[h,w]=int(result[1],2)*256/16
# blank_2[h,w]=int(result[2],2) *256/16
his1=cal_histogram(blank_1)
his2=cal_histogram(blank_2)
his=[]
for i in range(len(his1)):
his.append(his1[i])
for i in range(len(his2)):
his.append(his2[i])
histograms.append([his,o])
path1=os.path.join('tst','')
tstpics = os.listdir(path1)
histogramstst=[]
for o in tstpics:
cur_path=os.path.join(path1,o)
img=cv.imread(cur_path)
img = cv.cvtColor(img,cv.COLOR_RGB2GRAY)
blank_1 = np.zeros(img.shape, dtype='uint8')
blank_2 = np.zeros(img.shape, dtype='uint8')
height=img.shape[0]
width=img.shape[1]
for h in range(1,height-1):
for w in range(1,width-1):
window=img[h-1:h+2,w-1:w+2]
suroundings=get_surounding_bits(window)
result = cs_altp(window[1,1],suroundings,0.1)
blank_1[h,w]=int(result[1],2)
blank_2[h,w]=int(result[2],2)
# blank_1[h,w]=int(result[1],2)*256/16
# blank_2[h,w]=int(result[2],2) *256/16
his1=cal_histogram(blank_1)
his2=cal_histogram(blank_2)
his=[]
for i in range(len(his1)):
his.append(his1[i])
for i in range(len(his2)):
his.append(his2[i])
histogramstst.append([his,o])
# saving the result histograms in an array
img_360= np.array(histograms)
path_data=os.path.join( '','')
np.save(path_data+'histos_of_360',img_360)
img_40= np.array(histogramstst)
path_data=os.path.join( '','')
np.save(path_data+'histos_of_40',img_40)
print('finished 1/3')
| [
"noreply@github.com"
] | noreply@github.com |
d0b3943dc7b79e7aaa19525cd9192680fe0d56bd | eac3fb72ebf0bae220a741986a777a8c7c1ae60b | /ex3.py | 49e75e81fc433b0f4138e1cd2a010a221533c31f | [] | no_license | renvasanth/Learning | a1591765c55a6e847fbb0ccd6936c7bd72d4284f | 7ecdb79cf17b26aa43ec68909c5d8a400e5bca47 | refs/heads/master | 2021-01-10T05:06:52.834647 | 2015-11-19T07:25:01 | 2015-11-19T07:25:01 | 43,491,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 477 | py | print " I will now count my chickens:"
print "Hens", 25 + 30 / 6
print "Roosters", 100 - 25 * 3 % 4
print "Now I will count the eggs:"
print 3 + 2 + 1 - 5 + 4 % 2 - 1 / 4 + 6
print "Is it true that 3 + 2 < 5 - 7?"
print 3 + 2 < 5 - 7
print "What is 3 + 2?", 3 + 2
print "What is 5 - 7?", 5 - 7
print "Oh, that's why it's False."
print "How about some more."
print "Is it greater?", 5 > -2
print "Is it greater or equal?", 5 >= -2
print "Is it less or equal?", 5 <= -2
| [
"renvasanth@yahoo.co.in"
] | renvasanth@yahoo.co.in |
da64bfaf14f0546a07b4a4164b5f97b7f9e7119d | a7a7946c9032d638c3a6432c2ce8f3d9cf7532a3 | /bin/shopify_api.py | e8abb535e9c3d72ff6735dfda4e5295f8987f985 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | cosito-bonito/shopify_python_api | 0e797d2720296bfbe7c7c304e0a0434fc9bfa38e | eaf4684ae4da4e9cfe8fe459b76e70c1610a34b9 | refs/heads/master | 2022-04-22T20:26:35.451970 | 2020-04-28T13:15:42 | 2020-04-28T13:15:42 | 263,233,088 | 0 | 1 | MIT | 2020-05-12T04:34:58 | 2020-05-12T04:34:57 | null | UTF-8 | Python | false | false | 416 | py | #!/usr/bin/env python
"""shopify_api.py wrapper script for running it the source directory"""
import sys
import os.path
# Use the development rather than installed version
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, project_root)
with open(os.path.join(project_root, 'scripts', 'shopify_api.py')) as f:
code = compile(f.read(), f.name, 'exec')
exec(code)
| [
"Dylan.Smith@shopify.com"
] | Dylan.Smith@shopify.com |
8aeb1860ef47d39a2c88759972b762e32d56d78e | 91dc5e01081bc50812692b8c3d30c2c4e5200ccc | /tests/test_urljoin.py | 40d78c80c48e9862e50445b186489419eb52d03c | [
"MIT"
] | permissive | datasets-org/urljoin | 11a3db083d08bd97f9bc7e8df208deddea5b0745 | 383dea3b0c9e92a2e15eb58b0c31177dfd007839 | refs/heads/master | 2021-08-31T15:07:48.626475 | 2017-12-21T20:46:26 | 2017-12-21T20:46:26 | 115,023,262 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | from urljoin import url_path_join
def test_url_path_join():
assert url_path_join("http://example.com/", "/path/") == \
"http://example.com/path"
def test_url_path_join_nested():
assert url_path_join("http://example.com/", "/path/path") == \
"http://example.com/path/path"
def test_url_path_join_single():
assert url_path_join("http://example.com/") == \
"http://example.com"
def test_url_path_join_trailing_slash():
assert url_path_join("http://example.com/", "/path/",
trailing_slash=True) == \
"http://example.com/path/"
def test_url_path_join_multiple():
assert url_path_join("http://example.com/", "/path/", "a", "b") == \
"http://example.com/path/a/b"
| [
"tivvit@seznam.cz"
] | tivvit@seznam.cz |
c2c3d0c9f2cc57dc9f5b814e773d9bfae23f9703 | 6cc84bffe7f6669d9ac4d98cd72f17033667f12a | /venv/Scripts/easy_install-3.6-script.py | b91608f6e7757be7e5a9a1faa47816d993da4f76 | [] | no_license | AscernNSU/MP3-FourierSpectrum | efc31f2bd8d66eebe89c66cc99be14668aba374f | 1ca6923df190669d2f69e0ac87208dbbd74a443d | refs/heads/master | 2022-04-11T16:21:59.414193 | 2020-03-29T15:51:13 | 2020-03-29T15:51:13 | 250,780,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 449 | py | #!D:\Python\MP3-FourierSpectrum\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.6'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.6')()
)
| [
"alexey.romanov.nsu@gmail.com"
] | alexey.romanov.nsu@gmail.com |
288b4bd41f49b1124f0b189c46fb7fc1cba2ea02 | 066f812b051afffbe1a05630a728d15bab9f02bc | /django_503/models.py | 53c13d2c325313df137b454d59365c4a456316b9 | [
"MIT"
] | permissive | soul4code/django-503 | 04714af1a72813d5f6f1691eada97773adbe9c30 | 48f30e176f334988dafb48dff7c604b7f72ab290 | refs/heads/master | 2021-12-14T00:08:07.899188 | 2015-07-16T18:31:09 | 2015-07-16T18:31:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | from django.db import models
from django.utils.translation import ugettext as _
class Config(models.Model):
key = models.CharField(_('Key'), max_length=100, unique=True)
value = models.BooleanField(_('Value'))
| [
"baryshev@gmail.com"
] | baryshev@gmail.com |
133fc33a335e1265586ef671d075c0c7376d22a6 | 1e6a3a58d1217e2fd6ea48a6cfe6546b2c6e58d4 | /Phishing_Legitimate_Full/Experimentación Logistic Regression.py | 484b8704d906d5dcf27470e5351b24ef5f649b02 | [] | no_license | eduard0mv/ExperimentacionTesis | e4b3d6c0c0037e5395123dd55fce2ef639d775ad | 8eaad627cf716f96dde786d53332dd441cab455d | refs/heads/main | 2023-03-08T06:19:27.490173 | 2021-02-01T16:52:47 | 2021-02-01T16:52:47 | 335,010,002 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,058 | py | # -*- coding: utf-8 -*-
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import cross_val_score
ds = pd.read_csv('Phishing_Legitimate_Full.csv')
y = ds.loc[:,'CLASS_LABEL']
X = ds.loc[:,ds.columns!='CLASS_LABEL']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 100)
clf=LogisticRegression(solver='newton-cg',penalty='none',max_iter=1627)
scores_acc = cross_val_score(clf, X,y, cv=10, scoring='accuracy')
scores_pre = cross_val_score(clf, X,y, cv=10, scoring='precision')
scores_rec = cross_val_score(clf, X,y, cv=10, scoring='recall')
scores_f1 = cross_val_score(clf, X,y, cv=10, scoring='f1')
print (scores_acc.mean())
print (scores_pre.mean())
print (scores_rec.mean())
print (scores_f1.mean())
clf.fit(X_train,y_train)
y_pred_lr=clf.predict(X_test)
print ("\nLa exactitud de Regresion Logistica es: ",
accuracy_score(y_test,y_pred_lr))
print ("La precision de Regresion Logistica es: ",
precision_score(y_test,y_pred_lr))
print ("La recuperacion de Regresion Logistica es: ",
recall_score(y_test,y_pred_lr))
print ("El valor F de Regresion Logistica es: ",
f1_score(y_test,y_pred_lr))
parameters = {'penalty': ["l1","l2","elasticnet","none"],
'solver': ["newton-cg","lbfgs","liblinear","sag","saga"]}
grid_clf = GridSearchCV(clf,parameters,cv=10)
grid_clf.fit(X_train,y_train)
estimator = grid_clf.best_estimator_
y_pred = estimator.predict(X_test)
print (accuracy_score(y_test,y_pred))
print (precision_score(y_test,y_pred,average='weighted'))
print (recall_score(y_test,y_pred,average='weighted'))
print (f1_score(y_test,y_pred,average='weighted'))
#print (grid_clf.best_score_)
print (grid_clf.best_estimator_)
print (grid_clf.best_params_)
| [
"eduardo.moncada7398@gmail.com"
] | eduardo.moncada7398@gmail.com |
f43ed25f781212d1b64771578d20444c472a816a | 9ed7808e6083363cb7158575548fae4d47c945b4 | /Battery_Testing_Software/labphew/core/base/__init__.py | 559ae4a782097f3c12dbae45ab60a692e95db287 | [
"Apache-2.0"
] | permissive | Elena-Kyprianou/FAIR-Battery | 79bc844ad77c100568f3867586228a090fb8d84d | 85a1f3b55b7025d1c7176b96a0b68193687285d7 | refs/heads/main | 2023-08-25T21:35:00.061579 | 2021-10-18T20:40:20 | 2021-10-18T20:40:20 | 419,263,403 | 1 | 0 | Apache-2.0 | 2021-10-20T09:21:39 | 2021-10-20T09:21:39 | null | UTF-8 | Python | false | false | 100 | py | # from .operator_base import OperatorBase
# from .view_base import MonitorWindowBase, ScanWindowBase | [
"cinbarker@gmail.com"
] | cinbarker@gmail.com |
9a09e23a306ac2775dccaa67e15f9ed859cc7f88 | 9d278285f2bc899ac93ec887b1c31880ed39bf56 | /ondoc/plus/migrations/0019_plusproposer_merchant_code.py | ff1def92b9ab218ad60d2f57d45ff8b56f588bab | [] | no_license | ronit29/docprime | 945c21f8787387b99e4916cb3ba1618bc2a85034 | 60d4caf6c52a8b70174a1f654bc792d825ba1054 | refs/heads/master | 2023-04-01T14:54:10.811765 | 2020-04-07T18:57:34 | 2020-04-07T18:57:34 | 353,953,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | # Generated by Django 2.0.5 on 2019-09-05 13:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plus', '0018_auto_20190905_1603'),
]
operations = [
migrations.AddField(
model_name='plusproposer',
name='merchant_code',
field=models.CharField(max_length=100, null=True),
),
]
| [
"akashs@docprime.com"
] | akashs@docprime.com |
bc47db2fbd5e552d18ef8b17070331d9bf86f0a9 | 595c69f717fc3ceb4e0701cc433f6d7f927b6fdb | /Hogworts/Page_Object/Pages/main.py | 7f61ff32429f5e1477ac8b6a1941e7c335deb355 | [
"MIT"
] | permissive | archerckk/PyTest | d6462ebf46c6dbd5bb3ce03666aad0c2665367cd | 610dd89df8d70c096f4670ca11ed2f0ca3196ca5 | refs/heads/master | 2022-03-26T21:09:25.891745 | 2021-06-14T01:39:36 | 2021-06-14T01:39:36 | 129,497,345 | 0 | 0 | null | 2020-01-14T10:57:49 | 2018-04-14T08:23:03 | Python | UTF-8 | Python | false | false | 533 | py | from Page_Object.Pages.base_page import Base_Page
from Page_Object.Pages.login import Login
from Page_Object.Pages.register import Register
from selenium.webdriver.common.by import By
class Main(Base_Page):
_base_url='https://work.weixin.qq.com/'
def goto_register(self):
self.find(By.CSS_SELECTOR,'.index_head_info_pCDownloadBtn').click()
return Register(self._driver)
def goto_login(self):
self.find(By.CSS_SELECTOR,'.index_top_operation_loginBtn').click()
return Login(self._driver) | [
"archerckk@163.com"
] | archerckk@163.com |
defc1b7d74de6d1d58c5993550f7e8e9ad068c89 | 0f0a7adfae45e07a896c5cd5648ae081d4ef7790 | /python数据结构/python黑马数据结构/排序于搜索/桶排序.py | 12d443dea83de14b64c2fafd0db8a034651882fd | [] | no_license | renlei-great/git_window- | e2c578544c7a8bdd97a7a9da7be0464d6955186f | 8bff20a18d7bbeeaf714aa49bf15ab706153cc28 | refs/heads/master | 2021-07-19T13:09:01.075494 | 2020-06-13T06:14:37 | 2020-06-13T06:14:37 | 227,722,554 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,463 | py | lista = [12, 4, 5, 6, 22, 3, 43, 654, 765, 7, 234]
# 桶排序
"""
桶排序就是找出最大值和最小值,在这个区间进行分桶,然后将数组中的数按区间装桶,然后在对每个桶进行排序
"""
def pail_sort(alist):
"""桶排序"""
n = len(alist)
min_cur, max_cur = 0, 0
cur = 1
# 找出最大
while cur < n:
if alist[min_cur] > alist[cur]:
min_cur = cur
cur += 1
cur = 1
# 找出最小
while cur < n:
if alist[max_cur] < alist[cur]:
max_cur = cur
cur += 1
min_number, max_number = alist[min_cur], alist[max_cur]
# 初始化桶,和桶的区间,分出3个桶
for i in range(1,4):
number_name = 'number' + str(i)
pail_name = 'pail' + str(i)
number = max_number // i
setattr(pail_sort, pail_name, [])
setattr(pail_sort, number_name, number)
# 往桶里封装
for i in alist:
if i <= getattr(pail_sort, 'number1') and i > getattr(pail_sort, 'number2'):
pail_sort.__dict__['pail1'].append(i)
elif i < getattr(pail_sort, 'number2') and i > getattr(pail_sort, 'number3'):
pail_sort.__dict__['pail2'].append(i)
elif i < getattr(pail_sort, 'number3'):
pail_sort.__dict__['pail3'].append(i)
# 对每个桶进行排序后拼接返回
sort_pail = []
for i in range(3,0, -1):
sort_pail += marge_sort(pail_sort.__dict__['pail' + str(i)])
return sort_pail
def marge_sort(alist):
"""归并排序"""
n = len(alist)
if n <= 1:
return alist
mid = n // 2
left_li = marge_sort(alist[:mid])
right_li = marge_sort(alist[mid:])
left_cur, right_cur = 0, 0
result = []
while left_cur < len(left_li) and right_cur < len(right_li):
if left_li[left_cur] < right_li[right_cur]:
result.append(left_li[left_cur])
left_cur += 1
elif left_li[left_cur] > right_li[right_cur]:
result.append(right_li[right_cur])
right_cur += 1
elif left_li[left_cur] == right_li[right_cur]:
result.append(left_li[left_cur])
left_cur += 1
result.append(right_li[right_cur])
right_cur += 1
result += left_li[left_cur:] + right_li[right_cur:]
return result
if __name__ == "__main__":
new_li = pail_sort(lista)
# new_li = marge_sort(lista)
print(new_li)
| [
"1415977534@qq.com"
] | 1415977534@qq.com |
3f2a7f8ca8c8b949eb087d6b60465bf94f7e9e90 | ac01d8bdab2140eae6332613142b784484877b78 | /main.py | c4d5cd5bb342e7b9329bf9ee51a5c37957b7ec15 | [] | no_license | 2020668/api_automation_course | eb19322485fdb7db4b9586597895c3ac97727e96 | 33da9f5f1f17de5a5892d28a9f6feea09e8c4adc | refs/heads/master | 2022-12-29T22:32:02.321058 | 2020-10-15T03:24:32 | 2020-10-15T03:24:32 | 304,195,531 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,271 | py | # -*- coding: utf-8 -*-
"""
=================================
Author: keen
Created on: 2019/9/2
E-mail:keen2020@outlook.com
=================================
"""
import unittest
import os
import time
from library.HTMLTestRunnerNew import HTMLTestRunner
from common.config import conf
from common.constant import CASE_DIR, REPORT_DIR
from common.send_email import SendEmail
_title = conf.get('report', 'title')
_description = conf.get('report', 'description')
_tester = conf.get('report', 'tester')
report_name = conf.get('report', 'report_name')
report_name = time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_" + report_name
mail_title = conf.get('mail', 'mail_title')
mail_message = conf.get('mail', 'mail_message')
file_path = os.path.join(REPORT_DIR, report_name)
suite = unittest.TestSuite() # 创建测试集合
loader = unittest.TestLoader()
suite.addTest(loader.discover(CASE_DIR))
with open(file_path, 'wb') as f:
runner = HTMLTestRunner(
stream=f,
verbosity=2,
title=_title,
description=_description,
tester=_tester
)
runner.run(suite)
# 发送email
# SendEmail.send_qq_file_mail(mail_title, mail_message, file_path)
# SendEmail.send_outlook_file_mail(mail_title, mail_message, file_path)
| [
"keen2020@outlook.com"
] | keen2020@outlook.com |
163c19a57d3f954bdad45a05c1c4f19e3be6b9da | 22af3b24f10f18f22b3c81ee59d25484bb4b8845 | /Intro to Algorithm/counting_sort.py | 8da0c2b1b8ae2eea1da4c8bdffa25587a83cb1ad | [] | no_license | zhou-1/Algorithm | 8f46159f67e70776cb211b274a3ea0aed64470b0 | a005715c4448efddf224edf214f42f3814234524 | refs/heads/master | 2021-05-05T08:57:41.499438 | 2021-04-17T15:12:33 | 2021-04-17T15:12:33 | 119,095,099 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | def counting(data):
counts = [0 for i in xrange(max(data)+1)]
for el in data:
counts[el] += 1
for index in xrange(1, len(counts)):
counts[index] = counts[index-1] + counts[index]
L = [0 for loop in xrange(len(data)+1)]
for el in data:
index = counts[el] - 1
L[index] = el
counts[el] -= 1
return L
data = [27, 4, 15, 9, 110, 0, 13, 25, 1, 17, 802, 66, 25, 45, 97, 9]
assert sorted(data) == counting(data)
| [
"noreply@github.com"
] | noreply@github.com |
8d7f9c7e120b85f5ce675ea201d2ae03c6e55998 | 2b642643fa8c3fc9f340d90451afd197accd29e3 | /obsapis/views/admin.py | 5d570ffe4863520f2990ff4ada05ef2ed165fd1b | [] | no_license | orochvilato/obsapis | aade4c3f10c9c2eeb74c7d94ff4ed656d3798100 | c71364859ea539876923f0d899fe6ed51e17e105 | refs/heads/master | 2022-12-24T07:37:31.940355 | 2019-10-11T11:03:19 | 2019-10-11T11:03:19 | 110,365,503 | 0 | 0 | null | 2022-12-07T23:42:36 | 2017-11-11T17:30:38 | Python | UTF-8 | Python | false | false | 835 | py | # -*- coding: utf-8 -*-
from obsapis import app,use_cache,mdb
from flask import request
from obsapis.tools import image_response,json_response,cache_function, getdot, strip_accents, logitem
import re
import random
import datetime
import pygal
from obsapis.config import cache_pages_delay
from obsapis.controllers.admin.imports.documents import import_docs
from obsapis.controllers.admin.updates.scrutins import updateScrutinsTexte
from obsapis.controllers.admin.updates.deputes import updateDeputesContacts
@app.route('/admin/updateScrutinsRefs')
def view_updateScrutinsRefs():
import_docs()
updateScrutinsTexte()
return "ok"
@app.route('/admin/updateDeputesContacts')
def view_updateDeputesContacts():
return json_response(updateDeputesContacts())
#@app.route('/charts/participationgroupes')
#def votesgroupes():
| [
"olivierrochvilato@yahoo.com"
] | olivierrochvilato@yahoo.com |
a0485c4cb332ebd75e227c8399d966b35342cc60 | 623065fb8f2fec97c7a4e201bff7ff1d9578e457 | /imgviz/data/kitti/__init__.py | afb8eb994cbe1b8a3520b78d531e100de2e1bc1e | [] | no_license | bigdatasciencegroup/imgviz | 4759c4264a43e9d37429489cc63a8a00fbb489d5 | cec9f1e3cc02cac46d11a99c63c696b8743ba6f1 | refs/heads/master | 2020-08-21T23:39:44.038394 | 2019-09-09T13:55:57 | 2019-09-09T13:55:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | import os.path as osp
import numpy as np
here = osp.dirname(osp.abspath(__file__))
def read_pose_file(filename):
with open(filename, 'r') as f:
transforms = []
for one_line in f:
one_line = one_line.split(' ')
Rt = [float(pose) for pose in one_line] + [0, 0, 0, 1]
Rt = np.reshape(np.array(Rt), (4, 4))
assert abs(Rt[3].sum() - 1) < 1e-5
transforms.append(Rt)
return transforms
def kitti_odometry():
# http://www.cvlibs.net/datasets/kitti/eval_odometry.php
pose_file = osp.join(here, 'odometry/00.txt')
transforms = read_pose_file(pose_file)
data = {'transforms': transforms}
return data
| [
"www.kentaro.wada@gmail.com"
] | www.kentaro.wada@gmail.com |
d9ebe87b47ad3410683fd11d9ff44c615a8a5493 | a99616f6c09f7c5eced19540f3a92b5edb4ef02d | /big-O/frank_burkholder/lecture_big-O/find_anagrams.py | 734d9cfaf949156a2a86d835979bc2c22245231c | [] | no_license | wmarshall484/DSI_LECTURES_2 | 27d33755f3d93e216f63e87fc05730862e217718 | eb67298353f763da82ea6498e5c254e2b246faf8 | refs/heads/master | 2022-12-07T23:31:00.375596 | 2019-11-01T15:25:33 | 2019-11-01T15:25:33 | 220,568,205 | 1 | 0 | null | 2022-11-23T04:29:05 | 2019-11-09T00:20:08 | Jupyter Notebook | UTF-8 | Python | false | false | 5,560 | py | import numpy as np
import time
from collections import defaultdict
import matplotlib.pyplot as plt
import argparse
plt.rcParams.update({'font.size': 14})
def find_anagrams_m1(lst):
time_start = time.time()
result = []
for word1 in lst:
for word2 in lst:
if word1 != word2 and sorted(word1) == sorted(word2):
if word1 not in result:
result.append(word1)
if word2 not in result:
result.append(word2)
time_end = time.time()
time_elapsed = time_end - time_start
return (time_elapsed, result)
def find_anagrams_m2(lst):
time_start = time.time()
result = []
d = defaultdict(list)
for word in lst:
d[str(sorted(word))].append(word)
for key, value in d.items():
if len(value) > 1:
result.extend(value)
time_end = time.time()
time_elapsed = time_end - time_start
return (time_elapsed, result)
def make_anagram_lst(all_anagrams, n_lst):
return [list(np.random.choice(all_anagrams, size=n, replace=False)) for n
in n_lst]
def plot_computation_time(n_lst, times_lst, title, label, color, fname,
keepopen=False):
fig = plt.figure(figsize=(8, 6))
ax = fig.add_subplot(1, 1, 1)
ax.plot(n_lst, times_lst, 'o', color=color, label=label)
ax.set_xlabel('n, number of words in input')
ax.set_ylabel('computation time (s)')
ax.set_xlim((0, max(n_lst)*1.1))
ax.set_title(title)
if keepopen == 'True':
return ax
else:
ax.legend(loc='upper left', frameon=False)
plt.tight_layout()
plt.savefig(fname, dpi=100)
plt.close()
return None
def plot_fit(x, y, p, fname, ax):
if len(p) == 2:
label = f'fit: {p[0]:0.1e}n + {p[1]:0.1e}'
else:
label = f'fit: {p[0]:0.1e}n^2 + {p[1]:0.1e}n + {p[2]:0.1e}'
ax.plot(x, y, 'k:', label=label)
ax.legend(loc='upper left', frameon=False)
plt.tight_layout()
plt.savefig(fname, dpi=100)
plt.close()
def get_averaged_computation_times(all_anagrams, n_lst, num_times):
result_shape = (num_times, len(n_lst))
computation_times_m1 = np.zeros(result_shape)
computation_times_m2 = np.zeros(result_shape)
for i in range(num_times):
print(f"\nWord list {i+1}")
anagrams_lst = make_anagram_lst(all_anagrams, n_lst)
print("Method 1 - double for")
anagram_results_m1 = [find_anagrams_m1(anagrams) for anagrams in anagrams_lst]
print("Method 2 - use dictionary")
anagram_results_m2 = [find_anagrams_m2(anagrams) for anagrams in anagrams_lst]
computation_times_m1[i] = np.array([results[0] for results in anagram_results_m1])
computation_times_m2[i] = np.array([results[0] for results in anagram_results_m2])
comp_time_m1_avg = computation_times_m1.mean(axis=0)
comp_time_m2_avg = computation_times_m2.mean(axis=0)
return (comp_time_m1_avg, comp_time_m2_avg)
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument("-f", "--file", required=True, help="text file containing words")
ap.add_argument("-m", "--mode", required=True, help="""mode determining
output. single: computation times based on one random
selection from word list, average: computation times based
on average of 10 random selections from word list""")
ap.add_argument("-t", "--fit", required=True, help="""True or False: plot
polynomial fit on plots""")
args = vars(ap.parse_args())
all_anagrams = np.loadtxt(args['file'], dtype=str)
max_n = all_anagrams.shape[0]
n_step = max_n // 10
n_lst = list(range(n_step, max_n, n_step))
if args['mode'] == 'single':
print("\nFinding computation times based on one selection from word list.")
anagrams_lst = make_anagram_lst(all_anagrams, n_lst)
print("Method 1 - double for")
anagram_results_m1 = [find_anagrams_m1(anagrams) for anagrams in anagrams_lst]
print("Method 2 - use dictionary")
anagram_results_m2 = [find_anagrams_m2(anagrams) for anagrams in anagrams_lst]
comp_times_m1 = [results[0] for results in anagram_results_m1]
comp_times_m2 = [results[0] for results in anagram_results_m2]
else:
print("\nFinding computation times based on ten selections from word list.")
comp_times_m1, comp_times_m2 = get_averaged_computation_times(all_anagrams,
n_lst, num_times=10)
if args['fit'] == 'True':
p_m1 = np.polyfit(n_lst, comp_times_m1, deg=2)
p_m2 = np.polyfit(n_lst, comp_times_m2, deg=1)
x = np.linspace(0, max(n_lst)*1.1)
y_m1 = np.polyval(p_m1, x)
y_m2 = np.polyval(p_m2, x)
print("\nPlotting")
fname = 'm1_plot.png'
ax = plot_computation_time(n_lst, comp_times_m1, title='method 1, double for',
label='m1: double for', color='blue', fname=fname,
keepopen=args['fit'])
if args['fit'] == 'True':
plot_fit(x, y_m1, p_m1, fname, ax)
fname = 'm2_plot.png'
ax = plot_computation_time(n_lst, comp_times_m2, title='method 2, use dictionary',
label='m2: use dict', color='green', fname=fname,
keepopen=args['fit'])
if args['fit'] == 'True':
plot_fit(x, y_m2, p_m2, fname, ax)
print('\nComplete')
| [
"frank.burkholder@gmail.com"
] | frank.burkholder@gmail.com |
a8b44fa0be0fcec467b480ed13e5e1ddc5008900 | fc00b177802c49cf04dc6a8e430093bc14ae9b53 | /venv/Lib/site-packages/mypy/typeshed/stdlib/2and3/ctypes/__init__.pyi | 616d9df9283314885fca81c82384f607e3cd5fbd | [] | permissive | artisakov/vigilant-journey | 9c8264d36da5745374a0d08b0b0288a70f978a11 | 4fed9026071a64489d26422ba7cd1a9b9cb05e16 | refs/heads/master | 2022-11-16T03:10:06.418221 | 2020-07-16T07:33:06 | 2020-07-16T07:33:06 | 238,490,887 | 0 | 1 | MIT | 2020-03-01T10:12:22 | 2020-02-05T16:03:07 | HTML | UTF-8 | Python | false | false | 11,771 | pyi | # Stubs for ctypes
from array import array
from typing import (
Any, Callable, ClassVar, Iterator, Iterable, List, Mapping, Optional, Sequence, Sized, Text,
Tuple, Type, Generic, TypeVar, overload,
)
from typing import Union as _UnionT
import sys
_T = TypeVar('_T')
_DLLT = TypeVar('_DLLT', bound=CDLL)
_CT = TypeVar('_CT', bound=_CData)
RTLD_GLOBAL: int = ...
RTLD_LOCAL: int = ...
DEFAULT_MODE: int = ...
class CDLL(object):
_func_flags_: ClassVar[int] = ...
_func_restype_: ClassVar[_CData] = ...
_name: str = ...
_handle: int = ...
_FuncPtr: Type[_FuncPointer] = ...
def __init__(
self,
name: Optional[str],
mode: int = ...,
handle: Optional[int] = ...,
use_errno: bool = ...,
use_last_error: bool = ...,
winmode: Optional[int] = ...,
) -> None: ...
def __getattr__(self, name: str) -> _FuncPointer: ...
def __getitem__(self, name: str) -> _FuncPointer: ...
if sys.platform == 'win32':
class OleDLL(CDLL): ...
class WinDLL(CDLL): ...
class PyDLL(CDLL): ...
class LibraryLoader(Generic[_DLLT]):
def __init__(self, dlltype: Type[_DLLT]) -> None: ...
def __getattr__(self, name: str) -> _DLLT: ...
def __getitem__(self, name: str) -> _DLLT: ...
def LoadLibrary(self, name: str) -> _DLLT: ...
cdll: LibraryLoader[CDLL] = ...
if sys.platform == 'win32':
windll: LibraryLoader[WinDLL] = ...
oledll: LibraryLoader[OleDLL] = ...
pydll: LibraryLoader[PyDLL] = ...
pythonapi: PyDLL = ...
# Anything that implements the read-write buffer interface.
# The buffer interface is defined purely on the C level, so we cannot define a normal Protocol
# for it. Instead we have to list the most common stdlib buffer classes in a Union.
_WritableBuffer = _UnionT[bytearray, memoryview, array, _CData]
# Same as _WritableBuffer, but also includes read-only buffer types (like bytes).
_ReadOnlyBuffer = _UnionT[_WritableBuffer, bytes]
class _CDataMeta(type):
# By default mypy complains about the following two methods, because strictly speaking cls
# might not be a Type[_CT]. However this can never actually happen, because the only class that
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
def __mul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore
def __rmul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore
class _CData(metaclass=_CDataMeta):
_b_base: int = ...
_b_needsfree_: bool = ...
_objects: Optional[Mapping[Any, int]] = ...
@classmethod
def from_buffer(cls: Type[_CT], source: _WritableBuffer, offset: int = ...) -> _CT: ...
@classmethod
def from_buffer_copy(cls: Type[_CT], source: _ReadOnlyBuffer, offset: int = ...) -> _CT: ...
@classmethod
def from_address(cls: Type[_CT], address: int) -> _CT: ...
@classmethod
def from_param(cls: Type[_CT], obj: Any) -> _UnionT[_CT, _CArgObject]: ...
@classmethod
def in_dll(cls: Type[_CT], library: CDLL, name: str) -> _CT: ...
class _PointerLike(_CData): ...
_ECT = Callable[[Optional[Type[_CData]],
_FuncPointer,
Tuple[_CData, ...]],
_CData]
_PF = _UnionT[
Tuple[int],
Tuple[int, str],
Tuple[int, str, Any]
]
class _FuncPointer(_PointerLike, _CData):
restype: _UnionT[Type[_CData], Callable[[int], None], None] = ...
argtypes: Sequence[Type[_CData]] = ...
errcheck: _ECT = ...
@overload
def __init__(self, address: int) -> None: ...
@overload
def __init__(self, callable: Callable[..., Any]) -> None: ...
@overload
def __init__(self, func_spec: Tuple[_UnionT[str, int], CDLL],
paramflags: Tuple[_PF, ...] = ...) -> None: ...
@overload
def __init__(self, vtlb_index: int, name: str,
paramflags: Tuple[_PF, ...] = ...,
iid: pointer[c_int] = ...) -> None: ...
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
class ArgumentError(Exception): ...
def CFUNCTYPE(restype: Optional[Type[_CData]],
*argtypes: Type[_CData],
use_errno: bool = ...,
use_last_error: bool = ...) -> Type[_FuncPointer]: ...
if sys.platform == 'win32':
def WINFUNCTYPE(restype: Optional[Type[_CData]],
*argtypes: Type[_CData],
use_errno: bool = ...,
use_last_error: bool = ...) -> Type[_FuncPointer]: ...
def PYFUNCTYPE(restype: Optional[Type[_CData]],
*argtypes: Type[_CData]) -> Type[_FuncPointer]: ...
class _CArgObject: ...
# Any type that can be implicitly converted to c_void_p when passed as a C function argument.
# (bytes is not included here, see below.)
_CVoidPLike = _UnionT[_PointerLike, Array[Any], _CArgObject, int]
# Same as above, but including types known to be read-only (i. e. bytes).
# This distinction is not strictly necessary (ctypes doesn't differentiate between const
# and non-const pointers), but it catches errors like memmove(b'foo', buf, 4)
# when memmove(buf, b'foo', 4) was intended.
_CVoidConstPLike = _UnionT[_CVoidPLike, bytes]
def addressof(obj: _CData) -> int: ...
def alignment(obj_or_type: _UnionT[_CData, Type[_CData]]) -> int: ...
def byref(obj: _CData, offset: int = ...) -> _CArgObject: ...
_PT = TypeVar('_PT', bound=_PointerLike)
def cast(obj: _UnionT[_CData, _CArgObject], type: Type[_PT]) -> _PT: ...
def create_string_buffer(init_or_size: _UnionT[int, bytes],
size: Optional[int] = ...) -> Array[c_char]: ...
c_buffer = create_string_buffer
def create_unicode_buffer(init_or_size: _UnionT[int, Text],
size: Optional[int] = ...) -> Array[c_wchar]: ...
if sys.platform == 'win32':
def DllCanUnloadNow() -> int: ...
def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO not documented
def FormatError(code: int) -> str: ...
def GetLastError() -> int: ...
def get_errno() -> int: ...
if sys.platform == 'win32':
def get_last_error() -> int: ...
def memmove(dst: _CVoidPLike, src: _CVoidConstPLike, count: int) -> None: ...
def memset(dst: _CVoidPLike, c: int, count: int) -> None: ...
def POINTER(type: Type[_CT]) -> Type[pointer[_CT]]: ...
# The real ctypes.pointer is a function, not a class. The stub version of pointer behaves like
# ctypes._Pointer in that it is the base class for all pointer types. Unlike the real _Pointer,
# it can be instantiated directly (to mimic the behavior of the real pointer function).
class pointer(Generic[_CT], _PointerLike, _CData):
_type_: ClassVar[Type[_CT]] = ...
contents: _CT = ...
def __init__(self, arg: _CT = ...) -> None: ...
@overload
def __getitem__(self, i: int) -> _CT: ...
@overload
def __getitem__(self, s: slice) -> List[_CT]: ...
@overload
def __setitem__(self, i: int, o: _CT) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_CT]) -> None: ...
def resize(obj: _CData, size: int) -> None: ...
if sys.version_info < (3,):
def set_conversion_mode(encoding: str, errors: str) -> Tuple[str, str]: ...
def set_errno(value: int) -> int: ...
if sys.platform == 'win32':
def set_last_error(value: int) -> int: ...
def sizeof(obj_or_type: _UnionT[_CData, Type[_CData]]) -> int: ...
def string_at(address: _CVoidConstPLike, size: int = ...) -> bytes: ...
if sys.platform == 'win32':
def WinError(code: Optional[int] = ...,
desc: Optional[str] = ...) -> WindowsError: ...
def wstring_at(address: _CVoidConstPLike, size: int = ...) -> str: ...
class _SimpleCData(Generic[_T], _CData):
value: _T = ...
def __init__(self, value: _T = ...) -> None: ...
class c_byte(_SimpleCData[int]): ...
class c_char(_SimpleCData[bytes]):
def __init__(self, value: _UnionT[int, bytes] = ...) -> None: ...
class c_char_p(_PointerLike, _SimpleCData[Optional[bytes]]):
def __init__(self, value: Optional[_UnionT[int, bytes]] = ...) -> None: ...
class c_double(_SimpleCData[float]): ...
class c_longdouble(_SimpleCData[float]): ...
class c_float(_SimpleCData[float]): ...
class c_int(_SimpleCData[int]): ...
class c_int8(_SimpleCData[int]): ...
class c_int16(_SimpleCData[int]): ...
class c_int32(_SimpleCData[int]): ...
class c_int64(_SimpleCData[int]): ...
class c_long(_SimpleCData[int]): ...
class c_longlong(_SimpleCData[int]): ...
class c_short(_SimpleCData[int]): ...
class c_size_t(_SimpleCData[int]): ...
class c_ssize_t(_SimpleCData[int]): ...
class c_ubyte(_SimpleCData[int]): ...
class c_uint(_SimpleCData[int]): ...
class c_uint8(_SimpleCData[int]): ...
class c_uint16(_SimpleCData[int]): ...
class c_uint32(_SimpleCData[int]): ...
class c_uint64(_SimpleCData[int]): ...
class c_ulong(_SimpleCData[int]): ...
class c_ulonglong(_SimpleCData[int]): ...
class c_ushort(_SimpleCData[int]): ...
class c_void_p(_PointerLike, _SimpleCData[Optional[int]]): ...
class c_wchar(_SimpleCData[Text]): ...
class c_wchar_p(_PointerLike, _SimpleCData[Optional[Text]]):
def __init__(self, value: Optional[_UnionT[int, Text]] = ...) -> None: ...
class c_bool(_SimpleCData[bool]):
def __init__(self, value: bool = ...) -> None: ...
if sys.platform == 'win32':
class HRESULT(_SimpleCData[int]): ... # TODO undocumented
class py_object(_SimpleCData[_T]): ...
class _CField:
offset: int = ...
size: int = ...
class _StructUnionMeta(_CDataMeta):
_fields_: Sequence[_UnionT[Tuple[str, Type[_CData]], Tuple[str, Type[_CData], int]]] = ...
_pack_: int = ...
_anonymous_: Sequence[str] = ...
def __getattr__(self, name: str) -> _CField: ...
class _StructUnionBase(_CData, metaclass=_StructUnionMeta):
def __init__(self, *args: Any, **kw: Any) -> None: ...
def __getattr__(self, name: str) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
class Union(_StructUnionBase): ...
class Structure(_StructUnionBase): ...
class BigEndianStructure(Structure): ...
class LittleEndianStructure(Structure): ...
class Array(Generic[_CT], _CData):
_length_: ClassVar[int] = ...
_type_: ClassVar[Type[_CT]] = ...
raw: bytes = ... # Note: only available if _CT == c_char
value: Any = ... # Note: bytes if _CT == c_char, Text if _CT == c_wchar, unavailable otherwise
# TODO These methods cannot be annotated correctly at the moment.
# All of these "Any"s stand for the array's element type, but it's not possible to use _CT
# here, because of a special feature of ctypes.
# By default, when accessing an element of an Array[_CT], the returned object has type _CT.
# However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object
# and converts it to the corresponding Python primitive. For example, when accessing an element
# of an Array[c_int], a Python int object is returned, not a c_int.
# This behavior does *not* apply to subclasses of "simple types".
# If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns
# a MyInt, not an int.
# This special behavior is not easy to model in a stub, so for now all places where
# the array element type would belong are annotated with Any instead.
def __init__(self, *args: Any) -> None: ...
@overload
def __getitem__(self, i: int) -> Any: ...
@overload
def __getitem__(self, s: slice) -> List[Any]: ...
@overload
def __setitem__(self, i: int, o: Any) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[Any]) -> None: ...
def __iter__(self) -> Iterator[Any]: ...
# Can't inherit from Sized because the metaclass conflict between
# Sized and _CData prevents using _CDataMeta.
def __len__(self) -> int: ...
| [
"60698561+artisakov@users.noreply.github.com"
] | 60698561+artisakov@users.noreply.github.com |
76ae4c570eb191bcd3cae53ad96285f51605bdae | 0f4ced644d3ee8db4361a178c69e97b5ddfaf1f6 | /tests/test_video_ops.py | ca62a0790020b8a15521398e27a3edb8155427ad | [
"MIT"
] | permissive | tabital0/video_packaging_platform | e29001e6de701f3c4bc2ecb4ed5bf59dc929868e | d0bff522de10fab6a2847f5433f88015e1210d7d | refs/heads/master | 2022-12-16T08:32:57.401681 | 2020-09-06T12:29:43 | 2020-09-06T12:29:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,258 | py | import os
import subprocess
import sys
import json
from test_database import *
import ntpath
import string
import secrets
import base64
import unittest
# further this exercise, we should consider a file normalisation function
class test_video_ops(unittest.TestCase):
def setUp(self):
self.working_dir = os.getcwd()
self.bin_dir = os.getcwd() + "/../bin"
self.storage_dir = os.getcwd() + "/../storage/"
self.input_file1 = os.getcwd() + "/TEST_VIDEOS/BigBuckBunny.mp4"
self.input_file2 = os.getcwd() + "/TEST_VIDEOS/BigBuckBunny_10sec.mp4"
self.input_file3 = os.getcwd() + "/TEST_VIDEOS/BigBuckBunny_10sec_frag.mp4"
self.input_file4 = os.getcwd() + "/TEST_VIDEOS/BigBuckBunny_10sec_frag.mp4"
self.output_dir = os.getcwd() + "/../output/"
self.key = "hyN9IKGfWKdAwFaE5pm0qg"
self.kid = "oW5AK5BW43HzbTSKpiu3SQ"
self.video_track_number = 2
def test_video_ingest(self):
os.chdir(self.bin_dir)
""" Export the Video metadata into a JSON """
try:
subprocess.check_output("./mp4info {} --format json > out.json".
format(self.input_file1), shell=True)
except subprocess.CalledProcessError as e:
output = ("ERROR - Corrupted or wrong file, please review the file. Details:"
+ '\n' + '\n', e)
return output
""" Check the metadata and search for video tracks """
with open('out.json') as f:
data = json.load(f)
items = data.get('tracks')
video_found_flag = 0
for item in items:
if item.get('type') == 'Video':
video_found_flag = 1
video_track_number = (item.get('id'))
os.chdir(self.working_dir)
""" When a video track is found, return the Track ID and put file into storage """
try:
subprocess.check_output("mv {}".format(self.input_file1) + " {}".
format(self.storage_dir), shell=True)
file_name = ntpath.basename(self.input_file1)
# DATABASE - we add 1 as confirmation process went good!
output = ("OK - File " + self.input_file1 +
" has been processed and moved to storage", video_track_number, 1)
return output
except subprocess.CalledProcessError as e:
output = ("\nERROR - can't move the file to storage\n\n", e)
return output
raise
if video_found_flag == 0:
output = ("ERROR - An error has been occured, file doesn't contain an audio track ")
return output
def test_video_fragment(self):
output_code = ''.join(secrets.choice(string.ascii_uppercase +
string.digits) for _ in range(6))
output_file_path = self.output_dir + output_code + "/" + output_code + ".mp4"
os.chdir(self.output_dir)
os.mkdir(output_code, mode=0o0755)
os.chdir(self.bin_dir)
""" Then the video fragmentation process uses its output as name encryption """
fragment_custom_command = ("./mp4fragment " + str(self.input_file2) + " " +
output_file_path)
try:
# subprocess.check_output(fragment_custom_command, shell=True)
output = ("OK - File " + str(self.input_file2) + " has been fragmented and is ready to encrypt\n\n",
str(output_file_path), 1)
os.chdir(self.working_dir)
return output
except subprocess.CalledProcessError as e:
output = ("\nERROR - can't fragment the video file" +
self.input_file2 + "\n\n", e)
os.chdir(self.working_dir)
return output
raise
def test_video_encrypt(self):
os.chdir(self.bin_dir)
string1 = (self.key + "==")
video_key = (base64.b64decode(string1).hex())
string2 = (self.kid + "==")
video_kid = (base64.b64decode(string2).hex())
output_file_path = (os.path.splitext(self.input_file3)[0]) + "_enc.mp4"
encrypt_custom_command = ("./mp4encrypt --method MPEG-CBCS --key " +
str(self.video_track_number) + ":" + video_key +
":random " + "--property " +
str(self.video_track_number) + ":KID:" + video_kid +
" " + str(self.input_file3) + " " + output_file_path)
try:
subprocess.check_output(encrypt_custom_command, shell=True)
output = ("\nOK - File" + str(self.input_file3) +
" has been encrypted with key:" + self.key + "kid:" + self.kid,
output_file_path, 1)
os.chdir(self.working_dir)
return output
except subprocess.CalledProcessError as e:
output = ("\nERROR - can't encrypt the video file" +
self.input_file3 + "\n\n", e)
os.chdir(self.working_dir)
return output
raise
def test_video_dash(self):
os.chdir(self.bin_dir)
path, file = os.path.split(self.input_file4)
dash_custom_command = ("./mp4dash " + self.input_file4 + " -o " +
path + "/dash/")
try:
subprocess.check_output(dash_custom_command, shell=True)
dash_output = path + "/dash/stream.mpd"
output = ("OK - File" + self.input_file4 + " has been processed into " +
dash_output, dash_output, 1)
os.chdir(self.working_dir)
return output
except subprocess.CalledProcessError as e:
output = ("\nERROR - can't generate the mpd file" +
self.input_file4 + "\n\n", e)
os.chdir(self.working_dir)
return output
raise
if __name__ == '__main__':
unittest.main()
| [
"javierbrines@FIGURIN.local"
] | javierbrines@FIGURIN.local |
0f0b6b2cec570037f97c8c7bbc64d189cf1d44a3 | 462e0b4a7b271c288e99f6c9ac5882b7dd27e6a5 | /Question.py | 14955ff3dd21a9617b0f849c29cd0b11386fb2a6 | [] | no_license | dennyhong96/learn-python | 7c08f2fc7cfce86043fcd5b969b086d98d6f8cea | 2177d8fa8495b0d3468e11e5407c47adec6e7ab8 | refs/heads/master | 2022-11-17T16:41:39.206535 | 2020-07-10T01:54:21 | 2020-07-10T01:54:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | class Question:
def __init__(self, prompt, answer):
self.prompt = prompt
self.answer = answer
def check_anser(self, user_answer):
return user_answer == self.answer
| [
"haiyanghong@Haiyangs-iMac.local"
] | haiyanghong@Haiyangs-iMac.local |
11610e5e284f31663f91e07ce29bae98c90f16af | b5025befdf74fff3071252abaa4db09479f2d763 | /Gopi_bhagam/Docker_files/python_server/server.py | f1ed0a5272466add01816a31b758c3502317d10a | [] | no_license | sidv/Assignments | d2fcc643a2963627afd748ff4d690907f01f71d8 | d50d668264e2a31581ce3c0544f9b13de18da2b3 | refs/heads/main | 2023-07-30T02:17:19.392164 | 2021-09-23T04:47:56 | 2021-09-23T04:47:56 | 392,696,356 | 1 | 20 | null | 2021-09-23T08:14:11 | 2021-08-04T13:20:43 | Python | UTF-8 | Python | false | false | 241 | py | from flask import Flask
app =Flask (_name_)
@app.route(“/”)
def hello ():
return “hi from sid ”
@app.route(“/”)
def data ():
return “this is data ”
if _name_ == “__main__ ”:
app.run(host='0.0.0.0',port=4000)
| [
"gopibhagam023@gmail.com"
] | gopibhagam023@gmail.com |
465e3e933794157e4094f168a1ad18f93d8fd7b7 | 8b5966d1851c54dcb114eeb7303a634ac2caf933 | /test.py | 78d43a510e55d8190ed60e5ea5e996e7e6797b77 | [] | no_license | Berthot/PySpark | 7a2d5303774a18740674d601bed2dba5d2ce96a3 | 6de38a38cd3115635865630b4f42d376c432150f | refs/heads/master | 2023-04-26T23:39:45.481961 | 2021-05-29T00:05:17 | 2021-05-29T00:05:17 | 369,671,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | from typing import Tuple
from SparkBD import SparkBD
original = SparkBD("test")
tuples = [('test', 1), ('test', 1), ('test2', 2)]
original.get_rdd_by_tuple(tuples)
original.save_rdd_to_file("test", coalesce=1)
| [
"matheus.berthot@gmail.com"
] | matheus.berthot@gmail.com |
d44657beb1f44a8371ce7aee8adb41b3592dd400 | 72f72aa7b92114beffeac32963400df5899ef034 | /Coinfection_Transmission_Model.py | 270e28bec4fc0bc79e14d7bffaa6c7f2e44c6735 | [] | no_license | hdobrovo/Two_virus_epidemiology | 47b1432af8cfd7cf04647e3e995c99c069a36a40 | 499751321a8c3987540efa10e3ab00b338963d4f | refs/heads/main | 2023-01-16T02:33:26.851878 | 2020-11-06T18:44:53 | 2020-11-06T18:44:53 | 310,678,984 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,303 | py | import matplotlib.pyplot as plt
import numpy as np
from scipy import integrate
from pylab import genfromtxt;
import matplotlib.ticker as ticker
from scipy import stats
import matplotlib as mpl
mpl.rcParams['axes.linewidth'] = 3.1
mpl.rcParams['axes.linewidth'] = 3.1
plt.rcParams['font.family'] = "Times New Roman"
mpl.rcParams['xtick.direction'] = 'in'
mpl.rcParams['ytick.direction'] = 'in'
mpl.rcParams['xtick.top'] = True
mpl.rcParams['ytick.right'] = True
mpl.rcParams['xtick.bottom'] = True
mpl.rcParams['ytick.left'] = True
def as_si(x, ndp):
s = '{x:0.{ndp:d}e}'.format(x=x, ndp=ndp)
m, e = s.split('e')
return r'{m:s}\times 10^{{{e:d}}}'.format(m=m, e=int(e))
from matplotlib.ticker import ScalarFormatter
data1 = np.loadtxt("data_Flu_Single.txt")
Inf_flu = data1[:,3]
t_flu = data1[:,0]
data2 = np.loadtxt("data_SARS_Single.txt")
Inf_sars = data2[:,3]
t_sars = data2[:,0]
#parameters
N=331002651
####IAV
#b1, k1, d1=0.001, 0.125, 0.11
#b1, k1, d1=1.0, 0.25, 0.2
b1, k1, d1=0.5/N, 0.25, 0.2 ### Influenza
b2, k2, d2=0.41/N, 0.2, 0.1 ### SARS-CoV-2
d3= 0.1
# initial conditions
S0, E01, E02, E03, I01, I02, I03, RS01, RS02, RL01, RL02, RI01, RI02, R0, I04= N, 0, 0, 0, 100, 100, 0, 0,0,0,0,0,0,0,0
# initial condition vector
y0 = [S0, E01, E02, E03, I01, I02, I03, RS01, RS02, RL01, RL02, RI01, RI02, R0, I04]
#Time : how long to simulate the model
t= np.linspace(0,1000,1000)
#the model equations
def funct(y,t):
S=y[0]
E1=y[1]
E2=y[2]
E3=y[3]
I1=y[4]
I2=y[5]
I3=y[6]
RS1=y[7]
RS2=y[8]
RL1=y[9]
RL2=y[10]
RI1=y[11]
RI2=y[12]
R=y[13]
I4=y[14]
############
f0 = - b1*S*(I1+I3+RI2+I4) - b2*S*(I2+I3+E3+RI1)
#######Exposed Class
f1 = b1*S*(I1+I3+RI2+I4) - k1*E1 - b2*E1*(I2+I3+RI1)
f2 = b2*S*(I2+I3+E3+RI1) - k2*E2 - b1*E2*(I1+I3+RI2+I4)
f3 = b1*(E2+I2)*(I1+I3+RI2+I4) -k1*E3
####### Infected Class
f4 = k1*E1 - d1*I1 - b2*E1*(I2+I3+RI1)
f5 = k2*E2 - d2*I2 -b1*I2*(I1*I3+RI2+I4)
f6 = k1*E3 - d3*I3
f14 = b2*E1*(I2+I3+RI1) - d1*I4
####### Recovered susceptible Class
f7 = d1*I1 - b2*RS1*(I2+I3+E3+RI1)
f8 = d2*I2 - b1*RS2*(I1+I3+RI2+I4)
############ Recovered Exposed Class
f9 = b2*RS1*(I2+I3+E3+RI1) - k2*RL1
f10 =b1*RS2*(I1+I3+RI2+I4) - k1*RL2
############ Recovered Infected Class
f11 =k2*RL1 -d2*RI1
f12 =k1*RL2 -d1*RI2
############ Recovered Class
f13 =d2*RI1+d1*RI2+d3*I3+d1*I4
return [f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14]
#Solve the model(integrate)------------------------------------
ds = integrate.odeint(funct,y0,t)
data=np.column_stack((t,ds[:,0],ds[:,1],ds[:,2],ds[:,3],ds[:,4],ds[:,5],ds[:,6],ds[:,7],ds[:,8],ds[:,9],ds[:,10],ds[:,11],ds[:,12],ds[:,13],ds[:,14]))
np.savetxt("data_coinfection_dynamics_immune.dat",data)
data=np.column_stack((t,ds[:,4],ds[:,5],ds[:,6],ds[:,11],ds[:,12],ds[:,4]+ds[:,12],ds[:,5]+ds[:,11],ds[:,14]))
np.savetxt("infected_immune.dat",data)
data=np.column_stack((t,(ds[:,4]+ds[:,12])*0.1,(ds[:,5]+ds[:,11])*0.3,((ds[:,4]+ds[:,12]+ds[:,14])*0.1+(ds[:,5]+ds[:,11]+ds[:,6])*0.3)))
np.savetxt("hospital_immune.dat",data)
##Plot
fig=plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
plt.plot(t, np.log10(ds[:,1]), 'r',t, np.log10(ds[:,2]), 'b',t, np.log10(ds[:,3]), 'c',linewidth=6)
plt.xlim(1,500)
plt.ylim(0,14)
plt.legend(('Exposed to Flu', 'Exposed to CoV-2','First exposed to''\n' 'CoV-2, then to Flu',),loc='best',fontsize=34)
plt.xlabel('Time (days)', fontsize=45)
plt.ylabel(r'Exposed population (log$_{10}$)', fontsize=45)
plt.tick_params(axis='both', which='major', labelsize=45)
plt.tick_params(axis='both', which='minor', labelsize=45)
ax.xaxis.set_major_locator(ticker.MultipleLocator(90))
ax.yaxis.set_major_locator(ticker.MultipleLocator(2))
ax.tick_params(direction='in', length=8, width=2, colors='k',
grid_color='k')
#plt.savefig("exposedclass_d3_01.pdf", bbox_inches='tight', pad_inches=0.2)
plt.show()
fig=plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
plt.plot(t, np.log10(ds[:,4]+ds[:,12]), 'r',t, np.log10(ds[:,5]+ds[:,11]), 'b',t, np.log10(ds[:,6]), 'c',linewidth=6)
plt.plot(t_flu, np.log10(Inf_flu), 'pink',linestyle=':',linewidth=5)
plt.plot(t_sars, np.log10(Inf_sars), 'skyblue',linestyle=':',linewidth=5)
plt.xlim(1,500)
plt.ylim(0,14)
plt.legend(('Flu', 'CoV-2','Coinfected', 'Single infection: Flu', 'Single infection: CoV-2',),loc='best',fontsize=30)
plt.xlabel('Time (days)', fontsize=45)
plt.ylabel(r'Infected population (log$_{10}$)', fontsize=45)
plt.tick_params(axis='both', which='major', labelsize=45)
plt.tick_params(axis='both', which='minor', labelsize=45)
ax.xaxis.set_major_locator(ticker.MultipleLocator(90))
ax.yaxis.set_major_locator(ticker.MultipleLocator(2))
ax.tick_params(direction='in', length=8, width=2, colors='k',
grid_color='k')
#plt.savefig("infectedclass_d3_01.pdf", bbox_inches='tight', pad_inches=0.2)
plt.show()
fig=plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
plt.plot(t, np.log10(ds[:,4]+ds[:,5]+ds[:,6]+ds[:,11]+ds[:,12]), 'darkorange',linestyle='-',linewidth=5)
plt.plot(t_flu, np.log10(Inf_flu+Inf_sars), 'darkorange',linestyle=':',linewidth=5)
plt.xlim(1,500)
plt.ylim(0,14)
plt.legend(('Total infected with coinfection','Total infected without coinfection',),loc='best',fontsize=30)
plt.xlabel('Time (days)', fontsize=45)
plt.ylabel(r'Infected population (log$_{10}$)', fontsize=45)
plt.tick_params(axis='both', which='major', labelsize=45)
plt.tick_params(axis='both', which='minor', labelsize=45)
ax.xaxis.set_major_locator(ticker.MultipleLocator(120))
ax.yaxis.set_major_locator(ticker.MultipleLocator(2))
ax.tick_params(direction='in', length=8, width=2, colors='k',
grid_color='k')
#plt.savefig("total_infectedclass_d3_01.pdf", bbox_inches='tight', pad_inches=0.2)
plt.show()
fig=plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
plt.plot(t, np.log10(ds[:,7]), 'k', t, np.log10(ds[:,8]), 'gray', linewidth=6 )
plt.xlim(1,500)
plt.ylim(0,14)
plt.legend(('Recovered from Flu,''\n' 'susceptible to CoV-2', 'Recovered from CoV-2,''\n' 'susceptible to Flu',),loc='best',fontsize=34)
plt.xlabel('Time (days)', fontsize=45)
plt.ylabel(r'Recovered susceptible (log$_{10}$)', fontsize=45)
plt.tick_params(axis='both', which='major', labelsize=45)
plt.tick_params(axis='both', which='minor', labelsize=45)
ax.xaxis.set_major_locator(ticker.MultipleLocator(90))
ax.yaxis.set_major_locator(ticker.MultipleLocator(2))
ax.tick_params(direction='in', length=8, width=2, colors='k',
grid_color='k')
#plt.savefig("recoveredsusceptible_d3_01.pdf", bbox_inches='tight', pad_inches=0.2)
plt.show()
fig=plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
plt.plot(t, np.log10(ds[:,9]), 'maroon', t, np.log10(ds[:,10]), 'darkviolet', linestyle='-', linewidth=6)
plt.xlim(1,500)
plt.ylim(0,14)
plt.legend(('Recovered from Flu,''\n' 'exposed to CoV-2', 'Recovered from CoV-2,''\n' 'exposed to Flu',),loc='best',fontsize=34)
plt.xlabel('Time (days)', fontsize=45)
plt.ylabel(r'Recovered exposed (log$_{10}$)', fontsize=45)
plt.tick_params(axis='both', which='major', labelsize=45)
plt.tick_params(axis='both', which='minor', labelsize=45)
ax.xaxis.set_major_locator(ticker.MultipleLocator(90))
ax.yaxis.set_major_locator(ticker.MultipleLocator(2))
ax.tick_params(direction='in', length=8, width=2, colors='k',
grid_color='k')
#plt.savefig("recoveredexposed_d3_01.pdf", bbox_inches='tight', pad_inches=0.2)
plt.show()
fig=plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
plt.plot(t, np.log10(ds[:,11]), 'maroon',t, np.log10(ds[:,12]), 'darkviolet',linewidth=6)
plt.xlim(1,500)
plt.ylim(0,14)
plt.legend(('Recovered from Flu,' '\n''infected with CoV-2', 'Recovered from CoV-2,''\n' 'infected with Flu',),loc='best',fontsize=34)
plt.xlabel('Time (days)', fontsize=45)
plt.ylabel(r'Recovered infected (log$_{10}$)', fontsize=45)
plt.tick_params(axis='both', which='major', labelsize=45)
plt.tick_params(axis='both', which='minor', labelsize=45)
ax.xaxis.set_major_locator(ticker.MultipleLocator(90))
ax.yaxis.set_major_locator(ticker.MultipleLocator(2))
ax.tick_params(direction='in', length=8, width=2, colors='k',
grid_color='k')
#plt.savefig("recoveredinfected_d3_01.pdf", bbox_inches='tight', pad_inches=0.2)
plt.show()
fig=plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
plt.plot(t, np.log10(ds[:,0]), 'g',t, np.log10(ds[:,13]), 'm',linewidth=6)
plt.xlim(1,500)
plt.ylim(0,14)
plt.legend(('Susceptible', 'Recovered',),loc='best',fontsize=34)
plt.xlabel('Time (days)', fontsize=45)
plt.ylabel(r'Population (log$_{10}$)', fontsize=45)
plt.tick_params(axis='both', which='major', labelsize=45)
plt.tick_params(axis='both', which='minor', labelsize=45)
ax.xaxis.set_major_locator(ticker.MultipleLocator(90))
ax.yaxis.set_major_locator(ticker.MultipleLocator(2))
ax.tick_params(direction='in', length=8, width=2, colors='k',
grid_color='k')
#plt.savefig("population_d3_01.pdf", bbox_inches='tight', pad_inches=0.2)
plt.show()
| [
"noreply@github.com"
] | noreply@github.com |
450918b101bf80f9b8506b378865dcdefa98b895 | f580680389877a9dc4e24c5558b6452af890fcc1 | /Pert-3.py | 08fa1d798deaa2aa77256a225e4502991175d514 | [] | no_license | diviandari/ninjaxpress-training | b899e92959eb73848eefbb0dedfab0bb6f0f271d | b8b525e02e2fd8340f75ae4e12ed556ff7966ab2 | refs/heads/main | 2023-08-22T09:00:20.485133 | 2021-10-27T10:03:25 | 2021-10-27T10:03:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,549 | py | #FOR LOOP -> Pengulangan print / formula
#nama_buah = ['apel', 'jeruk', 'strawberry', 'anggur', 'lemon', 'markisa']
#for buah in nama_buah:
#print (buah)
#print ("---")
#print ('done')
#FOR RANGE -> pengulangan dengan range angka
# range (start, stop, set)
#for i in range (1, 20, 2):
#print(i)
# QUIZ
#nama_pelanggan = ['divi', 'listya', 'nadi', 'nita', 'lala']
#for pelanggan in nama_pelanggan:
#print (pelanggan)
#Jawaban Quiz yang bener
#count = int(input("Berapa Data: "))
#nama_pelanggan = []
#umur_pelanggan = []
#for i in range(count):
#print("Data ke{}". format(i+1))
#nama = input("Nama : ")
#umur = int(input("Umur: "))
#nama_pelanggan.append(nama)
#umur_pelanggan.append(umur)
#for i in range(len(nama_pelanggan)):
#print('Pelanggan {} berusia {}'. format(nama_pelanggan[i], umur_pelanggan[i]))
# CONTINUE -> ngeprint kalo sesuai statement
#for i in range(5):
#if i == 2:
#continue
#print(i)
# BREAK -> stop ngeprint kalo kondisi sesuai
#for i in range(5):
#if i == 2:
#break
#print(i)
# CONTINUE WITH 2 CONDITIONS
#for i in range(5):
#if i == 2 or i == 3:
#continue
#print(i)
# NESTED LOOP
#for i in range(3):
# print("i: {}". format(i))
# for j in range (3):
# print("j: {}". format(j))
for baris in range(5):
for kolom in range(5):
print("{}.{}". format(baris+1, kolom+1), end=" ")
print()
x = [1,2,3,4,5]
y = [2,4,3,5,6]
z = 0
for i in x:
for j in y:
if i == j:
z=z+1
print(z)
| [
"diviandari.sabitha@ninjavan.co"
] | diviandari.sabitha@ninjavan.co |
a4b92786d5aa6b4d9494220d99effef090b94214 | f74e64920aa5f163606d5654991829aed858033e | /search.py | 5d8ca7b140716bb5266ade220bc6befd78a78e36 | [] | no_license | DerKom/FSIPractica1 | abceb15224195cc417c50db1c39b34167519f988 | dc427648d3b8ddd7cf8a20eddbf564a9dfc2b91b | refs/heads/master | 2020-03-17T19:28:44.115246 | 2018-05-17T20:21:15 | 2018-05-17T20:21:15 | 133,864,017 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,593 | py | """Search (Chapters 3-4)
The way to use this code is to subclass Problem to create a class of problems,
then create problem instances and solve them with calls to the various search
functions."""
from __future__ import generators
from utils import *
import random
import sys
# ______________________________________________________________________________
class Problem:
"""The abstract class for a formal problem. You should subclass this and
implement the method successor, and possibly __init__, goal_test, and
path_cost. Then you will create instances of your subclass and solve them
with the various search functions."""
def __init__(self, initial, goal=None):
"""The constructor specifies the initial state, and possibly a goal
state, if there is a unique goal. Your subclass's constructor can add
other arguments."""
self.initial = initial
self.goal = goal
def successor(self, state):
"""Given a state, return a sequence of (action, state) pairs reachable
from this state. If there are many successors, consider an iterator
that yields the successors one at a time, rather than building them
all at once. Iterators will work fine within the framework."""
abstract
def goal_test(self, state):
"""Return True if the state is a goal. The default method compares the
state to self.goal, as specified in the constructor. Implement this
method if checking against a single self.goal is not enough."""
return state == self.goal
def path_cost(self, c, state1, action, state2):
"""Return the cost of a solution path that arrives at state2 from
state1 via action, assuming cost c to get up to state1. If the problem
is such that the path doesn't matter, this function will only look at
state2. If the path does matter, it will consider c and maybe state1
and action. The default method costs 1 for every step in the path."""
return c + 1
def value(self):
"""For optimization problems, each state has a value. Hill-climbing
and related algorithms try to maximize this value."""
abstract
# ______________________________________________________________________________
class Node:
"""A node in a search tree. Contains a pointer to the parent (the node
that this is a successor of) and to the actual state for this node. Note
that if a state is arrived at by two paths, then there are two nodes with
the same state. Also includes the action that got us to this state, and
the total path_cost (also known as g) to reach the node. Other functions
may add an f and h value; see best_first_graph_search and astar_search for
an explanation of how the f and h values are handled. You will not need to
subclass this class."""
def __init__(self, state, parent=None, action=None, path_cost=0):
"""Create a search tree Node, derived from a parent by an action."""
update(self, state=state, parent=parent, action=action,
path_cost=path_cost, depth=0)
if parent:
self.depth = parent.depth + 1
def __repr__(self):
return "<Node %s>" % (self.state,)
def path(self):
"""Create a list of nodes from the root to this node."""
x, result = self, [self]
while x.parent:
result.append(x.parent)
x = x.parent
return result
def expand(self, problem):
"""Return a list of nodes reachable from this node. [Fig. 3.8]"""
return [Node(next, self, act,
problem.path_cost(self.path_cost, self.state, act, next))
for (act, next) in problem.successor(self.state)]
# ______________________________________________________________________________
## Uninformed Search algorithms
def search_Ramacot(problem):
ramacot = applyRamacotAlgorithm()
toRet = graph_search(problem, ramacot)
return toRet
def search_Ramacot_heurisitic(problem):
ramacot_H = applyRamacotAlgorithm_Heuristic(problem)
toRet = graph_search(problem, ramacot_H)
return toRet
def tree_search(problem, fringe):
"""Search through the successors of a problem to find a goal.
The argument fringe should be an empty queue.
Don't worry about repeFIFOQueueated paths to a state. [Fig. 3.8]"""
fringe.append(Node(problem.initial))
while fringe:
node = fringe.pop()
if problem.goal_test(node.state):
return node
fringe.extend(node.expand(problem))
return None
def breadth_first_tree_search(problem):
"""Search the shallowest nodes in the search tree first. [p 74]"""
return tree_search(problem, FIFOQueue())
def depth_first_tree_search(problem):
"""Search the deepest nodes in the search tree first. [p 74]"""
return tree_search(problem, Stack())
def graph_search(problem, fringe):
"""Search through the successors of a problem to find a goal.
The argument fringe should be an empty queue.
If two paths reach a state, only use the best one. [Fig. 3.18]"""
closed = {}
fringe.append(Node(problem.initial))
while fringe:
node = fringe.pop()
if problem.goal_test(node.state):
return node
if node.state not in closed:
closed[node.state] = True
fringe.extend(node.expand(problem))
return None
def breadth_first_graph_search(problem):
"""Search the shallowest nodes in the search tree first. [p 74]"""
return graph_search(problem, FIFOQueue()) # FIFOQueue -> fringe
def depth_first_graph_search(problem):
"""Search the deepest nodes in the search tree first. [p 74]"""
return graph_search(problem, Stack())
def depth_limited_search(problem, limit=50):
"""[Fig. 3.12]"""
def recursive_dls(node, problem, limit):
cutoff_occurred = False
if problem.goal_test(node.state):
return node
elif node.depth == limit:
return 'cutoff'
else:
for successor in node.expand(problem):
result = recursive_dls(successor, problem, limit)
if result == 'cutoff':
cutoff_occurred = True
elif result != None:
return result
if cutoff_occurred:
return 'cutoff'
else:
return None
# Body of depth_limited_search:
return recursive_dls(Node(problem.initial), problem, limit)
def iterative_deepening_search(problem):
"""[Fig. 3.13]"""
for depth in xrange(sys.maxint):
result = depth_limited_search(problem, depth)
if result is not 'cutoff':
return result
#______________________________________________________________________________
# Informed (Heuristic) Search
def best_first_graph_search(problem, f):
"""Search the nodes with the lowest f scores first.
You specify the function f(node) that you want to minimize; for example,
if f is a heuristic estimate to the goal, then we have greedy best
first search; if f is node.depth then we have depth-first search.
There is a subtlety: the line "f = memoize(f, 'f')" means that the f
values will be cached on the nodes as they are computed. So after doing
a best first search you can examine the f values of the path returned."""
f = memoize(f, 'f')
return graph_search(problem, PriorityQueue(min, f))
greedy_best_first_graph_search = best_first_graph_search
# Greedy best-first search is accomplished by specifying f(n) = h(n).
def astar_search(problem, h=None):
"""A* search is best-first graph search with f(n) = g(n)+h(n).
You need to specify the h function when you call astar_search.
Uses the pathmax trick: f(n) = max(f(n), g(n)+h(n)).
Implementar por parte del alumno.
"""
pass
# _____________________________________________________________________________
# The remainder of this file implements examples for the search algorithms.
# ______________________________________________________________________________
# Graphs and Graph Problems
class Graph:
"""A graph connects nodes (vertices) by edges (links). Each edge can also
have a length associated with it. The constructor call is something like:
g = Graph({'A': {'B': 1, 'C': 2})
this makes a graph with 3 nodes, A, B, and C, with an edge of length 1 from
A to B, and an edge of length 2 from A to C. You can also do:
g = Graph({'A': {'B': 1, 'C': 2}, directed=False)
This makes an undirected graph, so inverse links are also added. The graph
stays undirected; if you add more links with g.connect('B', 'C', 3), then
inverse link is also added. You can use g.nodes() to get a list of nodes,
g.get('A') to get a dict of links out of A, and g.get('A', 'B') to get the
length of the link from A to B. 'Lengths' can actually be any object at
all, and nodes can be any hashable object."""
def __init__(self, dict=None, directed=True):
self.dict = dict or {}
self.directed = directed
if not directed:
self.make_undirected()
def make_undirected(self):
"""Make a digraph into an undirected graph by adding symmetric edges."""
for a in self.dict.keys():
for (b, distance) in self.dict[a].items():
self.connect1(b, a, distance)
def connect(self, A, B, distance=1):
"""Add a link from A and B of given distance, and also add the inverse
link if the graph is undirected."""
self.connect1(A, B, distance)
if not self.directed: self.connect1(B, A, distance)
def connect1(self, A, B, distance):
"""Add a link from A to B of given distance, in one direction only."""
self.dict.setdefault(A, {})[B] = distance
def get(self, a, b=None):
"""Return a link distance or a dict of {node: distance} entries.
.get(a,b) returns the distance or None;
.get(a) returns a dict of {node: distance} entries, possibly {}."""
links = self.dict.setdefault(a, {})
if b is None:
return links
else:
return links.get(b)
def nodes(self):
"""Return a list of nodes in the graph."""
return self.dict.keys()
def UndirectedGraph(dict=None):
"""Build a Graph where every edge (including future ones) goes both ways."""
return Graph(dict=dict, directed=False)
def RandomGraph(nodes=range(10), min_links=2, width=400, height=300,
curvature=lambda: random.uniform(1.1, 1.5)):
"""Construct a random graph, with the specified nodes, and random links.
The nodes are laid out randomly on a (width x height) rectangle.
Then each node is connected to the min_links nearest neighbors.
Because inverse links are added, some nodes will have more connections.
The distance between nodes is the hypotenuse times curvature(),
where curvature() defaults to a random number between 1.1 and 1.5."""
g = UndirectedGraph()
g.locations = {}
## Build the cities
for node in nodes:
g.locations[node] = (random.randrange(width), random.randrange(height))
## Build roads from each city to at least min_links nearest neighbors.
for i in range(min_links):
for node in nodes:
if len(g.get(node)) < min_links:
here = g.locations[node]
def distance_to_node(n):
if n is node or g.get(node, n): return infinity
return distance(g.locations[n], here)
neighbor = argmin(nodes, distance_to_node)
d = distance(g.locations[neighbor], here) * curvature()
g.connect(node, neighbor, int(d))
return g
romania = UndirectedGraph(Dict(
A=Dict(Z=75, S=140, T=118),
B=Dict(U=85, P=101, G=90, F=211),
C=Dict(D=120, R=146, P=138),
D=Dict(M=75),
E=Dict(H=86),
F=Dict(S=99),
H=Dict(U=98),
I=Dict(V=92, N=87),
L=Dict(T=111, M=70),
O=Dict(Z=71, S=151),
P=Dict(R=97),
R=Dict(S=80),
U=Dict(V=142)))
romania.locations = Dict(
A=(91, 492), B=(400, 327), C=(253, 288), D=(165, 299),
E=(562, 293), F=(305, 449), G=(375, 270), H=(534, 350),
I=(473, 506), L=(165, 379), M=(168, 339), N=(406, 537),
O=(131, 571), P=(320, 368), R=(233, 410), S=(207, 457),
T=(94, 410), U=(456, 350), V=(509, 444), Z=(108, 531))
australia = UndirectedGraph(Dict(
T=Dict(),
SA=Dict(WA=1, NT=1, Q=1, NSW=1, V=1),
NT=Dict(WA=1, Q=1),
NSW=Dict(Q=1, V=1)))
australia.locations = Dict(WA=(120, 24), NT=(135, 20), SA=(135, 30),
Q=(145, 20), NSW=(145, 32), T=(145, 42), V=(145, 37))
class GPSProblem(Problem):
"""The problem of searching in a graph from one node to another."""
def __init__(self, initial, goal, graph):
Problem.__init__(self, initial, goal)
self.graph = graph
def successor(self, A):
"""Return a list of (action, result) pairs."""
return [(B, B) for B in self.graph.get(A).keys()]
def path_cost(self, cost_so_far, A, action, B):
return cost_so_far + (self.graph.get(A, B) or infinity)
def h(self, node):
"""h function is straight-line distance from a node's state to goal."""
locs = getattr(self.graph, 'locations', None)
if locs:
return int(distance(locs[node.state], locs[self.goal]))
else:
return infinity
| [
"mac_alicante@hotmail.com"
] | mac_alicante@hotmail.com |
ab7997be71a3305122dda1051466a0965060a26c | 940636e2e948808e59f9e0d65f1227bc9b95b063 | /form_basic/admin.py | a2a13446053dcbceb919692224933e5ce8fcd455 | [] | no_license | ccw88u/django_form | f3d1b109992c99d3ba09c5974e58bd7a4f1b44d7 | 47be54ff84fdb99bc8848392590116fa90193c1e | refs/heads/master | 2021-09-06T17:30:05.855684 | 2018-02-09T02:30:17 | 2018-02-09T02:30:17 | 118,993,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 576 | py | from django.contrib import admin
from form_basic.models import Reguser, website, website_subject
# Register your models here.
class ReguserAdmin(admin.ModelAdmin):
list_display = ('first_name', 'last_name', 'email')
admin.site.register(Reguser, ReguserAdmin)
class admindispfmt1_website_subject(admin.ModelAdmin):
list_display = ('subject_name', )
admin.site.register(website_subject, admindispfmt1_website_subject)
class admindispfmt1_website(admin.ModelAdmin):
list_display = ('title', 'subject', 'uri')
admin.site.register(website, admindispfmt1_website)
| [
"ccw88u@gmail.com"
] | ccw88u@gmail.com |
41894e7590dde3aa44f8c38b7453e8c364d924f5 | cd8f7ecd20c58ce1ae0fe3840f7c7ee961aa5819 | /Binary Tree Zigzag Level Order Traversal.py | 5ffebb0274f92ac415a122c9c02b477d302ff3ff | [
"Apache-2.0"
] | permissive | sugia/leetcode | 9b0f2a3521b088f8f7e5633c2c6c17c76d33dcaf | 6facec2a54d1d9f133f420c9bce1d1043f57ebc6 | refs/heads/master | 2021-06-05T07:20:04.099488 | 2021-02-24T07:24:50 | 2021-02-24T07:24:50 | 29,124,136 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,303 | py | '''
Given a binary tree, return the zigzag level order traversal of its nodes' values. (ie, from left to right, then right to left for the next level and alternate between).
For example:
Given binary tree [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
return its zigzag level order traversal as:
[
[3],
[20,9],
[15,7]
]
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def zigzagLevelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
res = []
if not root:
return res
zigzag = True
vec = [root]
while len(vec):
zigzag = not zigzag
next_vec = []
tmp = []
for node in vec:
if zigzag:
tmp = [node.val] + tmp
else:
tmp.append(node.val)
if node.left:
next_vec.append(node.left)
if node.right:
next_vec.append(node.right)
res.append(tmp)
vec = next_vec
return res
| [
"noreply@github.com"
] | noreply@github.com |
7b7e7d9c330becf08fba57b89a602ca1e454287c | e02684654840ebe9a0030ccd89eea5e10678842d | /nn.py | 6b516abae250778dac37a5888a7066113ecf9498 | [] | no_license | zeynepoguz/ClassificationText | 1830f19b4b523f5b254d0188a842f7b7688192ba | 9e739a0faea56a29d2cf458a4d2af1f859e09aef | refs/heads/master | 2020-03-26T05:37:11.129254 | 2018-08-16T10:50:49 | 2018-08-16T10:50:49 | 144,565,768 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,693 | py | import textacy
import keras
from numpy import array
from numpy import argmax
from keras.utils import to_categorical
import textacy.keyterms
from keras.preprocessing.text import text_to_word_sequence
from keras.layers import Embedding
from keras.models import Sequential
from keras.layers import Flatten, Dense, Activation
from keras import preprocessing
import numpy as np
from sklearn.model_selection import train_test_split
print("-------------------------------------------------------\n")
texts = open("C:\MyProjects\PythonProjects\Keras\sherlock.txt",encoding='utf-8').read().lower()
print(texts)
words = set(text_to_word_sequence(texts))
vocab_size = len(words)
txt = keras.preprocessing.text.one_hot(texts, vocab_size, filters='!"#$%&()*+,-./:;<=>?@[\]^_`{|}~',
lower=True, split=' ')
print("-------\n")
print(txt)
data = array(txt)
print(data)
# one hot encode
encoded = to_categorical(data)
print(encoded)
# invert encoding
inverted = argmax(encoded[0])
print("****************************************")
print(inverted)
# embedding_layer = Embedding(1000, 64)
#
# # Number of words to consider as features
# max_features = 10000
# # Cut texts after this number of words
# # (among top max_features most common words)
# maxlen = 20
#
# train_data, test_data = train_test_split(a, test_size=0.2)
# print(len(train_data))
# print(len(test_data))
# print(train_data[10])
#
# model = Sequential()
# model.add(Dense(32, input_dim=784))
# model.add(Activation('relu'))
#
# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['acc'])
# model.summary()
| [
"noreply@github.com"
] | noreply@github.com |
f2b27d4fedcc3b3ab8497f7917ec528b3d71273a | 822ed0b7e32ebbb09539cc0b3e99b01168f62f89 | /computerMove.py | bc4b6dab5824060beb6b170a3ed1ec37a5b1b57b | [] | no_license | sawasthi77/tutorials | f4d882e12c440a57b67101301b83159d4a585026 | bb22674112cc99dcfde0c7e1e40b1eca3dc4b277 | refs/heads/master | 2020-07-24T07:47:46.134158 | 2020-05-09T08:43:27 | 2020-05-09T08:53:10 | 207,851,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,128 | py | import random
randomMove = str(random.randint(1, 3))
if randomMove == '1':
computerMove = 'r'
print('ROCK')
if playerMove == computerMove:
print('It is a tie')
ties += 1
elif playerMove == 's' and computerMove == 'r':
print('You loose!! Try again')
losses += 1
elif randomMove == '2':
computerMove = 'p'
print('PAPER')
if playerMove == computerMove:
print('It is a tie')
ties += 1
elif playerMove == 's' and computerMove == 'p':
print('You win !')
wins += 1
elif playerMove == 'r' and computerMove == 'p':
print('You loose!! Try again')
losses += 1
elif randomMove == '3':
computerMove = 's'
print('SCISSORS')
if playerMove == computerMove:
print('It is a tie')
ties += 1
elif playerMove == 'r' and computerMove == 's':
print('You Win !')
wins += 1
elif playerMove == 'p' and computerMove == 's':
print('You win !')
wins += 1
elif playerMove == 'p' and computerMove == 's':
print('You loose!! Try again')
losses += 1
| [
"saumyaawasthi187@gmail.com"
] | saumyaawasthi187@gmail.com |
519290a303338a6a8fe7363c928e363289bcdeb6 | b840b9d7a408d076f903b3c5f1f75ee255fda23b | /pca_rbf_svm.py | c625280b74d0f63003de9c2191e005489a0a1be9 | [] | no_license | hzh0512/10701-singlecell | 13f858da59c5b2ae3764b743cfdd80e584857ce8 | 4eec081fd3b3f286f1941b431e76713ed54e0312 | refs/heads/master | 2021-10-08T19:38:53.614162 | 2018-12-16T20:12:38 | 2018-12-16T20:12:38 | 162,035,493 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,786 | py | import pandas as pd
import os, timeit, socket, time, pickle
from sklearn.svm import SVC, LinearSVC
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score
from sklearn.decomposition import PCA
from sklearn.externals import joblib
from sklearn.model_selection import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import BaggingClassifier
data_path = './'
train_file = os.path.join(data_path, 'train_data.h5')
test_file = os.path.join(data_path, 'test_data.h5')
pca_n_components = 40
encoding_method = 'pca'
def load_data():
print('start loading data...')
train_data = pd.HDFStore(train_file)
X_train = train_data['rpkm'].values
y_train = train_data['labels'].values
train_data.close()
test_data = pd.HDFStore(test_file)
X_test = test_data['rpkm'].values
y_test = test_data['labels'].values
test_data.close()
print("start %s transformation..." % encoding_method)
cache_file_name = "models/%s_%d_std.model" % (encoding_method, pca_n_components)
encoder = pickle.load(open(cache_file_name, "rb"))
X_train = encoder.transform(X_train)
X_test = encoder.transform(X_test)
return X_train, X_test, y_train, y_test
def do_SVM(X_train, X_test, y_train, y_test):
start_time = timeit.default_timer()
print('training...')
clf = SVC(kernel='rbf', C=1, gamma=0.01, random_state=10701, decision_function_shape='ovr', cache_size=1000)
clf.fit(X_train, y_train)
mid_time = timeit.default_timer()
print('testing...')
y_pred = clf.predict(X_test)
end_time = timeit.default_timer()
print("finish SVM")
print("train time: %s" % str(mid_time - start_time))
print("testing time %s" % str(end_time - mid_time))
acc = accuracy_score(y_test, y_pred)
print("accuracy: {:.4f}".format(acc))
# joblib.dump(clf, "models/svm_{:.4f}.pkl".format(acc))
def do_ada_boost(X_train, X_test, y_train, y_test):
start_time = timeit.default_timer()
print('training...')
clf = SVC(kernel='rbf', C=1, gamma=0.01, probability=True, random_state=10701, decision_function_shape='ovr', cache_size=1000)
model = AdaBoostClassifier(clf, n_estimators=50, algorithm='SAMME.R')
model.fit(X_train, y_train)
mid_time = timeit.default_timer()
print('testing...')
y_pred = model.predict(X_test)
end_time = timeit.default_timer()
print("finish Adaboost")
print("train time: %s" % str(mid_time - start_time))
print("testing time %s" % str(end_time - mid_time))
acc = accuracy_score(y_test, y_pred)
print("accuracy using Adaboost is %g" % acc)
pickle.dump(mode, "models/Adaboost_50.model")
def do_bagging_boost(X_train, X_test, y_train, y_test):
start_time = timeit.default_timer()
print('training...')
clf = SVC(kernel='rbf', C=1, gamma=0.01, random_state=10701, decision_function_shape='ovr', cache_size=1000)
bdt = BaggingClassifier(clf)
bdt.fit(X_train, y_train)
mid_time = timeit.default_timer()
print('testing...')
y_pred = bdt.predict(X_test)
end_time = timeit.default_timer()
print("finish bagging boost")
print("train time: %s" % str(mid_time - start_time))
print("testing time %s" % str(end_time - mid_time))
acc = accuracy_score(y_test, y_pred)
print("accuracy using Adaboost is %g" % acc)
def main():
X_train, X_test, y_train, y_test = load_data()
# do_SVM(X_train, X_test, y_train, y_test)
do_ada_boost(X_train, X_test, y_train, y_test)
# do_bagging_boost(X_train, X_test, y_train, y_test)
if __name__ == '__main__':
main()
| [
"hzh0512@gmail.com"
] | hzh0512@gmail.com |
81ba4fd53b36f660673c2f5859aaefd8ae71a2e4 | bda16393b0c67d24f8071668ad8ad50d527a6bee | /the_basics/task8.py | f8de388b051241c239b179687571dc9b24926683 | [] | no_license | MaximDick/PythonCourse | 88910c6706944d38b25fbc1b50716ec05f5758ce | 7a5be59b38d1f0959423ee3efa877201cc73aaad | refs/heads/master | 2021-02-28T13:59:29.782929 | 2020-04-24T20:16:42 | 2020-04-24T20:16:42 | 245,703,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | """N школьников поделили K яблок поровну, не делящийся остаток остался в корзинке.
Сколько яблок осталось в корзинке?"""
n = int(input("Введите n:"))
k = int(input("Введите k:"))
m = k % n
print(m)
| [
"maxim1994barca@gmail.com"
] | maxim1994barca@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.