blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c25023e9994785ae6cf50dba0ce6282030295e61 | a1ec1552bc0096405069215fcfd338e573b6c520 | /app4/views.py | 8d90a60803822417b04e8d38e36c639c01e6cfab | [] | no_license | BittuSoni7759/login-page | 5fbcd73f9390178994647a1bf5afe989547f8f19 | 351471d3345ae9583f284a7ba8418ef4fb734a40 | refs/heads/master | 2023-07-17T17:13:09.574972 | 2021-09-02T16:42:38 | 2021-09-02T16:42:38 | 402,494,074 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 209 | py | from django.contrib import auth
from django.shortcuts import redirect, render
from django.contrib import auth
# Create your views here.
def logout(request):
auth.logout(request)
return redirect('/')
| [
"bittusoni302@gmail.com"
] | bittusoni302@gmail.com |
255089f5cf3db143f9c2043f0f21b85c69954fa5 | 9d5b1c022c8b6ce0c648bc6875b6844464533dd9 | /setup.py | e26c7a746adfaf6623f4628dbc8e80f0c3fc5362 | [
"Apache-2.0"
] | permissive | cfranken/isofit | 596db6d8351be50f38cbc8eb164a7276943efc72 | a67a26fe59fe0eb3fd5fe3503736294e17172f82 | refs/heads/master | 2020-03-30T21:42:48.136041 | 2018-10-03T16:22:16 | 2018-10-03T16:22:16 | 151,639,225 | 0 | 1 | Apache-2.0 | 2018-10-04T21:39:54 | 2018-10-04T21:39:54 | null | UTF-8 | Python | false | false | 1,895 | py | #! /usr/bin/env python3
#
# Copyright 2018 California Institute of Technology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ISOFIT: Imaging Spectrometer Optimal FITting
# Author: David R Thompson, david.r.thompson@jpl.nasa.gov
#
from io import open
from setuptools import setup, find_packages
with open('README.rst', 'r') as f:
long_description = f.read()
lic = 'Apache Software License (http://www.apache.org/licenses/LICENSE-2.0)'
setup(name='isofit',
version='0.5.0',
url='http://github.com/davidraythompson/isofit/',
license=lic,
author='David R. Thompson, Winston Olson-Duvall, and Team',
author_email='david.r.thompson@jpl.nasa.gov',
description='Imaging Spectrometer Optimal FITting',
long_description=long_description,
long_description_content_type='text/x-rst',
packages=find_packages(),
install_requires=['scipy>=1.1.0',
'numba>=0.38.0',
'matplotlib>=2.2.2',
'scikit-learn>=0.19.1',
'spectral>=0.19',
'pytest>=3.5.1',
'pep8>=1.7.1',
'xxhash>=1.2.0'],
python_requires='>=3',
platforms='any',
classifiers=['Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent'])
| [
"david.r.thompson@jpl.nasa.gov"
] | david.r.thompson@jpl.nasa.gov |
1276594023e619789a3cbf1e47b43fb77928a023 | 8d820b07c3af6f47d481a27826c80612791bffec | /Efficiency.py | a1d7152bde2f80e37d92a26e8ac83380b803518d | [
"MIT"
] | permissive | tybtab/radwatch-analysis | b01506f9399013f8aa2a28a4d62f214a0d805e5e | f8b9bfecac1ca7b5a761bb943f0b36ed7fbedc15 | refs/heads/master | 2021-01-12T04:51:14.078487 | 2018-05-03T21:47:13 | 2018-05-03T21:47:13 | 77,803,846 | 0 | 0 | null | 2017-01-02T01:01:09 | 2017-01-02T01:01:09 | null | UTF-8 | Python | false | false | 2,887 | py | import numpy as np
import matplotlib.pyplot as plt
import csv
class Efficiency(object):
"""
Object for undertaking the Efficiency Calibration of a detector.
Currently only plots the Efficiency versus Energy data and the fitted curve.
"""
def __init__(self,
path = 'Efficiency.csv'
):
self.path = path
self.rows = []
self.energy = []
self.efficiency = []
self.values = []
self.unc = []
self.x = []
self.y = []
self.space = np.linspace(1, 2700, 540)
self.z = []
self.fit = []
self.new_fit = []
def open(self):
"""
Reads the csv file.
"""
with open(self.path, newline='') as f:
reader = csv.reader(f)
self.rows = [r for r in reader]
def sort(self):
"""
Sorts the data.
"""
for i in self.rows:
self.energy.append(float(i[0]))
self.efficiency.append(([float(i[1]), float(i[2])]))
def unwrap_lst_unc(self):
"""
Separates data.
"""
for i in self.efficiency:
self.values.append(i[0])
self.unc.append(i[1])
def mutate(self):
""""
Mutates data and creates the fit function.
"""
for i in self.energy:
self.x.append(np.log(i/1460))
for i in self.values:
self.y.append(np.log(i))
self.z = np.polyfit(np.asarray(self.x), np.asarray(self.y), 4)
def normal(self, x):
return np.log(x/1460)
def func3(self, x):
return (self.z[0]*self.normal(x)**4)+(self.z[1]*self.normal(x)**3)+(self.z[2]*self.normal(x)**2)+(self.z[3]*self.normal(x))+(self.z[4])
def new_func(self, x):
return np.exp(func3(x))
def fitting(self):
"""
Fits the data.
"""
for i in self.space:
self.fit.append(self.func3(i))
for i in self.fit:
self.new_fit.append(np.exp(i))
def plotter(self):
"""
Plots the data and the fit.
"""
plt.title('Efficiency Curve')
plt.xlabel('Energy (keV)')
plt.ylabel('Efficiency')
plt.errorbar(self.energy, self.values, fmt=None, yerr=self.unc)
plt.plot(self.energy, self.values, 'ro')
plt.grid()
plt.plot(self.space, self.new_fit)
plt.legend(('Data Points', 'Fitted Curve'), loc='upper right')
plt.ylim(0, 0.002)
plt.show()
def main(self):
self.open()
self.sort()
self.unwrap_lst_unc()
self.mutate()
self.fitting()
self.plotter()
if __name__ == '__main__':
eff = Efficiency()
eff.main()
| [
"noreply@github.com"
] | tybtab.noreply@github.com |
1b02cbd81e6d0d70e6c61416944602b6e863075c | e15b2ebbb9bf30a50d1e720624e9853aa269fc05 | /CoverSlasher/items.py | 563945642bcb4b389d94c4ea6bdfbf3d8b5cf0e0 | [] | no_license | Hodo7amShichiYA/WnacgCoverSlasher | e42ce1ec438558c2890d1bf34f9a192eb1ab4f81 | 5734d58caedb3defff622bb45de6cd073f8b656d | refs/heads/master | 2020-04-27T16:03:02.225473 | 2019-03-08T09:05:06 | 2019-03-08T09:05:06 | 174,470,262 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | # -*- coding: utf-8 -*-
import scrapy
class CoverslasherItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
image_urls = scrapy.Field() #保存图片地址
images = scrapy.Field() #保存图片的信息
image_names = scrapy.Field() #保存图片的信息
| [
"unconfigured@null.spigotmc.org"
] | unconfigured@null.spigotmc.org |
7f09a812196c0f573db33b0a5ed6d9b411928ff8 | 19f18997f10e66fb96738d50c8f83a62b252f05a | /tests/numerical/fit/test_fit.py | 9683412f1e1206b30c28b4ca7251e57146c0975d | [
"MIT"
] | permissive | hauchenjiang/Copulas | 97036e66a9266299d097d2dcba83b340c6ef844b | 64b02ebd944f150bcc5706574a739d7b75d3751f | refs/heads/master | 2021-03-01T09:41:08.624375 | 2020-03-06T14:08:38 | 2020-03-06T14:08:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 932 | py | import glob
import json
import os
import numpy as np
import pandas as pd
import pytest
from copulas import get_instance
BASE = os.path.dirname(__file__)
TESTS = glob.glob(BASE + '/test_cases/*/*.json')
@pytest.mark.parametrize("config_path", TESTS)
def test_fit(config_path):
with open(config_path, 'r') as config_file:
config = json.load(config_file)
# Setup
test_obj = config['test']
instance = get_instance(test_obj['class'], **test_obj['kwargs'])
data = pd.read_csv(os.path.join(BASE, 'input', config['input']))
# Run
instance.fit(data.values)
# Asserts
params = instance.to_dict()
rtol = config['settings']['rtol']
for other, expected in config['output'].items():
for key, exp in expected.items():
obs = params[key]
msg = "Mismatch against {} on {}".format(other, config_path)
assert np.isclose(exp, obs, rtol=rtol), msg
| [
"noreply@github.com"
] | hauchenjiang.noreply@github.com |
7264625e70a356884089103665bb18b428c082f9 | 0b1dc117fa9775edad8af175a1bd58d863fc3e58 | /HMM.py | 4fe6472df76d3bc89a82cbaedb2839f451b889d9 | [] | no_license | long-johnson/HMM_Python | e5b2e4bfea646466985d8ce49c0a510a46069733 | 6bf8d76c413680172d206993dc80864983c3e0c8 | refs/heads/master | 2020-04-15T14:29:07.932793 | 2018-04-06T16:55:31 | 2018-04-06T16:55:31 | 48,696,638 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,467 | py | # -*- coding: utf-8 -*-
"""
Various functions that apply both to Disrete HMM amd Gaussian HMM
"""
import numpy as np
import DiscreteHMM as dhmm
import GaussianHMM as ghmm
def calc_symmetric_distance(hmm1, hmm2, T, K=1, seed=None):
""" Calculate symmetric distance between two hmms based on the logprobs.
refer to formula (89) from "Rabiner, 1989 - A tutorial on hidden Markov..."
Parameters
----------
T : int
length of sequences to evaluate on
K : int
number of sequences to evaluate on
"""
seqs1, _ = hmm1.generate_sequences(K, T, seed=seed)
seqs2, _ = hmm2.generate_sequences(K, T, seed=seed)
return np.abs(_calc_distance(hmm1, hmm2, seqs2) +
_calc_distance(hmm2, hmm1, seqs1)) / 2
def _calc_distance(hmm1, hmm2, seqs2):
""" Calculate assymetric distance between hmm1 and hmm2
refer to formula (88) from "Rabiner, 1989 - A tutorial on hidden Markov..."
NOTE: lists seqs1 and seqs2 should be of the same size and length of each
sequence from seqs1 should be equal to the length of the corresponding seq
from seqs2. Otherwise, the distance measure will be incorrect.
"""
p12 = hmm1.calc_loglikelihood(seqs2)
p22 = hmm2.calc_loglikelihood(seqs2)
# calc total number of elements in all sequences
# TODO: consider the case when number of elements vary from seq to seq
n_elements = len(seqs2) * len(seqs2[0])
return (p22 - p12) / n_elements
| [
"razoru93@gmail.com"
] | razoru93@gmail.com |
a03639d7fa672ef3cad188728c3cb687f9110537 | 882180e75d746c9c0f4833b4e77de4dfde02a590 | /asr/common/credential.py | 5df7afed594c197355aa0faf71a88a8ff457aa54 | [] | no_license | taopin/Audio_to_Intent | fbbd0362cef3a3b6e3e768ca3589e46e9ff3a73c | 76b56eac4421c822ffa6994a08e2b6d22d6f9f4e | refs/heads/main | 2023-08-22T01:11:37.675002 | 2021-10-20T07:06:15 | 2021-10-20T07:06:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | # -*- coding: utf-8 -*-
class Credential:
def __init__(self, secret_id, secret_key):
self.secret_id = secret_id
self.secret_key = secret_key
| [
"lihai19@mails.tsinghua.edu.cn"
] | lihai19@mails.tsinghua.edu.cn |
30a48182d41cbecb7caba66c3aa0354a0b546438 | be91bc1add0d1f8af572eb84e4e95d76e2a4a587 | /Views/Affichage/see.py | efae27c2ac7f981f029020eb515583f29e978c38 | [
"MIT"
] | permissive | Mleandra/automata-brains | d03ae64b9cd4acf23b4645a51afb027dde596d2e | 6c2a7714d1fcb16763084a33a2f0f1364d4f8eb8 | refs/heads/master | 2020-12-26T18:22:22.981632 | 2020-02-01T09:52:39 | 2020-02-01T09:52:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,631 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UI/see.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from Models.Automate import Automate
from Views.Affichage.makeView import SeeEtatWindow, SeeAlphabetWindow, ReconnaissanceWindow, \
SeeCentralWindow
class Ui_Form(object):
def __init__(self, automate:Automate):
self.automate = automate
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(470, 368)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtWidgets.QGroupBox(Form)
self.groupBox.setFlat(True)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.transition = SeeCentralWindow(self.automate)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.transition.sizePolicy().hasHeightForWidth())
self.transition.setSizePolicy(sizePolicy)
self.transition.setObjectName("transition")
self.verticalLayout_2.addWidget(self.transition)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.alphabet = SeeAlphabetWindow(self.automate)
self.alphabet.setObjectName("alphabet")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.alphabet)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.horizontalLayout.addWidget(self.alphabet)
self.etats = SeeEtatWindow(self.automate)
self.etats.setObjectName("etats")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.etats)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.horizontalLayout.addWidget(self.etats)
self.reconnaitre = ReconnaissanceWindow()
self.reconnaitre.setObjectName("reconnaitre")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.reconnaitre)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.horizontalLayout.addWidget(self.reconnaitre)
self.verticalLayout_2.addLayout(self.horizontalLayout)
self.verticalLayout.addWidget(self.groupBox)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
self.automate.automate_modifier.connect(self.set_state)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.groupBox.setTitle(_translate("Form", f"{self.automate.nom} ({self.automate.type} {'complet' if self.automate.est_complet else 'incomplet'})"))
def set_state(self):
self.groupBox.setTitle(f"{self.automate.nom} ({self.automate.type} {'complet' if self.automate.est_complet else 'incomplet'})")
def switch_transition(self):
self.verticalLayout_2.addWidget(self.transition)
def switch_reconnaissance(self):
self.verticalLayout_2.replaceWidget(self.transition,self.reconnaissance)
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| [
"yvesjordan06@gmail.com"
] | yvesjordan06@gmail.com |
e074aa8e9acc9aa8608f15b10dc2257a9cfb6cdf | 55b44516aad78bf34606051238d2f549d8134591 | /lecture_test/linked_list.py | 9b80c0cbf3b0664013f7fee6cc538ad987cc50ca | [] | no_license | hmtbgc/cs61a | 470e22eb1b2a30292a654300be6efbc84d2814ce | aaf89e6f13553069a06973f62330da42fbfd82a6 | refs/heads/master | 2020-06-17T17:39:07.773216 | 2019-09-24T11:20:26 | 2019-09-24T11:20:26 | 195,995,397 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,144 | py | class link:
empty = ()
def __init__(self, first, rest=empty):
self.first = first
self.rest = rest
a = link(1, link(2, link(1)))
b = link(3, link(2, link(1)))
combined = link(a, link(b))
def sum(lnk):
empty = ()
if lnk.rest == empty:
return lnk.first
else:
return lnk.first + sum(lnk.rest)
def display_link(lnk):
def con_string(lnk):
empty = ()
if lnk.rest == empty:
return str(lnk.first)
else:
return str(lnk.first) + con_string(lnk.rest)
return con_string(lnk)
def map(f, lnk):
empty = ()
if lnk.rest == empty:
return link(f(lnk.first))
else:
return link(f(lnk.first), map(f, lnk.rest))
def map_v2(f, lnk):
# mutate the linked list
# empty = ()
# while(lnk != empty):
# lnk.first = f(lnk.first)
# lnk = lnk.rest
empty = ()
if lnk == empty:
return
else:
lnk.first = f(lnk.first)
map_v2(f, lnk.rest)
def insert(num, lnk):
# insert num into index 1 of lin
insert_link = link(num)
insert_link.rest = lnk.rest
lnk.rest = insert_link
| [
"hmtbgc@163.com"
] | hmtbgc@163.com |
850f7be79f76e3958f35634f22f146e00d19e140 | be7dd069d4803f8f2367b890ef6364e75a49ae70 | /NG/station/Get_StnUniqueId.py | 55c53b7f4df1f389071877ea4824ba26f844350c | [] | no_license | ShervanGharari/Application-Production-Support | 3e4f6eedf37fdbacd79674d01015a6ba2963aa06 | 23f736354500c70e0e11907d4d1254a4f03d77a6 | refs/heads/main | 2023-01-11T10:00:19.339521 | 2020-10-21T14:46:02 | 2020-10-21T14:46:02 | 306,076,505 | 0 | 0 | null | 2020-11-23T21:09:06 | 2020-10-21T15:57:57 | null | UTF-8 | Python | false | false | 632 | py | ##############################################
###
import requests
def getStationUniqueId(stn, token, server_pub):
stnIdUrl = server_pub + "GetLocationData?LocationIdentifier=" + str(stn) + "&token=" + token
exists = False
try:
r = requests.get(stnIdUrl)
exists = True
print "Station exists: " + str(stn)
except:
print "Station does not exists: " + str(stn)
# get the unique Id of the station
try:
r = requests.get(stnIdUrl)
UniqueId = r.json()['UniqueId']
return UniqueId
except:
print "Unable to get Unique Id for station: " + str(stn) | [
"56357108+yanxuYX@users.noreply.github.com"
] | 56357108+yanxuYX@users.noreply.github.com |
91f92c326775e661700467fed42718d9b09d1adb | e53d8488ffea72db3f3618f5639f2ddfa929f11b | /perpustakaan/migrations/0002_delete_buku.py | 63677b59c0a8886eaae0991e502bc4c513f0449e | [] | no_license | writerlab/perpus | 60473fa2b51d67617525bfc25b75656141e9529b | 9cf7676a543c3414ac2e7fca88c3d26ac403be3b | refs/heads/master | 2023-07-15T05:27:23.859264 | 2021-08-29T02:02:34 | 2021-08-29T02:02:34 | 262,490,216 | 5 | 10 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | # Generated by Django 2.2.12 on 2020-05-11 07:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('perpustakaan', '0001_initial'),
]
operations = [
migrations.DeleteModel(
name='Buku',
),
]
| [
"netspytux@gmail.com"
] | netspytux@gmail.com |
08666a6231cbdd222deec01b14ea64c45f9cabcf | 684c550fb172b7406c2dd363bd9d6c16d128337b | /PFT_Exercises/exercise-pexpect/tests/test_paths.py | 0da271afa1065cb072c7d89f370701cf390d3f64 | [] | no_license | kristoffern/practical-python-for-testers | 5a5b6185faf318e4642234e67059ab449e2b144a | bcb0a6e6098c9d4dda4d7f3951d9b440c2b5badf | refs/heads/main | 2023-01-29T07:19:32.834336 | 2020-12-12T22:27:07 | 2020-12-12T22:27:07 | 310,860,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,404 | py | class HappyPath:
questions = ('Installation path \[\/opt\/esconfs\]:',
'Full or light installation \[Full\/Light\]:',
'Deploy web server \[Y\/N\]:',
'On which port \[80\]:',
'Admin user account \[admin\]:',
'Admin acount password \[\]:',
'Backup previous data \[Y\/N\]:',
'Backup path \[~\/esconfs_backup\/\]:',
'Install with these setting \[Y\/N\]:')
answers = ('/opt/esconfs',
'Full',
'Y',
'80',
'admin',
'secret',
'Y',
'\n',
'Y')
class UnhappyPath:
questions = ('Installation path \[\/opt\/esconfs\]:',
'Full or light installation \[Full\/Light\]:',
'Deploy web server \[Y\/N\]:',
'On which port \[80\]:',
'Admin user account \[admin\]:',
'Admin acount password \[\]:',
'Backup previous data \[Y\/N\]:',
'Backup path \[~\/esconfs_backup\/\]:',
'Install with these setting \[Y\/N\]:')
answers = ('/opt/esconfs',
'Full',
'Y',
'8080',
'admin',
'secret',
'Y',
'\n',
'Y')
| [
"kristoffer.nordstrom@northerntest.se"
] | kristoffer.nordstrom@northerntest.se |
165c5d9b380d90a46b79d0e3afc8af7a6ba40b19 | 72401e9527c11e0fa025c9992e7c27c04197c575 | /notebook/__init__.py | 667761b9f9ea22aed67fca523176f0db605bb39f | [] | no_license | mirobeka/notebook | f7d39cc1f976810d45c029f0402a93144634b21a | 6838de1c482cc084f95e6abf2534153fc1d07972 | refs/heads/master | 2021-01-10T06:42:52.736498 | 2016-03-11T10:58:10 | 2016-03-11T10:58:10 | 51,080,119 | 3 | 4 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | from flask import Flask
from notebook.api import notebook_api
from notebook.frontend import client_application
API_VERSION='v1'
def create_app(cfgfile=None, cfgobj=None):
app = Flask(__name__)
app.secret_key = '\xfe\x060|\xfb\xf3\xe9F\x0c\x93\x95\xc4\xbfJ\x12gu\xf1\x0cP\xd8\n\xd5'
# register flask blueprints for front end and rest api
app.register_blueprint(
client_application,
url_prefix="")
app.register_blueprint(
notebook_api,
url_prefix="/api/{}".format(API_VERSION))
return app
| [
"mirobeka@gmail.com"
] | mirobeka@gmail.com |
006d147bfedac319f1d2109e4b368c3011fffe09 | 1de336061eb27dee989f459404c86dae1bf78d5e | /nsnitro/nsnitro.py | c9c48e9ef15e6adc685d59710fedda96d2cd10b8 | [
"Apache-2.0"
] | permissive | zojoncj/cleanthehead | 4b0cea4f5a484e084afd38b8ed2c0f97df68121c | 6f008e1f32e168d41bc5fbe77156b78057ff735f | refs/heads/master | 2016-09-06T00:26:57.650032 | 2013-08-13T23:01:54 | 2013-08-13T23:01:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,051 | py | """ Citrix Netscaler Nitro API accessor """
import urllib, urllib2
from nsutil import *
__version__ = "0.0.2"
class NSNitro:
""" Main class """
__ip = "1.2.3.4"
__user = "api_user"
__password = "api_user"
__baseurl = "http://1.2.3.4/nitro/v1/config/"
__sessionid = ""
__loggedin = False
__initialized = False
__contenttype = "application/x-www-form-urlencoded"
__postheaders = {'Cookie' : 'sessionid='+__sessionid, 'Content-type' : __contenttype}
def __init__(self, ip, user, password, useSSL=False):
""" Contructor: ip - LB ip, user - LB username, pass - LB password """
self.__ip = ip
self.__user = user
self.__password = password
self.__baseurl = "%s://%s/nitro/v1/config/" % ('https' if useSSL else 'http',ip)
self.__initialized = True
def get_url(self):
""" Returns base url for nitro API. Mostly useful for debugging """
if not self.__initialized:
raise NSNitroError("Not initialized.")
return self.__baseurl
def get_sessionid(self):
""" Returns sessionID that LB gave us after logging in """
if not self.__initialized or not self.__loggedin:
raise NSNitroError("Not initialized or not logged in.")
return self.__sessionid
def login(self):
""" Logins to the LB using the credentials give to constructor """
if not self.__initialized:
raise NSNitroError("Not initialized.")
payload = {"object":json.dumps({"login":{"username":self.__user,"password":self.__password}})}
try:
nsresponse = self.post(payload)
if nsresponse.failed:
raise NSNitroError(nsresponse.message)
self.__sessionid = nsresponse.get_response_field('sessionid')
self.__postheaders = {'Cookie' : 'sessionid='+self.__sessionid, 'Content-type' : self.__contenttype}
self.__loggedin = True
return True
except SyntaxError:
raise NSNitroError("Could not parse LB response.")
except urllib2.URLError, ue:
raise NSNitroError("Error logging in!" + ue.message)
def post(self, payload):
try:
payload_encoded = urllib.urlencode(payload)
req = urllib2.Request(self.__baseurl, payload_encoded, self.__postheaders)
response = urllib2.urlopen(req)
except urllib2.HTTPError, e:
raise NSNitroError("Could not send post request: %s, %s" % (e.code, e.message))
nsresponse = NSNitroResponse(response.read())
if nsresponse.failed:
raise NSNitroError(nsresponse.message)
return nsresponse
def put(self, payload):
try:
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(self.__baseurl, json.dumps(payload))
request.add_header('Cookie', 'sessionid='+self.__sessionid)
request.get_method = lambda: 'PUT'
response = opener.open(request)
except urllib2.HTTPError, e:
raise NSNitroError("Could not send put request: %s, %s" % (e.code, e.message))
nsresponse = NSNitroResponse(response.read())
if nsresponse.failed:
raise NSNitroError(nsresponse.message)
return nsresponse
def get(self, url):
try:
opener = urllib2.build_opener()
opener.addheaders.append(('Cookie', 'sessionid='+self.__sessionid))
response = opener.open(url)
except urllib2.HTTPError, e:
print "Got reponse code: %s from the server" % e.code
raise NSNitroError("Could not get resource: %s, %s" % (e.code, e.message))
nsresponse = NSNitroResponse(response.read())
if nsresponse.failed:
raise NSNitroError(nsresponse.message)
return nsresponse
def delete(self, url):
try:
opener = urllib2.build_opener()
req = urllib2.Request(url)
req.add_header('Cookie', 'sessionid='+self.__sessionid)
req.get_method = lambda: 'DELETE'
response = urllib2.urlopen(req)
except urllib2.HTTPError, e:
raise NSNitroError("Could not send delete request: %s, %s" % (e.code, e.message))
nsresponse = NSNitroResponse(response.read())
if nsresponse.failed:
raise NSNitroError(nsresponse.message)
return nsresponse
def logout(self):
try:
opener = urllib2.build_opener()
req = urllib2.Request(self.__baseurl)
req.add_header('Cookie','sessionid='+self.__sessionid)
req.add_header('logout','{}')
response = urllib2.urlopen(req)
except urllib2.HTTPError,e:
raise NSNitroError("Could not send logout request: %s, %s" % (e.code, e.message))
nsresponse = NSNitroResponse(response.read())
if nsresponse.failed:
raise NSNitroError(nsresponse.message)
del self.__sessionid
return nsresponse.get_json_response()
| [
"Josh.Zojonc@Oregonstate.edu"
] | Josh.Zojonc@Oregonstate.edu |
2a930f8fc17f6a4af9fdfaeb6ff31fb3020b1552 | 6be845bf70a8efaf390da28c811c52b35bf9e475 | /windows/Resources/Python/Core/Lib/lib2to3/main.py | 21120209a72ee4a781dc2b7ce9223426acd4d8bd | [] | no_license | kyeremalprime/ms | 228194910bf2ed314d0492bc423cc687144bb459 | 47eea098ec735b2173ff0d4e5c493cb8f04e705d | refs/heads/master | 2020-12-30T15:54:17.843982 | 2017-05-14T07:32:01 | 2017-05-14T07:32:01 | 91,180,709 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 6,385 | py | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: main.py
"""
Main program for 2to3.
"""
from __future__ import with_statement
import sys
import os
import difflib
import logging
import shutil
import optparse
from . import refactor
def diff_texts(a, b, filename):
"""Return a unified diff of two strings."""
a = a.splitlines()
b = b.splitlines()
return difflib.unified_diff(a, b, filename, filename, '(original)', '(refactored)', lineterm='')
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
"""
Prints output to stdout.
"""
def __init__(self, fixers, options, explicit, nobackups, show_diffs):
self.nobackups = nobackups
self.show_diffs = show_diffs
super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
def log_error(self, msg, *args, **kwargs):
self.errors.append((msg, args, kwargs))
self.logger.error(msg, *args, **kwargs)
def write_file(self, new_text, filename, old_text, encoding):
if not self.nobackups:
backup = filename + '.bak'
if os.path.lexists(backup):
try:
os.remove(backup)
except os.error as err:
self.log_message("Can't remove backup %s", backup)
try:
os.rename(filename, backup)
except os.error as err:
self.log_message("Can't rename %s to %s", filename, backup)
write = super(StdoutRefactoringTool, self).write_file
write(new_text, filename, old_text, encoding)
if not self.nobackups:
shutil.copymode(backup, filename)
def print_output(self, old, new, filename, equal):
if equal:
self.log_message('No changes to %s', filename)
else:
self.log_message('Refactored %s', filename)
if self.show_diffs:
diff_lines = diff_texts(old, new, filename)
try:
if self.output_lock is not None:
with self.output_lock:
for line in diff_lines:
print line
sys.stdout.flush()
else:
for line in diff_lines:
print line
except UnicodeEncodeError:
warn("couldn't encode %s's diff for your terminal" % (
filename,))
return
return
def warn(msg):
print >> sys.stderr, 'WARNING: %s' % (msg,)
def main(fixer_pkg, args=None):
"""Main program.
Args:
fixer_pkg: the name of a package where the fixers are located.
args: optional; a list of command line arguments. If omitted,
sys.argv[1:] is used.
Returns a suggested exit status (0, 1, 2).
"""
parser = optparse.OptionParser(usage='2to3 [options] file|dir ...')
parser.add_option('-d', '--doctests_only', action='store_true', help='Fix up doctests only')
parser.add_option('-f', '--fix', action='append', default=[], help='Each FIX specifies a transformation; default: all')
parser.add_option('-j', '--processes', action='store', default=1, type='int', help='Run 2to3 concurrently')
parser.add_option('-x', '--nofix', action='append', default=[], help='Prevent a transformation from being run')
parser.add_option('-l', '--list-fixes', action='store_true', help='List available transformations')
parser.add_option('-p', '--print-function', action='store_true', help='Modify the grammar so that print() is a function')
parser.add_option('-v', '--verbose', action='store_true', help='More verbose logging')
parser.add_option('--no-diffs', action='store_true', help="Don't show diffs of the refactoring")
parser.add_option('-w', '--write', action='store_true', help='Write back modified files')
parser.add_option('-n', '--nobackups', action='store_true', default=False, help="Don't write backups for modified files")
refactor_stdin = False
flags = {}
options, args = parser.parse_args(args)
if not options.write and options.no_diffs:
warn("not writing files and not printing diffs; that's not very useful")
if not options.write and options.nobackups:
parser.error("Can't use -n without -w")
if options.list_fixes:
print 'Available transformations for the -f/--fix option:'
for fixname in refactor.get_all_fix_names(fixer_pkg):
print fixname
if not args:
return 0
if not args:
print >> sys.stderr, 'At least one file or directory argument required.'
print >> sys.stderr, 'Use --help to show usage.'
return 2
if '-' in args:
refactor_stdin = True
if options.write:
print >> sys.stderr, "Can't write to stdin."
return 2
if options.print_function:
flags['print_function'] = True
level = logging.DEBUG if options.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
unwanted_fixes = set((fixer_pkg + '.fix_' + fix for fix in options.nofix))
explicit = set()
if options.fix:
all_present = False
for fix in options.fix:
if fix == 'all':
all_present = True
else:
explicit.add(fixer_pkg + '.fix_' + fix)
if all_present:
requested = avail_fixes.union(explicit) if 1 else explicit
else:
requested = avail_fixes.union(explicit)
fixer_names = requested.difference(unwanted_fixes)
rt = StdoutRefactoringTool(sorted(fixer_names), flags, sorted(explicit), options.nobackups, not options.no_diffs)
if rt.errors or refactor_stdin:
rt.refactor_stdin()
else:
try:
rt.refactor(args, options.write, options.doctests_only, options.processes)
except refactor.MultiprocessingUnsupported:
print >> sys.stderr, "Sorry, -j isn't supported on this platform."
return 1
rt.summarize()
return int(bool(rt.errors)) | [
"kyeremalprime@gmail.com"
] | kyeremalprime@gmail.com |
36818cd817e1f1d2b3e8c9cfb87439327c2222d9 | beac72992bc0f5080e3bb8363dff09e1dbef7897 | /selenium_base_site.py | cc0b77853b2c78129595ee6901cc69ccd609b924 | [] | no_license | nanites2000/Selenium | 27f45212af44d75fd53fe5f409248abfead7d83a | 02b01824adde5509b4f8991bc6a6c6ac9181e7fc | refs/heads/main | 2023-03-08T11:48:57.170860 | 2021-02-18T08:09:49 | 2021-02-18T08:09:49 | 339,977,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,155 | py | from seleniumbase import BaseCase
import time
class MyTestClass(BaseCase):
def test_demo_site(self):
self.open("https://seleniumbase.io/demo_page")
# Assert the title of the current web page
self.assert_title("Web Testing Page")
# Assert that the element is visible on the page
self.assert_element("tbody#tbodyId")
# Assert that the text appears within a given element
self.assert_text("Demo Page", "h1")
# Type/update text in text fields on the page
self.type("#myTextInput", "This is Automated")
time.sleep(2)
self.type("textarea.area1", "Testing Time!\n")
time.sleep(2)
self.type('[name="preText2"]', "Typing Text!")
# Verify that a hover dropdown link changes page text
self.assert_text("Automation Practice", "h3")
self.hover_and_click("#myDropdown", "#dropOption2")
self.assert_text("Link Two Selected", "h3")
# Verify that a button click changes text on the page
self.assert_text("This Text is Green", "#pText")
self.click("#myButton")
self.assert_text("This Text is Purple", "#pText")
# Assert that the given SVG is visible on the page
self.assert_element('svg[name="svgName"]')
# Verify that a slider control updates a progress bar
self.assert_element('progress[value="50"]')
self.press_right_arrow("#myslider", times=5)
self.assert_element('progress[value="100"]')
# Verify that a "select" option updates a meter bar
self.assert_element('meter[value="0.25"]')
self.select_option_by_text("#mySelect", "Set to 75%")
self.assert_element('meter[value="0.75"]')
# Assert an element located inside an iFrame
self.assert_false(self.is_element_visible("img"))
self.switch_to_frame("#myFrame1")
self.assert_true(self.is_element_visible("img"))
self.switch_to_default_content()
# Assert text located inside an iFrame
self.assert_false(self.is_text_visible("iFrame Text"))
self.switch_to_frame("#myFrame2")
self.assert_true(self.is_text_visible("iFrame Text"))
self.switch_to_default_content()
# Verify that clicking a radio button selects it
self.assert_false(self.is_selected("#radioButton2"))
self.click("#radioButton2")
self.assert_true(self.is_selected("#radioButton2"))
# Verify that clicking a checkbox makes it selected
self.assert_false(self.is_selected("#checkBox1"))
self.click("#checkBox1")
self.assert_true(self.is_selected("#checkBox1"))
# Verify clicking on multiple elements with one call
self.assert_false(self.is_selected("#checkBox2"))
self.assert_false(self.is_selected("#checkBox3"))
self.assert_false(self.is_selected("#checkBox4"))
self.click_visible_elements("input.checkBoxClassB")
self.assert_true(self.is_selected("#checkBox2"))
self.assert_true(self.is_selected("#checkBox3"))
self.assert_true(self.is_selected("#checkBox4"))
# Verify that clicking an iFrame checkbox selects it
self.assert_false(self.is_element_visible(".fBox"))
self.switch_to_frame("#myFrame3")
self.assert_true(self.is_element_visible(".fBox"))
self.assert_false(self.is_selected(".fBox"))
self.click(".fBox")
self.assert_true(self.is_selected(".fBox"))
self.switch_to_default_content()
# Assert link text
self.assert_link_text("seleniumbase.com")
self.assert_link_text("SeleniumBase on GitHub")
self.assert_link_text("seleniumbase.io")
# Click link text
self.click_link("SeleniumBase Demo Page")
# Assert exact text
self.assert_exact_text("Demo Page", "h1")
# Highlight a page element (Also asserts visibility)
self.highlight("h2")
# Assert no broken links (Can be slow if many links)
# self.assert_no_404_errors()
# Assert no JavaScript errors (Can also detect 404s)
# self.assert_no_js_errors() | [
"nanites2000@yahoo.com"
] | nanites2000@yahoo.com |
a125ce982cc39eab106b53f576d0f13157a6e8b6 | ce4c00268aeb86b9d2f59216359f5ee076d39830 | /checkout/migrations/0001_initial.py | 34da110e344676011a150b28a9329b22b610d90f | [] | no_license | Code-Institute-Submissions/MichaelOsarumwense-Treasure_Hair_Collections_MayResub | 17b2bd6a13d1133e72c5e2cbf8d3812bc0801216 | add5ad99e426a383cf6a0a61206c9cba439efd1c | refs/heads/master | 2023-05-09T06:41:39.579440 | 2021-05-28T10:58:41 | 2021-05-28T10:58:41 | 372,469,561 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,345 | py | # Generated by Django 3.1.6 on 2021-03-05 22:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('products', '0002_auto_20210302_1126'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_number', models.CharField(editable=False, max_length=32)),
('full_name', models.CharField(max_length=50)),
('email', models.EmailField(max_length=254)),
('phone_number', models.CharField(max_length=20)),
('country', models.CharField(max_length=40)),
('postcode', models.CharField(blank=True, max_length=20, null=True)),
('town_or_city', models.CharField(max_length=40)),
('street_address1', models.CharField(max_length=80)),
('street_address2', models.CharField(blank=True, max_length=80, null=True)),
('county', models.CharField(blank=True, max_length=80, null=True)),
('date', models.DateTimeField(auto_now_add=True)),
('delivery_cost', models.DecimalField(decimal_places=2, default=0, max_digits=6)),
('order_total', models.DecimalField(decimal_places=2, default=0, max_digits=10)),
('grand_total', models.DecimalField(decimal_places=2, default=0, max_digits=10)),
],
),
migrations.CreateModel(
name='OrderLineItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('product_size', models.CharField(blank=True, max_length=2, null=True)),
('quantity', models.IntegerField(default=0)),
('lineitem_total', models.DecimalField(decimal_places=2, editable=False, max_digits=6)),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lineitems', to='checkout.order')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='products.product')),
],
),
]
| [
"sanmicad@gmail.com"
] | sanmicad@gmail.com |
bffa3b52e28fc2e8b8fef91856bc09c4cace489a | 0a01dd11c0a6aab857b49ce614208a93ff76d2f0 | /textract_demo_compute_score.py | 93ad3d1990341069b5406ff026a133c9885ca7c7 | [] | no_license | OkkarMin/CloudInterestGroup_Textract_Demo | a8f6b9571309fd286338a6e909df10c881336a76 | 4ea96f343f7751487b4843c54aeb032da92b3b74 | refs/heads/main | 2023-03-07T00:03:27.574707 | 2021-02-20T01:34:21 | 2021-02-20T01:34:21 | 340,531,876 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,035 | py | import pandas as pd
ANSWER_KEY = ('B ', 'C ', 'B ')
okkar_answers_csv = pd.read_csv('./okkar_answers.csv', header=None)
surabhi_answers_csv = pd.read_csv('./surabhi_answers.csv', header=None)
# print(okkar_answers_csv)
# print(surabhi_answers_csv)
okkar_answers = okkar_answers_csv.iloc[0][1], okkar_answers_csv.iloc[1][1], okkar_answers_csv.iloc[2][1]
surabhi_answers = surabhi_answers_csv.iloc[0][1], surabhi_answers_csv.iloc[1][1], surabhi_answers_csv.iloc[2][1]
# print(okkar_answers)
# print(surabhi_answers)
def show_result(student_answer):
if student_answer == ANSWER_KEY:
print('Good job! 100 % correct 😄')
return
# which question did this student answered wrongly?
wrong_answers = [student_answer[i] for i in range(
3) if student_answer[i] != ANSWER_KEY[i]]
for q_num, answer in enumerate(wrong_answers):
print(
f'Question {q_num} is wrong ❌ Correct: {ANSWER_KEY[q_num]} Answered {answer}')
# show_result(okkar_answers)
show_result(surabhi_answers)
| [
"omin001@e.ntu.edu.sg"
] | omin001@e.ntu.edu.sg |
a981cc944965a3bd08ddd26784a406d00a5bc05c | 84465be859a221db02e7f897dfcb7fcbddef0087 | /Packs/ServiceNow/Integrations/ServiceNowv2/ServiceNowv2.py | 479f7b7aac05133d12c7fd3674d1d20289e1cc1f | [
"MIT"
] | permissive | JT-NL/content | ba9325da86aeb5b021846e2e05ed8d2182c2d367 | 98b6a5447f88c8a759dddce215e2e842fb4916d5 | refs/heads/master | 2023-06-22T03:38:32.339175 | 2023-06-13T09:26:09 | 2023-06-13T09:26:09 | 280,732,243 | 0 | 0 | null | 2020-07-18T20:16:57 | 2020-07-18T20:16:56 | null | UTF-8 | Python | false | false | 124,395 | py | import re
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import shutil
from typing import Callable, Dict, Iterable, List, Tuple
import mimetypes
# disable insecure warnings
import urllib3
urllib3.disable_warnings()
INCIDENT = 'incident'
SIR_INCIDENT = 'sn_si_incident'
COMMAND_NOT_IMPLEMENTED_MSG = 'Command not implemented'
DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
DATE_FORMAT_OPTIONS = {
'MM-dd-yyyy': '%m-%d-%Y %H:%M:%S',
'dd/MM/yyyy': '%d/%m/%Y %H:%M:%S',
'dd-MM-yyyy': '%d-%m-%Y %H:%M:%S',
'dd.MM.yyyy': '%d.%m.%Y %H:%M:%S',
'yyyy-MM-dd': '%Y-%m-%d %H:%M:%S'
}
TICKET_STATES = {
'incident': {
'1': '1 - New',
'2': '2 - In Progress',
'3': '3 - On Hold',
'4': '4 - Awaiting Caller',
'5': '5 - Awaiting Evidence',
'6': '6 - Resolved',
'7': '7 - Closed',
'8': '8 - Canceled'
},
'problem': {
'1': '1 - Open',
'2': '2 - Known Error',
'3': '3 - Pending Change',
'4': '4 - Closed/Resolved'
},
'change_request': {
'-5': '-5 - New',
'-4': '-4 - Assess',
'-3': '-3 - Authorize',
'-2': '-2 - Scheduled',
'-1': '-1 - Implement',
'0': '0 - Review',
'3': '3 - Closed',
'4': '4 - Canceled'
},
'sc_task': {
'-5': '-5 - Pending',
'1': '1 - Open',
'2': '2 - Work In Progress',
'3': '3 - Closed Complete',
'4': '4 - Closed Incomplete',
'7': '7 - Closed Skipped'
},
'sc_request': {
'1': '1 - Approved',
'3': '3 - Closed',
'4': '4 - Rejected'
},
SIR_INCIDENT: {
'3': 'Closed',
'7': 'Cancelled',
'10': 'Draft',
'16': 'Analysis',
'18': 'Contain',
'19': 'Eradicate'
}
}
TICKET_TYPE_TO_CLOSED_STATE = {INCIDENT: '7',
'problem': '4',
'change_request': '3',
'sc_task': '3',
'sc_request': '3',
SIR_INCIDENT: '3'}
TICKET_APPROVAL = {
'sc_req_item': {
'waiting_for_approval': 'Waiting for approval',
'approved': 'Approved',
'requested': 'Requested',
'rejected': 'Rejected',
'not requested': 'Not Yet Requested'
}
}
TICKET_PRIORITY = {
'1': '1 - Critical',
'2': '2 - High',
'3': '3 - Moderate',
'4': '4 - Low',
'5': '5 - Planning'
}
TICKET_IMPACT = {
'1': '1 - Enterprise',
'2': '2 - Region / Market',
'3': '3 - Ministry',
'4': '4 - Department / Function',
'5': '5 - Caregiver'
}
BUSINESS_IMPACT = {
'1': '1 - Critical',
'2': '2 - High',
'3': '3 - Non-Critical'
}
SNOW_ARGS = ['active', 'activity_due', 'opened_at', 'short_description', 'additional_assignee_list', 'approval_history',
'approval', 'approval_set', 'assigned_to', 'assignment_group',
'business_duration', 'business_service', 'business_stc', 'change_type', 'category', 'caller',
'calendar_duration', 'calendar_stc', 'caller_id', 'caused_by', 'close_code', 'close_notes',
'closed_at', 'closed_by', 'cmdb_ci', 'comments', 'comments_and_work_notes', 'company', 'contact_type',
'correlation_display', 'correlation_id', 'delivery_plan', 'delivery_task', 'description', 'due_date',
'expected_start', 'follow_up', 'group_list', 'hold_reason', 'impact', 'incident_state',
'knowledge', 'location', 'made_sla', 'notify', 'order', 'parent', 'parent_incident', 'priority',
'problem_id', 'reassignment_count', 'reopen_count', 'resolved_at', 'resolved_by', 'rfc',
'severity', 'sla_due', 'state', 'subcategory', 'sys_tags', 'sys_updated_by', 'sys_updated_on',
'time_worked', 'title', 'type', 'urgency', 'user_input', 'watch_list', 'work_end', 'work_notes',
'work_notes_list', 'work_start', 'business_criticality', 'risk_score']
SIR_OUT_FIELDS = ['attack_vector', 'affected_user', 'change_request', 'incident', 'parent_security_incident',
'substate']
# Every table in ServiceNow should have those fields
DEFAULT_RECORD_FIELDS = {
'sys_id': 'ID',
'sys_updated_by': 'UpdatedBy',
'sys_updated_on': 'UpdatedAt',
'sys_created_by': 'CreatedBy',
'sys_created_on': 'CreatedAt'
}
MIRROR_DIRECTION = {
'None': None,
'Incoming': 'In',
'Outgoing': 'Out',
'Incoming And Outgoing': 'Both'
}
def arg_to_timestamp(arg: Any, arg_name: str, required: bool = False) -> int:
"""
Converts an XSOAR argument to a timestamp (seconds from epoch).
This function is used to quickly validate an argument provided to XSOAR
via ``demisto.args()`` into an ``int`` containing a timestamp (seconds
since epoch). It will throw a ValueError if the input is invalid.
If the input is None, it will throw a ValueError if required is ``True``,
or ``None`` if required is ``False``.
Args:
arg: argument to convert
arg_name: argument name.
required: throws exception if ``True`` and argument provided is None
Returns:
returns an ``int`` containing a timestamp (seconds from epoch) if conversion works
returns ``None`` if arg is ``None`` and required is set to ``False``
otherwise throws an Exception
"""
if arg is None:
if required is True:
raise ValueError(f'Missing "{arg_name}"')
if isinstance(arg, str) and arg.isdigit():
# timestamp is a str containing digits - we just convert it to int
return int(arg)
if isinstance(arg, str):
# we use dateparser to handle strings either in ISO8601 format, or
# relative time stamps.
# For example: format 2019-10-23T00:00:00 or "3 days", etc
date = dateparser.parse(arg, settings={'TIMEZONE': 'UTC'})
if date is None:
# if d is None it means dateparser failed to parse it
raise ValueError(f'Invalid date: {arg_name}')
return int(date.timestamp())
if isinstance(arg, (int, float)):
# Convert to int if the input is a float
return int(arg)
raise ValueError(f'Invalid date: "{arg_name}"')
def get_server_url(server_url: str) -> str:
url = server_url
url = re.sub('/[/]+$/', '', url)
url = re.sub('/$', '', url)
return url
def get_item_human_readable(data: dict) -> dict:
"""Get item human readable.
Args:
data: item data.
Returns:
item human readable.
"""
item = {
'ID': data.get('sys_id', ''),
'Name': data.get('name', ''),
'Description': data.get('short_description', ''),
'Price': data.get('price', ''),
'Variables': []
}
variables = data.get('variables')
if variables and isinstance(variables, list):
for var in variables:
if var:
pretty_variables = {
'Question': var.get('label', ''),
'Type': var.get('display_type', ''),
'Name': var.get('name', ''),
'Mandatory': var.get('mandatory', '')
}
item['Variables'].append(pretty_variables)
return item
def create_ticket_context(data: dict, additional_fields: list | None = None) -> Any:
"""Create ticket context.
Args:
data: ticket data.
additional_fields: additional fields to extract from the ticket
Returns:
ticket context.
"""
context = {
'ID': data.get('sys_id'),
'Summary': data.get('short_description'),
'Number': data.get('number'),
'CreatedOn': data.get('sys_created_on'),
'Active': data.get('active'),
'AdditionalComments': data.get('comments'),
'CloseCode': data.get('close_code'),
'OpenedAt': data.get('opened_at')
}
if additional_fields:
for additional_field in additional_fields:
if camelize_string(additional_field) not in context.keys():
# in case of a nested additional field (in the form of field1.field2)
nested_additional_field_list = additional_field.split('.')
if value := dict_safe_get(data, nested_additional_field_list):
context[additional_field] = value
# These fields refer to records in the database, the value is their system ID.
closed_by = data.get('closed_by')
if closed_by:
if isinstance(closed_by, dict):
context['ResolvedBy'] = closed_by.get('value', '')
else:
context['ResolvedBy'] = closed_by
opened_by = data.get('opened_by')
if opened_by:
if isinstance(opened_by, dict):
context['OpenedBy'] = opened_by.get('value', '')
context['Creator'] = opened_by.get('value', '')
else:
context['OpenedBy'] = opened_by
context['Creator'] = opened_by
assigned_to = data.get('assigned_to')
if assigned_to:
if isinstance(assigned_to, dict):
context['Assignee'] = assigned_to.get('value', '')
else:
context['Assignee'] = assigned_to
# Try to map fields
priority = data.get('priority')
if priority:
if isinstance(priority, dict):
context['Priority'] = TICKET_PRIORITY.get(str(int(priority.get('value', ''))),
str(int(priority.get('value', '')))),
else:
context['Priority'] = TICKET_PRIORITY.get(priority, priority)
state = data.get('state')
if state:
context['State'] = state
return createContext(context, removeNull=True)
def get_ticket_context(data: Any, additional_fields: list | None = None) -> Any:
"""Manager of ticket context creation.
Args:
data: ticket data. in the form of a dict or a list of dict.
additional_fields: additional fields to extract from the ticket
Returns:
ticket context. in the form of a dict or a list of dict.
"""
if not isinstance(data, list):
return create_ticket_context(data, additional_fields)
tickets = []
for d in data:
tickets.append(create_ticket_context(d, additional_fields))
return tickets
def get_ticket_human_readable(tickets, ticket_type: str, additional_fields: list | None = None) -> list:
"""Get ticket human readable.
Args:
tickets: tickets data. in the form of a dict or a list of dict.
ticket_type: ticket type.
additional_fields: additional fields to extract from the ticket
Returns:
ticket human readable.
"""
if not isinstance(tickets, list):
tickets = [tickets]
ticket_severity = {
'1': '1 - High',
'2': '2 - Medium',
'3': '3 - Low'
}
result = []
for ticket in tickets:
hr = {
'Number': ticket.get('number'),
'System ID': ticket.get('sys_id'),
'Created On': ticket.get('sys_created_on'),
'Created By': ticket.get('sys_created_by'),
'Active': ticket.get('active'),
'Close Notes': ticket.get('close_notes'),
'Close Code': ticket.get('close_code'),
'Description': ticket.get('description'),
'Opened At': ticket.get('opened_at'),
'Due Date': ticket.get('due_date'),
# This field refers to a record in the database, the value is its system ID.
'Resolved By': ticket.get('closed_by', {}).get('value') if isinstance(ticket.get('closed_by'), dict)
else ticket.get('closed_by'),
'Resolved At': ticket.get('resolved_at'),
'SLA Due': ticket.get('sla_due'),
'Short Description': ticket.get('short_description'),
'Additional Comments': ticket.get('comments')
}
# Try to map the fields
impact = ticket.get('impact', '')
if impact:
hr['Impact'] = ticket_severity.get(impact, impact)
urgency = ticket.get('urgency', '')
if urgency:
hr['Urgency'] = ticket_severity.get(urgency, urgency)
severity = ticket.get('severity', '')
if severity:
hr['Severity'] = ticket_severity.get(severity, severity)
priority = ticket.get('priority', '')
if priority:
hr['Priority'] = TICKET_PRIORITY.get(priority, priority)
state = ticket.get('state', '')
if state:
mapped_state = state
if ticket_type in TICKET_STATES:
mapped_state = TICKET_STATES[ticket_type].get(state, mapped_state)
hr['State'] = mapped_state
approval = ticket.get('approval', '')
if approval:
mapped_approval = approval
if ticket_type in TICKET_APPROVAL:
mapped_approval = TICKET_APPROVAL[ticket_type].get(ticket.get('approval'), mapped_approval)
# Approval will be added to the markdown only in the necessary ticket types
hr['Approval'] = mapped_approval
if additional_fields:
for additional_field in additional_fields:
# in case of a nested additional field (in the form of field1.field2)
nested_additional_field_list = additional_field.split('.')
hr[additional_field] = dict_safe_get(ticket, nested_additional_field_list)
result.append(hr)
return result
def get_ticket_fields(args: dict, template_name: dict = {}, ticket_type: str = '') -> dict:
"""Inverse the keys and values of those dictionaries
to map the arguments to their corresponding values in ServiceNow.
Args:
args: Demisto args
template_name: ticket template name
ticket_type: ticket type
Returns:
ticket fields.
"""
ticket_severity = {
'1': '1 - High',
'2': '2 - Medium',
'3': '3 - Low'
}
inv_severity = {v: k for k, v in ticket_severity.items()}
inv_priority = {v: k for k, v in TICKET_PRIORITY.items()}
inv_business_impact = {v: k for k, v in BUSINESS_IMPACT.items()}
states = TICKET_STATES.get(ticket_type)
inv_states = {v: k for k, v in states.items()} if states else {}
approval = TICKET_APPROVAL.get(ticket_type)
inv_approval = {v: k for k, v in approval.items()} if approval else {}
fields_to_clear = argToList(
args.get('clear_fields', [])) # This argument will contain fields to allow their value empty
# This is for updating null fields for update_remote_system function for example: assigned_to.
for arg in args.keys():
if not args[arg]:
fields_to_clear.append(arg)
demisto.debug(f'Fields to clear {fields_to_clear}')
ticket_fields = {}
for arg in SNOW_ARGS:
input_arg = args.get(arg)
if arg in fields_to_clear:
if input_arg:
raise DemistoException(f"Could not set a value for the argument '{arg}' and add it to the clear_fields. \
You can either set or clear the field value.")
ticket_fields[arg] = ""
elif input_arg:
if arg in ['impact', 'urgency', 'severity']:
ticket_fields[arg] = inv_severity.get(input_arg, input_arg)
elif arg == 'priority':
ticket_fields[arg] = inv_priority.get(input_arg, input_arg)
elif arg == 'state':
ticket_fields[arg] = inv_states.get(input_arg, input_arg)
elif arg == 'approval':
ticket_fields[arg] = inv_approval.get(input_arg, input_arg)
elif arg == 'change_type':
# this change is required in order to use type 'Standard' as well.
ticket_fields['type'] = input_arg
elif arg == 'business_criticality':
ticket_fields[arg] = inv_business_impact.get(input_arg, input_arg)
else:
ticket_fields[arg] = input_arg
elif template_name and arg in template_name:
ticket_fields[arg] = template_name[arg]
return ticket_fields
def generate_body(fields: dict = {}, custom_fields: dict = {}) -> dict:
"""Generates a body from fields and custom fields.
Args:
fields: fields data.
custom_fields: custom fields data.
Returns:
body object for SNOW requests.
"""
body = {}
if fields:
for field in fields:
body[field] = fields[field]
if custom_fields:
for field in custom_fields:
# custom fields begin with "u_"
if field.startswith('u_'):
body[field] = custom_fields[field]
else:
body['u_' + field] = custom_fields[field]
return body
def split_fields(fields: str = '', delimiter: str = ';') -> dict:
"""Split str fields of Demisto arguments to SNOW request fields by the char ';'.
Args:
fields: fields in a string representation.
delimiter: the delimiter to use to separate the fields.
Returns:
dic_fields object for SNOW requests.
"""
dic_fields = {}
if fields:
if '=' not in fields:
raise Exception(
f"The argument: {fields}.\nmust contain a '=' to specify the keys and values. e.g: key=val.")
arr_fields = fields.split(delimiter)
for f in arr_fields:
field = f.split('=', 1) # a field might include a '=' sign in the value. thus, splitting only once.
if len(field) > 1:
dic_fields[field[0]] = field[1]
return dic_fields
def split_notes(raw_notes, note_type, time_info):
notes: List = []
# The notes should be in this form:
# '16/05/2023 15:49:56 - John Doe (Additional comments)\nsecond note first line\n\nsecond line\n\nthird
# line\n\n2023-05-10 15:41:38 - פלוני אלמוני (Additional comments)\nfirst note first line\n\nsecond line\n\n
delimiter = '([0-9]{1,4}(?:\/|-)[0-9]{1,2}(?:\/|-)[0-9]{1,4}.*\((?:Additional comments|Work notes)\))'
notes_split = list(filter(None, re.split(delimiter, raw_notes)))
for note_info, note_value in zip(notes_split[::2], notes_split[1::2]):
created_on, _, created_by = note_info.partition(" - ")
created_by = created_by.split(' (')[0]
if not created_on or not created_by:
raise Exception(f'Failed to extract the required information from the following note: {note_info} - {note_value}')
# convert note creation time to UTC
try:
display_date_format = time_info.get('display_date_format')
created_on_UTC = datetime.strptime(created_on, display_date_format) + time_info.get('timezone_offset')
except ValueError as e:
raise Exception(f'Failed to convert {created_on} to a datetime object. Error: {e}')
if time_info.get('filter') and created_on_UTC < time_info.get('filter'):
# If a time_filter was passed and the note was created before this time, do not return it.
demisto.debug(f'Using time filter: {time_info.get("filter")}. Not including note: {note_info} - {note_value}.')
continue
note_dict = {
"sys_created_on": created_on_UTC.strftime(DATE_FORMAT),
"value": note_value.strip(),
"sys_created_by": created_by,
"element": note_type
}
notes.append(note_dict)
return notes
def convert_to_notes_result(full_response, time_info):
"""
Converts the response of a ticket to the response format when making a query for notes only.
"""
if not full_response or 'result' not in full_response or not full_response.get('result'):
return []
timezone_offset = get_timezone_offset(full_response, time_info.get('display_date_format'))
time_info['timezone_offset'] = timezone_offset
all_notes = []
raw_comments = full_response.get('result', {}).get('comments', {}).get('display_value', '')
if raw_comments:
comments = split_notes(raw_comments, 'comments', time_info=time_info)
all_notes.extend(comments)
raw_work_notes = full_response.get('result', {}).get('work_notes', {}).get('display_value', '')
if raw_work_notes:
work_notes = split_notes(raw_work_notes, 'work_notes', time_info=time_info)
all_notes.extend(work_notes)
return {'result': all_notes}
class Client(BaseClient):
"""
Client to use in the ServiceNow integration. Overrides BaseClient.
"""
def __init__(self, server_url: str, sc_server_url: str, cr_server_url: str, username: str,
password: str, verify: bool, fetch_time: str, sysparm_query: str,
sysparm_limit: int, timestamp_field: str, ticket_type: str, get_attachments: bool,
incident_name: str, oauth_params: dict | None = None, version: str | None = None, look_back: int = 0,
use_display_value: bool = False, display_date_format: str = ''):
"""
Args:
server_url: SNOW server url
sc_server_url: SNOW Service Catalog url
cr_server_url: SNOW Change Management url
username: SNOW username
password: SNOW password
oauth_params: (optional) the parameters for the ServiceNowClient that should be used to create an
access token when using OAuth2 authentication.
verify: whether to verify the request
fetch_time: first time fetch for fetch_incidents
sysparm_query: system query
sysparm_limit: system limit
timestamp_field: timestamp field for fetch_incidents
ticket_type: default ticket type
get_attachments: whether to get ticket attachments by default
incident_name: the ServiceNow ticket field to be set as the incident name
look_back: defines how much backwards (minutes) should we go back to try to fetch incidents.
"""
oauth_params = oauth_params if oauth_params else {}
self._base_url = server_url
self._sc_server_url = sc_server_url
self._cr_server_url = cr_server_url
self._version = version
self._verify = verify
self._username = username
self._password = password
self._proxies = handle_proxy(proxy_param_name='proxy', checkbox_default_value=False)
self.use_oauth = True if oauth_params else False
self.fetch_time = fetch_time
self.timestamp_field = timestamp_field
self.ticket_type = ticket_type
self.get_attachments = get_attachments
self.incident_name = incident_name
self.sys_param_query = sysparm_query
self.sys_param_limit = sysparm_limit
self.sys_param_offset = 0
self.look_back = look_back
self.use_display_value = use_display_value
self.display_date_format = DATE_FORMAT_OPTIONS.get(display_date_format)
if self.use_display_value:
assert self.display_date_format, 'A display date format must be selected in the instance configuration when ' \
'using the `Use Display Value` option.'
if self.use_oauth: # if user selected the `Use OAuth` checkbox, OAuth2 authentication should be used
self.snow_client: ServiceNowClient = ServiceNowClient(credentials=oauth_params.get('credentials', {}),
use_oauth=self.use_oauth,
client_id=oauth_params.get('client_id', ''),
client_secret=oauth_params.get('client_secret', ''),
url=oauth_params.get('url', ''),
verify=oauth_params.get('verify', False),
proxy=oauth_params.get('proxy', False),
headers=oauth_params.get('headers', ''))
else:
self._auth = (self._username, self._password)
def generic_request(self, method: str, path: str, body: Optional[Dict] = None, headers: Optional[Dict] = None,
sc_api: bool = False, cr_api: bool = False):
"""Generic request to ServiceNow api.
Args:
(Required Arguments)
method (str) required: The HTTP method, for example, GET, POST, and so on.
path (str) required: The API endpoint.
(Optional Arguments)
body (dict): The body to send in a 'POST' request. Default is None.
header (dict): requests headers. Default is None.
sc_api: Whether to send the request to the Service Catalog API
cr_api: Whether to send the request to the Change Request REST API
Returns:
Resposne object or Exception
"""
return self.send_request(path, method, body, headers=headers, sc_api=sc_api, cr_api=cr_api)
def send_request(self, path: str, method: str = 'GET', body: dict | None = None, params: dict | None = None,
headers: dict | None = None, file=None, sc_api: bool = False, cr_api: bool = False,
no_record_found_res: dict = {'result': []}):
"""Generic request to ServiceNow.
Args:
path: API path
method: request method
body: request body
params: request params
headers: request headers
file: request file
sc_api: Whether to send the request to the Service Catalog API
cr_api: Whether to send the request to the Change Request REST API
Returns:
response from API
"""
body = body if body is not None else {}
params = params if params is not None else {}
if sc_api:
url = f'{self._sc_server_url}{path}'
elif cr_api:
url = f'{self._cr_server_url}{path}'
else:
url = f'{self._base_url}{path}'
if not headers:
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
max_retries = 3
num_of_tries = 0
while num_of_tries < max_retries:
if file:
# Not supported in v2
url = url.replace('/v2', '/v1')
try:
file_entry = file['id']
file_name = file['name']
shutil.copy(demisto.getFilePath(file_entry)['path'], file_name)
with open(file_name, 'rb') as f:
file_info = (file_name, f, self.get_content_type(file_name))
if self.use_oauth:
access_token = self.snow_client.get_access_token()
headers.update({
'Authorization': f'Bearer {access_token}'
})
res = requests.request(method, url, headers=headers, data=body, params=params,
files={'file': file_info}, verify=self._verify, proxies=self._proxies)
else:
res = requests.request(method, url, headers=headers, data=body, params=params,
files={'file': file_info}, auth=self._auth,
verify=self._verify, proxies=self._proxies)
shutil.rmtree(demisto.getFilePath(file_entry)['name'], ignore_errors=True)
except Exception as err:
raise Exception('Failed to upload file - ' + str(err))
else:
if self.use_oauth:
access_token = self.snow_client.get_access_token()
headers.update({
'Authorization': f'Bearer {access_token}'
})
res = requests.request(method, url, headers=headers, data=json.dumps(body) if body else {},
params=params, verify=self._verify, proxies=self._proxies)
else:
res = requests.request(method, url, headers=headers, data=json.dumps(body) if body else {},
params=params, auth=self._auth, verify=self._verify, proxies=self._proxies)
if "Instance Hibernating page" in res.text:
raise DemistoException(
"A connection was established but the instance is in hibernate mode.\n"
"Please wake your instance and try again.")
try:
json_res = res.json()
except Exception as err:
if res.status_code == 201:
return "The ticket was successfully created."
if not res.content:
return ''
raise Exception(f'Error parsing reply - {str(res.content)} - {str(err)}')
if 'error' in json_res:
error = json_res.get('error', {})
if res.status_code == 401:
demisto.debug(f'Got status code 401 - {json_res}. Retrying ...')
else:
if isinstance(error, dict):
message = json_res.get('error', {}).get('message')
details = json_res.get('error', {}).get('detail')
if message == 'No Record found':
return no_record_found_res
else:
raise Exception(f'ServiceNow Error: {message}, details: {details}')
else:
raise Exception(f'ServiceNow Error: {error}')
if res.status_code < 200 or res.status_code >= 300:
if res.status_code != 401 or num_of_tries == (max_retries - 1):
raise Exception(
f'Got status code {str(res.status_code)} with url {url} with body {str(res.content)}'
f' with headers {str(res.headers)}')
else:
break
num_of_tries += 1
return json_res
def get_content_type(self, file_name):
"""Get the correct content type for the POST request.
Args:
file_name: file name
Returns:
the content type - image with right type for images , and general for other types..
"""
file_type = None
if not file_name:
demisto.debug("file name was not supllied, uploading with general type")
else:
file_type, _ = mimetypes.guess_type(file_name)
return file_type or '*/*'
def get_table_name(self, ticket_type: str = '') -> str:
"""Get the relevant table name from th client.
Args:
ticket_type: ticket type
Returns:
the ticket_type if given or the client ticket type
"""
if ticket_type:
return ticket_type
return self.ticket_type
def get_template(self, template_name: str) -> dict:
"""Get a ticket by sending a GET request.
Args:
template_name: ticket template name
Returns:
the ticket template
"""
query_params = {'sysparm_limit': 1, 'sysparm_query': f'name={template_name}'}
result = self.send_request('table/sys_template', 'GET', params=query_params)
if len(result['result']) == 0:
raise ValueError("Incorrect template name.")
template = result['result'][0].get('template', '').split('^')
dic_template = {}
for i in range(len(template) - 1):
template_value = template[i].split('=')
if len(template_value) > 1:
dic_template[template_value[0]] = template_value[1]
return dic_template
def get_ticket_attachments(self, ticket_id: str, sys_created_on: Optional[str] = None) -> dict:
"""Get ticket attachments by sending a GET request.
Args:
ticket_id: ticket id
sys_created_on: string, when the attachment was created
Returns:
Response from API.
"""
query = f'table_sys_id={ticket_id}'
if sys_created_on:
query += f'^sys_created_on>{sys_created_on}'
return self.send_request('attachment', 'GET', params={'sysparm_query': query})
def get_ticket_attachment_entries(self, ticket_id: str, sys_created_on: Optional[str] = None) -> list:
"""Get ticket attachments, including file attachments
by sending a GET request and using the get_ticket_attachments class function.
Args:
ticket_id: ticket id
sys_created_on: string, when the attachment was created
Returns:
Array of attachments entries.
"""
entries = []
links = [] # type: List[Tuple[str, str]]
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
attachments_res = self.get_ticket_attachments(ticket_id, sys_created_on)
if 'result' in attachments_res and len(attachments_res['result']) > 0:
attachments = attachments_res['result']
links = [(attachment.get('download_link', ''), attachment.get('file_name', ''))
for attachment in attachments]
for link in links:
if self.use_oauth:
access_token = self.snow_client.get_access_token()
headers.update({'Authorization': f'Bearer {access_token}'})
file_res = requests.get(link[0], headers=headers, verify=self._verify, proxies=self._proxies)
else:
file_res = requests.get(link[0], auth=(self._username, self._password), verify=self._verify,
proxies=self._proxies)
if file_res is not None:
entries.append(fileResult(link[1], file_res.content))
return entries
def get(self, table_name: str, record_id: str, custom_fields: dict = {}, number: str | None = None,
no_record_found_res: dict = {'result': []}) -> dict:
"""Get a ticket by sending a GET request.
Args:
table_name: the table name
record_id: the record ID
custom_fields: custom fields of the record to query
number: record number
Returns:
Response from API.
"""
query_params = {} # type: Dict
if record_id:
path = f'table/{table_name}/{record_id}'
elif number:
path = f'table/{table_name}'
query_params = {
'number': number
}
elif custom_fields:
path = f'table/{table_name}'
query_params = custom_fields
else:
# Only in cases where the table is of type ticket
raise ValueError('servicenow-get-ticket requires either ticket ID (sys_id) or ticket number.')
return self.send_request(path, 'GET', params=query_params, no_record_found_res=no_record_found_res)
def update(self, table_name: str, record_id: str, fields: dict = {}, custom_fields: dict = {},
input_display_value: bool = False) -> dict:
"""Updates a ticket or a record by sending a PATCH request.
Args:
table_name: table name
record_id: record id
fields: fields to update
custom_fields: custom_fields to update
input_display_value: whether to set field values using the display value or the actual value.
Returns:
Response from API.
"""
body = generate_body(fields, custom_fields)
query_params = {'sysparm_input_display_value': input_display_value}
return self.send_request(f'table/{table_name}/{record_id}', 'PATCH', params=query_params, body=body)
def create(self, table_name: str, fields: dict = {}, custom_fields: dict = {},
input_display_value: bool = False):
"""Creates a ticket or a record by sending a POST request.
Args:
table_name: table name
record_id: record id
fields: fields to update
custom_fields: custom_fields to update
input_display_value: whether to set field values using the display value or the actual value.
Returns:
Response from API.
"""
body = generate_body(fields, custom_fields)
query_params = {'sysparm_input_display_value': input_display_value}
return self.send_request(f'table/{table_name}', 'POST', params=query_params, body=body)
def delete(self, table_name: str, record_id: str) -> dict:
"""Deletes a ticket or a record by sending a DELETE request.
Args:
table_name: table name
record_id: record id
Returns:
Response from API.
"""
return self.send_request(f'table/{table_name}/{record_id}', 'DELETE')
def add_link(self, ticket_id: str, ticket_type: str, key: str, link: str) -> dict:
"""Adds a link to a ticket by sending a PATCH request.
Args:
ticket_id: ticket ID
ticket_type: ticket type
key: link key
link: link str
Returns:
Response from API.
"""
return self.send_request(f'table/{ticket_type}/{ticket_id}', 'PATCH', body={key: link})
def add_comment(self, ticket_id: str, ticket_type: str, key: str, text: str) -> dict:
"""Adds a comment to a ticket by sending a PATCH request.
Args:
ticket_id: ticket ID
ticket_type: ticket type
key: link key
link: link str
Returns:
Response from API.
"""
return self.send_request(f'table/{ticket_type}/{ticket_id}', 'PATCH', body={key: text})
def upload_file(self, ticket_id: str, file_id: str, file_name: str, ticket_type: str) -> dict:
"""Adds a file to a ticket by sending a POST request.
Args:
ticket_id: ticket ID
file_id: file ID
file_name: file name
ticket_type: ticket type
Returns:
Response from API.
"""
body = {
'table_name': ticket_type,
'table_sys_id': ticket_id,
'file_name': file_name
}
return self.send_request('attachment/upload', 'POST', headers={'Accept': 'application/json'},
body=body, file={'id': file_id, 'name': file_name})
def add_tag(self, ticket_id: str, tag_id: str, title: str, ticket_type: str) -> dict:
"""Adds a tag to a ticket by sending a POST request.
Args:
ticket_id: ticket id
tag_id: tag id
title: tag title
ticket_type: ticket type
Returns:
Response from API.
"""
body = {'label': tag_id, 'table': ticket_type, 'table_key': ticket_id, 'title': title}
return self.send_request('/table/label_entry', 'POST', body=body)
def query(self, table_name: str, sys_param_limit: str, sys_param_offset: str, sys_param_query: str,
system_params: dict = {}, sysparm_fields: Optional[str] = None) -> dict:
"""Query records by sending a GET request.
Args:
table_name: table name
sys_param_limit: limit the number of results
sys_param_offset: offset the results
sys_param_query: the query
system_params: system parameters
sysparm_fields: Comma-separated list of field names to return in the response.
Returns:
Response from API.
"""
query_params = {'sysparm_limit': sys_param_limit, 'sysparm_offset': sys_param_offset}
if sys_param_query:
query_params['sysparm_query'] = sys_param_query
if system_params:
query_params.update(system_params)
if sysparm_fields:
query_params['sysparm_fields'] = sysparm_fields
demisto.debug(f'Running query records with the params: {query_params}')
return self.send_request(f'table/{table_name}', 'GET', params=query_params)
def get_table_fields(self, table_name: str) -> dict:
"""Get table fields by sending a GET request.
Args:
table_name: table name
Returns:
Response from API.
"""
return self.send_request(f'table/{table_name}?sysparm_limit=1', 'GET')
def get_item_details(self, id_: str) -> dict:
"""Get item details from service catalog by sending a GET request to the Service Catalog API.
Args:
id_: item id
Returns:
Response from API.
"""
return self.send_request(f'servicecatalog/items/{id_}', 'GET', sc_api=True)
def create_item_order(self, id_: str, quantity: str, variables: dict = {}) -> dict:
"""Create item order in the service catalog by sending a POST request to the Service Catalog API.
Args:
id_: item id
quantity: order quantity
variables: order variables
Returns:
Response from API.
"""
body = {'sysparm_quantity': quantity, 'variables': variables}
return self.send_request(f'servicecatalog/items/{id_}/order_now', 'POST', body=body, sc_api=True)
def document_route_to_table_request(self, queue_id: str, document_table: str, document_id: str) -> dict:
"""Routes a document(ticket/incident) to a queue by sending a GET request.
Args:
queue_id: Queue ID.
document_table: Document table.
document_id: Document ID.
Returns:
Response from API.
"""
body = {'document_sys_id': document_id, 'document_table': document_table}
return self.send_request(f'awa/queues/{queue_id}/work_item', 'POST', body=body)
def create_co_from_template(self, template: str):
"""Creates a standard change request from template by sending a POST request.
Args:
fields: fields to update
Returns:
Response from API.
"""
return self.send_request(f'change/standard/{template}', 'POST', body={},
cr_api=True)
def get_co_tasks(self, sys_id: str) -> dict:
"""Get item details from service catalog by sending a GET request to the Change Request REST API.
Args:
id: item id
Returns:
Response from API.
"""
return self.send_request(f'change/{sys_id}/task', 'GET', cr_api=True)
def get_ticket_command(client: Client, args: dict):
"""Get a ticket.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
ticket_id = str(args.get('id', ''))
number = str(args.get('number', ''))
get_attachments = args.get('get_attachments', 'false')
fields_delimiter = args.get('fields_delimiter', ';')
custom_fields = split_fields(str(args.get('custom_fields', '')), fields_delimiter)
additional_fields = argToList(str(args.get('additional_fields', '')))
result = client.get(ticket_type, ticket_id, generate_body({}, custom_fields), number)
if not result or 'result' not in result:
return 'Ticket was not found.'
if isinstance(result['result'], list):
if len(result['result']) == 0:
return 'Ticket was not found.'
ticket = result['result'][0]
else:
ticket = result['result']
entries = [] # type: List[Dict]
if get_attachments.lower() != 'false':
entries = client.get_ticket_attachment_entries(ticket.get('sys_id'))
hr = get_ticket_human_readable(ticket, ticket_type, additional_fields)
context = get_ticket_context(ticket, additional_fields)
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Approval',
'Created On', 'Created By', 'Active', 'Close Notes', 'Close Code', 'Description', 'Opened At',
'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description', 'Additional Comments']
if additional_fields:
headers.extend(additional_fields)
entry = {
'Type': entryTypes['note'],
'Contents': result,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown('ServiceNow ticket', hr, headers=headers, removeNull=True),
'EntryContext': {
'Ticket(val.ID===obj.ID)': context,
'ServiceNow.Ticket(val.ID===obj.ID)': context
},
'IgnoreAutoExtract': True
}
entries.append(entry)
return entries
def update_ticket_command(client: Client, args: dict) -> Tuple[Any, Dict, Dict, bool]:
"""Update a ticket.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
fields_delimiter = args.get('fields_delimiter', ';')
custom_fields = split_fields(str(args.get('custom_fields', '')), fields_delimiter)
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
ticket_id = str(args.get('id', ''))
additional_fields = split_fields(str(args.get('additional_fields', '')), fields_delimiter)
additional_fields_keys = list(additional_fields.keys())
fields = get_ticket_fields(args, ticket_type=ticket_type)
fields.update(additional_fields)
input_display_value = argToBoolean(args.get('input_display_value', 'false'))
result = client.update(ticket_type, ticket_id, fields, custom_fields, input_display_value)
if not result or 'result' not in result:
raise Exception('Unable to retrieve response.')
ticket = result['result']
hr_ = get_ticket_human_readable(ticket, ticket_type, additional_fields_keys)
human_readable = tableToMarkdown(f'ServiceNow ticket updated successfully\nTicket type: {ticket_type}',
t=hr_, removeNull=True)
# make the modified fields the user inserted as arguments show in the context
if additional_fields:
additional_fields_keys = list(set(additional_fields_keys).union(set(args.keys())))
else:
additional_fields_keys = list(args.keys())
entry_context = {'ServiceNow.Ticket(val.ID===obj.ID)': get_ticket_context(ticket, additional_fields_keys)}
return human_readable, entry_context, result, True
def create_ticket_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Create a ticket.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
fields_delimiter = args.get('fields_delimiter', ';')
custom_fields = split_fields(str(args.get('custom_fields', '')), fields_delimiter)
template = args.get('template')
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
additional_fields = split_fields(str(args.get('additional_fields', '')), fields_delimiter)
additional_fields_keys = list(additional_fields.keys())
input_display_value = argToBoolean(args.get('input_display_value', 'false'))
if template:
template = client.get_template(template)
fields = get_ticket_fields(args, template, ticket_type)
if additional_fields:
fields.update(additional_fields)
result = client.create(ticket_type, fields, custom_fields, input_display_value)
if not result or 'result' not in result:
if 'successfully' in result:
return result, {}, {}, True
raise Exception('Unable to retrieve response.')
ticket = result['result']
hr_ = get_ticket_human_readable(ticket, ticket_type, additional_fields_keys)
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Approval',
'Created On', 'Created By', 'Active', 'Close Notes', 'Close Code', 'Description', 'Opened At',
'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description', 'Additional Comments']
if additional_fields:
headers.extend(additional_fields_keys)
human_readable = tableToMarkdown('ServiceNow ticket was created successfully.', t=hr_,
headers=headers, removeNull=True)
# make the modified fields the user inserted as arguments show in the context
if additional_fields:
additional_fields_keys = list(set(additional_fields_keys).union(set(args.keys())))
else:
additional_fields_keys = list(args.keys())
created_ticket_context = get_ticket_context(ticket, additional_fields_keys)
entry_context = {
'Ticket(val.ID===obj.ID)': created_ticket_context,
'ServiceNow.Ticket(val.ID===obj.ID)': created_ticket_context
}
return human_readable, entry_context, result, True
def delete_ticket_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Delete a ticket.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
ticket_id = str(args.get('id', ''))
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
result = client.delete(ticket_type, ticket_id)
return f'Ticket with ID {ticket_id} was successfully deleted.', {}, result, True
def query_tickets_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Query tickets.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
sys_param_limit = args.get('limit', client.sys_param_limit)
sys_param_offset = args.get('offset', client.sys_param_offset)
sys_param_query = str(args.get('query', ''))
system_params = split_fields(args.get('system_params', ''))
additional_fields = argToList(str(args.get('additional_fields')))
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
result = client.query(ticket_type, sys_param_limit, sys_param_offset, sys_param_query, system_params)
if not result or 'result' not in result or len(result['result']) == 0:
return 'No ServiceNow tickets matched the query.', {}, {}, True
tickets = result.get('result', {})
hr_ = get_ticket_human_readable(tickets, ticket_type, additional_fields)
context = get_ticket_context(tickets, additional_fields)
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code', 'Description', 'Opened At', 'Due Date', 'Resolved By',
'Resolved At', 'SLA Due', 'Short Description', 'Additional Comments']
if additional_fields:
headers.extend(additional_fields)
human_readable = tableToMarkdown('ServiceNow tickets', t=hr_, headers=headers, removeNull=True)
entry_context = {
'Ticket(val.ID===obj.ID)': context,
'ServiceNow.Ticket(val.ID===obj.ID)': context
}
return human_readable, entry_context, result, True
def add_link_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Add a link.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
ticket_id = str(args.get('id', ''))
key = 'comments' if args.get('post-as-comment', 'false').lower() == 'true' else 'work_notes'
link_argument = str(args.get('link', ''))
text = args.get('text', link_argument)
link = f'[code]<a class="web" target="_blank" href="{link_argument}" >{text}</a>[/code]'
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
result = client.add_link(ticket_id, ticket_type, key, link)
if not result or 'result' not in result:
raise Exception('Unable to retrieve response.')
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code', 'Description', 'Opened At', 'Due Date', 'Resolved By',
'Resolved At', 'SLA Due', 'Short Description', 'Additional Comments']
hr_ = get_ticket_human_readable(result['result'], ticket_type)
human_readable = tableToMarkdown('Link successfully added to ServiceNow ticket', t=hr_,
headers=headers, removeNull=True)
return human_readable, {}, result, True
def add_comment_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Add a comment.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
ticket_id = str(args.get('id', ''))
key = 'comments' if args.get('post-as-comment', 'false').lower() == 'true' else 'work_notes'
text = str(args.get('comment', ''))
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
result = client.add_comment(ticket_id, ticket_type, key, text)
if not result or 'result' not in result:
raise Exception('Unable to retrieve response.')
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
'Active', 'Close Notes', 'Close Code',
'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
'Additional Comments']
hr_ = get_ticket_human_readable(result['result'], ticket_type)
human_readable = tableToMarkdown('Comment successfully added to ServiceNow ticket', t=hr_,
headers=headers, removeNull=True)
return human_readable, {}, result, True
def upload_file_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Upload a file.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
ticket_id = str(args.get('id', ''))
file_id = str(args.get('file_id', ''))
file_name = args.get('file_name')
if not file_name:
file_data = demisto.getFilePath(file_id)
file_name = file_data.get('name')
result = client.upload_file(ticket_id, file_id, file_name, ticket_type)
if not result or 'result' not in result or not result['result']:
raise Exception('Unable to upload file.')
uploaded_file_resp = result.get('result', {})
hr_ = {
'Filename': uploaded_file_resp.get('file_name'),
'Download link': uploaded_file_resp.get('download_link'),
'System ID': uploaded_file_resp.get('sys_id')
}
human_readable = tableToMarkdown(f'File uploaded successfully to ticket {ticket_id}.', t=hr_)
context = {
'ID': ticket_id,
'File': {
'Filename': uploaded_file_resp.get('file_name'),
'Link': uploaded_file_resp.get('download_link'),
'SystemID': uploaded_file_resp.get('sys_id')
}
}
entry_context = {
'ServiceNow.Ticket(val.ID===obj.ID)': context,
'Ticket(val.ID===obj.ID)': context
}
return human_readable, entry_context, result, True
def add_tag_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Add tag to a ticket.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
ticket_id = str(args.get('id', ''))
tag_id = str(args.get('tag_id', ''))
title = str(args.get('title', ''))
ticket_type = client.get_table_name(str(args.get('ticket_type', '')))
result = client.add_tag(ticket_id, tag_id, title, ticket_type)
if not result or 'result' not in result:
raise Exception(f'Could not add tag {title} to ticket {ticket_id}.')
added_tag_resp = result.get('result', {})
hr_ = {
'Title': added_tag_resp.get('title'),
'Ticket ID': added_tag_resp.get('id_display'),
'Ticket Type': added_tag_resp.get('id_type'),
'Tag ID': added_tag_resp.get('sys_id'),
}
human_readable = tableToMarkdown(f'Tag {tag_id} was added successfully to ticket {ticket_id}.', t=hr_)
context = {
'ID': ticket_id,
'TagTitle': added_tag_resp.get('title'),
'TagID': added_tag_resp.get('sys_id'),
}
entry_context = {'ServiceNow.Ticket(val.ID===obj.ID)': context}
return human_readable, entry_context, result, True
def get_ticket_notes_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Get the ticket's note.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
ticket_id = args.get('id')
sys_param_limit = args.get('limit', client.sys_param_limit)
sys_param_offset = args.get('offset', client.sys_param_offset)
use_display_value = argToBoolean(args.get('use_display_value', client.use_display_value))
if use_display_value: # make query using sysparm_display_value=all (requires less permissions)
assert client.display_date_format, 'A display date format must be selected in the instance configuration when' \
' retrieving notes using the display value option.'
ticket_type = client.get_table_name(str(args.get('ticket_type', client.ticket_type)))
path = f'table/{ticket_type}/{ticket_id}'
query_params = {'sysparm_limit': sys_param_limit, 'sysparm_offset': sys_param_offset, 'sysparm_display_value': 'all'}
full_result = client.send_request(path, 'GET', params=query_params)
result = convert_to_notes_result(full_result, time_info={'display_date_format': client.display_date_format})
else:
sys_param_query = f'element_id={ticket_id}^element=comments^ORelement=work_notes'
result = client.query('sys_journal_field', sys_param_limit, sys_param_offset, sys_param_query)
if not result or 'result' not in result:
return f'No comment found on ticket {ticket_id}.', {}, {}, True
headers = ['Value', 'CreatedOn', 'CreatedBy', 'Type']
mapped_notes = [{
'Value': note.get('value'),
'CreatedOn': note.get('sys_created_on'),
'CreatedBy': note.get('sys_created_by'),
'Type': 'Work Note' if note.get('element', '') == 'work_notes' else 'Comment'
} for note in result['result']]
if not mapped_notes:
return f'No comment found on ticket {ticket_id}.', {}, {}, True
ticket = {
'ID': ticket_id,
'Note': mapped_notes
}
human_readable = tableToMarkdown(f'ServiceNow notes for ticket {ticket_id}', t=mapped_notes, headers=headers,
headerTransform=pascalToSpace, removeNull=True)
entry_context = {'ServiceNow.Ticket(val.ID===obj.ID)': createContext(ticket, removeNull=True)}
return human_readable, entry_context, result, True
def get_record_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Get a record.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = str(args.get('table_name', ''))
record_id = str(args.get('id', ''))
fields = str(args.get('fields', ''))
result = client.get(table_name, record_id)
if not result or 'result' not in result:
return f'ServiceNow record with ID {record_id} was not found.', {}, {}, True
if isinstance(result['result'], list):
if len(result['result']) == 0:
return f'ServiceNow record with ID {record_id} was not found.', {}, result, True
record = result['result'][0]
else:
record = result['result']
if fields:
list_fields = argToList(fields)
if 'sys_id' not in list_fields:
# ID is added by default
list_fields.append('sys_id')
# filter the record for the required fields
record = dict([kv_pair for kv_pair in list(record.items()) if kv_pair[0] in list_fields])
for k, v in record.items():
if isinstance(v, dict):
# For objects that refer to a record in the database, take their value(system ID).
record[k] = v.get('value', record[k])
record['ID'] = record.pop('sys_id')
human_readable = tableToMarkdown('ServiceNow record', record, removeNull=True)
entry_context = {'ServiceNow.Record(val.ID===obj.ID)': createContext(record)}
else:
mapped_record = {DEFAULT_RECORD_FIELDS[k]: record[k] for k in DEFAULT_RECORD_FIELDS if k in record}
human_readable = tableToMarkdown(f'ServiceNow record {record_id}', mapped_record, removeNull=True)
entry_context = {'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)}
return human_readable, entry_context, result, True
def create_record_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Any, bool]:
"""Create a record.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = str(args.get('table_name', ''))
fields_str = str(args.get('fields', ''))
custom_fields_str = str(args.get('custom_fields', ''))
input_display_value = argToBoolean(args.get('input_display_value', 'false'))
fields_delimiter = args.get('fields_delimiter', ';')
fields = {}
if fields_str:
fields = split_fields(fields_str, fields_delimiter)
custom_fields = {}
if custom_fields_str:
custom_fields = split_fields(custom_fields_str, fields_delimiter)
result = client.create(table_name, fields, custom_fields, input_display_value)
if not result or 'result' not in result:
return 'Could not create record.', {}, {}, True
record = result.get('result', {})
mapped_record = {DEFAULT_RECORD_FIELDS[k]: record[k] for k in DEFAULT_RECORD_FIELDS if k in record}
human_readable = tableToMarkdown('ServiceNow record created successfully', mapped_record, removeNull=True)
entry_context = {'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)}
return human_readable, entry_context, result, True
def update_record_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Update a record.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = str(args.get('table_name', ''))
record_id = str(args.get('id', ''))
fields_str = str(args.get('fields', ''))
custom_fields_str = str(args.get('custom_fields', ''))
input_display_value = argToBoolean(args.get('input_display_value', 'false'))
fields_delimiter = args.get('fields_delimiter', ';')
fields = get_ticket_fields(args, ticket_type=table_name)
if fields_str:
additional_fields = split_fields(fields_str, fields_delimiter)
fields.update(additional_fields)
custom_fields = {}
if custom_fields_str:
custom_fields = split_fields(custom_fields_str, fields_delimiter)
result = client.update(table_name, record_id, fields, custom_fields, input_display_value)
if not result or 'result' not in result:
return 'Could not retrieve record.', {}, {}, True
record = result.get('result', {})
mapped_record = {DEFAULT_RECORD_FIELDS[k]: record[k] for k in DEFAULT_RECORD_FIELDS if k in record}
human_readable = tableToMarkdown(f'ServiceNow record with ID {record_id} updated successfully',
t=mapped_record, removeNull=True)
entry_context = {'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)}
return human_readable, entry_context, result, True
def delete_record_command(client: Client, args: dict) -> Tuple[str, Dict[Any, Any], Dict, bool]:
"""Delete a record.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
record_id = str(args.get('id', ''))
table_name = str(args.get('table_name', ''))
result = client.delete(table_name, record_id)
return f'ServiceNow record with ID {record_id} was successfully deleted.', {}, result, True
def query_table_command(client: Client, args: dict) -> Tuple[str, Dict, Dict, bool]:
"""Query a table.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = str(args.get('table_name', ''))
sys_param_limit = args.get('limit', client.sys_param_limit)
sys_param_query = str(args.get('query', ''))
system_params = split_fields(args.get('system_params', ''))
sys_param_offset = args.get('offset', client.sys_param_offset)
fields = args.get('fields')
if fields and 'sys_id' not in fields:
fields = f'{fields},sys_id' # ID is added by default
result = client.query(table_name, sys_param_limit, sys_param_offset, sys_param_query, system_params,
sysparm_fields=fields)
if not result or 'result' not in result or len(result['result']) == 0:
return 'No results found', {}, {}, False
table_entries = result.get('result', {})
if fields:
fields = argToList(fields)
# Filter the records according to the given fields
records = [{k.replace('.', '_'): v for k, v in r.items() if k in fields} for r in table_entries]
for record in records:
record['ID'] = record.pop('sys_id')
for k, v in record.items():
if isinstance(v, dict):
# For objects that refer to a record in the database, take their value (system ID).
record[k] = v.get('value', v)
human_readable = tableToMarkdown('ServiceNow records', records, removeNull=True)
entry_context = {'ServiceNow.Record(val.ID===obj.ID)': createContext(records)}
else:
mapped_records = [{DEFAULT_RECORD_FIELDS[k]: r[k] for k in DEFAULT_RECORD_FIELDS if k in r}
for r in table_entries]
human_readable = tableToMarkdown('ServiceNow records', mapped_records, removeNull=True)
entry_context = {'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_records)}
return human_readable, entry_context, result, False
def query_computers_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Query computers.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = 'cmdb_ci_computer'
computer_id = args.get('computer_id', None)
computer_name = args.get('computer_name', None)
asset_tag = args.get('asset_tag', None)
computer_query = args.get('query', {})
offset = args.get('offset', client.sys_param_offset)
limit = args.get('limit', client.sys_param_limit)
if computer_id:
result = client.get(table_name, computer_id)
else:
if computer_name:
computer_query = f'name={computer_name}'
elif asset_tag:
computer_query = f'asset_tag={asset_tag}'
result = client.query(table_name, limit, offset, computer_query)
if not result or 'result' not in result:
return 'No computers found.', {}, {}, False
computers = result.get('result', {})
if not isinstance(computers, list):
computers = [computers]
if len(computers) == 0:
return 'No computers found.', {}, {}, False
computer_statuses = {
'1': 'In use',
'2': 'On order',
'3': 'On maintenance',
'6': 'In stock/In transit',
'7': 'Retired',
'100': 'Missing'
}
mapped_computers = [{
'ID': computer.get('sys_id'),
'AssetTag': computer.get('asset_tag'),
'Name': computer.get('name'),
'DisplayName': f"{computer.get('asset_tag', '')} - {computer.get('name', '')}",
'SupportGroup': computer.get('support_group'),
'OperatingSystem': computer.get('os'),
'Company': computer.get('company', {}).get('value')
if isinstance(computer.get('company'), dict) else computer.get('company'),
'AssignedTo': computer.get('assigned_to', {}).get('value')
if isinstance(computer.get('assigned_to'), dict) else computer.get('assigned_to'),
'State': computer_statuses.get(computer.get('install_status', ''), computer.get('install_status')),
'Cost': f"{computer.get('cost', '').rstrip()} {computer.get('cost_cc', '').rstrip()}",
'Comments': computer.get('comments')
} for computer in computers]
headers = ['ID', 'AssetTag', 'Name', 'DisplayName', 'SupportGroup', 'OperatingSystem', 'Company', 'AssignedTo',
'State', 'Cost', 'Comments']
human_readable = tableToMarkdown('ServiceNow Computers', t=mapped_computers, headers=headers,
removeNull=True, headerTransform=pascalToSpace)
entry_context = {'ServiceNow.Computer(val.ID===obj.ID)': createContext(mapped_computers, removeNull=True)}
return human_readable, entry_context, result, False
def query_groups_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Query groups.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = 'sys_user_group'
group_id = args.get('group_id')
group_name = args.get('group_name')
group_query = args.get('query', {})
offset = args.get('offset', client.sys_param_offset)
limit = args.get('limit', client.sys_param_limit)
if group_id:
result = client.get(table_name, group_id)
else:
if group_name:
group_query = f'name={group_name}'
result = client.query(table_name, limit, offset, group_query)
if not result or 'result' not in result:
return 'No groups found.', {}, {}, False
groups = result.get('result', {})
if not isinstance(groups, list):
groups = [groups]
if len(groups) == 0:
return 'No groups found.', {}, {}, False
headers = ['ID', 'Description', 'Name', 'Active', 'Manager', 'Updated']
mapped_groups = [{
'ID': group.get('sys_id'),
'Description': group.get('description'),
'Name': group.get('name'),
'Active': group.get('active'),
'Manager': group.get('manager', {}).get('value')
if isinstance(group.get('manager'), dict) else group.get('manager'),
'Updated': group.get('sys_updated_on'),
} for group in groups]
human_readable = tableToMarkdown('ServiceNow Groups', t=mapped_groups, headers=headers,
removeNull=True, headerTransform=pascalToSpace)
entry_context = {'ServiceNow.Group(val.ID===obj.ID)': createContext(mapped_groups, removeNull=True)}
return human_readable, entry_context, result, False
def query_users_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Query users.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = 'sys_user'
user_id = args.get('user_id')
user_name = args.get('user_name')
user_query = args.get('query', {})
offset = args.get('offset', client.sys_param_offset)
limit = args.get('limit', client.sys_param_limit)
if user_id:
result = client.get(table_name, user_id)
else:
if user_name:
user_query = f'user_name={user_name}'
result = client.query(table_name, limit, offset, user_query)
if not result or 'result' not in result:
return 'No users found.', {}, {}, False
users = result.get('result', {})
if not isinstance(users, list):
users = [users]
if len(users) == 0:
return 'No users found.', {}, {}, False
mapped_users = [{
'ID': user.get('sys_id'),
'Name': f"{user.get('first_name', '').rstrip()} {user.get('last_name', '').rstrip()}",
'UserName': user.get('user_name'),
'Email': user.get('email'),
'Created': user.get('sys_created_on'),
'Updated': user.get('sys_updated_on'),
} for user in users]
headers = ['ID', 'Name', 'UserName', 'Email', 'Created', 'Updated']
human_readable = tableToMarkdown('ServiceNow Users', t=mapped_users, headers=headers, removeNull=True,
headerTransform=pascalToSpace)
entry_context = {'ServiceNow.User(val.ID===obj.ID)': createContext(mapped_users, removeNull=True)}
return human_readable, entry_context, result, False
def list_table_fields_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""List table fields.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = str(args.get('table_name', ''))
result = client.get_table_fields(table_name)
if not result or 'result' not in result:
return 'Table was not found.', {}, {}, False
if len(result['result']) == 0:
return 'Table contains no records.', {}, {}, False
fields = [{'Name': k} for k, v in result['result'][0].items()]
human_readable = tableToMarkdown(f'ServiceNow Table fields - {table_name}', fields)
entry_context = {'ServiceNow.Field': createContext(fields)}
return human_readable, entry_context, result, False
def get_table_name_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""List table fields.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
label = args.get('label')
offset = args.get('offset', client.sys_param_offset)
limit = args.get('limit', client.sys_param_limit)
table_query = f'label={label}'
result = client.query('sys_db_object', limit, offset, table_query)
if not result or 'result' not in result:
return 'Table was not found.', {}, {}, False
tables = result.get('result', {})
if len(tables) == 0:
return 'Table was not found.', {}, {}, False
headers = ['ID', 'Name', 'SystemName']
mapped_tables = [{
'ID': table.get('sys_id'),
'Name': table.get('name'),
'SystemName': table.get('sys_name')
} for table in tables]
human_readable = tableToMarkdown(f'ServiceNow Tables for label - {label}', t=mapped_tables,
headers=headers, headerTransform=pascalToSpace)
entry_context = {'ServiceNow.Table(val.ID===obj.ID)': createContext(mapped_tables)}
return human_readable, entry_context, result, False
def query_items_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Query items.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
table_name = 'sc_cat_item'
limit = args.get('limit', client.sys_param_limit)
offset = args.get('offset', client.sys_param_offset)
name = str(args.get('name', ''))
items_query = f'nameLIKE{name}' if name else ''
result = client.query(table_name, limit, offset, items_query)
if not result or 'result' not in result:
return 'No items were found.', {}, {}, True
items = result.get('result', {})
if not isinstance(items, list):
items_list = [items]
else:
items_list = items
if len(items_list) == 0:
return 'No items were found.', {}, {}, True
mapped_items = []
for item in items_list:
mapped_items.append(get_item_human_readable(item))
headers = ['ID', 'Name', 'Price', 'Description']
human_readable = tableToMarkdown('ServiceNow Catalog Items', mapped_items, headers=headers,
removeNull=True, headerTransform=pascalToSpace)
entry_context = {'ServiceNow.CatalogItem(val.ID===obj.ID)': createContext(mapped_items, removeNull=True)}
return human_readable, entry_context, result, True
def get_item_details_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Get item details.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
id_ = str(args.get('id', ''))
result = client.get_item_details(id_)
if not result or 'result' not in result:
return 'Item was not found.', {}, {}, True
item = result.get('result', {})
mapped_item = get_item_human_readable(item)
human_readable = tableToMarkdown('ServiceNow Catalog Item', t=mapped_item, headers=['ID', 'Name', 'Description'],
removeNull=True, headerTransform=pascalToSpace)
if mapped_item.get('Variables'):
human_readable += tableToMarkdown('Item Variables', t=mapped_item.get('Variables'),
headers=['Question', 'Type', 'Name', 'Mandatory'],
removeNull=True, headerTransform=pascalToSpace)
entry_context = {'ServiceNow.CatalogItem(val.ID===obj.ID)': createContext(mapped_item, removeNull=True)}
return human_readable, entry_context, result, True
def create_order_item_command(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Create item order.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
id_ = str(args.get('id', ''))
quantity = str(args.get('quantity', '1'))
variables = split_fields(str(args.get('variables', '')))
result = client.create_item_order(id_, quantity, variables)
if not result or 'result' not in result:
return 'Order item was not created.', {}, {}, True
order_item = result.get('result', {})
mapped_item = {
'ID': order_item.get('sys_id'),
'RequestNumber': order_item.get('request_number')
}
human_readable = tableToMarkdown('ServiceNow Order Request', mapped_item,
removeNull=True, headerTransform=pascalToSpace)
entry_context = {'ServiceNow.OrderRequest(val.ID===obj.ID)': createContext(mapped_item, removeNull=True)}
return human_readable, entry_context, result, True
def document_route_to_table(client: Client, args: dict) -> Tuple[Any, Dict[Any, Any], Dict[Any, Any], bool]:
"""Document routes to table.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
queue_id = str(args.get('queue_id', ''))
document_table = str(args.get('document_table', ''))
document_id = str(args.get('document_id', ''))
result = client.document_route_to_table_request(queue_id, document_table, document_id)
if not result or 'result' not in result:
return 'Route to table was not found.', {}, {}, True
route = result.get('result', {})
context = {
'DisplayName': route.get('display_name'),
'DocumentID': route.get('document_id'),
'DocumentTable': route.get('document_table'),
'QueueID': route.get('queue'),
'WorkItemID': route.get('sys_id')
}
headers = ['DisplayName', 'DocumentID', 'DocumentTable', 'QueueID', 'WorkItemID']
human_readable = tableToMarkdown('ServiceNow Queue', t=context, headers=headers, removeNull=True,
headerTransform=pascalToSpace)
entry_context = {'ServiceNow.WorkItem(val.WorkItemID===obj.WorkItemID)': createContext(context, removeNull=True)}
return human_readable, entry_context, result, True
def get_ticket_file_attachments(client: Client, ticket: dict) -> list:
"""
Extract file attachment from a service now ticket.
"""
file_names = []
if client.get_attachments:
file_entries = client.get_ticket_attachment_entries(ticket.get('sys_id', ''))
if isinstance(file_entries, list):
for file_result in file_entries:
if file_result['Type'] == entryTypes['error']:
raise Exception(f"Error getting attachment: {str(file_result.get('Contents', ''))}")
file_names.append({
'path': file_result.get('FileID', ''),
'name': file_result.get('File', '')
})
return file_names
def get_mirroring():
"""
Get tickets mirroring.
"""
params = demisto.params()
return {
'mirror_direction': MIRROR_DIRECTION.get(params.get('mirror_direction')),
'mirror_tags': [
params.get('comment_tag'), # comment tag to service now
params.get('comment_tag_from_servicenow'),
params.get('file_tag'), # file tag to service now
params.get('file_tag_from_service_now'),
params.get('work_notes_tag'), # work not tag to service now
params.get('work_notes_tag_from_servicenow')
],
'mirror_instance': demisto.integrationInstance()
}
def fetch_incidents(client: Client) -> list:
query_params = {}
incidents = []
last_run = demisto.getLastRun()
demisto.debug("ServiceNowv2 - Start fetching")
start_snow_time, end_snow_time = get_fetch_run_time_range(
last_run=last_run, first_fetch=client.fetch_time, look_back=client.look_back, date_format=DATE_FORMAT
)
snow_time_as_date = datetime.strptime(start_snow_time, DATE_FORMAT)
fetch_limit = last_run.get('limit') or client.sys_param_limit
query = ''
if client.sys_param_query:
query += f'{client.sys_param_query}^'
# get the tickets which occurred after the 'start_snow_time'
query += f'ORDERBY{client.timestamp_field}^{client.timestamp_field}>{start_snow_time}'
if query:
query_params['sysparm_query'] = query
query_params['sysparm_limit'] = fetch_limit # type: ignore[assignment]
demisto.debug(f"ServiceNowV2 - Last run: {json.dumps(last_run)}")
demisto.debug(f"ServiceNowV2 - Query sent to the server: {str(query_params)}")
tickets_response = client.send_request(f'table/{client.ticket_type}', 'GET', params=query_params).get('result', [])
count = 0
skipped_incidents = 0
severity_map = {'1': 3, '2': 2, '3': 1} # Map SNOW severity to Demisto severity for incident creation
# remove duplicate incidents which were already fetched
tickets_response = filter_incidents_by_duplicates_and_limit(
incidents_res=tickets_response, last_run=last_run, fetch_limit=client.sys_param_limit, id_field='sys_id'
)
for ticket in tickets_response:
ticket.update(get_mirroring())
if client.timestamp_field not in ticket:
raise ValueError(f"The timestamp field [{client.timestamp_field}] does not exist in the ticket")
if count > fetch_limit:
break
try:
if datetime.strptime(ticket[client.timestamp_field], DATE_FORMAT) < snow_time_as_date:
skipped_incidents += 1
demisto.debug(
f"ServiceNowV2 - -Skipping incident with sys_id={ticket.get('sys_id')} and date="
f"{ticket.get(client.timestamp_field)} because its creation time is smaller than the last fetch.")
continue
parse_dict_ticket_fields(client, ticket)
except Exception as e:
demisto.debug(f"Got the following error: {e}")
incidents.append({
'name': f"ServiceNow Incident {ticket.get(client.incident_name)}",
'labels': [
{'type': _type, 'value': value if isinstance(value, str) else json.dumps(value)}
for _type, value in ticket.items()
],
'details': json.dumps(ticket),
'severity': severity_map.get(ticket.get('severity', ''), 0),
'attachment': get_ticket_file_attachments(client=client, ticket=ticket),
'occurred': ticket.get(client.timestamp_field),
'sys_id': ticket.get('sys_id'),
'rawJSON': json.dumps(ticket)
})
count += 1
last_run = update_last_run_object(
last_run=last_run,
incidents=incidents,
fetch_limit=client.sys_param_limit,
start_fetch_time=start_snow_time,
end_fetch_time=end_snow_time,
look_back=client.look_back,
created_time_field='occurred',
id_field='sys_id',
date_format=DATE_FORMAT
)
demisto.debug(f"ServiceNowV2 - Last run after incidents fetching: {json.dumps(last_run)}")
demisto.debug(f"ServiceNowV2 - Number of incidents before filtering: {len(tickets_response)}")
demisto.debug(f"ServiceNowV2 - Number of incidents after filtering: {len(incidents)}")
demisto.debug(f"ServiceNowV2 - Number of incidents skipped: {skipped_incidents}")
for ticket in incidents:
# the occurred time requires to be in ISO format.
occurred = datetime.strptime(ticket.get('occurred'), DATE_FORMAT).isoformat() # type: ignore[arg-type]
ticket['occurred'] = f"{occurred}Z"
demisto.setLastRun(last_run)
return incidents
def test_instance(client: Client):
"""
The function that executes the logic for the instance testing. If the instance wasn't configured correctly, this
function will raise an exception and cause the test_module/oauth_test_module function to fail.
"""
# Validate fetch_time parameter is valid (if not, parse_date_range will raise the error message)
parse_date_range(client.fetch_time, DATE_FORMAT)
result = client.send_request(f'table/{client.ticket_type}', params={'sysparm_limit': 1}, method='GET')
if 'result' not in result:
raise Exception('ServiceNow error: ' + str(result))
ticket = result.get('result')
if ticket and demisto.params().get('isFetch'):
if isinstance(ticket, list):
ticket = ticket[0]
if client.timestamp_field not in ticket:
raise ValueError(f"The timestamp field [{client.timestamp_field}] does not exist in the ticket.")
if client.incident_name not in ticket:
raise ValueError(f"The field [{client.incident_name}] does not exist in the ticket.")
def test_module(client: Client, *_) -> Tuple[str, Dict[Any, Any], Dict[Any, Any], bool]:
"""
Test the instance configurations when using basic authorization.
"""
# Notify the user that test button can't be used when using OAuth 2.0:
if client.use_oauth:
raise Exception('Test button cannot be used when using OAuth 2.0. Please use the !servicenow-oauth-login '
'command followed by the !servicenow-oauth-test command to test the instance.')
if client._version == 'v2' and client.get_attachments:
raise DemistoException('Retrieving incident attachments is not supported when using the V2 API.')
test_instance(client)
return 'ok', {}, {}, True
def oauth_test_module(client: Client, *_) -> Tuple[str, Dict[Any, Any], Dict[Any, Any], bool]:
"""
Test the instance configurations when using OAuth authentication.
"""
if not client.use_oauth:
raise Exception('!servicenow-oauth-test command should be used only when using OAuth 2.0 authorization.\n '
'Please select the `Use OAuth Login` checkbox in the instance configuration before running '
'this command.')
test_instance(client)
hr = '### Instance Configured Successfully.\n'
return hr, {}, {}, True
def login_command(client: Client, args: Dict[str, Any]) -> Tuple[str, Dict[Any, Any], Dict[Any, Any], bool]:
"""
Login the user using OAuth authorization
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
# Verify that the user checked the `Use OAuth` checkbox:
if not client.use_oauth:
raise Exception('!servicenow-oauth-login command can be used only when using OAuth 2.0 authorization.\n Please '
'select the `Use OAuth Login` checkbox in the instance configuration before running this '
'command.')
username = args.get('username', '')
password = args.get('password', '')
try:
client.snow_client.login(username, password)
hr = '### Logged in successfully.\n A refresh token was saved to the integration context. This token will be ' \
'used to generate a new access token once the current one expires.'
except Exception as e:
return_error(
f'Failed to login. Please verify that the provided username and password are correct, and that you '
f'entered the correct client id and client secret in the instance configuration (see ? for'
f'correct usage when using OAuth).\n\n{e}')
return hr, {}, {}, True
def check_assigned_to_field(client: Client, assigned_to: dict) -> Optional[str]:
if assigned_to:
user_result = client.get('sys_user', assigned_to.get('value'), # type: ignore[arg-type]
no_record_found_res={'result': {}})
user = user_result.get('result', {})
if user:
user_email = user.get('email')
return user_email
else:
demisto.debug(f'Could not assign user {assigned_to.get("value")} since it does not exist in ServiceNow')
return ''
def parse_dict_ticket_fields(client: Client, ticket: dict) -> dict:
# Parse user dict to email
assigned_to = ticket.get('assigned_to', {})
caller = ticket.get('caller_id', {})
assignment_group = ticket.get('assignment_group', {})
if assignment_group:
group_result = client.get('sys_user_group', assignment_group.get('value'), no_record_found_res={'result': {}})
group = group_result.get('result', {})
group_name = group.get('name')
ticket['assignment_group'] = group_name
user_assigned = check_assigned_to_field(client, assigned_to)
ticket['assigned_to'] = user_assigned
if caller:
user_result = client.get('sys_user', caller.get('value'), no_record_found_res={'result': {}})
user = user_result.get('result', {})
user_email = user.get('email')
ticket['caller_id'] = user_email
return ticket
def get_timezone_offset(full_response, display_date_format):
"""
Receives the full response of a ticket query from SNOW and computes the timezone offset between the timezone of the
instance and UTC.
"""
try:
local_time = full_response.get('result', {}).get('sys_created_on', {}).get('display_value', '')
local_time = datetime.strptime(local_time, display_date_format)
except Exception as e:
raise Exception(f'Failed to get the display value offset time. ERROR: {e}')
try:
utc_time = full_response.get('result', {}).get('sys_created_on', {}).get('value', '')
utc_time = datetime.strptime(utc_time, DATE_FORMAT)
except ValueError as e:
raise Exception(f'Failed to convert {utc_time} to datetime object. ERROR: {e}')
offset = utc_time - local_time
return offset
def get_remote_data_command(client: Client, args: Dict[str, Any], params: Dict) -> Union[List[Dict[str, Any]], str]:
"""
get-remote-data command: Returns an updated incident and entries
Args:
client: XSOAR client to use
args:
id: incident id to retrieve
lastUpdate: when was the last time we retrieved data
Returns:
List[Dict[str, Any]]: first entry is the incident (which can be completely empty) and the new entries.
"""
ticket_id = args.get('id', '')
demisto.debug(f'Getting update for remote {ticket_id}')
last_update = arg_to_timestamp(
arg=args.get('lastUpdate'),
arg_name='lastUpdate',
required=True
)
demisto.debug(f'last_update is {last_update}')
ticket_type = client.ticket_type
result = client.get(ticket_type, ticket_id)
if not result or 'result' not in result:
return 'Ticket was not found.'
if isinstance(result['result'], list):
if len(result['result']) == 0:
return 'Ticket was not found.'
ticket = result['result'][0]
else:
ticket = result['result']
ticket_last_update = arg_to_timestamp(
arg=ticket.get('sys_updated_on'),
arg_name='sys_updated_on',
required=False
)
demisto.debug(f'ticket_last_update is {ticket_last_update}')
if last_update > ticket_last_update:
demisto.debug('Nothing new in the ticket')
ticket = {}
else:
demisto.debug(f'ticket is updated: {ticket}')
parse_dict_ticket_fields(client, ticket)
# get latest comments and files
entries = []
file_entries = client.get_ticket_attachment_entries(ticket_id, datetime.fromtimestamp(last_update)) # type: ignore
if file_entries:
for file in file_entries:
if '_mirrored_from_xsoar' not in file.get('File'):
file['Tags'] = [params.get('file_tag_from_service_now')]
entries.append(file)
if client.use_display_value:
ticket_type = client.get_table_name(client.ticket_type)
path = f'table/{ticket_type}/{ticket_id}'
query_params = {'sysparm_limit': client.sys_param_limit, 'sysparm_offset': client.sys_param_offset,
'sysparm_display_value': 'all'}
full_result = client.send_request(path, 'GET', params=query_params)
try:
comments_result = convert_to_notes_result(full_result, time_info={'display_date_format': client.display_date_format,
'filter': datetime.fromtimestamp(last_update)})
except Exception as e:
demisto.debug(f'Failed to retrieve notes using display value. Continuing without retrieving notes.\n Error: {e}')
comments_result = {'result': []}
else:
sys_param_limit = args.get('limit', client.sys_param_limit)
sys_param_offset = args.get('offset', client.sys_param_offset)
sys_param_query = f'element_id={ticket_id}^sys_created_on>' \
f'{datetime.fromtimestamp(last_update)}^element=comments^ORelement=work_notes'
comments_result = client.query('sys_journal_field', sys_param_limit, sys_param_offset, sys_param_query)
demisto.debug(f'Comments result is {comments_result}')
if not comments_result or 'result' not in comments_result:
demisto.debug(f'Pull result is {ticket}')
return [ticket] + entries
for note in comments_result.get('result', []):
if 'Mirrored from Cortex XSOAR' not in note.get('value'):
comments_context = {'comments_and_work_notes': note.get('value')}
if (tagsstr := note.get('tags', 'none')) == 'none':
if note.get('element') == 'comments':
tags = [params.get('comment_tag_from_servicenow', 'CommentFromServiceNow')]
else:
tags = [params.get('work_notes_tag_from_servicenow', 'WorkNoteFromServiceNow')]
else:
if str(note.get('element')) == 'comments':
tags = tagsstr + params.get('comment_tag_from_servicenow', 'CommentFromServiceNow')
tags = argToList(tags)
else:
tags = tagsstr + params.get('work_notes_tag_from_servicenow', 'WorkNoteFromServiceNow')
tags = argToList(tags)
entries.append({
'Type': note.get('type'),
'Category': note.get('category'),
'Contents': f"Type: {note.get('element')}\nCreated By: {note.get('sys_created_by')}\n"
f"Created On: {note.get('sys_created_on')}\n{note.get('value')}",
'ContentsFormat': note.get('format'),
'Tags': tags,
'Note': True,
'EntryContext': comments_context
})
# Handle closing ticket/incident in XSOAR
close_incident = params.get('close_incident')
if close_incident != 'None':
server_close_custom_state = params.get('server_close_custom_state', '')
ticket_state = ticket.get('state', '')
# The first condition is for closing the incident if the ticket's state is in the
# `Mirrored XSOAR Ticket custom close state code` parameter, which is configured by the user in the
# integration configuration.
if (ticket_state and ticket_state in server_close_custom_state) \
or (ticket.get('closed_at') and close_incident == 'closed') \
or (ticket.get('resolved_at') and close_incident == 'resolved'):
demisto.debug(f'SNOW ticket changed state- should be closed in XSOAR: {ticket}')
entries.append({
'Type': EntryType.NOTE,
'Contents': {
'dbotIncidentClose': True,
'closeNotes': ticket.get("close_notes"),
'closeReason': converts_state_close_reason(ticket_state, server_close_custom_state)
},
'ContentsFormat': EntryFormat.JSON
})
demisto.debug(f'Pull result is {ticket}')
return [ticket] + entries
def converts_state_close_reason(ticket_state: Optional[str], server_close_custom_state: Optional[str]):
"""
determine the XSOAR incident close reason based on the Service Now ticket state.
if 'Mirrored XSOAR Ticket custom close state code' parameter is set, the function will try to use it to
determine the close reason (should be corresponding to a user-defined list of close reasons in the server configuration).
then it will try using 'closed' or 'resolved' state, if set using 'Mirrored XSOAR Ticket closure method' parameter.
otherwise, it will use the default 'out of the box' server incident close reason.
Args:
ticket_state: Service now ticket state
server_close_custom_state: server close custom state parameter
Returns:
The XSOAR state
"""
custom_label = ''
# if custom state parameter is set and ticket state is returned from incident is not empty
if server_close_custom_state and ticket_state:
demisto.debug(f'trying to close XSOAR incident using custom states: {server_close_custom_state}, with \
received state code: {ticket_state}')
# parse custom state parameter into a dictionary of custom state codes and their names (label)
server_close_custom_state_dict = dict(item.split("=") for item in server_close_custom_state.split(","))
if ticket_state in server_close_custom_state_dict:
# check if state code is in the parsed dictionary
if custom_state_label := server_close_custom_state_dict.get(ticket_state):
custom_label = custom_state_label
if custom_label:
demisto.debug(f'incident should be closed using custom state. State Code: {ticket_state}, Label: {custom_label}')
return custom_label
elif ticket_state in ['6', '7']: # default states for closed (6) and resolved (7)
demisto.debug(f'incident should be closed using default state. State Code: {ticket_state}')
return 'Resolved'
demisto.debug(f'incident is closed using default close reason "Other". State Code: {ticket_state}')
return 'Other'
def update_remote_system_command(client: Client, args: Dict[str, Any], params: Dict[str, Any]) -> str:
"""
This command pushes local changes to the remote system.
Args:
client: XSOAR Client to use.
args:
args['data']: the data to send to the remote system
args['entries']: the entries to send to the remote system
args['incident_changed']: boolean telling us if the local incident indeed changed or not
args['remote_incident_id']: the remote incident id
params:
entry_tags: the tags to pass to the entries (to separate between comments and work_notes)
Returns: The remote incident id - ticket_id
"""
parsed_args = UpdateRemoteSystemArgs(args)
if parsed_args.delta:
demisto.debug(f'Got the following delta keys {str(list(parsed_args.delta.keys()))}')
ticket_type = client.ticket_type
ticket_id = parsed_args.remote_incident_id
closure_case = get_closure_case(params)
is_custom_close = False
close_custom_state = params.get('close_custom_state', None)
if parsed_args.incident_changed:
demisto.debug(f'Incident changed: {parsed_args.incident_changed}')
if parsed_args.inc_status == IncidentStatus.DONE:
if closure_case and ticket_type in {'sc_task', 'sc_req_item', SIR_INCIDENT}:
parsed_args.data['state'] = '3'
# These ticket types are closed by changing their state.
if closure_case == 'closed' and ticket_type == INCIDENT:
parsed_args.data['state'] = '7' # Closing incident ticket.
elif closure_case == 'resolved' and ticket_type == INCIDENT:
parsed_args.data['state'] = '6' # resolving incident ticket.
if close_custom_state: # Closing by custom state
demisto.debug(f'Closing by custom state = {close_custom_state}')
is_custom_close = True
parsed_args.data['state'] = close_custom_state
fields = get_ticket_fields(parsed_args.data, ticket_type=ticket_type)
if closure_case:
fields = {key: val for key, val in fields.items() if key != 'closed_at' and key != 'resolved_at'}
demisto.debug(f'Sending update request to server {ticket_type}, {ticket_id}, {fields}')
result = client.update(ticket_type, ticket_id, fields)
# Handle case of custom state doesn't exist, reverting to the original close state
if is_custom_close and demisto.get(result, 'result.state') != close_custom_state:
fields['state'] = TICKET_TYPE_TO_CLOSED_STATE[ticket_type]
demisto.debug(f'Given custom state doesn\'t exist - Sending second update request to server with '
f'default closed state: {ticket_type}, {ticket_id}, {fields}')
result = client.update(ticket_type, ticket_id, fields)
demisto.info(f'Ticket Update result {result}')
entries = parsed_args.entries
if entries:
demisto.debug(f'New entries {entries}')
for entry in entries:
demisto.debug(f'Sending entry {entry.get("id")}, type: {entry.get("type")}')
# Mirroring files as entries
if is_entry_type_mirror_supported(entry.get('type')):
path_res = demisto.getFilePath(entry.get('id'))
full_file_name = path_res.get('name')
file_name, file_extension = os.path.splitext(full_file_name)
if not file_extension:
file_extension = ''
if params.get('file_tag_from_service_now') not in entry.get('tags', []):
client.upload_file(ticket_id, entry.get('id'), file_name + '_mirrored_from_xsoar' + file_extension,
ticket_type)
else:
# Mirroring comment and work notes as entries
tags = entry.get('tags', [])
key = ''
if params.get('work_notes_tag') in tags:
key = 'work_notes'
elif params.get('comment_tag') in tags:
key = 'comments'
# Sometimes user is an empty str, not None, therefore nothing is displayed in ServiceNow
user = entry.get('user', 'dbot') or 'dbot'
text = f"({user}): {str(entry.get('contents', ''))}\n\n Mirrored from Cortex XSOAR"
client.add_comment(ticket_id, ticket_type, key, text)
return ticket_id
def get_closure_case(params: Dict[str, Any]):
"""
return the right incident closing states according to old and new close_ticket integration param.
Args:
params: the integration params dict.
Returns: None if no closure method is specified. otherwise returns (str) The right closure method.
"""
if not params.get('close_ticket_multiple_options') == 'None':
return params.get('close_ticket_multiple_options')
elif params.get('close_ticket'):
return 'closed'
else:
return None
def is_entry_type_mirror_supported(entry_type):
"""
Args:
entry_type (int)
Return:
True if the entry type supports mirroring otherwise False
"""
supported_mirror_entries = [EntryType.FILE, EntryType.ENTRY_INFO_FILE, EntryType.IMAGE,
EntryType.VIDEO_FILE, EntryType.STATIC_VIDEO_FILE]
return entry_type in supported_mirror_entries
def get_mapping_fields_command(client: Client) -> GetMappingFieldsResponse:
"""
Returns the list of fields for an incident type.
Args:
client: XSOAR client to use
Returns: Dictionary with keys as field names
"""
incident_type_scheme = SchemeTypeMapping(type_name=client.ticket_type)
demisto.debug(f'Collecting incident mapping for incident type - "{client.ticket_type}"')
# If the type is sn_si_incident then add it specific fields else use the snow args as is.
out_fields = SNOW_ARGS + SIR_OUT_FIELDS if client.ticket_type == SIR_INCIDENT else SNOW_ARGS
for field in out_fields:
incident_type_scheme.add_field(field)
mapping_response = GetMappingFieldsResponse()
mapping_response.add_scheme_type(incident_type_scheme)
return mapping_response
def get_modified_remote_data_command(
client: Client,
args: Dict[str, str],
update_timestamp_field: str = 'sys_updated_on',
mirror_limit: str = '100',
) -> GetModifiedRemoteDataResponse:
remote_args = GetModifiedRemoteDataArgs(args)
parsed_date = dateparser.parse(remote_args.last_update, settings={'TIMEZONE': 'UTC'})
assert parsed_date is not None, f'could not parse {remote_args.last_update}'
last_update = parsed_date.strftime(DATE_FORMAT)
demisto.debug(f'Running get-modified-remote-data command. Last update is: {last_update}')
result = client.query(
table_name=client.ticket_type,
sys_param_limit=mirror_limit,
sys_param_offset=str(client.sys_param_offset),
sys_param_query=f'{update_timestamp_field}>{last_update}',
sysparm_fields='sys_id',
)
modified_records_ids = []
if result and (modified_records := result.get('result')):
modified_records_ids = [record.get('sys_id') for record in modified_records if 'sys_id' in record]
return GetModifiedRemoteDataResponse(modified_records_ids)
def add_custom_fields(params):
global SNOW_ARGS
custom_fields = argToList(params.get('custom_fields'))
SNOW_ARGS += custom_fields
def get_tasks_from_co_human_readable(data: dict, ticket_type: str) -> dict:
"""Get item human readable.
Args:
data: item data.
Returns:
item human readable.
:param data: the task data
:param ticket_type: ticket type
"""
states = TICKET_STATES.get(ticket_type, {})
state = data.get('state', {}).get('value')
item = {
'ID': data.get('sys_id', {}).get('value', ''),
'Name': data.get('number', {}).get('value', ''),
'Description': data.get('short_description', {}).get('value', ''),
'State': states.get(str(int(state)), str(int(state))),
'Variables': []
}
variables = data.get('variables')
if variables and isinstance(variables, list):
for var in variables:
if var:
pretty_variables = {
'Question': var.get('label', ''),
'Type': var.get('display_type', ''),
'Name': var.get('name', ''),
'Mandatory': var.get('mandatory', '')
}
item['Variables'].append(pretty_variables)
return item
def get_tasks_for_co_command(client: Client, args: dict) -> CommandResults:
"""Get tasks for a change request
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
sys_id = str(args.get('id', ''))
result = client.get_co_tasks(sys_id)
if not result or 'result' not in result:
return CommandResults(
outputs_prefix="ServiceNow.Tasks",
readable_output='Item was not found.',
raw_response=result
)
items = result.get('result', {})
if not isinstance(items, list):
items_list = [items]
else:
items_list = items
if len(items_list) == 0:
return CommandResults(
outputs_prefix="ServiceNow.Tasks",
readable_output='No items were found.',
raw_response=result
)
mapped_items = []
for item in items_list:
mapped_items.append(get_tasks_from_co_human_readable(item, client.ticket_type))
headers = ['ID', 'Name', 'State', 'Description']
human_readable = tableToMarkdown('ServiceNow Catalog Items', mapped_items, headers=headers,
removeNull=True, headerTransform=pascalToSpace)
entry_context = {'ServiceNow.Tasks(val.ID===obj.ID)': createContext(mapped_items, removeNull=True)}
return CommandResults(
outputs_prefix="ServiceNow.Tasks",
outputs=entry_context,
readable_output=human_readable,
raw_response=result
)
def create_co_from_template_command(client: Client, args: dict) -> CommandResults:
"""Create a change request from a template.
Args:
client: Client object with request.
args: Usually demisto.args()
Returns:
Demisto Outputs.
"""
template = args.get('template', "")
result = client.create_co_from_template(template)
if not result or 'result' not in result:
raise Exception('Unable to retrieve response.')
ticket = result['result']
human_readable_table = get_co_human_readable(ticket=ticket, ticket_type='change_request')
headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Approval',
'Created On', 'Created By', 'Active', 'Close Notes', 'Close Code', 'Description', 'Opened At',
'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description', 'Additional Comments']
human_readable = tableToMarkdown('ServiceNow ticket was created successfully.', t=human_readable_table,
headers=headers, removeNull=True)
created_ticket_context = get_ticket_context(ticket)
entry_context = {
'Ticket(val.ID===obj.ID)': created_ticket_context,
'ServiceNow.Ticket(val.ID===obj.ID)': created_ticket_context
}
return CommandResults(
outputs_prefix="ServiceNow.Ticket",
outputs=entry_context,
readable_output=human_readable,
raw_response=result
)
def get_co_human_readable(ticket: dict, ticket_type: str, additional_fields: Iterable = tuple()) -> dict:
"""Get co human readable.
Args:
ticket: tickets data. in the form of a dict.
ticket_type: ticket type.
additional_fields: additional fields to extract from the ticket
Returns:
ticket human readable.
"""
states = TICKET_STATES.get(ticket_type, {})
state = ticket.get('state', {}).get('value', '')
priority = ticket.get('priority', {}).get('value', '')
item = {
'System ID': ticket.get('sys_id', {}).get('value', ''),
'Number': ticket.get('number', {}).get('value', ''),
'Impact': TICKET_IMPACT.get(str(int(ticket.get('impact', {}).get('value', ''))), ''),
'Business Impact': BUSINESS_IMPACT.get(str(ticket.get('business_criticality', {}).get('value', '')), ''),
'Urgency': ticket.get('urgency', {}).get('display_value', ''),
'Severity': ticket.get('severity', {}).get('value', ''),
'Priority': TICKET_PRIORITY.get(str(int(priority)), str(int(priority))),
'State': states.get(str(int(state)), str(int(state))),
'Approval': ticket.get('approval_history', {}).get('value', ''),
'Created On': ticket.get('sys_created_on', {}).get('value', ''),
'Created By': ticket.get('sys_created_by', {}).get('value', ''),
'Active': ticket.get('active', {}).get('value', ''),
'Close Notes': ticket.get('close_notes', {}).get('value', ''),
'Close Code': ticket.get('close_code', {}).get('value', ''),
'Description': ticket.get('description', {}).get('value', ''),
'Opened At': ticket.get('opened_at', {}).get('value', ''),
'Due Date': ticket.get('due_date', {}).get('value', ''),
'Resolved By': ticket.get('closed_by', {}).get('value', ''),
'Resolved At': ticket.get('closed_at', {}).get('value', ''),
'SLA Due': ticket.get('sla_due', {}).get('value', ''),
'Short Description': ticket.get('short_description', {}).get('value', ''),
'Additional Comments': ticket.get('comments', {}).get('value', '')
}
for field in additional_fields:
item.update({field: ticket.get(field, {}).get('value', '')})
return item
def generic_api_call_command(client: Client, args: Dict) -> Union[str, CommandResults]:
"""make a call to ServiceNow api
Args:
(Required Arguments)
method (str) required: The HTTP method, for example, GET, POST, and so on.
url_suffix (str) required: The API endpoint.
(Optional Arguments)
body (dict): The body to send in a 'POST' request. Default is None.
header (dict): requests headers. Default is None.
Return:
Generic Api Response.
"""
methods = ("GET", "POST", "PATCH", "DELETE")
method = str(args.get("method"))
path = str(args.get("path"))
headers = json.loads(str(args.get("headers", {})))
try:
body: Dict = json.loads(str(args.get("body", {})))
except ValueError:
body = args.get("body", "")
sc_api: bool = argToBoolean(args.get("sc_api", False))
cr_api: bool = argToBoolean(args.get("cr_api", False))
if method.upper() not in methods:
return f"{method} method not supported.\nTry something from {', '.join(methods)}"
response = None
response = client.generic_request(method=method, path=path, body=body, headers=headers, sc_api=sc_api, cr_api=cr_api)
if response is not None:
resp = response
human_readable: str = f"Request for {method} method is successful"
return CommandResults(
outputs_prefix="ServiceNow.Generic.Response",
outputs=resp,
readable_output=human_readable,
)
return f"Request for {method} method is not successful"
def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
command = demisto.command()
LOG(f'Executing command {command}')
params = demisto.params()
verify = not params.get('insecure', False)
use_oauth = params.get('use_oauth', False)
oauth_params = {}
if use_oauth: # if the `Use OAuth` checkbox was checked, client id & secret should be in the credentials fields
username = ''
password = ''
client_id = params.get('credentials', {}).get('identifier')
client_secret = params.get('credentials', {}).get('password')
oauth_params = {
'credentials': {
'identifier': username,
'password': password
},
'client_id': client_id,
'client_secret': client_secret,
'url': params.get('url'),
'headers': {
'Content-Type': 'application/json',
'Accept': 'application/json'
},
'verify': verify,
'proxy': params.get('proxy'),
'use_oauth': use_oauth
}
else: # use basic authentication
username = params.get('credentials', {}).get('identifier')
password = params.get('credentials', {}).get('password')
version = params.get('api_version')
if version:
api = f'/api/now/{version}/'
sc_api = f'/api/sn_sc/{version}/'
cr_api = f'/api/sn_chg_rest/{version}/'
else:
api = '/api/now/'
sc_api = '/api/sn_sc/'
cr_api = '/api/sn_chg_rest/'
server_url = params.get('url')
sc_server_url = f'{get_server_url(server_url)}{sc_api}'
cr_server_url = f'{get_server_url(server_url)}{cr_api}'
server_url = f'{get_server_url(server_url)}{api}'
fetch_time = params.get('fetch_time', '10 minutes').strip()
sysparm_query = params.get('sysparm_query')
sysparm_limit = int(params.get('fetch_limit', 10))
timestamp_field = params.get('timestamp_field', 'opened_at')
ticket_type = params.get('ticket_type', INCIDENT)
incident_name = params.get('incident_name', 'number') or 'number'
get_attachments = params.get('get_attachments', False)
update_timestamp_field = params.get('update_timestamp_field', 'sys_updated_on') or 'sys_updated_on'
mirror_limit = params.get('mirror_limit', '100') or '100'
look_back = arg_to_number(params.get('look_back')) or 0
use_display_value = argToBoolean(params.get('use_display_value', False))
display_date_format = params.get('display_date_format', '')
add_custom_fields(params)
file_tag_from_service_now, file_tag_to_service_now = (
params.get('file_tag_from_service_now'), params.get('file_tag')
)
if file_tag_from_service_now == file_tag_to_service_now:
raise Exception(
f'File Entry Tag To ServiceNow and File Entry Tag '
f'From ServiceNow cannot be the same name [{file_tag_from_service_now}].'
)
comment_tag_from_servicenow, comment_tag = (
params.get('comment_tag_from_servicenow'), params.get('comment_tag')
)
if comment_tag_from_servicenow == comment_tag:
raise Exception(
f'Comment Entry Tag To ServiceNow and Comment Entry Tag '
f'From ServiceNow cannot be the same name [{comment_tag_from_servicenow}].'
)
work_notes_tag_from_servicenow, work_notes_tag = (
params.get('work_notes_tag_from_servicenow'), params.get('work_notes_tag')
)
if work_notes_tag_from_servicenow == work_notes_tag:
raise Exception(
f'Work note Entry Tag To ServiceNow and Work Note Entry Tag '
f'From ServiceNow cannot be the same name [{work_notes_tag_from_servicenow}].'
)
raise_exception = False
try:
client = Client(server_url=server_url, sc_server_url=sc_server_url, cr_server_url=cr_server_url,
username=username, password=password, verify=verify, fetch_time=fetch_time,
sysparm_query=sysparm_query, sysparm_limit=sysparm_limit,
timestamp_field=timestamp_field, ticket_type=ticket_type, get_attachments=get_attachments,
incident_name=incident_name, oauth_params=oauth_params, version=version, look_back=look_back,
use_display_value=use_display_value, display_date_format=display_date_format)
commands: Dict[str, Callable[[Client, Dict[str, str]], Tuple[str, Dict[Any, Any], Dict[Any, Any], bool]]] = {
'test-module': test_module,
'servicenow-oauth-test': oauth_test_module,
'servicenow-oauth-login': login_command,
'servicenow-update-ticket': update_ticket_command,
'servicenow-create-ticket': create_ticket_command,
'servicenow-delete-ticket': delete_ticket_command,
'servicenow-query-tickets': query_tickets_command,
'servicenow-add-link': add_link_command,
'servicenow-add-comment': add_comment_command,
'servicenow-upload-file': upload_file_command,
'servicenow-add-tag': add_tag_command,
'servicenow-get-ticket-notes': get_ticket_notes_command,
'servicenow-get-record': get_record_command,
'servicenow-update-record': update_record_command,
'servicenow-create-record': create_record_command,
'servicenow-delete-record': delete_record_command,
'servicenow-query-table': query_table_command,
'servicenow-list-table-fields': list_table_fields_command,
'servicenow-query-computers': query_computers_command,
'servicenow-query-groups': query_groups_command,
'servicenow-query-users': query_users_command,
'servicenow-get-table-name': get_table_name_command,
'servicenow-query-items': query_items_command,
'servicenow-get-item-details': get_item_details_command,
'servicenow-create-item-order': create_order_item_command,
'servicenow-document-route-to-queue': document_route_to_table,
}
args = demisto.args()
if command == 'fetch-incidents':
raise_exception = True
incidents = fetch_incidents(client)
demisto.incidents(incidents)
elif command == 'servicenow-get-ticket':
demisto.results(get_ticket_command(client, args))
elif command == "servicenow-generic-api-call":
return_results(generic_api_call_command(client, args))
elif command == 'get-remote-data':
return_results(get_remote_data_command(client, demisto.args(), demisto.params()))
elif command == 'update-remote-system':
return_results(update_remote_system_command(client, demisto.args(), demisto.params()))
elif demisto.command() == 'get-mapping-fields':
return_results(get_mapping_fields_command(client))
elif demisto.command() == 'get-modified-remote-data':
return_results(get_modified_remote_data_command(client, args, update_timestamp_field, mirror_limit))
elif demisto.command() == 'servicenow-create-co-from-template':
return_results(create_co_from_template_command(client, demisto.args()))
elif demisto.command() == 'servicenow-get-tasks-for-co':
return_results(get_tasks_for_co_command(client, demisto.args()))
elif command in commands:
md_, ec_, raw_response, ignore_auto_extract = commands[command](client, args)
return_outputs(md_, ec_, raw_response, ignore_auto_extract=ignore_auto_extract)
else:
raise_exception = True
raise NotImplementedError(f'{COMMAND_NOT_IMPLEMENTED_MSG}: {demisto.command()}')
except Exception as err:
LOG(err)
LOG.print_log()
if not raise_exception:
return_error(f'Unexpected error: {str(err)}', error=traceback.format_exc())
else:
raise
from ServiceNowApiModule import * # noqa: E402
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| [
"noreply@github.com"
] | JT-NL.noreply@github.com |
a0861006392cf9759614b170ba1c9488db09f588 | 92a52850355048ecd7b63bae88c567d2db2ebd11 | /polls/form.py | 84c94eca320cc46e6b3363f9e77deb9f6490452a | [] | no_license | nicoladilillo/Polls-project | 5854274b43f6aab63e84996ee8cc3798bef64de0 | 8caa3c1d02e0519e654ea82cd4f2bec8aa4a0d26 | refs/heads/master | 2020-05-21T13:32:16.362262 | 2016-10-05T16:50:29 | 2016-10-05T16:50:29 | 64,008,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | from django import forms
from .models import Choice
class ChoiceForm(forms.ModelForm):
class Meta:
model = Choice
fields = ['choice_text']
def clean_choice(self):
choice_text = self.cleaned_data.get('choice_text')
return choice_text
| [
"nicoladilillo98@gmail.com"
] | nicoladilillo98@gmail.com |
984979cab35366e04976af46b617235c3f4d653f | caa31e24557d8badf6f2f80784a5b5ec2e68e9fc | /passportd/manage.py | 10e2e02b10cc0dd925f3b249ca3f60fd09639d68 | [] | no_license | memoia/peek-passportd | 6ec9f937e30a64f808052e950dcbc29061ca3e86 | 1f80b3a9c59d632739ecc824a2340611e66f2296 | refs/heads/master | 2020-12-24T14:26:46.067503 | 2014-11-03T05:23:53 | 2014-11-03T05:23:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passportd.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"melnick@gmail.com"
] | melnick@gmail.com |
da8a1919f3968f26d8f1b0079484e3cd70f24251 | 4693f529c117d347566a428a4ada6887b28ad6b0 | /landmark_based_retrieval.py | ef1b3b001d827c4f2e4422e6b66ffe308a9ff888 | [] | no_license | cdolea331/XSEDEClothingRetrievalPrototype | 6a1815b2395d44a421a7d78ef0ce4deb078cf08f | cefa4acf1b118975f00e44ea10ff4c5506536f0b | refs/heads/master | 2020-05-15T06:34:57.831103 | 2019-04-19T04:37:11 | 2019-04-19T04:37:11 | 182,126,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,793 | py | from scipy.spatial.distance import cosine
import csv
import cv2
import numpy as np
def execute(input_path = "testSpread.csv", output_path = "results.csv", database_path = 'Landmark_BRIEF_features.csv',
image_base_path = 'Data/blouseData/', keypoint_size = 64.0, landmark_constant = 1.0, feature_coefficient = 0.0025):
inputFile = open(input_path, "r", newline='')
outputFile = open(output_path, "w", newline='')
writer = csv.writer(outputFile)
reader = csv.reader(inputFile)
brief = cv2.xfeatures2d.BriefDescriptorExtractor_create(bytes=32)
next(reader)
line = next(reader)
subject = []
subject.append(line[0])
landmarks = []
for coordinates in line[2:]:
landmarks.append(coordinates.split("_")[:2])
for coordinates in landmarks:
for i in range(len(coordinates)):
coordinates[i] = float(coordinates[i])
subject.append(landmarks)
features = []
for coordinates in landmarks:
if coordinates[0] != -1:
features.append(cv2.KeyPoint(x=coordinates[0], y=coordinates[1], _size=keypoint_size))
image = cv2.imread(image_base_path + subject[0], 3)
kp, des = brief.compute(image, features)
subject.append(des)
inputFile.close()
inputFile = open(database_path, "r", newline='')
reader = csv.reader(inputFile)
next(reader)
line = next(reader)
l = 0
nonEmpty = True
results = [];
while nonEmpty:
print(line[0])
comparison = []
image = cv2.imread(image_base_path + line[0], 3)
comparison.append(line[0])
line[1] = line[1][1:-1]
line[1] = line[1].split("],")
for i in range(len(line[1])):
line[1][i] = line[1][i].replace('[', '')
line[1][i] = line[1][i].replace(']', '')
line[1][i] = line[1][i].split(', ')
line[1][i] = [float(line[1][i][0]), float(line[1][i][1])]
# sys.exit()
comparison.append(line[1])
landmark_cosines = []
for i in range(len(subject[1])):
landmark_cosines.append(cosine(subject[1][i], comparison[1][i]))
landmark_similarity = np.linalg.norm(landmark_cosines, ord=1)
line[2] = line[2][1:-1]
line[2] = line[2].split("]\n")
for i in range(len(line[2])):
line[2][i] = line[2][i].replace('[', '')
line[2][i] = line[2][i].replace(']', '')
line[2][i] = line[2][i].replace('\n', '')
# print(line[2][i])
line[2][i] = line[2][i].replace(' ', ' ')
line[2][i] = line[2][i].replace(' ', ' ')
line[2][i] = line[2][i][1:]
line[2][i] = line[2][i].split(' ')
descriptions = []
try:
for entry in line[2][i]:
descriptions.append(float(entry))
except ValueError:
print(line[2])
print(line[2][i])
line[2][i] = descriptions
comparison.append(line[2])
feature_cosines = []
# print(line[2][0])
# sys.exit()
subject_index = 0
comparison_index = 0
for i in range(len(subject[2])):
if subject[1][i][0] != -1 and comparison[1][i][0] != -1:
try:
feature_cosines.append(cosine(subject[2][subject_index], comparison[2][comparison_index]))
except ValueError:
pass
elif subject[2][i][0] == -1 and comparison[2][i][0] == -1:
pass
elif subject[2][i][0] == -1:
comparison_index += 1
else:
subject_index += 1
feature_similarity = np.linalg.norm(feature_cosines, ord=1)
overall_similarity = (landmark_similarity * landmark_constant) + (feature_coefficient * feature_similarity)
results.append([line[0], overall_similarity])
l += 1
try:
line = next(reader)
except Exception as e:
nonEmpty = False
for i in range(len(results)):
if np.isnan(results[i][1]):
results[i][1] = -1
results = sorted(results, key=lambda t: t[1])
for entry in results:
writer.writerow(entry)
inputFile.close()
outputFile.close()
if __name__ == "__main__":
execute() | [
"noreply@github.com"
] | cdolea331.noreply@github.com |
e27718431a89a408acd5f5b2dd57f5ace29e91f3 | 325bf15ed9ae7c7e871bc99fcd8c694cd5dfe35b | /AnyTextFileIntoPdf-Prgrmm Rslt/AnyTextTextIntoPdf.py | 2e83a30fe9e40eb51d3e7018db9cadbd1ca960f0 | [] | no_license | HaziqyWqjiq/MyProjectPython | e78b0c63a56777fd01c05182391f4d14ad70d4d4 | 1cb4094e0f0dd370a86a53a8d39a86538ff0e7b9 | refs/heads/main | 2023-08-24T01:09:30.089877 | 2021-10-11T05:02:52 | 2021-10-11T05:02:52 | 415,782,588 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | # text to pdf
# pip install fpdf / sudo pip3 install fpdf
from fpdf import FPDF
pdf= FPDF()
pdf.add_page()
pdf.set_font("Arial",size=20)
pdf.cell(50,100,txt="Hello EPELY ONE")
pdf.output("pdftext.pdf")
print("Done! You can check it now") | [
"noreply@github.com"
] | HaziqyWqjiq.noreply@github.com |
8a1d66a24fa772635a9c8347ee08db7f9861fc2d | eefc46d29e5e9900607d0d89fec697d3fbd75ea1 | /directories/__init__.py | c9bf3897ae277ed6fd255dabc04012a0d85b5b8f | [] | no_license | rogfrich/start | 81ad17a5e9196ffa61b749edb4377086b9b2600b | 82ab0862613d29ceb9ab723d768ec21fd1c24e2d | refs/heads/main | 2023-08-20T09:01:10.375250 | 2021-10-24T18:13:20 | 2021-10-24T18:13:20 | 418,110,783 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | from pathlib import Path
directories = {
'STANDARD_CODE_DIR': Path('/Users/rich/code'),
'TEMPLATE_DIRECTORY': Path('/Users/rich/start_template')
}
| [
"richard.a.cairns@gmail.com"
] | richard.a.cairns@gmail.com |
f1aff488cc397a61c84e4bf306f6c1cf8961a77a | 4557846c914c259bd520f6c8bd358e8c5fb8e820 | /bin/__init__.py | b78fa2a4678d9ddf5c8ca9c763a457facdb431cf | [] | no_license | ninthDevilHAUNSTER/EasyEnglishTerminal | 7c854475b6f472d296a1f8ee25c9f5c9b961a641 | 3f68209b5ff9b3e35e9660b60b91ebddd99ac33b | refs/heads/master | 2020-12-11T23:13:57.678840 | 2020-01-15T02:59:37 | 2020-01-15T02:59:37 | 233,981,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41 | py | from bin.base import EasyEnglishTerminal
| [
"luotianhan@126.com"
] | luotianhan@126.com |
f224b46e3b5fabe1b29f2a1d2b9bb2378445b6da | 46bdc6843e31718653c0f7e27c890f31ad9ac10c | /ssh_by_napalm.py | 4aeb188c3fe9543c0f6cd471069b36914548732a | [] | no_license | ovod88/python_automation_scripts | 5a4696bb6b7b59f4fc963e7d0178e7c8e8e2e4a5 | 70c89c836c3d5156702c3d7a3f6df236cde3ff9c | refs/heads/master | 2020-03-19T03:32:56.823892 | 2018-08-06T12:49:51 | 2018-08-06T12:49:51 | 135,737,908 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 539 | py | #!/usr/bin/python3
import json
from napalm import get_network_driver
driver = get_network_driver('ios')
iosl3 = driver('192.168.122.151', 'ovod88', 'taon88')
iosl3.open()
iosl3_output = iosl3.get_facts()
print(iosl3_output['hostname'])
output = json.dumps(iosl3_output, sort_keys=True, indent=4)
print(output)
# iosl3_output = iosl3.get_interfaces()
# print(json.dumps(iosl3_output, sort_keys=True, indent=4))
# iosl3_output = iosl3.get_interfaces_counters()
# print(json.dumps(iosl3_output, sort_keys=True, indent=4))
iosl3.close() | [
"ovod88@bigmir.net"
] | ovod88@bigmir.net |
a6df67081dc937ae93372cc8c5755bee0ab8b591 | 43be8c11bf743e4c1f09bf86ba1a6c9c8e3b776f | /DeepTradeRiskEsti_TXF/datasetLoader/chart.py | c74c54b1836f121ff5b61bf13739d1c5534d355e | [
"Apache-2.0"
] | permissive | kangchihlun/DeepTradeRiskEsti_TXF | 1f95cd1bf98823af8c454634982dafa9d5a24158 | 2d0297a2b50f8e66d718bdc2a8b0473b71f8053a | refs/heads/master | 2021-07-11T17:42:17.251373 | 2017-10-14T04:22:53 | 2017-10-14T04:22:53 | 106,895,590 | 14 | 3 | null | null | null | null | UTF-8 | Python | false | false | 18,366 | py | # coding:UTF-8
import numpy
import talib
import math
from sklearn.preprocessing import scale
from sklearn.preprocessing import minmax_scale
class ChartFeature(object):
def __init__(self, selector):
self.selector = selector
self.supported = {
"ROCP", "MACD", "RSI", "VROCP", "BOLL", "MA",
"VMA", "PRICE_VOLUME",
"TXFTWSE_DIFF_ROCP","EXFFXF_DIFF_ROCP","UDV_ROCP","BAV_ROCP","TBA_ROCP"
"TXFTWSE_RATIO_ROCP","EXFFXF_RATIO_ROCP","UDV_RATIO_ROCP","BAV_RATIO_ROCP","TBA_RATIO_ROCP"
}
self.feature = []
def moving_extract(self, window=30, close_prices=None,
TXFTWSE_DIFF=None, EXFFXF_DIFF=None, UDV=None,BAV=None,TBA=None,
TXFTWSE_RATIO=None, EXFFXF_RATIO=None, UDV_RATIO=None,BAV_RATIO=None,TBA_RATIO=None,
volumes=None, with_label=True, flatten=True):
self._window_ = window
self.extract(
close_prices=close_prices,
TXFTWSE_DIFF=TXFTWSE_DIFF,
EXFFXF_DIFF=EXFFXF_DIFF,
UDV=UDV,
BAV=BAV,
TBA=TBA,
TXFTWSE_RATIO=TXFTWSE_RATIO,
EXFFXF_RATIO=EXFFXF_RATIO,
UDV_RATIO=UDV_RATIO,
BAV_RATIO=BAV_RATIO,
TBA_RATIO=TBA_RATIO,
volumes=volumes
)
feature_arr = numpy.asarray(self.feature)
p = 0
rows = feature_arr.shape[0]
print("feature dimension: %s" % rows)
moving_features = []
moving_labels = []
dataLen = feature_arr.shape[1]
while p + window < dataLen :
x = feature_arr[:, p:p + window]
tgt_idx = min( [p + window * 2 , dataLen-1])
fRngArr = close_prices[p + window : tgt_idx]
if(len(fRngArr)):
fRngMax = max(fRngArr)
fRngMin = min(fRngArr)
curClose_idx = max([ p + window -1 , 0])
curClose = close_prices[curClose_idx]
if(curClose > 0.01):
difUp = fRngMax - curClose
difDw = fRngMin - curClose
tgtFutureClose = fRngMax if abs(difUp)>abs(difDw) else fRngMin
p_change = (tgtFutureClose - curClose) / curClose * 100.0
p_change = max(-1, min(p_change , 1))
y = p_change
if flatten:
x = x.flatten("F")
moving_features.append(numpy.nan_to_num(x))
moving_labels.append(y)
p += 1
return numpy.asarray(moving_features), moving_labels , rows
def extract(self, close_prices=None,
TXFTWSE_DIFF=None,EXFFXF_DIFF=None,UDV=None,BAV=None,TBA=None,
TXFTWSE_RATIO=None,EXFFXF_RATIO=None,UDV_RATIO=None,BAV_RATIO=None,TBA_RATIO=None,
volumes=None):
self.feature = []
for feature_type in self.selector:
if feature_type in self.supported:
print("extracting feature : %s" % feature_type)
self.extract_by_type(feature_type, close_prices,
TXFTWSE_DIFF,EXFFXF_DIFF,UDV,BAV,TBA,
TXFTWSE_RATIO,EXFFXF_RATIO,UDV_RATIO,BAV_RATIO,TBA_RATIO,
volumes)
else:
print("feature type not supported: %s" % feature_type)
return self.feature
def normalise_windows(window_data):
normalised_data = []
for window in window_data:
normalised_window = [((float(p) / float(window[0])) - 1) for p in window]
normalised_data.append(normalised_window)
return normalised_data
def extract_by_type(self, feature_type, close_prices=None,
TXFTWSE_DIFF=None,EXFFXF_DIFF=None,UDV=None,BAV=None,TBA=None,
TXFTWSE_RATIO=None,EXFFXF_RATIO=None,UDV_RATIO=None,BAV_RATIO=None,TBA_RATIO=None,
volumes=None):
if feature_type == 'ROCP':
rocp = numpy.nan_to_num(talib.ROCP(close_prices, timeperiod=1))
rocp_n = minmax_scale(rocp,feature_range=(-1, 1))
self.feature.append(rocp) # for ad in rocp : print(ad)
if feature_type == 'OROCP':
orocp = talib.ROCP(open_prices, timeperiod=1)
self.feature.append(orocp)
if feature_type == 'HROCP':
hrocp = talib.ROCP(high_prices, timeperiod=1)
self.feature.append(hrocp)
if feature_type == 'LROCP':
lrocp = talib.ROCP(low_prices, timeperiod=1)
self.feature.append(lrocp)
if feature_type == 'MACD':
macd, signal, hist = talib.MACD(close_prices, fastperiod=12, slowperiod=26, signalperiod=9)
macd = numpy.nan_to_num(macd)
norm_macd = minmax_scale(macd ,feature_range=(-1, 1))
self.feature.append(norm_macd)
signal = numpy.nan_to_num(signal)
norm_signal = minmax_scale(signal ,feature_range=(-1, 1))
self.feature.append(norm_signal)
hist = numpy.nan_to_num(hist)
norm_hist = minmax_scale(hist ,feature_range=(-1, 1))
self.feature.append(norm_hist)
if feature_type == 'RSI':
rsi6 = talib.RSI(close_prices, timeperiod=6)
rsi12 = talib.RSI(close_prices, timeperiod=12)
rsi24 = talib.RSI(close_prices, timeperiod=24)
rsi6rocp = talib.ROCP(rsi6 + 100., timeperiod=1)
rsi12rocp = talib.ROCP(rsi12 + 100., timeperiod=1)
rsi24rocp = talib.ROCP(rsi24 + 100., timeperiod=1)
rsi6 = numpy.nan_to_num(rsi6)
rsi6 = rsi6 / 100.0 - 0.5
norm_rsi6 = minmax_scale(rsi6 ,feature_range=(-1, 1))
self.feature.append(norm_rsi6)
rsi12 = numpy.nan_to_num(rsi12)
rsi12 = rsi12 / 100.0 - 0.5
norm_rsi12 = minmax_scale(rsi12 ,feature_range=(-1, 1))
self.feature.append(norm_rsi12)
rsi24 = numpy.nan_to_num(rsi24)
rsi24 = rsi24 / 100.0 - 0.5
norm_rsi24 = minmax_scale(rsi24 ,feature_range=(-1, 1))
self.feature.append(norm_rsi24)
if feature_type == 'VROCP':
norm_volumes = minmax_scale(volumes ,feature_range=(-1, 1))
self.feature.append(norm_volumes)
if feature_type == 'BOLL':
upperband, middleband, lowerband = talib.BBANDS(close_prices, timeperiod=self._window_ , nbdevup=2, nbdevdn=2, matype=0)
upperband = (upperband - close_prices) / close_prices
upperband = numpy.nan_to_num(upperband)
norm_upperband = minmax_scale(upperband ,feature_range=(-1, 1))
self.feature.append(norm_upperband)
middleband = (middleband - close_prices) / close_prices
middleband = numpy.nan_to_num(middleband)
norm_middleband = minmax_scale(middleband ,feature_range=(-1, 1))
self.feature.append(norm_middleband)
lowerband = (lowerband - close_prices) / close_prices
lowerband = numpy.nan_to_num(lowerband)
norm_lowerband = minmax_scale(lowerband,feature_range=(-1, 1))
self.feature.append(norm_lowerband)
if feature_type == 'MA':
ma5 = talib.MA(close_prices, timeperiod=5)
ma5_clo = (ma5 - close_prices) / close_prices
ma5_clo = numpy.nan_to_num(ma5_clo)
norm_ma5_clo = minmax_scale(ma5_clo,feature_range=(-1, 1))
self.feature.append(norm_ma5_clo)
ma10 = talib.MA(close_prices, timeperiod=10)
ma10_clo = (ma10 - close_prices) / close_prices
ma10_clo = numpy.nan_to_num(ma10_clo)
norm_ma10_clo = minmax_scale(ma10_clo,feature_range=(-1, 1))
self.feature.append(norm_ma10_clo)
ma20 = talib.MA(close_prices, timeperiod=20)
ma20_clo = (ma20 - close_prices) / close_prices
ma20_clo = numpy.nan_to_num(ma20_clo)
norm_ma20_clo = minmax_scale(ma20_clo,feature_range=(-1, 1))
self.feature.append(norm_ma20_clo)
ma30 = talib.MA(close_prices, timeperiod=30)
ma30_clo = (ma30 - close_prices) / close_prices
ma30_clo = numpy.nan_to_num(ma30_clo)
norm_ma30_clo = minmax_scale(ma30_clo,feature_range=(-1, 1))
self.feature.append(norm_ma30_clo)
ma60 = talib.MA(close_prices, timeperiod=60)
ma60_clo = (ma60 - close_prices) / close_prices
ma60_clo = numpy.nan_to_num(ma60_clo)
norm_ma60_clo = minmax_scale(ma60_clo,feature_range=(-1, 1))
self.feature.append(norm_ma60_clo)
if feature_type == 'VMA':
ma5 = talib.MA(volumes, timeperiod=5)
ma5_clo = ((ma5 - volumes) / (volumes + 1))
ma5_clo = numpy.nan_to_num(ma5_clo)
norm_ma5_clo = minmax_scale(ma5_clo,feature_range=(-1, 1))
self.feature.append(norm_ma5_clo)
ma10 = talib.MA(volumes, timeperiod=10)
ma10_clo = ((ma5 - volumes) / (volumes + 1))
ma10_clo = numpy.nan_to_num(ma10_clo)
norm_ma10_clo = minmax_scale(ma10_clo ,feature_range=(-1, 1))
self.feature.append(norm_ma10_clo)
ma20 = talib.MA(volumes, timeperiod=20)
ma20_clo = ((ma5 - volumes) / (volumes + 1))
ma20_clo = numpy.nan_to_num(ma20_clo)
norm_ma20_clo = minmax_scale(ma20_clo ,feature_range=(-1, 1))
self.feature.append(norm_ma20_clo)
if feature_type == 'PRICE_VOLUME':
rocp = talib.ROCP(close_prices, timeperiod=1)
norm_volumes = (volumes - numpy.mean(volumes)) / math.sqrt(numpy.var(volumes))
vrocp = talib.ROCP(norm_volumes + numpy.max(norm_volumes) - numpy.min(norm_volumes), timeperiod=1)
pv = rocp * vrocp * 100
pv = numpy.nan_to_num(pv)
norm_pv = minmax_scale(pv ,feature_range=(-1, 1))
self.feature.append(norm_pv)
if feature_type == 'TXFTWSE_DIFF_ROCP':
norm_volumes = minmax_scale(TXFTWSE_DIFF ,feature_range=(-1, 1))
self.feature.append(norm_volumes)
if feature_type == 'TXFTWSE_RATIO_ROCP':
norm_volumesr = minmax_scale(TXFTWSE_RATIO ,feature_range=(-1, 1))
self.feature.append(norm_volumesr) # for ad in norm_volumesr :print(ad)
if feature_type == 'EXFFXF_DIFF_ROCP':
norm_volumes = minmax_scale(EXFFXF_DIFF,feature_range=(-1, 1))
self.feature.append(norm_volumes)
if feature_type == 'EXFFXF_RATIO_ROCP':
norm_volumes = minmax_scale(EXFFXF_RATIO ,feature_range=(-1, 1))
self.feature.append(norm_volumes) # for ad in norm_volumes:print(ad)
if feature_type == 'UDV_ROCP':
UDV = numpy.nan_to_num(UDV)
norm_volumes = minmax_scale(UDV,feature_range=(-1, 1))
self.feature.append(norm_volumes) # for ad in norm_volumes:print(ad)
if feature_type == 'UDV_RATIO_ROCP':
norm_volumes = minmax_scale(UDV_RATIO ,feature_range=(-1, 1))
self.feature.append(norm_volumes)
ma5 = talib.MA(UDV_RATIO, timeperiod=5)
ma5_clo = (ma5 - UDV_RATIO) / UDV_RATIO
ma5_clo = numpy.nan_to_num(ma5_clo)
norm_ma5_clo = minmax_scale(ma5_clo ,feature_range=(-1, 1))
self.feature.append(norm_ma5_clo)# for ad in norm_ma5_clo:print(ad)
ma10 = talib.MA(UDV_RATIO, timeperiod=10)
ma10_clo = (ma10 - UDV_RATIO) / UDV_RATIO
ma10_clo = numpy.nan_to_num(ma10_clo)
norm_ma10_clo = minmax_scale(ma10_clo ,feature_range=(-1, 1))
self.feature.append(norm_ma10_clo)# for ad in norm_ma10_clo:print(ad)
ma20 = talib.MA(UDV_RATIO, timeperiod=20)
ma20_clo = (ma20 - UDV_RATIO) / UDV_RATIO
ma20_clo = numpy.nan_to_num(ma20_clo)
norm_ma20_clo = minmax_scale(ma20_clo ,feature_range=(-1, 1))# for ad in norm_ma20_clo: print(ad)
self.feature.append(norm_ma20_clo)
if feature_type == 'BAV_ROCP':
BAV = numpy.nan_to_num(BAV)
norm_volumes = minmax_scale(BAV,feature_range=(-1, 1))
self.feature.append(norm_volumes)
if feature_type == 'BAV_RATIO_ROCP':
BAV_RATIO = numpy.nan_to_num(BAV_RATIO)
norm_volumes = minmax_scale(BAV_RATIO ,feature_range=(-1, 1))
self.feature.append(norm_volumes)
ma5 = talib.MA(BAV_RATIO, timeperiod=5)
ma5_clo = (ma5 - BAV_RATIO) / BAV_RATIO
ma5_clo = numpy.nan_to_num(ma5_clo)
norm_ma5_clo = minmax_scale(ma5_clo,feature_range=(-1, 1))
self.feature.append(norm_ma5_clo)
ma10 = talib.MA(BAV_RATIO, timeperiod=10)
ma10_clo = (ma10 - BAV_RATIO) / BAV_RATIO
ma10_clo = numpy.nan_to_num(ma10_clo)
norm_ma10_clo = minmax_scale(ma10_clo,feature_range=(-1, 1))
self.feature.append(norm_ma10_clo)
ma20 = talib.MA(BAV_RATIO, timeperiod=20)
ma20_clo = (ma20 - BAV_RATIO) / BAV_RATIO
ma20_clo = numpy.nan_to_num(ma20_clo)
norm_ma20_clo = minmax_scale(ma20_clo,feature_range=(-1, 1))
self.feature.append(norm_ma20_clo)
if feature_type == 'TBA_ROCP':
norm_volumes = minmax_scale(TBA,feature_range=(-1, 1))
self.feature.append(norm_volumes)
if feature_type == 'TBA_RATIO_ROCP':
norm_volumes = minmax_scale(TBA_RATIO,feature_range=(-1, 1))
self.feature.append(norm_volumes)
ma5 = talib.MA(TBA_RATIO , timeperiod=5)
ma5_clo = (ma5 - TBA_RATIO ) / TBA_RATIO
ma5_clo = numpy.nan_to_num(ma5_clo)
norm_ma5_clo = minmax_scale(ma5_clo ,feature_range=(-1, 1))# for ad in norm_ma5_clo: print(ad)
self.feature.append(norm_ma5_clo)
ma10 = talib.MA(TBA_RATIO , timeperiod=10)
ma10_clo = (ma10 - TBA_RATIO ) / TBA_RATIO
ma10_clo = numpy.nan_to_num(ma10_clo)
norm_ma10_clo = minmax_scale(ma10_clo ,feature_range=(-1, 1))# for ad in norm_ma10_clo: print(ad)
self.feature.append(norm_ma10_clo)
ma20 = talib.MA(TBA_RATIO , timeperiod=20)
ma20_clo = (ma20 - TBA_RATIO ) / TBA_RATIO
ma20_clo = numpy.nan_to_num(ma20_clo)
norm_ma20_clo = minmax_scale(ma20_clo ,feature_range=(-1, 1))# for ad in norm_ma10_clo: print(ad)
self.feature.append(norm_ma20_clo)
def extract_feature(raw_data,selector, window=30, with_label=True, flatten=True):
chart_feature = ChartFeature(selector)
closes = raw_data.Close.values # len(closes)
volumes = raw_data.TotalVolume.values # len(volumes)
txftwse_dif = raw_data.txfTwseDiff.values # len(txftwse_dif)
exffxf_diff = raw_data.exfFxfDiff.values # len(exffxf_diff)
udv=raw_data.uvdv.values # len(udv)
bav=raw_data.bvav.values # len(bav)
tba=raw_data.tbta.values # len(tba)
txftwse_ratio = raw_data.txfTwseRatio.values # len(txftwse_dif)
exffxf_ratio = raw_data.exfFxfRatio.values # len(exffxf_diff)
udv_ratio=raw_data.uvdv_ratio.values # len(udv)
bav_ratio=raw_data.bvavRatio.values # len(bav)
tba_ratio=raw_data.tbtaRatio.values # len(tba)
if with_label:
moving_features,moving_labels,numRows = chart_feature.moving_extract(
window=window,
close_prices=closes,
TXFTWSE_DIFF=txftwse_dif,
EXFFXF_DIFF=exffxf_diff,
UDV=udv,
BAV=bav,
TBA=tba,
TXFTWSE_RATIO=txftwse_ratio,
EXFFXF_RATIO=exffxf_ratio,
UDV_RATIO=udv_ratio,
BAV_RATIO=bav_ratio,
TBA_RATIO=tba_ratio,
volumes=volumes,
with_label=with_label,
flatten=flatten
)
return moving_features, moving_labels,numRows
| [
"noreply@github.com"
] | kangchihlun.noreply@github.com |
f92227c51ec1996e3e31c2e0073f8916609625b5 | e4bab7fc4e8eacb62ad35b4b58b9a5093bae44c7 | /spec/rift/data/models/tenant.py | a7a4d5803847af6a42bab44665e8cd139f2cfdba | [
"Apache-2.0"
] | permissive | mkam/Rift | 972d5c571ead01480519509b783ec70b0636d10f | 802892f7c119845e0f2ec5b0798463f210e7061f | refs/heads/master | 2021-06-01T12:33:58.147207 | 2015-08-27T19:35:51 | 2015-08-27T19:35:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 414 | py | import uuid
from specter import Spec, expect
from rift.data.models.tenant import Tenant
class TenantModel(Spec):
def can_convert_to_dictionary(self):
tmp_uuid = str(uuid.uuid4())
tenant = Tenant(name=tmp_uuid, tenant_id=tmp_uuid)
tenant_dict = tenant.as_dict()
test_dict = Tenant.build_tenant_from_dict(tenant_dict).as_dict()
expect(tenant_dict).to.equal(test_dict)
| [
"john.vrbanac@linux.com"
] | john.vrbanac@linux.com |
c6181955ae958e8e09f7d70d6cabc46465b949a8 | 9930f08717594022e0f7fde2a96baaa7fcfce784 | /assignment3_prime number or not.py | c6ba297b1ad4aab747f95704e77a94145abc75b2 | [] | no_license | dinesh5555/python_assignments | 72bd2d1cc35a92a01826536eeb4107953d8d73c7 | 33fbcbe1de8f92bd6ffe07fa66640ce1ab84a756 | refs/heads/master | 2022-11-11T18:42:41.621053 | 2020-07-03T09:12:49 | 2020-07-03T09:12:49 | 276,854,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | #!/usr/bin/env python
# coding: utf-8
# In[19]:
num=int(input("enter a number"))
if num>1:
for i in range(2,num):
if (num%i)==0:
print(num,"is not a prime number")
break
else:
print(num,"is a prime number")
| [
"noreply@github.com"
] | dinesh5555.noreply@github.com |
72613cfdfc84957ce2f78b079aaeccfcf9368423 | 9345020ae1720c401cfa0c47fd71cc5dfb303a02 | /test.py | 444b7d84e3a0f1680807e7772e30ec0660b3d11a | [] | no_license | Syun1/travis-test | efd21e27ff1fb8c05c9d483f9871bf8e573bb166 | 9ee8965763fd93790c4550fec1a0012dc0c3af87 | refs/heads/master | 2021-01-17T17:36:38.434361 | 2016-06-11T16:15:46 | 2016-06-11T16:15:46 | 60,602,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | #!/usr/bin/python
import sys
res = "NG"
if sys.argv[1] == sys.argv[2];
res = "OK"
print sys.argv[1], sys.argv[2], res
| [
"a1607so@aiit.ac.jp"
] | a1607so@aiit.ac.jp |
ff457fa453332ea9f015cb50a8cbf848dcb3297b | dec76e0a8db7e8f8f43965df75f43202cd83ba8f | /caffemodel/feature_extraction.py | 1663a5a81cc6f7b39c3ccc31f40730f173e722fa | [] | no_license | yasutomo57jp/deeplearning_samples | ff88748ab04d10e19d71c4ca14dfaa2741fdd33c | d60b3dfb4d00ba0c4422f8faeaf81e9cd278a154 | refs/heads/master | 2021-01-19T10:48:47.147573 | 2017-12-21T05:54:21 | 2017-12-21T05:54:21 | 60,152,419 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,592 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# 使い方:
# 画像をあるフォルダに保存しておく(images)
from __future__ import print_function
import sys
import os
import os.path
import pickle
import cv2
import numpy as np
from chainer import Variable
from chainer.functions import caffe
from chainer import cuda
if len(sys.argv) < 3:
print("usage: %s imagedir outputfile" % sys.argv[0])
quit()
inputpath = sys.argv[1]
outputfile = sys.argv[2]
# Caffeのモデル読み込み(時間がかかる)
print("loading model ... ", end="", file=sys.stderr)
model = caffe.CaffeFunction("bvlc_reference_caffenet.caffemodel")
model.to_gpu()
print("done", file=sys.stderr)
# 平均画像の読み込み
mean = np.load("ilsvrc_2012_mean.npy") # 3x255x255 の画像
def load_images(inputpath, mean):
imglist = []
filenames = []
for root, dirs, files in os.walk(inputpath):
for fn in sorted(files):
filenames.append(fn)
bn, ext = os.path.splitext(fn)
if ext not in [".bmp", ".jpg", ".png"]:
continue
filename = os.path.join(root, fn)
# 評価用画像の読み込み(255x255サイズのカラー画像)
# チャンネル,高さ,幅に入れ替え
testimage = np.asarray(cv2.imread(filename), dtype=np.float64)
testimage = cv2.resize(testimage, (256,256))
testimage = testimage.transpose(2, 0, 1)
# 平均を引いた画像
testimage = testimage - mean
# 画像サイズを bvlc_reference_caffenet 用の 3x227x227 に揃えて配列にする
start = (255 - 227) // 2
stop = start + 227
imglist.append(testimage[:, start:stop, start:stop])
imgsdata = np.asarray(imglist, dtype=np.float32)
return imgsdata, filenames
print("loading images ... ", end="", file=sys.stderr)
testimages, filenames = load_images(inputpath, mean)
print("done", file=sys.stderr)
batchsize = 1
results = []
for i in range(0, len(testimages), batchsize):
# chainer用の変数にする
x = Variable(cuda.cupy.asarray(testimages[i:i+batchsize]))
# ネットワークを通す
y = model(inputs={"data": x}, outputs=["fc7"], train=False)
# 結果を受け取る
outputs = cuda.to_cpu(y[0].data) # 1000クラスそれぞれのスコア
for j, out in enumerate(outputs):
print(filenames[i+j], out, sep=",")
results.append((filenames[i+j], out))
with open(outputfile, "wb") as fout:
pickle.dump(results, fout)
| [
"kawanishi@nagoya-u.jp"
] | kawanishi@nagoya-u.jp |
8cf41bf2c38bace65ba3431ba16d3ed20a36db90 | d722194fb13eb1edfcdbfd6db36112949549bde2 | /config/email_config.py | 9502ceeb10867d2ef3d1d9a99ccd8a22b179af0b | [] | no_license | testervic/UiTesting | 2547e9e381bfb8154eec38fe633602a6ace19453 | ecaf0fff7e225092bd4ec9fb58284767ec299469 | refs/heads/master | 2020-08-31T16:08:18.041147 | 2019-10-31T09:58:16 | 2019-10-31T09:58:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 525 | py | #coding:utf-8
email_config = {
#发件人
"sender" : 'zw.vic@qq.com',
#收件人,多个收件人按;隔开
"receiver" : 'zw.vic@qq.com',
#邮件发送服务器
"smtpserver" : 'smtp.qq.com',
#发件人账号
"username" : 'zw.vic@qq.com',
#发件人密码
"password" : '',
#邮件标题
"mail_title" : '自动化测试报告',
#邮件内容
"mail_body" : '测试一下邮件到达情况',
#附件,list
"attachment": ['HTMLReport.html',r'F:\work\aaaa\test.txt']
} | [
"469858846@qq.com"
] | 469858846@qq.com |
c41afe920dbadeda0752df57557343c313996051 | 877a1368d26279afb6a9912e28dbe9e6a8ec06c3 | /ejercicios-basicos/02-binary_search.py | bc043844b1e2af4773659ecb446c98656699f123 | [] | no_license | alirioangel/python-excersices | 17f66f54e9b53453cacaa674f94b4423fa51c1fc | 71e10d7b30c7757ba05e05ecd0ab9ebe14cb40d7 | refs/heads/master | 2021-01-01T22:56:35.938398 | 2020-02-29T19:56:43 | 2020-02-29T19:56:43 | 239,377,371 | 0 | 0 | null | 2020-02-09T21:34:54 | 2020-02-09T21:30:44 | Python | UTF-8 | Python | false | false | 739 | py | def binary_search(numbers, number_to_find, low, high):
if low > high:
return False
mid = (low + high) // 2
if numbers[mid] == number_to_find:
return True
elif numbers[mid] > number_to_find:
return binary_search(numbers, number_to_find, low, mid - 1)
else:
return binary_search(numbers, number_to_find, mid + 1, high)
if __name__ == "__main__":
numbers = [1, 3, 4, 5, 6, 9, 10, 11, 25, 27, 28, 34, 36, 49, 51]
number_to_find = int(input("Ingresa un número: "))
result = binary_search(numbers, number_to_find, 0, len(numbers) - 1)
if result is True:
print("El número sí está en la lista.")
else:
print("El número NO está en la lista.")
| [
"alirioangelarenas@gmail.com"
] | alirioangelarenas@gmail.com |
e9a734e265312168e05ed8b6875b10202afd3554 | e49a224c4cca17839b29beaa16a9fcac8ac72df9 | /datasaver.py | 523240e24787d3cb89de03840dc7f067aabb7908 | [] | no_license | adnansabbir/LocationTracker | 63491895d40a91b60e9b98bc037f73a659e10035 | 96cb639fffd920b460428580c1339b645f59c260 | refs/heads/master | 2020-05-07T18:59:42.843413 | 2019-04-18T06:05:04 | 2019-04-18T06:05:04 | 180,791,836 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | def saveData(list):
file = open('tracker_data', 'w+')
for data in list:
file.write("{}\n".format(data))
file.close()
def getData():
try:
file = open('tracker_data', 'r')
data_list = file.read().split('\n')
data = list(filter(None, data_list))
file.close()
except:
file = open('tracker_data', 'w+')
file = open('tracker_data', 'r')
data_list = file.read().split('\n')
data = list(filter(None, data_list))
file.close()
return data
| [
"adnansabbir2012@gmail.com"
] | adnansabbir2012@gmail.com |
7f78820ec7b0fb9e06f0c1b1bdf198ef2b2cabe4 | 77900cdd9a815caf1cd04705321ca93f5072179f | /Project2/Project2/.history/blog/views_20211114212413.py | 9aa8a5a017e67a14ca1de498d7a75944f750328a | [] | no_license | Bom19990111/helloword_python | 717799d994223d65de5adaeabecf396ff2bc1fb7 | 2ee2e67a60043f03c1ce4b070470c7d2dcdc72a7 | refs/heads/master | 2023-09-06T04:17:02.057628 | 2021-11-21T20:00:46 | 2021-11-21T20:00:46 | 407,063,273 | 0 | 1 | null | 2021-11-21T20:00:47 | 2021-09-16T07:18:35 | Python | UTF-8 | Python | false | false | 486 | py | from django.shortcuts import get_object_or_404, render
from .models import Blog
# Create your views here.
def all_blogs(request):
blogs = Blog.objects.filter(status=1).order_by('-created_on')
return render(request, 'blog/all_blogs.html', {'blogs': blogs})
def detail(request, slug):
blog = get_object_or_404(Blog, slug_title=slug)
return render(request, 'movies_details.html', {'blog': blog, 'blogs': app_movies})
return render(request, 'blog/detail.html')
| [
"phanthituyngoc1995@gmail.com"
] | phanthituyngoc1995@gmail.com |
f5085859358ee2e26a21ac501859baaff4b6d32d | 99ff45e2a5619c5a7db814ad860c9df03b30e53d | /h2_cars.py | bbbee34a754987133c7ccdced3e75eca414cc6b9 | [] | no_license | michael2112/sql | 55ea4029b01e01e3c93bcb9d6f643cb993b4dab0 | 813c65ecc96843e9ebac9783ce840b1be1a61c5a | refs/heads/master | 2020-07-05T22:45:09.466474 | 2020-05-14T00:38:31 | 2020-05-14T00:38:31 | 152,000,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | import sqlite3
with sqlite3.connect("cars.db") as connection:
c = connection.cursor()
cars = [
('Ford', 'F150', 600000),
('Ford', 'Escort', 2700000),
('Ford', 'Explorer', 2100000),
('Honda', 'Accord', 1500000),
('Honda', 'Pilot', 1500000)
]
c.executemany("""
INSERT INTO inventory
VALUES (?,?,?)
""", cars)
| [
"mike@senseoflifesolutions.com"
] | mike@senseoflifesolutions.com |
5261a0e23701194a9ed6e03c317ebc9374941a35 | d14175ef247fcb0357c8fd517e1413dbbca0fa57 | /api/src/main.py | e8fed0aa0cfc7228696f8e76a849d25cd3eb0550 | [
"MIT"
] | permissive | Makai-Stern/shoppingify-api | 9ae729f860c6d92a28a897b46602ca44b0717ffa | 51913fbc51fc051ce9f3b9833aa0f398492dae29 | refs/heads/master | 2023-08-24T03:47:43.291257 | 2021-10-21T12:13:57 | 2021-10-21T12:13:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | import uvicorn
from database import Base, engine
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from .config import initialize_routes
# Create db
Base.metadata.create_all(bind=engine)
app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
initialize_routes(app)
# Middleware
origins = ["http://localhost:3000", "http://192.168.0.121:3000"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
| [
"makaistern@live.seminolestate.edu"
] | makaistern@live.seminolestate.edu |
57b6bba8bd5f82d7a8c4fc860ac7c792709b1230 | d46e2dac9a1a38413bcd9e4a517f575b18ccc96d | /store/migrations/0005_orders.py | 12cba2f3fa1082257c9ebb747878f9e0bdebd049 | [] | no_license | sabbir80/E_shop | cd1f9df89347df3d292edf22437c3bf36fcc3f59 | 098f866fdc50bcf3000386375d0b94ff9a0e64c5 | refs/heads/main | 2023-03-14T09:56:52.267221 | 2021-02-25T06:33:41 | 2021-02-25T06:33:41 | 342,146,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 933 | py | # Generated by Django 3.1.5 on 2021-02-14 18:27
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('store', '0004_customer'),
]
operations = [
migrations.CreateModel(
name='Orders',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField(default=1)),
('price', models.IntegerField()),
('date', models.DateField(default=datetime.datetime(2021, 2, 15, 0, 27, 5, 948948))),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.customer')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.product')),
],
),
]
| [
"sabbirahmed4003@gmail.com"
] | sabbirahmed4003@gmail.com |
69f5eeb74738e2af87590e319e2a5c8ae41e1a01 | da3f97ded7d1c310f61c0346c88f995a79f6859a | /Solutions/RobotSim/Source/SimNode.py | 0e413efffb944e04fbc9e5aa702c34c9144bdb7f | [] | no_license | myxiaoxiao/kaixinba | a68a75ad6f12f6668b94e22f12f26c4329905a5c | 691ced946671167650f90ebaabf720d51548d860 | refs/heads/main | 2023-05-31T03:56:53.938720 | 2021-07-03T08:36:18 | 2021-07-03T08:36:18 | 367,508,806 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,465 | py | from Service import Service
import json
import sys
import requests
import time
class SimNode(Service):
def __init__(self, name, thisPort, nodePort):
Service.__init__(self, name, thisPort, nodePort)
self.mySimMap = {}
def Execute(self, parameters):
if 'content' in parameters and 'url' in parameters:
url = parameters['url']
package = json.loads(parameters['content'])
# if url == 'Connect':
# if package.get('setting'):
# setting = package['setting']
# self.mySim.Initialize(setting)
# return None
# if url == 'ChangeBatch':
# if package.get('wip') and package.get('batch'):
# wip = package['wip']
# batch = package['batch']
# rule = self.mySim.GetRule()
# rule.ChangeBatch(wip, batch)
# return None
# if url == 'ChangePriority':
# if package.get('wip') and package.get('priority'):
# wip = package['wip']
# priority = package['priority']
# rule = self.mySim.GetRule()
# rule.ChangePriority(wip, priority)
# self.mySim.Change()
# return None
# if url == 'RunStep':
# self.mySim.RunStep()
# return None
# if url == "GetOfflineData":
# OfflineData = self.mySim.GetOfflineData()
# return OfflineData
# if url == "GetOnlineData":
# onlineData = self.mySim.GetOnlineData()
# return onlineData
# if url == "GetTick":
# return self.mySim.tick
# # 备忘录 2020-02-27
# # 添加读取在制品、机床表的API,以便客户端获取仿真数据
# if url == "GetWipData":
# shop = self.mySim.GetShop()
# return shop.GetWipData()
# if url == "GetMachineData":
# shop = self.mySim.GetShop()
# return shop.GetMachineData()
# if url == "GetFinishedData":
# return self.mySim.GetFinishedMap()
return None
if __name__ == '__main__':
# servicePort = sys.argv[1]
# nodePort = sys.argv[2]
# serviceName = sys.argv[3]
servicePort = "21563"
nodePort = "21560"
serviceName = "MyRobot"
print(serviceName + ' STARTED')
print('PORT : ' + servicePort)
services = SimNode(serviceName, servicePort, nodePort)
# print('ADD APIS...')
# print('API : Connect')
# services.Append('Connect')
# print('API : ChangeBatch')
# services.Append('ChangeBatch')
# print('API : ChangePriority')
# services.Append('ChangePriority')
# print('API : RunStep')
# services.Append('RunStep')
# print('API : GetOfflineData')
# services.Append('GetOfflineData')
# print('API : GetOnlineData')
# services.Append('GetOnlineData')
# print('API : GetWipData')
# services.Append('GetWipData')
# print('API : GetMachineData')
# services.Append('GetMachineData')
# print('API : GetFinishedData')
# services.Append('GetFinishedData')
# print('API : GetTick')
# services.Append('GetTick')
print('RUNNING')
services.Run() | [
"guxiaofenghsl@126.com"
] | guxiaofenghsl@126.com |
d5a5fd248b40c965635e9177916a802d3a8d852a | ce2ac32d1e4e83a27db7b2f6c30c1a36d29698d0 | /A1/vae.py | 600f59fdf0d0d9f8d01653ee8f33c31dd387c397 | [] | no_license | ym1495/DeepLearningAssignments | 42f75cae4b91399720b1811d66fd0824ea284f2c | 86f93f5e94571f5aec08e7bdcc46ebe92c2b1c1c | refs/heads/master | 2021-01-19T07:47:23.955569 | 2017-03-14T20:55:24 | 2017-03-14T20:55:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,958 | py | from __future__ import print_function
import pickle
import numpy as np
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.autograd import Variable
import pickle
import torchvision
import matplotlib.pyplot as plt
class CVAE(nn.Module):
def __init__(self):
super(CVAE, self).__init__()
arc = [32,32,32]
self.conv1 = nn.Conv2d(1, 16, kernel_size=5 ,stride=2)
self.conv2 = nn.Conv2d(16, 32, kernel_size=3 ,stride=2)
self.conv3 = nn.Conv2d(32, 64, kernel_size=3, stride=2)
self.flat_dim = [32 ,4 ,4]
self.flat_dim = [64 ,2 ,2]
self.h = self.flat_dim[0 ] *self.flat_dim[1 ] *self.flat_dim[2]
self.latent_vars = 200
self.fc1 = nn.Linear(self.h, self. latent_vars *2)
self.fc2 = nn.Linear(self.latent_vars, self.h)
self.dconv1 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2)
self.dconv2 = nn.ConvTranspose2d(32, 16, kernel_size=3, stride=2 ,output_padding=1)
self.dconv3 = nn.ConvTranspose2d(16, 1, kernel_size=5, stride=2 ,output_padding=1)
def encode(self ,x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = x.view(-1, x.size()[1] *x.size()[2] * x.size()[3])
z = self.fc1(x)
self.mu = z[:, 0:self.latent_vars]
self.log_sig = z[:, self.latent_vars:]
# sample
eps = Variable(torch.randn(self.log_sig.size()))
return self.mu + torch.exp(self.log_sig / 2) * eps
def decode(self, z):
x = F.relu(self.fc2(z))
x = x.view(-1, self.flat_dim[0], self.flat_dim[1], self.flat_dim[2])
x = F.relu(self.dconv1(x))
x = F.relu(self.dconv2(x))
x = self.dconv3(x)
x = F.sigmoid(x)
return (x)
def forward(self, x):
z = self.encode(x)
x_hat = self.decode(z)
return (x_hat)
def sample(self, n):
z = Variable(torch.randn((n, self.latent_vars)))
return (self.decode(z))
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=5, stride=2)
self.conv2 = nn.Conv2d(16, 32, kernel_size=5, stride=2)
self.conv3 = nn.Conv2d(10, 20, kernel_size=5, stride=2)
print('here')
self.flat_dim = [32, 4, 4]
self.conv2_drop = nn.Dropout2d()
self.conv3_drop = nn.Dropout2d()
self.fc1 = nn.Linear(self.flat_dim[0] * self.flat_dim[1] * self.flat_dim[2], 100)
self.fc2 = nn.Linear(100, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = self.conv2_drop(x)
# x = F.relu(self.conv3(x))
# x = self.conv3_drop(x)
x = x.view(-1, self.flat_dim[0] * self.flat_dim[1] * self.flat_dim[2])
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = F.relu(self.fc2(x))
return F.log_softmax(x)
class CVAE2(nn.Module):
def __init__(self):
super(CVAE2, self).__init__()
arc = [32,32,32]
self.bnf1 = nn.BatchNorm2d(arc[0])
self.bnf2 = nn.BatchNorm2d(arc[1])
self.bnf3 = nn.BatchNorm2d(arc[2])
self.bnb1 = nn.BatchNorm2d(arc[2])
self.bnb2 = nn.BatchNorm2d(arc[1])
self.bnb3 = nn.BatchNorm2d(arc[0])
self.conv1 = nn.Conv2d(1, arc[0], kernel_size=5 ,stride=2)
self.conv2 = nn.Conv2d(arc[0], arc[1], kernel_size=5 ,stride=2)
self.conv3 = nn.Conv2d(arc[1], arc[2], kernel_size=3)
self.flat_dim = self.get_flat_dim()
self.h = self.flat_dim[0 ] *self.flat_dim[1 ] *self.flat_dim[2]
self.latent_vars = 10
self.fc1 = nn.Linear(self.h, self. latent_vars *2)
self.fc2 = nn.Linear(self.latent_vars, self.h)
self.dconv1 = nn.ConvTranspose2d(arc[2], arc[1], kernel_size=3)
self.dconv2 = nn.ConvTranspose2d(arc[1], arc[0], kernel_size=5, stride=2 ,output_padding=1)
self.dconv3 = nn.ConvTranspose2d(arc[0], 1, kernel_size=5, stride=2 ,output_padding=1)
def get_flat_dim(self):
x = Variable(torch.randn(64,1,28,28))
x = F.relu(self.bnf1(self.conv1(x)))
x = F.relu(self.bnf2(self.conv2(x)))
x = F.relu(self.bnf3(self.conv3(x)))
return(x.size()[1:])
def encode(self ,x):
x = F.relu(self.bnf1(self.conv1(x)))
x = F.relu(self.bnf2(self.conv2(x)))
x = F.relu(self.bnf3(self.conv3(x)))
x = x.view(-1, x.size()[1] *x.size()[2] * x.size()[3])
z = self.fc1(x)
mu = z[:, 0:self.latent_vars]
log_sig = z[:, self.latent_vars:]
return mu, log_sig
def decode(self, z):
x = self.fc2(z)
x = x.view(-1, self.flat_dim[0], self.flat_dim[1], self.flat_dim[2])
x = F.relu(self.bnb1(x))
x = F.relu(self.bnb2(self.dconv1(x)))
x = F.relu(self.bnb3(self.dconv2(x)))
x = self.dconv3(x)
#x = F.sigmoid(x)
return (x)
def forward(self, x):
mu,log_sig = self.encode(x)
eps = Variable(torch.randn(log_sig.size()))
z = mu + torch.exp(log_sig / 2) * eps
x_hat = self.decode(z)
return(x_hat,mu,log_sig)
def sample(self, n):
z = Variable(torch.randn((n, self.latent_vars)))
return (self.decode(z))
class CVAE2_Pool(nn.Module):
def __init__(self):
super(CVAE2_Pool, self).__init__()
arc = [64, 128, 128]
self.arc = arc
self.bnf1 = nn.BatchNorm2d(arc[0])
self.bnf2 = nn.BatchNorm2d(arc[1])
self.bnf3 = nn.BatchNorm2d(arc[2])
self.bnb1 = nn.BatchNorm2d(arc[2])
self.bnb2 = nn.BatchNorm2d(arc[1])
self.bnb3 = nn.BatchNorm2d(arc[0])
self.poolf1 = nn.MaxPool2d(2,return_indices=True)
self.poolf2 = nn.MaxPool2d(2,return_indices=True)
self.poolf3 = nn.MaxPool2d(2,return_indices=True)
self.unpool1 = nn.MaxUnpool2d(2)
self.unpool2 = nn.MaxUnpool2d(2)
self.unpool3 = nn.MaxUnpool2d(2)
self.conv1 = nn.Conv2d(1, arc[0], kernel_size=5)
self.conv2 = nn.Conv2d(arc[0], arc[1], kernel_size=3)
self.conv3 = nn.Conv2d(arc[1], arc[2], kernel_size=3)
self.flat_dim = self.get_flat_dim()
self.h = self.flat_dim[0] * self.flat_dim[1] * self.flat_dim[2]
self.latent_vars = 10
self.fc1 = nn.Linear(self.h, self.latent_vars * 2)
self.fc2 = nn.Linear(self.latent_vars, self.h)
self.dconv1 = nn.ConvTranspose2d(arc[2], arc[1], kernel_size=3)
self.dconv2 = nn.ConvTranspose2d(arc[1], arc[0], kernel_size=3)
self.dconv3 = nn.ConvTranspose2d(arc[0], 1, kernel_size=5)
def get_flat_dim(self):
x = Variable(torch.randn(64, 1, 28, 28))
x = F.relu(F.max_pool2d( self.bnf1(self.conv1(x)) , 2))
x = F.relu(F.max_pool2d(self.bnf2(self.conv2(x)), 2))
x = F.relu(F.max_pool2d(self.bnf3(self.conv3(x)), 2))
return (x.size()[1:])
def encode(self, x):
x,id1 = self.poolf1(F.relu(self.bnf1(self.conv1(x))))
x, id2 = self.poolf2(F.relu(self.bnf2(self.conv2(x))))
x, id3 = self.poolf3(F.relu(self.bnf3(self.conv3(x))))
x = x.view(-1, x.size()[1] * x.size()[2] * x.size()[3])
z = self.fc1(x)
mu = z[:, 0:self.latent_vars]
log_sig = z[:, self.latent_vars:]
return mu, log_sig , [id1,id2,id3]
def decode(self, z , ids):
x = self.fc2(z)
x = x.view(-1, self.flat_dim[0], self.flat_dim[1], self.flat_dim[2])
x = F.relu(self.bnb1(x))
x = self.unpool1(x,ids[2] , output_size=torch.Size([64, self.arc[2], 3, 3]))
x = F.relu(self.bnb2(self.dconv1(x)))
x = self.unpool2(x,ids[1] , output_size=torch.Size([64, self.arc[1], 10, 10]))
x = F.relu(self.bnb3(self.dconv2(x)))
x = self.unpool3(x,ids[0] , output_size=torch.Size([64, self.arc[0], 24, 24]))
x = self.dconv3(x)
# x = F.sigmoid(x)
return (x)
def forward(self, x):
mu, log_sig , ids = self.encode(x)
eps = Variable(torch.randn(log_sig.size()))
z = mu + torch.exp(log_sig / 2) * eps
x_hat = self.decode(z,ids)
return (x_hat, mu, log_sig)
def sample(self, n):
z = Variable(torch.randn((n, self.latent_vars)))
return (self.decode(z))
class DCVAE2_Pool(nn.Module):
def __init__(self):
super(DCVAE2_Pool, self).__init__()
arc = [64, 128, 128]
self.bnf1 = nn.BatchNorm2d(arc[0])
self.bnf2 = nn.BatchNorm2d(arc[1])
self.bnf3 = nn.BatchNorm2d(arc[2])
self.bnb1 = nn.BatchNorm2d(arc[2])
self.bnb2 = nn.BatchNorm2d(arc[1])
self.bnb3 = nn.BatchNorm2d(arc[0])
self.poolf1 = nn.MaxPool2d(2,return_indices=True)
self.poolf2 = nn.MaxPool2d(2,return_indices=True)
self.poolf3 = nn.MaxPool2d(2,return_indices=True)
self.unpool1 = nn.MaxUnpool2d(2)
self.unpool2 = nn.MaxUnpool2d(2)
self.unpool3 = nn.MaxUnpool2d(2)
self.conv1 = nn.Conv2d(1, arc[0], kernel_size=5)
self.conv2 = nn.Conv2d(arc[0], arc[1], kernel_size=3)
self.conv3 = nn.Conv2d(arc[1], arc[2], kernel_size=3)
self.flat_dim = self.get_flat_dim()
self.h = self.flat_dim[0] * self.flat_dim[1] * self.flat_dim[2]
self.latent_vars = 10
self.fc1 = nn.Linear(self.h, self.latent_vars * 2)
self.fc2 = nn.Linear(self.latent_vars, self.h)
self.dconv1 = nn.ConvTranspose2d(arc[2], arc[1], kernel_size=3)
self.dconv2 = nn.ConvTranspose2d(arc[1], arc[0], kernel_size=3)
self.dconv3 = nn.ConvTranspose2d(arc[0], 1, kernel_size=5)
self.noise_std = 0
def get_flat_dim(self):
x = Variable(torch.randn(64, 1, 28, 28))
x = F.relu(F.max_pool2d( self.bnf1(self.conv1(x)) , 2))
x = F.relu(F.max_pool2d(self.bnf2(self.conv2(x)), 2))
x = F.relu(F.max_pool2d(self.bnf3(self.conv3(x)), 2))
return (x.size()[1:])
def encode(self, x):
if self.training:
x, id1 = self.poolf1(F.relu(self.bnf1(self.conv1(x+ Variable(self.noise_std * torch.randn(x.size()))))))
x, id2 = self.poolf2(F.relu(self.bnf2(self.conv2(x+ Variable(self.noise_std * torch.randn(x.size()))))))
x, id3 = self.poolf3(F.relu(self.bnf3(self.conv3(x+ Variable(self.noise_std * torch.randn(x.size()))))))
else:
x, id1 = self.poolf1(F.relu(self.bnf1(self.conv1(x))))
x, id2 = self.poolf2(F.relu(self.bnf2(self.conv2(x))))
x, id3 = self.poolf3(F.relu(self.bnf3(self.conv3(x))))
x = x.view(-1, x.size()[1] * x.size()[2] * x.size()[3])
z = self.fc1(x)
mu = z[:, 0:self.latent_vars]
log_sig = z[:, self.latent_vars:]
return mu, log_sig , [id1,id2,id3]
def decode(self, z , ids):
x = self.fc2(z)
x = x.view(-1, self.flat_dim[0], self.flat_dim[1], self.flat_dim[2])
x = F.relu(self.bnb1(x))
x = self.unpool1(x,ids[2] , output_size=torch.Size([64, 32, 3, 3]))
x = F.relu(self.bnb2(self.dconv1(x)))
x = self.unpool2(x,ids[1] , output_size=torch.Size([64, 32, 10, 10]))
x = F.relu(self.bnb3(self.dconv2(x)))
x = self.unpool3(x,ids[0] , output_size=torch.Size([64, 32, 24, 24]))
x = self.dconv3(x)
# x = F.sigmoid(x)
return (x)
def forward(self, x):
mu, log_sig , ids = self.encode(x)
eps = Variable(torch.randn(log_sig.size()))
z = mu + torch.exp(log_sig / 2) * eps
x_hat = self.decode(z,ids)
return (x_hat, mu, log_sig)
def sample(self, n):
z = Variable(torch.randn((n, self.latent_vars)))
return (self.decode(z))
class DCVAE2_Pool_Deeper(nn.Module):
def __init__(self,cost_rec = 1.):
super(DCVAE2_Pool_Deeper, self).__init__()
self.cost_rec = cost_rec
self.arc =[]
arc = [32 , 64, 64, 128]
self.bnf1 = nn.BatchNorm2d(arc[0])
self.bnf2 = nn.BatchNorm2d(arc[1])
self.bnf3 = nn.BatchNorm2d(arc[2])
self.bnf4 = nn.BatchNorm2d(arc[3])
self.bnb1 = nn.BatchNorm2d(arc[3])
self.bnb2 = nn.BatchNorm2d(arc[2])
self.bnb3 = nn.BatchNorm2d(arc[1])
self.bnb4 = nn.BatchNorm2d(arc[0])
self.poolf1 = nn.MaxPool2d(2,return_indices=True)
self.poolf2 = nn.MaxPool2d(2,return_indices=True)
self.unpool1 = nn.MaxUnpool2d(2)
self.unpool2 = nn.MaxUnpool2d(2)
self.unpool3 = nn.MaxUnpool2d(2)
self.conv1 = nn.Conv2d(1, arc[0], kernel_size=5)
self.conv2 = nn.Conv2d(arc[0], arc[1], kernel_size=3)
self.conv3 = nn.Conv2d(arc[1], arc[2], kernel_size=3)
self.conv4 = nn.Conv2d(arc[2], arc[3], kernel_size=3)
self.flat_dim = self.get_flat_dim()
self.h = self.flat_dim[0] * self.flat_dim[1] * self.flat_dim[2]
self.latent_vars = 10
self.fc1 = nn.Linear(self.h, self.latent_vars * 2)
self.fc2 = nn.Linear(self.latent_vars, self.h)
self.dconv1 = nn.ConvTranspose2d(arc[3], arc[2], kernel_size=3)
self.dconv2 = nn.ConvTranspose2d(arc[2], arc[1], kernel_size=3)
self.dconv3 = nn.ConvTranspose2d(arc[1], arc[0], kernel_size=3)
self.dconv4 = nn.ConvTranspose2d(arc[0], 1, kernel_size=5)
self.noise_std = .2
def get_flat_dim(self):
x = Variable(torch.randn(64, 1, 28, 28))
x = F.relu(self.bnf1(self.conv1(x)))
x, id1 = self.poolf1(x)
x = F.relu(self.bnf2(self.conv2(x)))
x = F.relu(self.bnf3(self.conv3(x)))
x, id2 = self.poolf2(x)
x = F.relu(self.bnf4(self.conv4(x)))
x,i = F.max_pool2d(x,2,return_indices=True)
return (x.size()[1:])
def encode(self, x):
if self.training:
noise = self.noise_std
else:
noise = 0
x = F.relu(self.bnf1(self.conv1(x+ Variable(noise * torch.randn(x.size())))))
x, id1 = self.poolf1(x)
x = F.relu(self.bnf2(self.conv2(x+ Variable(noise * torch.randn(x.size())))))
x = F.relu(self.bnf3(self.conv3(x+ Variable(noise * torch.randn(x.size())))))
x ,id2 = self.poolf2(x)
x = F.relu(self.bnf4(self.conv4(x+ Variable(noise * torch.randn(x.size())))))
x = F.avg_pool2d(x,2)
x = x.view(-1, x.size()[1] * x.size()[2] * x.size()[3])
z = self.fc1(x)
mu = z[:, 0:self.latent_vars]
log_sig = z[:, self.latent_vars:]
return mu, log_sig , [id1,id2]
def avg_unpool(self,x):
x = x.expand(x.size()[0],x.size()[1],2,2)
return x
def decode(self, z , ids):
x = self.fc2(z)
x = x.view(-1, self.flat_dim[0], self.flat_dim[1], self.flat_dim[2])
#x = self.unpool3(x,ids[2],output_size=torch.Size([64,128,2,2]))
x = self.avg_unpool(x)
x = F.relu(self.bnb1(x))
x = F.relu(self.bnb2(self.dconv1(x)))
x = self.unpool1(x,ids[1] , output_size=torch.Size([64, 64, 8, 8]))
x = F.relu(self.bnb3(self.dconv2(x)))
x = F.relu(self.bnb4(self.dconv3(x)))
x = self.unpool2(x,ids[0] , output_size=torch.Size([64, 32, 24, 24]))
x = self.dconv4(x)
return (x)
def forward(self, x):
mu, log_sig , ids = self.encode(x)
eps = Variable(torch.randn(log_sig.size()))
z = mu + torch.exp(log_sig / 2) * eps
x_hat = self.decode(z,ids)
return (x_hat, mu, log_sig)
def cost(self, x, y):
x_hat, mu, log_sig = self.forward(x)
y_hat = F.log_softmax(mu)
class_loss = F.nll_loss(y_hat,y)
recon_loss = ((x_hat - x)**2).mean()
loss = class_loss + recon_loss * self.cost_rec
return(loss)
def unsup_cost(self, x):
x_hat, mu, log_sig = self.forward(x)
recon_loss = ((x_hat - x)**2).mean() * self.cost_rec
return(recon_loss)
def predict(self,x):
x_hat, mu, log_sig = self.forward(x)
return F.log_softmax(mu)
def sample(self, n):
z = Variable(torch.randn((n, self.latent_vars)))
return (self.decode(z))
class DCVAE2_Pool_Deeper_Ladder(nn.Module):
def __init__(self, cost_rec = 1., cost_m = .1):
super(DCVAE2_Pool_Deeper_Ladder, self).__init__()
self.cost_rec = cost_rec
self.cost_m = cost_m
self.arc =[]
arc = [32 , 64, 64, 128]
self.bnf1 = nn.BatchNorm2d(arc[0])
self.bnf2 = nn.BatchNorm2d(arc[1])
self.bnf3 = nn.BatchNorm2d(arc[2])
self.bnf4 = nn.BatchNorm2d(arc[3])
self.bnb1 = nn.BatchNorm2d(arc[3])
self.bnb2 = nn.BatchNorm2d(arc[2])
self.bnb3 = nn.BatchNorm2d(arc[1])
self.bnb4 = nn.BatchNorm2d(arc[0])
self.poolf1 = nn.MaxPool2d(2,return_indices=True)
self.poolf2 = nn.MaxPool2d(2,return_indices=True)
self.unpool1 = nn.MaxUnpool2d(2)
self.unpool2 = nn.MaxUnpool2d(2)
self.unpool3 = nn.MaxUnpool2d(2)
self.conv1 = nn.Conv2d(1, arc[0], kernel_size=5)
self.conv2 = nn.Conv2d(arc[0], arc[1], kernel_size=3)
self.conv3 = nn.Conv2d(arc[1], arc[2], kernel_size=3)
self.conv4 = nn.Conv2d(arc[2], arc[3], kernel_size=3)
self.flat_dim = self.get_flat_dim()
self.h = self.flat_dim[0] * self.flat_dim[1] * self.flat_dim[2]
self.latent_vars = 10
self.fc1 = nn.Linear(self.h, self.latent_vars * 2)
self.fc2 = nn.Linear(self.latent_vars, self.h)
self.dconv1 = nn.ConvTranspose2d(arc[3], arc[2], kernel_size=3)
self.dconv2 = nn.ConvTranspose2d(arc[2], arc[1], kernel_size=3)
self.dconv3 = nn.ConvTranspose2d(arc[1], arc[0], kernel_size=3)
self.dconv4 = nn.ConvTranspose2d(arc[0], 1, kernel_size=5)
self.noise_std = .2
def get_flat_dim(self):
x = Variable(torch.randn(64, 1, 28, 28))
x = F.relu(self.bnf1(self.conv1(x)))
x, id1 = self.poolf1(x)
x = F.relu(self.bnf2(self.conv2(x)))
x = F.relu(self.bnf3(self.conv3(x)))
x, id2 = self.poolf2(x)
x = F.relu(self.bnf4(self.conv4(x)))
x,i = F.max_pool2d(x,2,return_indices=True)
return (x.size()[1:])
def encode(self, x):
if self.training:
noise = self.noise_std
else:
noise = 0
z1 = F.relu(self.bnf1(self.conv1(x+ Variable(noise * torch.randn(x.size())))))
z1, id1 = self.poolf1(z1)
z2 = F.relu(self.bnf2(self.conv2(z1+ Variable(noise * torch.randn(z1.size())))))
z3 = F.relu(self.bnf3(self.conv3(z2+ Variable(noise * torch.randn(z2.size())))))
z3 ,id2 = self.poolf2(z3)
z4 = F.relu(self.bnf4(self.conv4(z3+ Variable(noise * torch.randn(z3.size())))))
x = F.avg_pool2d(z4,2)
x = x.view(-1, x.size()[1] * x.size()[2] * x.size()[3])
z = self.fc1(x)
mu = z[:, 0:self.latent_vars]
log_sig = z[:, self.latent_vars:]
return mu, log_sig , [z1,z2,z3,z4] ,[id1,id2]
def avg_unpool(self,x):
x = x.expand(x.size()[0],x.size()[1],2,2)
return x
def decode(self, z , ids):
x = self.fc2(z)
x = x.view(-1, self.flat_dim[0], self.flat_dim[1], self.flat_dim[2])
x = self.avg_unpool(x)
h1 = self.bnb1(x)
x = F.relu(h1)
h2 = self.bnb2(self.dconv1(x))
x = F.relu(h2)
x = self.unpool1(x,ids[1] , output_size=torch.Size([64, 64, 8, 8]))
h3 =self.bnb3(self.dconv2(x))
x = F.relu(h3)
h4 = self.bnb4(self.dconv3(x))
x = F.relu(h4)
x = self.unpool2(x,ids[0] , output_size=torch.Size([64, 32, 24, 24]))
x = self.dconv4(x)
return (x,[h1,h2,h3,h4])
def forward(self, x):
mu, log_sig , h_encode , ids = self.encode(x)
eps = Variable(torch.randn(log_sig.size()))
z = mu + torch.exp(log_sig / 2) * eps
x_hat , h_decode = self.decode(z,ids)
h_decode.reverse()
return (x_hat, mu, log_sig , h_decode , h_encode)
def cost(self, x, y):
x_hat, mu, log_sig, h_decode, h_encode = self.forward(x)
y_hat = F.log_softmax(mu)
class_loss = F.nll_loss(y_hat,y)
recon_loss = ((x_hat - x)**2).mean()
h_loss = Variable(torch.zeros(1))
for h_d, h_e in zip(h_decode, h_encode):
h_loss += ((h_d - h_e) ** 2).mean()
loss = class_loss + self.cost_rec * recon_loss + self.cost_m *h_loss
return(loss)
def unsup_cost(self, x):
x_hat, mu, log_sig, h_decode, h_encode = self.forward(x)
y_hat = F.log_softmax(mu)
recon_loss = ((x_hat - x)**2).mean()
h_loss = Variable(torch.zeros(1))
for h_d, h_e in zip(h_decode, h_encode):
h_loss += ((h_d - h_e) ** 2).mean()
loss = self.cost_rec * recon_loss + self.cost_m *h_loss
return(loss)
def predict(self,x):
x_hat, mu, log_sig, h_decode, h_encode = self.forward(x)
return F.log_softmax(mu)
def sample(self, n):
z = Variable(torch.randn((n, self.latent_vars)))
return (self.decode(z))
model = DCVAE2_Pool_Deeper_Ladder()
#print(model.encode(Variable(torch.randn(64,1,28,28)))[0].size() )
#print(model.forward(Variable(torch.randn(64,1,28,28)))[0].size() )
print(model.cost(Variable(torch.randn(64,1,28,28)) , Variable(torch.ones(64)).long()) )
#model.forward(Variable(torch.randn(64,1,28,28)))
#model.encode(Variable(torch.randn(64,1,28,28)))
#print(model.get_flat_dim())
#a = model(Variable(torch.randn(64,1,28,28)))[0]
#print(a.size())
| [
"zz1409@nyu.edu"
] | zz1409@nyu.edu |
09506cc6d8d44d47a476bb388c88c914536aac64 | 56aba02e07581eb3de286fe361d3e590af5aee03 | /black-box-attack/VGGVox/draw.py | c6e933f47ebe9669503e6ba1885aa16af6a3060a | [] | no_license | zhanglei1949/AttackSpeakerVerificationSystem | 51daf281a7fe9b919fd6ef85703a3ef0a2e664c9 | e698dec73132b420b16beb3ae544fe3eaffdddb8 | refs/heads/master | 2020-06-13T01:02:34.452105 | 2019-08-01T06:46:42 | 2019-08-01T06:46:42 | 194,480,818 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,692 | py | import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
fig,ax = plt.subplots()
#ori_distance = [
## [0.83, 0.85, 0.78, 0.80, 1],
# [0.86, 0.82, 0.78, 1, 0.80],
# [0.82, 0.96, 1, 0.78, 0.78],
# [0.91, 1, 0.95, 0.82, 0.85],
# [1, 0.91, 0.82, 0.86, 0.83]
# #1 2 3 4 5
# ]
adv_distance = [
#target
#source
[0, 0.53, 0.39, 0.48, 0.36],
[0.25, 0, 0.23, 0.25, 0.20],
[0.43, 0.51, 0, 0.42, 0.37],
[0.51, 0.33, 0.44, 0, 0.38],
[0.40, 0.38, 0.58, 0.34, 0]
]
ori_distance = [
[0, 0.91, 0.82, 0.86, 0.83],
[0.91, 0, 0.95, 0.82, 0.85],
[0.82, 0.95, 0, 0.78, 0.78],
[0.86, 0.82, 0.78, 0, 0.80],
[0.83, 0.85, 0.78, 0.80, 0]
]
ori_score = np.ones((5,5)) - ori_distance
#adv_distance = [
# [0.36, 0.19, 0.37, 0.38, 1],
# [0.48, 0.25, 0.42, 1, 0.34],
# [0.39, 0.23, 1, 0.44, 0.58],
# [0.53, 1, 0.52, 0.33, 0.38],
# [1, 0.25, 0.43, 0.51, 0.40]
# ]
adv_score = np.ones((5,5)) - adv_distance
ylabels = ['id-84', 'id-174', 'id-251', 'id-422', 'id-652']
xlabels = ['id-84', 'id-174', 'id-251', 'id-422', 'id-652']
#ylabels = ['id-84', 'id-174', 'id-251', 'id-422', 'id652']
#ylabels = [ 'id-652', 'id-422','id-251', 'id-174', 'id-84']
#cmap = sns.cubehelix_palette(start = 1.5, rot = 3, gamma=0.8, as_cmap = True)
#cmpa = sns.cubehelix_palette(8, as_cmap = True)
mask = [
[0, 0, 0, 0, 1],
[0, 0, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 0, 0, 0],
[1, 0, 0, 0, 0]
]
#plt.figure(figsize=(10,10))
#sns.heatmap(adv_score, linewidth = 0.15, annot = True, mask = mask, cmap = 'Greys' , vmax=1, vmin=0)
sns.heatmap(adv_score, linewidth = 0.2, annot = True, cmap = 'Greys' , vmax=1, vmin=0, cbar=0, square = 1, annot_kws={'fontsize' : 16})
#ax.set_xticks(np.arange(5))
#ax.set_yticks(np.arange(5))
ax.set_xticklabels(xlabels, fontdict={'fontsize':12})
#ax.set_xticklabels(xlabels)
ax.set_yticklabels(ylabels, fontdict={'fontsize':12})
#ax.set_yticklabels(ylabels)
ax.xaxis.set_ticks_position('top')
ax.xaxis.set_label_position('top')
ax.tick_params(axis='both', which='both', labelsize=10)
plt.xlabel('Target speaker', fontdict={'fontsize':20}, labelpad=10)
plt.ylabel('Source speaker', fontdict={'fontsize':20}, labelpad=10)
#plt.xticks([])
#plt.yticks([])
#plt.gca().invert_yaxis()
fig.savefig('adv_score_grey_1.pdf', bbox_inches='tight')
#print((np.sum(adv_score) - 5)/20)
#print((np.sum(ori_score) - 5)/20)
| [
"zhanglei1949@sjtu.edu.cn"
] | zhanglei1949@sjtu.edu.cn |
61fc1d8eec5d3b9b963a62790170ce775fa5ca8f | 20e294239b399b25f2268d33bfa5d1688176003d | /pycrawler101/case_studies/tw104.py | 533acf98c5c12c10bc5ba22577c02315450c1d38 | [] | no_license | ppkliu/PyCrawler101-201510 | 569e6e8eea682259d70e442e2a7d00f925b0fde5 | dd92f57e54190f9c805a5a51be1dae3c2ce5192c | refs/heads/master | 2021-01-12T20:55:33.597995 | 2015-10-23T17:02:12 | 2015-10-23T17:02:12 | 46,966,547 | 1 | 0 | null | 2015-11-27T07:44:22 | 2015-11-27T07:44:21 | null | UTF-8 | Python | false | false | 2,997 | py | '''
Created on Oct 15, 2015
@author: c3h3
'''
import requests
from pyquery import PyQuery
from datetime import datetime
import pandas as pd
def getData(search_kw):
page = 1
url = "http://www.104.com.tw/jobbank/joblist/auto_joblist.cfm?auto=1&jobsource=n104bank1&ro=0&keyword={skw}&order=1&asc=0&page={page}&psl=N_B".format(skw=search_kw.replace(" ","+"),page=page)
res = requests.get(url)
S = PyQuery(res.text)
max_pages = int(PyQuery(S("#box_page_bottom_2 li > a")[-1]).text())
data = []
data.extend(S(".j_cont").map(lambda i,e: {"name":PyQuery(e)(".job_name").text(),
"url":PyQuery(e)("a").attr("href"),
"meta":dict(PyQuery(e)("meta").map(lambda ii,ee:(PyQuery(ee).attr("itemprop"),PyQuery(ee).attr("content")))),
"area":PyQuery(e)(".area_summary").text(),
"company_name":PyQuery(e)(".compname_summary").text(),
"company_meta":PyQuery(e)(".compname_summary span").attr("title"),
"candidates_summary":PyQuery(e)(".candidates_summary").text(),
"requirement":PyQuery(e)(".requirement").text(),
"joblist_summary":PyQuery(e)(".joblist_summary").text(),
"searched_keyword":search_kw,
"crawledAt":datetime.utcnow()}))
for page in range(2,max_pages+1):
url = "http://www.104.com.tw/jobbank/joblist/auto_joblist.cfm?auto=1&jobsource=n104bank1&ro=0&keyword={skw}&order=1&asc=0&page={page}&psl=N_B".format(skw=search_kw.replace(" ","+"),page=page)
res = requests.get(url)
S = PyQuery(res.text)
data.extend(S(".j_cont").map(lambda i,e: {"name":PyQuery(e)(".job_name").text(),
"url":PyQuery(e)("a").attr("href"),
"meta":dict(PyQuery(e)("meta").map(lambda ii,ee:(PyQuery(ee).attr("itemprop"),PyQuery(ee).attr("content")))),
"area":PyQuery(e)(".area_summary").text(),
"company_name":PyQuery(e)(".compname_summary").text(),
"company_meta":PyQuery(e)(".compname_summary span").attr("title"),
"candidates_summary":PyQuery(e)(".candidates_summary").text(),
"requirement":PyQuery(e)(".requirement").text(),
"joblist_summary":PyQuery(e)(".joblist_summary").text(),
"searched_keyword":search_kw,
"crawledAt":datetime.utcnow()}))
df = pd.DataFrame(data)
return df | [
"c3h3.tw@gmail.com"
] | c3h3.tw@gmail.com |
fad5e4306896158984d3b46ba9134d7ba1da6344 | 8c3abe504a135f926848d386a237376fb8c29717 | /hello_get.py | 771db69869aeb89fe70c498aeb21c49aef126715 | [] | no_license | hamyco/python_flask_test | 8be347fc267bef12451435bab9a0bb5b0c285dd1 | d6f8d0723065ddd8ee00ea68c856189a55eb6926 | refs/heads/master | 2023-06-06T18:19:19.267035 | 2021-07-07T08:36:01 | 2021-07-07T08:36:01 | 354,491,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/hello')
def hello():
name = request.args.get('name')
return render_template('hello.html', title='flask test', name=name)
## おまじない
if __name__ == "__main__":
app.run(debug=True) | [
"haimin.hu@linecorp.com"
] | haimin.hu@linecorp.com |
8706b1e17ec269573b2ea81317988e43cf44c58e | 2b81ac592b57a3477ae70cda12b83cd41e8a9d7e | /yeanay/ideology/views.py | 5a1fc67a334055c8d6ba2d13497cda44216e9f5e | [] | no_license | yeanay/yeanay.io | 1b4b893fa3d6ca6c1b8c946725ec80e6e6a530e4 | 4e709f10c6698c587c694ef2b76bf1334bc4a87b | refs/heads/master | 2021-01-20T11:29:41.851566 | 2013-05-24T20:13:40 | 2013-05-24T20:13:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,677 | py | from django.http import HttpResponse
from ideology.models import Ideology, Legislator, LegislatorSession
from django.db.models import Avg
import json
def congress(request, congress_num, cham):
full_congress = Ideology.objects.filter(congress=congress_num,
chamber=cham)
json_dict = []
print len(full_congress)
for mem in full_congress:
json_dict.append({'name': mem.name,
'district': mem.district,
'party_id': mem.party_id,
'state_id': mem.state_id,
'congress': mem.congress,
'ideology': mem.first_dimension})
return HttpResponse(json.dumps(json_dict), mimetype="application/json")
def polarization_over_time(request):
"""Returns JSON of polarization (distance between republicans and
dems over time)"""
party_avgs = Ideology.objects.values('congress', 'chamber', 'party_id') \
.annotate(party_avg = Avg('first_dimension')) \
.filter(party_id__in = [100, 200]) \
.order_by('congress', 'chamber', 'party_id').all()
polarization_dict = []
polar_grps = [(party_avgs[x], party_avgs[x+1]) for x in range(0, len(party_avgs), 2)]
for dem, rep in polar_grps:
polar = {}
polar['chamber'] = dem['chamber']
polar['congress'] = dem['congress']
polar['polarization'] = rep['party_avg'] - dem['party_avg']
polarization_dict.append(polar)
json_result = json.dumps(polarization_dict)
return HttpResponse(json_result, mimetype="application/json")
| [
"chrbrown@sas.upenn.edu"
] | chrbrown@sas.upenn.edu |
46a9d01adeb5180d9d78056ca7b5bbda3cd4b32e | c17ab2964a82403aad044e1674fd4389e64a4ab1 | /Transversal2/main.py | d57080a3f82ecb24b3b5a7cdea056efe6f811e9b | [] | no_license | alfonsopajueloaguirre/PythonExercises | bacaec1366b9ac5aa6d5119e4888571c5048819b | 98195035a56f3bfa30e6687bd0d7f9d9bcec8a17 | refs/heads/master | 2023-02-20T13:29:35.365089 | 2021-01-24T13:27:52 | 2021-01-24T13:27:52 | 332,299,903 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,343 | py | RUTA_DATOS = "Transversal/datos.txt"
RUTA_RESULTADOS = "Transversal/resultados.txt"
class Pais():
def __init__(self, nombre, lista_jugador):
self.nombre = nombre
self.lista_jugador = lista_jugador
class Jugador():
def __init__(self, nombre, apellido, edad, mano):
self.nombre = nombre
self.apellido = apellido
self.edad = edad
self.mano = mano
class Grupo():
def __init__(self, letra, lista_resultados):
self.letra = letra
self.lista_resultados = lista_resultados
def leer_datos():
try:
with open(RUTA_DATOS, 'r', encoding='utf-8-sig') as fichero:
lineas = fichero.read().splitlines()
paises = []
for linea in lineas:
campos = linea.split(' ')
nombre_pais = campos.pop(0)
jugadores = []
for i in range(5):
jugadores.append(Jugador(*campos[i*4:i*4+4]))
paises.append(Pais(nombre_pais, jugadores))
return paises
except:
print("Error no se encuentra el archivo de datos.txt")
def leer_resultados():
try:
with open(RUTA_RESULTADOS, "r", encoding="utf-8-sig") as fichero:
lineas = fichero.read().splitlines()
resultados = []
for linea in lineas:
| [
"46894176+alfonsopajueloaguirre@users.noreply.github.com"
] | 46894176+alfonsopajueloaguirre@users.noreply.github.com |
084e537b07673218e37ad8d96b7609c358645a1a | b5d9762757279f30affc5c9e9be87860723b975c | /lib/speed_riak.py | eebfe14eca9480687dd503b4a8b897c56cd2227f | [] | no_license | cwoodruf/sfu-cmpt415-summer2014-cloud-db-test-framework | 59c0ef69fde3e2c2e6a9732baf7368195cd20e56 | effaa38a523e1b97877a545d44e0430669031ffc | refs/heads/master | 2020-05-05T03:15:18.299849 | 2014-08-21T17:57:06 | 2014-08-21T17:57:06 | 32,189,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,197 | py | import riak
import nanotime
import time
testprops = {
"basic": {'n_val': 3, 'r': 1, 'pr': 1, 'w': 1, 'dw': 1, 'pw': 1, 'allow_mult': True},
"lww_all": {'n_val': 3, 'r': 'all', 'pr': 'all', 'w': 'all', 'dw': 'all', 'pw': 'all', 'allow_mult': False},
"lww_quorum": {'n_val': 3, 'r': 'quorum', 'pr': 'quorum', 'w': 'quorum', 'dw': 'quorum', 'pw': 'quorum', 'allow_mult': False},
"lww_sloppy_quorum": {'n_val': 3, 'r': 'quorum', 'pr': 1, 'w': 'quorum', 'dw': 1, 'pw': 1, 'allow_mult': False},
"crdt": {'n_val': 3, 'r': 1, 'pr': 0, 'w': 1, 'dw': 0, 'pw': 0, 'allow_mult': True},
}
initwait = 10
bucket = None
client = None
def resolver(fetched):
# this doesn't seem to return a single value:
# print max(fetched.siblings, lambda x: x.last_modified)
latest = None
for s in fetched.siblings:
if latest == None or latest.last_modified < s.last_modified:
latest = s
fetched.siblings = [latest]
return fetched.siblings
def setup(st):
global testprops, bucket, client, initwait
if 'port' in st.props:
port = st.props['port']
else:
port = 8087
print "connecting to "+st.props['host']+" on port "+str(port)
client = riak.RiakClient(host=st.props['host'], pb_port=port, protocol='pbc')
client.resolver = resolver
# properties for a bucket should be set once
bucket = client.bucket('speedtest')
if st.test == None or st.test not in testprops:
props = testprops['basic']
print "setup: using basic properties "+str(props)
else:
props = testprops[st.test]
print "setup: using custom properties for "+str(st.test)+": "+str(props)
bucket.set_properties(props)
# print "waiting "+str(initwait)+"s "
# time.sleep(initwait)
def cleanup(st):
pass
def read(props):
global bucket
# you can make secondary indexes but how do you get a range of keys?
# possible to make secondary index on keys?
if 'range' in props:
fetched = [bucket.get(str(k)) for k in range(props['range'][0], props['range'][1])]
else:
fetched = bucket.get(props['key'])
return fetched
def write(props):
global bucket
fetched = bucket.get(props['key'])
if fetched == None:
return bucket.new(props['key'],props['value'])
else:
fetched.data = props['value']
return fetched.store()
| [
"cwoodruf@sfu.ca"
] | cwoodruf@sfu.ca |
73a769860358682887712313fed38e62177e3612 | 55815c281f6746bb64fc2ba46d074ca5af966441 | /medium/299.py | 4b6e196e858ad7ee70a73d1e40dbf8f868f06bf8 | [] | no_license | brandoneng000/LeetCode | def5107b03187ad7b7b1c207d39c442b70f80fc2 | c7a42753b2b16c7b9c66b8d7c2e67b683a15e27d | refs/heads/master | 2023-08-30T23:38:04.845267 | 2023-08-30T08:42:57 | 2023-08-30T08:42:57 | 199,584,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 670 | py | import collections
class Solution:
def getHint(self, secret: str, guess: str) -> str:
cows = 0
bulls = 0
for i in range(len(secret)):
if secret[i] == guess[i]:
bulls += 1
secret_counter = collections.Counter(secret)
guess_counter = collections.Counter(guess)
cows = sum(secret_counter.values()) - sum((secret_counter - guess_counter).values()) - bulls
return f"{bulls}A{cows}B"
def main():
sol = Solution()
print(sol.getHint(secret = "1807", guess = "7810"))
print(sol.getHint(secret = "1123", guess = "0111"))
if __name__ == '__main__':
main() | [
"brandoneng000@gmail.com"
] | brandoneng000@gmail.com |
b512390af438cdc73eb5ce37c35ba0cdb58628f9 | 1041bfa8fe0d145ad196c93eb08b58bf17581864 | /extra/dynamic/bellmanford.py | e18b9b4e5a2bd29feee6dd131ac5fd703409473d | [] | no_license | arshadansari27/aoa | 9bc2eb15adc320a83c844aacecaf6a4e57a4a642 | 93cb4cadd8fa0dcc6ce91ebaa8b7b907d151998e | refs/heads/master | 2021-01-25T07:11:39.712833 | 2014-05-09T18:30:34 | 2014-05-09T18:30:34 | 17,389,674 | 0 | 1 | null | 2014-05-09T18:30:34 | 2014-03-04T03:54:24 | C | UTF-8 | Python | false | false | 1,670 | py | import sys
def bellman(edges, source):
distance = {}
predecessor = {}
for v in edges.keys():
if v is source:
distance[v] = 0
else:
distance[v] = sys.maxint
predecessor[v] = None
for k in edges.keys():
for i in edges.keys():
for j in edges[i].keys():
if edges[i][j] == 0 or i == j:
continue
alpha = distance[i] + edges[i][j]
#print i, j, alpha, distance[j]
if alpha < distance[j]:
distance[j] = alpha
predecessor[j] = i
print distance
for u in edges.keys():
for v in edges[u].keys():
if edges[u][v] == 0: continue
if distance[u] + edges[u][v] < distance[v]:
raise Exception("There is a negetive cycle")
print "Distance:", "[%s]" % ", ".join(["%s -> %s" % (u, v) for u, v in distance.iteritems()])
print "Predecessor:", "[%s]" % ", ".join(["%s -> %s" % (u, v) for u, v in predecessor.iteritems()])
def display(edges):
graph = [[edges[u][v] for v in xrange(1, len(edges[u]) + 1)] for u in xrange(1, len(edges.keys()) + 1)]
print "\n".join([" ".join(["%4d" % u for u in v]) for v in graph])
if __name__ == '__main__':
vertices = [x for x in range(1, 6)]
edges = {
1: {1: 0, 2: 0, 3: 6, 4: 3, 5: 0},
2: {1: 3, 2: 0, 3: 0, 4: 0, 5: 0},
3: {1: 0, 2: 0, 3: 0, 4: 2, 5: 0},
4: {1: 0, 2: 1, 3: 1, 4: 0, 5: 0},
5: {1: 0, 2: 4, 3: 0, 4: 2, 5: 0}
}
display(edges)
bellman(edges, 5)
| [
"arshadansari27@gmail.com"
] | arshadansari27@gmail.com |
ca33541472a33f07b0de2f75d8adbb76ccd1ca6d | 89239bc5be24719a990ff6158cb085478c40d265 | /DemoDB2.py | 36c261f3f65ce4dbfd5a36b1a0f2cfbf324c62ae | [] | no_license | Giyoung-93/python0405 | ac8ea3fa7bacca9010ab22bcca569d799f532858 | 97775b2b9a9bd1d8c24f2fd2ae4c123ca56a1697 | refs/heads/master | 2023-04-06T21:21:00.865797 | 2021-04-08T08:16:25 | 2021-04-08T08:16:25 | 354,692,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,103 | py | # DemoDB2.py
#SQLite를 사용하는 데모(로컬 데이터베이스)
import sqlite3
#처음에는 데이터베이스파일에 저장
con = sqlite3.connect("c:\\work2\\sample.db")
#SQL구문을 실행하는 것은 대부분 커서 객체
cur = con.cursor()
#저장소(테이블)을 만들기: 테이블 스키마(뼈대)
cur.execute("create table PhoneBook (Name text, PhoneNum text);")
#1건 입력
cur.execute("insert into PhoneBook values ('derick', '010-111');")
#입력 파라메터 처리(python format {0},{1})
#텍스트박스(GUI, Web Page)에서 입력을 받아서 처리
name = "gildong"
phoneNumber = "010-222"
cur.execute("insert into PhoneBook values (?, ?);", (name, phoneNumber))
#다중의 레코드(행, row)를 입력받는 경우: 2차원 행열데이터
datalist = (("tom","010-123"), ("dsp","010-567"))
cur.executemany("insert into PhoneBook values (?, ?);", datalist)
#검색
cur.execute("select * from PhoneBook;")
for row in cur:
print(row)
#커밋(작업을 정상적으로 완료. log --> db에도 기록)
#데이터를 변경(입력, 수정, 삭제)
con.commit() | [
"zxzx0520@gmail.com"
] | zxzx0520@gmail.com |
e51b95bc426d5a961262e9a81ab085a478d9b3f1 | 8a0a0485d6fde177241545408ab2289d915b5551 | /django/apps/email.py | d9447c03acd0ad3517db74e05f616d14b32a644b | [] | no_license | pabnas/pavimentos | 9af96f09b7352350c07ddfe8bb5dbccb93238cb8 | d72b641e94821c564a22783655efbba8e7cd2489 | refs/heads/main | 2023-06-24T04:15:05.597826 | 2021-07-22T00:30:00 | 2021-07-22T00:30:00 | 383,232,358 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,399 | py | from django.core.mail import EmailMultiAlternatives
from django.core.mail import EmailMessage
import img2pdf
from PIL import Image
import os
"""https://www.roytuts.com/how-to-send-attachments-with-email-using-python/"""
def Enviar_correo2(subject, to, text):
subject = subject
from_email = "pointdato@gmail.com"
to = to
text_content = text
html_content = 'This is an HTML message.'
msg = EmailMultiAlternatives(subject, text_content, from_email, [to])
msg.attach_alternative(html_content, "text/html")
msg.send(fail_silently=False)
def Enviar_correo(subject,mensaje,to,archivo=""):
msg = EmailMessage(
subject=subject,
body=mensaje,
from_email='pointdato@gmail.com',
to=to,
reply_to=['pointdato@gmail.com'],
headers={'Message-ID': 'foo'},
)
if archivo != "":
msg.attach_file(archivo)
msg.send(fail_silently=False)
def img_to_pdf(ruta):
# storing image path
img_path = ruta
# storing pdf path
pdf_path = "Factura.pdf"
# opening image
image = Image.open(img_path)
# converting into chunks using img2pdf
pdf_bytes = img2pdf.convert(image.filename)
# opening or creating pdf file
file = open(pdf_path, "wb")
# writing pdf files with chunks
file.write(pdf_bytes)
# closing image file
image.close()
# closing pdf file
file.close()
| [
"santibarry1930@gmail.com"
] | santibarry1930@gmail.com |
3b6866906136844824c0913c1cf8fbe9dee69e42 | 01bc372702bbc235c43fdd4ed684b8a328b8cc2e | /ninja_gold/apps/my_app/urls.py | 843ddc906ead8cad87afb4c707e0fb06c66cc63c | [] | no_license | ariasamandi/django_2 | 21a53ea4bacdf632e81e13c23f99d3f29986dccd | 87f5d7bf3aba45573a1d7b64c0930d26a9ee713b | refs/heads/master | 2020-03-15T02:26:34.084669 | 2018-05-09T01:16:51 | 2018-05-09T01:16:51 | 131,906,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^process_money/(?P<loc>\w+)$', views.process),
url(r'^reset$', views.reset),
] | [
"ariasamandi@gmail.com"
] | ariasamandi@gmail.com |
3e63aa4464698d9c254acbc385a426964835fadf | f939133a4051461e83f66699542ab6c92852640f | /conditional-sentences.py | 3c8fe579505aa9ddff3458bdaa3cbbd559c15aa6 | [] | no_license | altarimig/week3-session3 | cc7e0f4fa72340f98e98a0ad54363fc49de0200f | 3634319f06e8c1e8f8e57c596bfd4758f604d1d1 | refs/heads/master | 2022-09-03T08:35:27.071142 | 2020-05-27T21:46:14 | 2020-05-27T21:46:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | """palabras = ["gato", "perro", "ventana", "defenestrado"]
for val in palabras:
if val == "ventana":
break
print(val)
print("Final!!!")"""
##### RETO #####
lista = [1, 3, 20, 10, 50, 100, 31, 1000]
for val in lista:
if val == 3:
print(val)
if val % 2 == 0:
print(f"El valor {val} es par")
if val == 50:
break | [
"miguel.altamirano@pachaqtec.edu.pe"
] | miguel.altamirano@pachaqtec.edu.pe |
3cd07eb10c18113ece98a000337eb0ace652e3c9 | 36e8b5c6c34e1fbad37922d714c3d0dbd699d448 | /examples/01_plotting/plot_demo_more_plotting.py | 218e169a4965c897f5f7e8133d900c503c0673af | [
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | markushs/nilearn | c87da146c7109c86d5d794570e68e716c5a72bfc | 4a6012da5be83310a08bf1fc15d9189c3a0bc425 | refs/heads/master | 2020-05-02T14:07:07.143134 | 2016-06-09T09:19:07 | 2016-06-09T09:20:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,888 | py | """
More plotting tools from nilearn
================================
In this example, we demonstrate how to use plotting options from
nilearn essential in visualizing brain image analysis results.
We emphasize the use of parameters such as `display_mode` and `cut_coords`
with plotting function :func:`nilearn.plotting.plot_stat_map`. Also,
we show how to use various features such as `add_edges`, `add_contours`,
`add_markers` essential in visualizing regions of interest images or
mask images overlaying on subject specific anatomical/EPI image.
The display features shown here are inherited from the
:class:`nilearn.plotting.displays.OrthoSlicer` class.
The parameter `display_mode` is used to draw brain slices along given
specific directions, where directions can be one of 'ortho',
'x', 'y', 'z', 'xy', 'xz', 'yz'. whereas parameter `cut_coords`
is used to specify a limited number of slices to visualize along given
specific slice direction. The parameter `cut_coords` can also be used
to draw the specific cuts in the slices by giving its particular
coordinates in MNI space accordingly with particular slice direction.
This helps us point to the activation specific location of the brain slices.
See :ref:`plotting` for more details.
"""
###############################################################################
# First, we retrieve data from nilearn provided (general-purpose) datasets
from nilearn import datasets
# haxby dataset to have anatomical image, EPI images and masks
haxby_dataset = datasets.fetch_haxby(n_subjects=1)
haxby_anat_filename = haxby_dataset.anat[0]
haxby_mask_filename = haxby_dataset.mask_vt[0]
haxby_func_filename = haxby_dataset.func[0]
# localizer dataset to have contrast maps
localizer_dataset = datasets.fetch_localizer_contrasts(
["left vs right button press"],
n_subjects=2,
get_anats=True)
localizer_anat_filename = localizer_dataset.anats[1]
localizer_cmap_filename = localizer_dataset.cmaps[1]
########################################
# Now, we show from here how to visualize the retrieved datasets using plotting
# tools from nilearn.
from nilearn import plotting
########################################
# Visualizing contrast map in three different orthogonal views - 'sagittal',
# 'coronal' and 'axial' with coordinate positions in each view are given
# of interest manually also with colorbar on the right side of the plots.
# The first argument is a path to the filename of a constrast map,
# optional argument `display_mode` is given as string 'ortho' to visualize
# the map in three specific directions xyz and the optional `cut_coords`
# argument, is here a list of integers denotes coordinates of each slice
# in the order [x, y, z]. By default the `colorbar` argument is set to True
# in plot_stat_map.
plotting.plot_stat_map(localizer_cmap_filename, display_mode='ortho',
cut_coords=[36, -27, 60],
title="display_mode='ortho', cut_coords=[36, -27, 60]")
########################################
# Visualizing contrast map in single view 'axial' with maximum number of
# slices in this view are limited to 5. The coordinates to cut the slices
# are selected automatically.
# In this type of visualization, the `display_mode` argument is given as
# string 'z' for axial direction and `cut_coords` as integer 5 without a
# list implies that number of cuts in the slices should be maximum of 5.
plotting.plot_stat_map(localizer_cmap_filename, display_mode='z', cut_coords=5,
title="display_mode='z', cut_coords=5")
########################################
# Visualizing contrast map in another single view 'sagittal' and also showing
# how to select two slices of particular interest manually by giving the
# coordinates to cut each slice.
# In this type, `display_mode` should be given as string 'x' for sagittal
# view and coordinates should be given as integers in a list
plotting.plot_stat_map(localizer_cmap_filename, display_mode='x',
cut_coords=[-36, 36],
title="display_mode='x', cut_coords=[-36, 36]")
########################################
# Now constrast map is visualized in 'coronal' view with single cut where
# coordinates are located automatically
# For coronal view, `display_mode` is given as string 'y' and `cut_coords`
# as integer 1 not as a list for single cut
plotting.plot_stat_map(localizer_cmap_filename, display_mode='y', cut_coords=1,
title="display_mode='y', cut_coords=1")
########################################
# Now contrast map is shown without a colorbar on the right side.
# The argument `colorbar` should be given as False to show plots without
# a colorbar on the right side.
plotting.plot_stat_map(localizer_cmap_filename, display_mode='z',
cut_coords=1, colorbar=False,
title="display_mode='z', cut_coords=1, colorbar=False")
########################################
# Now we visualize the contrast map with two views - 'sagittal' and 'axial'
# and coordinates are given manually to select particular cuts in two views.
# argument display_mode='xz' where 'x' for sagittal and 'z' for axial view.
# argument `cut_coords` should match with input number of views therefore two
# integers should be given in a list to select the slices to be displayed
plotting.plot_stat_map(localizer_cmap_filename, display_mode='xz',
cut_coords=[36, 60],
title="display_mode='xz', cut_coords=[36, 60]")
########################################
# Visualizing the contrast map with 'coronal', 'sagittal' views and coordinates
# are given manually in two cuts with two views.
# display_mode='yx' for coronal and saggital view and coordinates will be
# assigned in the order of direction as [x, y, z]
plotting.plot_stat_map(localizer_cmap_filename, display_mode='yx',
cut_coords=[-27, 36],
title="display_mode='yx', cut_coords=[-27, 36]")
########################################
# Visualizing contrast map with 'coronal' and 'axial' views with manual
# positioning of coordinates with each directional view
plotting.plot_stat_map(localizer_cmap_filename, display_mode='yz',
cut_coords=[-27, 60],
title="display_mode='yz', cut_coords=[-27, 60]")
###############################################################################
# In second part, we switch to demonstrating various features add_* from
# nilearn where each specific feature will be helpful in projecting brain
# imaging results for further interpretation.
# Import image processing tool for basic processing of functional brain image
from nilearn import image
# Compute voxel-wise mean functional image across time dimension. Now we have
# functional image in 3D assigned in mean_haxby_img
mean_haxby_img = image.mean_img(haxby_func_filename)
########################################
# Now let us see how to use `add_edges`, method useful for checking
# coregistration by overlaying anatomical image as edges (red) on top of
# mean functional image (background), both being of same subject.
# First, we call the `plot_anat` plotting function, with a background image
# as first argument, in this case the mean fMRI image.
display = plotting.plot_anat(mean_haxby_img, title="add_edges")
# We are now able to use add_edges method inherited in plotting object named as
# display. First argument - anatomical image and by default edges will be
# displayed as red 'r', to choose different colors green 'g' and blue 'b'.
display.add_edges(haxby_anat_filename)
########################################
# Plotting outline of the mask (red) on top of the mean EPI image with
# `add_contours`. This method is useful for region specific interpretation
# of brain images
# As seen before, we call the `plot_anat` function with a background image
# as first argument, in this case again the mean fMRI image and argument
# `cut_coords` as list for manual cut with coordinates pointing at masked
# brain regions
display = plotting.plot_anat(mean_haxby_img, title="add_contours",
cut_coords=[28, -34, -22])
# Now use `add_contours` in display object with the path to a mask image from
# the Haxby dataset as first argument and argument `levels` given as list
# of values to select particular level in the contour to display and argument
# `colors` specified as red 'r' to see edges as red in color.
# See help on matplotlib.pyplot.contour to use more options with this method
display.add_contours(haxby_mask_filename, levels=[0.5], colors='r')
########################################
# Plotting outline of the mask (blue) with color fillings using same method
# `add_contours`.
display = plotting.plot_anat(mean_haxby_img,
title="add_contours with filled=True",
cut_coords=[28, -34, -22])
# By default, no color fillings will be shown using `add_contours`. To see
# contours with color fillings use argument filled=True. contour colors are
# changed to blue 'b' with alpha=0.7 sets the transparency of color fillings.
# See help on matplotlib.pyplot.contourf to use more options given that filled
# should be True
display.add_contours(haxby_mask_filename, filled=True, alpha=0.7,
levels=[0.5], colors='b')
#########################################
# Plotting seed regions of interest as spheres using new feature `add_markers`
# with MNI coordinates of interest.
display = plotting.plot_anat(mean_haxby_img, title="add_markers",
cut_coords=[28, -34, -22])
# Coordinates of seed regions should be specified in first argument and second
# argument `marker_color` denotes color of the sphere in this case yellow 'y'
# and third argument `marker_size` denotes size of the sphere
coords = [(28, -34, -22)]
display.add_markers(coords, marker_color='y', marker_size=100)
###############################################################################
# Finally, saving the plots to file with two different ways
# Contrast maps plotted with function `plot_stat_map` can be saved using an
# inbuilt parameter output_file as filename + .extension as string. Valid
# extensions are .png, .pdf, .svg
plotting.plot_stat_map(localizer_cmap_filename,
title='Using plot_stat_map output_file',
output_file='plot_stat_map.png')
########################################
# Another way of saving plots is using 'savefig' option from display object
display = plotting.plot_stat_map(localizer_cmap_filename,
title='Using display savefig')
display.savefig('plot_stat_map_from_display.png')
# In non-interactive settings make sure you close your displays
display.close()
plotting.show()
| [
"dkamalakarreddy@gmail.com"
] | dkamalakarreddy@gmail.com |
ccaac9073472d8c60c37e12201d498a543b585a0 | b5b601dec6ea846ef16de5759ea53f7eccf28f5f | /usingsqlite.py | 110b6a0283ac070aa90e95f6d5576d515f0895b7 | [] | no_license | cool51/cs50project1 | cf4b77571ce15ec0ba66d488f50fe54b2f0ae8ad | 2a76a461682d861d0908d4b8647a98f8a18f562a | refs/heads/master | 2022-04-26T16:35:45.277641 | 2020-04-27T10:22:27 | 2020-04-27T10:22:27 | 258,726,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | from flask import Flask,render_template,request
from sqlalchemy import create_engine
db = create_engine('sqlite:///newemployees.sqlite')
# echo output to console
db.echo = True
conn = db.connect()
conn.execute("""
CREATE TABLE newemployee12 (id INTEGER PRIMARY KEY,name STRING(100) NOT NULL,birthday DATE NOT NULL)""")
conn.execute("INSERT INTO employee VALUES (NULL, 'marcos mango',('1990-09-06'));")
conn.execute("INSERT INTO employee VALUES (NULL, 'rosie rinn',('1980-09-06') );")
conn.execute("INSERT INTO employee VALUES (NULL, 'mannie moon',('1970-07-06') );")
for row in conn.execute("SELECT * FROM employee"):
print (row)
# give connection back to the connection pool
conn.close() | [
"noreply@github.com"
] | cool51.noreply@github.com |
031e39b6c9583ff76a7cfdfdd76a02cb702cf810 | 0e7c5d081b113463d59728de827239e11967bf61 | /base/serializers.py | 91846fe25a265d6cc88db163ccd86c7205bf4a63 | [] | no_license | fhrryDeveloper/React-Django-Shope-backend | cab1d54e718a7a1186b182ec936eea8f1d2481af | d89892454cda060a743612e8d206d11aea26fa05 | refs/heads/main | 2023-04-08T01:02:33.801097 | 2021-04-21T23:55:05 | 2021-04-21T23:55:05 | 360,339,679 | 11 | 0 | null | 2021-04-21T23:58:32 | 2021-04-21T23:58:31 | null | UTF-8 | Python | false | false | 3,799 | py | from rest_framework import serializers
from django.contrib.auth.models import User
from rest_framework_simplejwt.tokens import RefreshToken
from .models import Shop, Product, User, Order, OrderItem, ShippingAddress, Review, Review_Shop
# UserSerializer
class UserSerializer(serializers.ModelSerializer):
name = serializers.SerializerMethodField(read_only=True)
_id = serializers.SerializerMethodField(read_only=True)
isAdmin = serializers.SerializerMethodField(read_only=True)
class Meta:
model = User
fields = ['id', '_id', 'username', 'email', 'name', 'isAdmin']
def get__id(self, obj):
return obj.id
def get_isAdmin(self, obj):
return obj.is_staff
def get_name(self, obj):
name = obj.first_name
if name == '':
name = obj.email
return name
# UserSerializerWithToken
class UserSerializerWithToken(UserSerializer):
token = serializers.SerializerMethodField(read_only=True)
class Meta:
model = User
fields = ['id', '_id', 'username', 'email', 'name', 'isAdmin', 'token']
def get_token(self, obj):
token = RefreshToken.for_user(obj)
return str(token.access_token)
#########################################################################
# ProductSerializer
class ProductSerializer(serializers.ModelSerializer):
reviews = serializers.SerializerMethodField(read_only=True)
class Meta:
model = Product
fields = '__all__'
def get_reviews(self, obj):
reviews = obj.review_set.all()
serializer = ReviewSerializer(reviews, many=True)
return serializer.data
############ #############
# ShopSerializer
class ShopSerializer(serializers.ModelSerializer):
reviews = serializers.SerializerMethodField(read_only=True)
class Meta:
model = Shop
fields = '__all__'
def get_reviews(self, obj):
reviews = obj.review_shop_set.all()
serializer = Review_ShopSerializer(reviews, many=True)
return serializer.data
############################################################################
# ReviewSerializer
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Review
fields = '__all__'
# Review_ShopSerializer
class Review_ShopSerializer(serializers.ModelSerializer):
class Meta:
model = Review_Shop
fields = '__all__'
###############################################################################
# ShippingAddressSerializer
class ShippingAddressSerializer(serializers.ModelSerializer):
class Meta:
model = ShippingAddress
fields = '__all__'
# OrderItemSerializer
class OrderItemSerializer(serializers.ModelSerializer):
class Meta:
model = OrderItem
fields = '__all__'
# OrderSerializer
class OrderSerializer(serializers.ModelSerializer):
orderItems = serializers.SerializerMethodField(read_only=True)
shippingAddress = serializers.SerializerMethodField(read_only=True)
user = serializers.SerializerMethodField(read_only=True)
class Meta:
model = Order
fields = '__all__'
def get_orderItems(self, obj):
items = obj.orderitem_set.all()
serializer = OrderItemSerializer(items, many=True)
return serializer.data
def get_shippingAddress(self, obj):
try:
address = ShippingAddressSerializer(
obj.shippingaddress, many=False).data
except:
address = False
return address
def get_user(self, obj):
user = obj.user
serializer = UserSerializer(user, many=False)
return serializer.data
################################################################################ | [
"69484492+topdeveloper0729@users.noreply.github.com"
] | 69484492+topdeveloper0729@users.noreply.github.com |
610dffc1617cd22bd7b8e889c292a9d1ef1e3346 | 677002b757c0a1a00b450d9710a8ec6aeb9b9e9a | /tiago_public_ws/build/tiago_bringup/catkin_generated/pkg.develspace.context.pc.py | dd49ac14d12608a0d4daa23f54bf5fb2b0e9670f | [] | no_license | mrrocketraccoon/tiago_development | ce686c86459dbfe8623aa54cf4279021342887fb | a0539bdcf21b67ab902a4649b516dcb929c54042 | refs/heads/main | 2023-06-16T19:39:33.391293 | 2021-07-08T21:20:03 | 2021-07-08T21:20:03 | 384,249,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "tiago_bringup"
PROJECT_SPACE_DIR = "/tiago_public_ws/devel/.private/tiago_bringup"
PROJECT_VERSION = "2.0.58"
| [
"ricardoxcm@hotmail.com"
] | ricardoxcm@hotmail.com |
4af74a933a0a3e003647d10696dc9afb71b9e739 | 080c13cd91a073457bd9eddc2a3d13fc2e0e56ae | /MY_REPOS/awesome-4-new-developers/tensorflow-master/tensorflow/python/framework/experimental/math_ops.py | bb168de21996ba1f8aa825afb66054bf16c1f338 | [
"Apache-2.0"
] | permissive | Portfolio-Projects42/UsefulResourceRepo2.0 | 1dccc8961a09347f124d3ed7c27c6d73b9806189 | 75b1e23c757845b5f1894ebe53551a1cf759c6a3 | refs/heads/master | 2023-08-04T12:23:48.862451 | 2021-09-15T12:51:35 | 2021-09-15T12:51:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,746 | py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Experimental impl for gen_math_ops.py using unified APIs, for testing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework.experimental import _math_ops
from tensorflow.python.framework.experimental import context_stack as context
def add(a, b, name=None):
ctx = context.get_default()
return _math_ops.add(ctx, a, b, name)
def mat_mul(a, b, name=None):
ctx = context.get_default()
return _math_ops.mat_mul(ctx, a, b, name)
def neg(a, name=None):
ctx = context.get_default()
return _math_ops.neg(ctx, a, name)
def sub(a, b, name=None):
ctx = context.get_default()
return _math_ops.sub(ctx, a, b, name)
def mul(a, b, name=None):
ctx = context.get_default()
return _math_ops.mul(ctx, a, b, name)
def log1p(a, name=None):
ctx = context.get_default()
return _math_ops.log1p(ctx, a, name)
def div_no_nan(a, b, name=None):
ctx = context.get_default()
return _math_ops.div_no_nan(ctx, a, b, name)
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
af978b8588e00a513c8060288cad8f31eb5090f7 | b482937e2b452e8ff40ecaba349ba56214c795c8 | /Regression/PolynomialRegression/polynomial_regression.py | ed2f4a9a98a32e9de7bbe1e023633f2d122de5a3 | [] | no_license | machine1010/Machine-Learning | 93bed7002c17aa5bd14633bf125447708dc37845 | 41710af4153c08bccc1fbc8baad36c50e0393c23 | refs/heads/master | 2022-10-09T09:47:04.941382 | 2020-06-08T20:22:26 | 2020-06-08T20:22:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,780 | py | import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
def main():
# Step # 1: Data Prepossessing
# Reading the data
print("Reading the data...")
dataset = pd.read_csv("../../DataSets/PolynomialRegression/Position_Salaries.csv")
print("Dataset contains")
print(dataset.count())
# Extracting features and outcome from dataset
print("Extracting features and outcome from dataset...")
feature = dataset.iloc[:, 1:-1].values
outcome = dataset.iloc[:, -1].values
# Converting the feature into polynomial form
print("Converting the feature into polynomial form...")
from sklearn.preprocessing import PolynomialFeatures
polynomial_features = PolynomialFeatures(degree=4)
polynomial_feature = polynomial_features.fit_transform(feature)
# Step # 2: Training the model
print("Training the model")
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(polynomial_feature, outcome)
# Increase the resolution of the feature that is going to use in prediction
resolute_feature = np.arange(min(feature), max(feature), 0.001)
resolute_feature = resolute_feature.reshape((len(resolute_feature), 1))
# Predicting the outcome and plotting the graph
plt.scatter(feature, outcome, color='red')
plt.plot(resolute_feature, regressor.predict(polynomial_features.fit_transform(resolute_feature)), color='black')
plt.show()
# Predicting the salary of an employee having X years of experience
salary_to_predict = np.array(6.5, dtype=float).reshape(1, -1)
print("The predicted salary is", end=' : ')
print(regressor.predict(polynomial_features.fit_transform(salary_to_predict)))
if __name__ == '__main__':
main()
| [
"iftikharliaquat1995@gmail.com"
] | iftikharliaquat1995@gmail.com |
29d682db283e2dc08722ab6dd840796f0e982a94 | e67a0139092d3389fea0075de9ecf12ab209649f | /scripts/addons_extern/AF_3dview_specials/VIEW3D_MT_armature_specials.py | 47b9a6cae734433af79d7e3a2b6eef5aca78063f | [] | no_license | amagnoni/blenderpython | 9fe864d287f992b7cd71cd584fca4a501a6ac954 | d2fec1a35369b7b171e2f0999196b87e242e08f3 | refs/heads/master | 2021-01-18T11:28:55.372759 | 2015-10-17T20:16:57 | 2015-10-17T20:16:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,882 | py | # 3Dビュー > アーマチュア編集モード > 「W」キー
import bpy
import re
################
# オペレーター #
################
class CreateMirror(bpy.types.Operator):
bl_idname = "armature.create_mirror"
bl_label = "Select bones mirroring."
bl_description = "Mirrored at any axes selected bone."
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
obj = context.active_object
if (obj.type == "ARMATURE"):
if (obj.mode == "EDIT"):
preCursorCo = context.space_data.cursor_location[:]
prePivotPoint = context.space_data.pivot_point
preUseMirror = context.object.data.use_mirror_x
context.space_data.cursor_location = (0, 0, 0)
context.space_data.pivot_point = 'CURSOR'
context.object.data.use_mirror_x = True
selectedBones = context.selected_bones[:]
bpy.ops.armature.autoside_names(type='XAXIS')
bpy.ops.armature.duplicate()
axis = (True, False, False)
bpy.ops.transform.mirror(constraint_axis=axis)
bpy.ops.armature.flip_names()
newBones = []
for bone in context.selected_bones:
for pre in selectedBones:
if (bone.name == pre.name):
break
else:
newBones.append(bone)
bpy.ops.armature.select_all(action='DESELECT')
for bone in selectedBones:
bone.select = True
bone.select_head = True
bone.select_tail = True
bpy.ops.transform.transform(mode='BONE_ROLL', value=(0, 0, 0, 0))
bpy.ops.armature.select_all(action='DESELECT')
for bone in newBones:
bone.select = True
bone.select_head = True
bone.select_tail = True
context.space_data.cursor_location = preCursorCo[:]
context.space_data.pivot_point = prePivotPoint
context.object.data.use_mirror_x = preUseMirror
else:
self.report(type={"ERROR"}, message="Please perform in edit mode")
else:
self.report(type={"ERROR"}, message="Armature object is not")
return {'FINISHED'}
class CopyBoneName(bpy.types.Operator):
bl_idname = "armature.copy_bone_name"
bl_label = "Copy to Clipboard bone name"
bl_description = "Copies the Clipboard the name of active bone"
bl_options = {'REGISTER', 'UNDO'}
isObject = bpy.props.BoolProperty(name="Object name", default=False)
def execute(self, context):
if (self.isObject):
context.window_manager.clipboard = context.active_object.name + ":" + context.active_bone.name
else:
context.window_manager.clipboard = context.active_bone.name
return {'FINISHED'}
class RenameBoneRegularExpression(bpy.types.Operator):
bl_idname = "armature.rename_bone_regular_expression"
bl_label = "Replace the bone names in regular expressions"
bl_description = "In the bone name (of choice) to match regular expression replace"
bl_options = {'REGISTER', 'UNDO'}
isAll = bpy.props.BoolProperty(name="Including non-select all", default=False)
pattern = bpy.props.StringProperty(name="Replacement front (in regular expressions)", default="^")
repl = bpy.props.StringProperty(name="Replacement", default="@")
def execute(self, context):
obj = context.active_object
if (obj.type == "ARMATURE"):
if (obj.mode == "EDIT"):
bones = context.selected_bones
if (self.isAll):
bones = obj.data.bones
for bone in bones:
try:
new_name = re.sub(self.pattern, self.repl, bone.name)
except:
continue
bone.name = new_name
else:
self.report(type={"ERROR"}, message="Please perform in edit mode")
else:
self.report(type={"ERROR"}, message="Armature object is not")
return {'FINISHED'}
class RenameOppositeBone(bpy.types.Operator):
bl_idname = "armature.rename_opposite_bone"
bl_label = "Bones in the opposite position, rename."
bl_description = "Bone is located opposite the X axis selection in bone \"1.R 1 longs.L \' of so versus the"
bl_options = {'REGISTER', 'UNDO'}
threshold = bpy.props.FloatProperty(name="At the threshold", default=0.00001, min=0, soft_min=0, step=0.001, precision=5)
def execute(self, context):
obj = context.active_object
if (obj.type == "ARMATURE"):
if (obj.mode == "EDIT"):
arm = obj.data
bpy.ops.armature.autoside_names(type='XAXIS')
selectedBones = context.selected_bones[:]
bpy.ops.armature.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='OBJECT')
threshold = self.threshold
for bone in selectedBones:
bone = arm.bones[bone.name]
head = (-bone.head_local[0], bone.head_local[1], bone.head_local[2])
tail = (-bone.tail_local[0], bone.tail_local[1], bone.tail_local[2])
for b in arm.bones:
if ( (head[0]-threshold) <= b.head_local[0] <= (head[0]+threshold)):
if ( (head[1]-threshold) <= b.head_local[1] <= (head[1]+threshold)):
if ( (head[2]-threshold) <= b.head_local[2] <= (head[2]+threshold)):
if ( (tail[0]-threshold) <= b.tail_local[0] <= (tail[0]+threshold)):
if ( (tail[1]-threshold) <= b.tail_local[1] <= (tail[1]+threshold)):
if ( (tail[2]-threshold) <= b.tail_local[2] <= (tail[2]+threshold)):
b.name = bone.name
b.select = True
b.select_head = True
b.select_tail = True
break
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.armature.flip_names()
else:
self.report(type={"ERROR"}, message="Please perform in edit mode")
else:
self.report(type={"ERROR"}, message="Armature object is not")
return {'FINISHED'}
return {'FINISHED'}
# Menu
def menu(self, context):
self.layout.separator()
self.layout.prop(context.object.data, "use_mirror_x", icon="PLUGIN", text="X axis mirror edit")
self.layout.operator(CreateMirror.bl_idname, icon="PLUGIN")
self.layout.operator(RenameOppositeBone.bl_idname, icon="PLUGIN")
self.layout.separator()
self.layout.operator(CopyBoneName.bl_idname, icon="PLUGIN")
self.layout.operator(RenameBoneRegularExpression.bl_idname, icon="PLUGIN")
| [
"meta.androcto1@gmail.com"
] | meta.androcto1@gmail.com |
8202d1b489625da9c60418e27fe1e6c60abb3afa | 1dfecf561529551a67a17d07a345cd61377aa36f | /edblog/urls.py | a88b2529f11b062d314aee7f5d5bcab9f7ce2bac | [] | no_license | emmanueldd/DjangoBlog | 7174a95e06792e412203c56bfbd193f439ba870b | 05aa56f8a00d0cb23e0953f5818190bcad791a98 | refs/heads/master | 2021-01-10T12:38:12.696188 | 2015-12-18T20:37:35 | 2015-12-18T20:37:35 | 48,249,574 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,253 | py | """edblog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url, include, patterns
from django.contrib import admin
from . import settings
from blog import views, feed
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', views.BlogIndex.as_view(), name="index"),
url(r'^feed/$', feed.LatestPosts(), name="feed"),
url(r'^entry/(?P<slug>\S+)$', views.BlogDetail.as_view(), name="entry_detail"),
url('^markdown/', include( 'django_markdown.urls')),
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.user_login, name='login'),
]
| [
"e.derozin@gmail.com"
] | e.derozin@gmail.com |
c3b0806a1a10dc677613be45805ff9d836e2ce5b | 3d304f5b2f63edf882f0ca7f8164ac9a40300709 | /caffe2onnx/src/op_layer_info.py | e844fd2a3c8ee7a7370230a94b9b4c8a1f288e92 | [
"BSD-3-Clause"
] | permissive | fangichao/caffe2onnx | 107251660387cf0373a3528bdbb8958053428012 | 865730aed5e80474a224d81a8faebc788d6af1b4 | refs/heads/main | 2023-05-06T06:48:23.269035 | 2021-05-29T15:01:00 | 2021-05-29T15:01:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,383 | py | from onnx import TensorProto
from caffe2onnx.proto import caffe_upsample_pb2 as caffe_pb2
# caffe layers
Layer_CONCAT = caffe_pb2.V1LayerParameter.CONCAT #3
Layer_CONVOLUTION = caffe_pb2.V1LayerParameter.CONVOLUTION #4
Layer_DROPOUT = caffe_pb2.V1LayerParameter.DROPOUT #6
Layer_INNER_PRODUCT = caffe_pb2.V1LayerParameter.INNER_PRODUCT #14
Layer_LRN = caffe_pb2.V1LayerParameter.LRN #15
Layer_POOLING = caffe_pb2.V1LayerParameter.POOLING #17
Layer_RELU = caffe_pb2.V1LayerParameter.RELU #18
Layer_SOFTMAX = caffe_pb2.V1LayerParameter.SOFTMAX #20
Layer_ELTWISE = caffe_pb2.V1LayerParameter.ELTWISE #25
Layer_UPSAMPLE = caffe_pb2.V1LayerParameter.UPSAMPLE #40
op_pname={"Conv":["_W","_b"],
"BatchNorm":["_mean","_var"],
"Scale":["_scale","_b"],
"Reshape":["_shape"],
"InnerProduct":["_W","_B"],
"ConvTranspose":["_W","_b"],
"Upsample":["_Scale"],
"PRelu":["_slope"]
}
op_ptype={"Conv":[TensorProto.FLOAT,TensorProto.FLOAT],
"BatchNorm":[TensorProto.FLOAT,TensorProto.FLOAT],
"Scale":[TensorProto.FLOAT,TensorProto.FLOAT],
"Reshape":[TensorProto.INT64],
"InnerProduct":[TensorProto.FLOAT,TensorProto.FLOAT],
"ConvTranspose":[TensorProto.FLOAT, TensorProto.FLOAT],
"Upsample":[TensorProto.FLOAT],
"PRelu":[TensorProto.FLOAT]
} | [
"dmc5mod@yandex.ru"
] | dmc5mod@yandex.ru |
22ecb6d18655d8918bf94146342aeafbeebf2a4c | 7d98d6885d7bcc4d55be9acbbdda942b68c5cb9a | /generate_qsub.py | 6ec396e2f8569f7f66d68f4e789ced8ea3432140 | [] | no_license | ThibaultLatrille/RedQueen | 189cda344f31cd286b35a739fffad4f93c14dbad | 172a03c327a95dce567842e17fcb4a99aa99d1c4 | refs/heads/master | 2022-05-31T21:13:33.993411 | 2022-03-22T13:42:16 | 2022-03-22T13:42:16 | 50,335,290 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | import os
dir = "./qsub"
i = 0
os.system('rm ' + dir + "/*")
params = ["-u 1.0 -v 10.0 -f power -a 0.01",
"-u 1.0 -v 10.0 -f poisson -a 10.0",
"-u 10.0 -v 10.0 -f poisson -a 10.0",
"-u 10.0 -v 1.0 -f poisson -a 10.0"]
for c, t, s in [[32, 100, 64], [32, 50, 32], [16, 30, 16], [8, 60, 32]]:
for param in params:
i += 1
file_name = "redqueen{}".format(i)
qsub = open(dir + "/{}.pbs".format(file_name), 'w')
qsub.writelines("#!/bin/bash\n")
qsub.writelines("#\n")
qsub.writelines("#PBS -q q1day\n")
qsub.writelines("#PBS -l nodes=1:ppn={0},mem=4gb\n".format(c))
qsub.writelines("#PBS -o /pandata/tlatrill/out_err/out{}\n".format(file_name))
qsub.writelines("#PBS -e /pandata/tlatrill/out_err/err" + file_name + "\n")
qsub.writelines("cd /panhome/tlatrill/RedQueen\n")
command = "python3 redQueen.py {0} -c {1} -t {2} -r 4 -s {3}".format(param, c, t, s)
qsub.writelines(command)
qsub.close()
print('Job completed')
| [
"thibault.latrille@ens-lyon.fr"
] | thibault.latrille@ens-lyon.fr |
32f86ace412325cb24bd5e915fe22f678c215b1b | 985f130e562e18d3106e2b641aece2dacfd348ee | /src/primal/contrib/flask/exceptions.py | f3a102a9af1b439c6745111ace5acb464039c302 | [] | no_license | voidfiles/primal | dcb71276ef1a2ba0522b7dc1c2b59484b4341fca | 5dbcc336abd9533e24530739fe273a686a2252e4 | refs/heads/master | 2021-05-14T02:10:03.291503 | 2018-01-07T20:00:58 | 2018-01-07T20:00:58 | 116,587,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py |
def format_error_message(message, info, slug):
return {
'error_message': message,
'error_info': info,
'error_slug': slug,
}
class APIException(Exception):
status_code = 400
def __init__(self, message, status_code=None, slug="non-specific",
info=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.info = info
self.slug = slug
def to_dict(self):
return format_error_message(self.message, self.info, self.slug)
| [
"akessinger@bepress.com"
] | akessinger@bepress.com |
14258aac0b0a7e639801a834cddbdf0089e45ea8 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_240/ch140_2020_04_01_19_19_15_085921.py | 4c86827b303c447f074e6de350d868de44996204 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | def faixa_notas(notas):
a = 0
b = 0
c = 0
for i in notas:
if i < 5:
a += 1
elif i <= 7:
b += 1
else:
c += 1
return [a ,b ,c] | [
"you@example.com"
] | you@example.com |
e9f2ba61dbeead5592485505beee0d62323ad63a | 058d6faeba874b6324d52463e059d6e2b2ec1889 | /mtgjson5/compiled_classes/mtgjson_tcgplayer_skus.py | 88632765876277f7f80e20ea02fa46cc0af6558d | [
"MIT"
] | permissive | 0az/mtgjson | f077b784586654494728d198adfc5201da94b9a9 | 64e4e0a452911418e608df932fbf12af5dcb1a35 | refs/heads/master | 2022-12-17T21:34:16.445461 | 2020-09-23T17:13:15 | 2020-09-23T17:13:15 | 298,050,106 | 0 | 0 | MIT | 2020-09-23T17:48:41 | 2020-09-23T17:48:40 | null | UTF-8 | Python | false | false | 1,513 | py | """
MTGJSON TcgplayerSkus Object
"""
import logging
import pathlib
from typing import Dict, List, Union
from ..providers import TCGPlayerProvider
from ..providers.tcgplayer import convert_sku_data_enum, get_tcgplayer_sku_data
from ..utils import generate_card_mapping
LOGGER = logging.getLogger(__name__)
class MtgjsonTcgplayerSkusObject:
"""
MTGJSON TcgplayerSkus Object
"""
enhanced_tcgplayer_skus: Dict[str, List[Dict[str, Union[int, str]]]]
def __init__(self, all_printings_path: pathlib.Path) -> None:
self.enhanced_tcgplayer_skus = {}
tcg_to_mtgjson_map = generate_card_mapping(
all_printings_path, ("identifiers", "tcgplayerProductId"), ("uuid",)
)
for group in TCGPlayerProvider().get_tcgplayer_magic_set_ids():
tcgplayer_sku_data = get_tcgplayer_sku_data(group)
for product in tcgplayer_sku_data:
product_id = str(product["productId"])
key = tcg_to_mtgjson_map.get(product_id)
if not key:
LOGGER.debug(f"Unable to translate TCGPlayer product {product_id}")
continue
self.enhanced_tcgplayer_skus[key] = [
convert_sku_data_enum(sku) for sku in product["skus"]
]
def to_json(self) -> Dict[str, List[Dict[str, Union[int, str]]]]:
"""
Support json.dump()
:return: JSON serialized object
"""
return self.enhanced_tcgplayer_skus
| [
"noreply@github.com"
] | 0az.noreply@github.com |
1ac52a959bf11a33051c857d8c7123aba92e2e67 | 7172f674359425b0980ba117afc47a80ae80f33a | /Frontend.py | d588616498341e466fb0a01e1d19999e95472977 | [] | no_license | ManosGero/books | 104e94c3c2be925fc02340a63c59166fbbe2ec92 | 7585ec0107135145eedcb59d1e6d10493b05ba73 | refs/heads/master | 2020-08-26T19:00:51.021866 | 2020-03-26T16:15:39 | 2020-03-26T16:15:39 | 217,111,741 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,709 | py | from tkinter import *
import backend
def get_selected_row(event):
global selected_tube
index=list1.curselection()[0]
selected_tube = list1.get(index)
e1.delete(0,END)
e1.insert(END,selected_tube[1])
e2.delete(0,END)
e2.insert(END,selected_tube[2])
e3.delete(0,END)
e3.insert(END,selected_tube[3])
e4.delete(0,END)
e4.insert(END,selected_tube[4])
return(selected_tube)
def view_command():
list1.delete(0,END)
for row in backend.view():
list1.insert(END,row)
def search_command():
list1.delete(0,END)
for row in backend.search(title_text.get(),author_text.get(),year_text.get(),isbn_text.get()):
list1.insert(END,row)
def add_command():
backend.insert(title_text.get(),author_text.get(),year_text.get(),isbn_text.get())
view_command()
def update_command():
try:
list1.delete(0, END)
backend.update(selected_tube[0],title_text.get(),author_text.get(),year_text.get(),isbn_text.get())
view_command()
except:
pass
def delete_command():
try:
backend.delete(selected_tube[0])
view_command()
except:
pass
window = Tk()
window.title("Books")
l1= Label(window, text="Title" )
l1.grid(row=0, column=0)
l2 = Label(window,text="Arthor")
l2.grid(row=0, column=2)
l3 = Label(window,text="Year")
l3.grid(row=1, column=0)
l4 = Label(window,text="ISBN")
l4.grid(row=1, column=2)
title_text=StringVar()
e1=Entry(window,textvariable=title_text)
e1.grid(row=0,column=1)
author_text=StringVar()
e2=Entry(window,textvariable=author_text)
e2.grid(row=0,column=3)
year_text=StringVar()
e3=Entry(window,textvariable=year_text)
e3.grid(row=1,column=1)
isbn_text=StringVar()
e4=Entry(window,textvariable=isbn_text)
e4.grid(row=1,column=3)
list1 =Listbox(window, height=6, width=35)
list1.grid(row = 2, column=0,rowspan=6, columnspan=2)
list1.bind("<<ListboxSelect>>",get_selected_row)
sb1 =Scrollbar(window)
sb1.grid(row=2,column=2,rowspan=6)
list1.configure(yscrollcommand=sb1.set)
sb1.configure(command=list1.yview)
b1 = Button(window, text="View all", width=12, command=view_command)
b1.grid(row=2,column=3)
b2 = Button(window, text="Search entry", width=12, command=search_command)
b2.grid(row=3,column=3)
b3 = Button(window, text="Add Entry", width=12, command=add_command)
b3.grid(row=4,column=3)
b4 = Button(window, text="Update", width=12, command=update_command)
b4.grid(row=5,column=3)
b5 = Button(window, text="Delete", width=12, command=delete_command)
b5.grid(row=6,column=3)
b6 = Button(window, text="Close", width=12, command=window.destroy)
b6.grid(row=7,column=3)
window.mainloop() | [
"pli1723@gmail.com"
] | pli1723@gmail.com |
5fd485f6a42ad5549abb8f9b3500ace2a0e330fa | 79312d0487aa7e0eb9936a2c1cffcb63349822e1 | /Assignments/Day-1/p1.py | ae4387b9e351dac52c7fafaa0532da91cdecaf26 | [] | no_license | AswiniSankar/OB-Python-Training | 75a04fee81b1f76ea226b8599ba2360da51c3ac2 | 80ba00991e0e345c3b5680eb64a79e6f5bb1190a | refs/heads/master | 2023-06-03T03:00:01.280123 | 2021-06-20T10:10:43 | 2021-06-20T10:10:43 | 376,510,153 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | # program to find maximum of three numbers
a, b, c = eval(input("enter three values"))
max = a if a > b and a > c else b if b > c else c
print("the maximum value is " + str(max))
| [
"aswinisuguna2@gmail.com"
] | aswinisuguna2@gmail.com |
9d4fa0a1977714a0290798e157f5b22310e8461f | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/hdinsight/v20210601/_enums.py | edcf6d44105644a4c49b3c3c4900f7f15073a76f | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,314 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from enum import Enum
__all__ = [
'DaysOfWeek',
'DirectoryType',
'JsonWebKeyEncryptionAlgorithm',
'OSType',
'PrivateIPAllocationMethod',
'PrivateLink',
'PrivateLinkServiceConnectionStatus',
'ResourceIdentityType',
'ResourceProviderConnection',
'Tier',
]
class DaysOfWeek(str, Enum):
MONDAY = "Monday"
TUESDAY = "Tuesday"
WEDNESDAY = "Wednesday"
THURSDAY = "Thursday"
FRIDAY = "Friday"
SATURDAY = "Saturday"
SUNDAY = "Sunday"
class DirectoryType(str, Enum):
"""
The directory type.
"""
ACTIVE_DIRECTORY = "ActiveDirectory"
class JsonWebKeyEncryptionAlgorithm(str, Enum):
"""
Algorithm identifier for encryption, default RSA-OAEP.
"""
RS_A_OAEP = "RSA-OAEP"
RS_A_OAE_P_256 = "RSA-OAEP-256"
RSA1_5 = "RSA1_5"
class OSType(str, Enum):
"""
The type of operating system.
"""
WINDOWS = "Windows"
LINUX = "Linux"
class PrivateIPAllocationMethod(str, Enum):
"""
The method that private IP address is allocated.
"""
DYNAMIC = "dynamic"
STATIC = "static"
class PrivateLink(str, Enum):
"""
Indicates whether or not private link is enabled.
"""
DISABLED = "Disabled"
ENABLED = "Enabled"
class PrivateLinkServiceConnectionStatus(str, Enum):
"""
The concrete private link service connection.
"""
APPROVED = "Approved"
REJECTED = "Rejected"
PENDING = "Pending"
REMOVED = "Removed"
class ResourceIdentityType(str, Enum):
"""
The type of identity used for the cluster. The type 'SystemAssigned, UserAssigned' includes both an implicitly created identity and a set of user assigned identities.
"""
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned"
NONE = "None"
class ResourceProviderConnection(str, Enum):
"""
The direction for the resource provider connection.
"""
INBOUND = "Inbound"
OUTBOUND = "Outbound"
class Tier(str, Enum):
"""
The cluster tier.
"""
STANDARD = "Standard"
PREMIUM = "Premium"
| [
"noreply@github.com"
] | bpkgoud.noreply@github.com |
fb629e7fe336b2a8fb237488f2d1c749bfac8f70 | 4f67f6b5898a0969838b4f08890afde4ff853eae | /spider.py | 38b6d1cd62b43516b3e2607651d5323be9c0906f | [] | no_license | EmpGro/Maoyan100 | 31e5ec28b1a4f9167fd3ea95c046cec047346ebc | f20d667a9b7334edae4fcf7def9c813ac198c3dd | refs/heads/master | 2021-05-16T11:52:25.758576 | 2017-09-28T08:24:50 | 2017-09-28T08:24:50 | 105,122,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,726 | py | import requests
from requests.exceptions import RequestException
import re
import json
def get_one_page(url):
try:
response = requests.get(url)
if response.status_code == 200:
return response.text
return None
except RequestException:
return None
def parse_one_page(html):
'''进行正则函数匹配'''
pattern = re.compile('<dd>.*?board-index.*?>(\d+)</i>' #排名
+'.*?data-src="(.*?)".*?name"><a' #图片
+'.*?}">(.*?)</a>' #名字
+'.*?star">\s*(.*?)\s*</p>' #主演
+'.*?releasetime">(.*?)</p' #上映时间
+'.*?integer">(.*?)</i' #排名个位
+'.*?fraction">(.*?)</i>.*?</dd>', re.S) #排名小数点位
items = re.findall(pattern, html)
#格式化输出
for item in items:
yield {
'index': item[0],
'image': item[1],
'title': item[2],
'actor': item[3][3:],
'times': item[4][5:15],
'ranks': item[5]+item[6]
}
def write_to_file(content):
with open('result.txt', 'a', encoding='utf-8') as f:
f.write(json.dumps(content, ensure_ascii=False) + '\n')
f.close()
##json输出中文而非unicode
##在open中加入encoding='utf-8' dumps中加入ensure_ascii=False
def one_page(offset):
url = 'http://maoyan.com/board/4?offset=' + str(offset)
html = get_one_page(url)
for item in parse_one_page(html):
write_to_file(item)
def main():
for i in range(10):
one_page(i*10)
print(i*10, end=' ')
if __name__ == '__main__':
main()
| [
"linangfs@163.com"
] | linangfs@163.com |
a7e651038218c9e972e369896ec435d505b8823a | a4135d6b14b05cb1aacfaa9548e5bf9db1ef8585 | /pddlstream/language/stream.py | 8f7b912af7192e42f26c74ade003ef963f7c1a42 | [
"MIT"
] | permissive | yijiangh/pddlstream | e3b05a237c3e510b1fb2aad4d0bcd62c35f0f49b | 8d3782fc65a2c44c77ae34eba9769d855925c10e | refs/heads/master | 2023-03-11T00:17:24.716411 | 2018-11-02T19:02:42 | 2018-11-02T19:02:42 | 155,922,752 | 1 | 0 | MIT | 2018-11-02T21:21:11 | 2018-11-02T21:21:10 | null | UTF-8 | Python | false | false | 18,592 | py | import time
from collections import Counter, defaultdict, namedtuple, Sequence
from itertools import count
from pddlstream.algorithms.downward import make_preconditions, make_parameters
from pddlstream.language.constants import AND, get_prefix, get_args, is_parameter
from pddlstream.language.conversion import list_from_conjunction, remap_objects, \
substitute_expression, get_formula_operators, evaluation_from_fact, values_from_objects, obj_from_value_expression
from pddlstream.language.external import ExternalInfo, Result, Instance, External, DEBUG, get_procedure_fn, \
parse_lisp_list
from pddlstream.language.generator import get_next, from_fn
from pddlstream.language.object import Object, OptimisticObject, UniqueOptValue
from pddlstream.utils import str_from_object, get_mapping, irange
VERBOSE_FAILURES = True
INTERNAL = False
DEFAULT_UNIQUE = False
NEGATIVE_BLOCKED = True
# TODO: could also make only wild facts and automatically identify output tuples satisfying certified
# TODO: default effort cost of streams with more inputs to be higher (but negated are free)
# TODO: automatically convert to test streams on inputs
def get_empty_fn():
return lambda *input_values: None
def get_constant_fn(constant):
return lambda *input_values: constant
def get_identity_fn(indices):
return lambda *input_values: tuple(input_values[i] for i in indices)
##################################################
OptValue = namedtuple('OptValue', ['stream', 'inputs', 'values', 'output'])
class PartialInputs(object):
def __init__(self, inputs='', unique=False): #, num=1):
self.inputs = tuple(inputs.split())
self.unique = unique
#self.num = num
def get_opt_gen_fn(self, stream):
inputs = stream.inputs if self.unique else self.inputs
assert set(inputs) <= set(stream.inputs)
# TODO: ensure no scoping error with inputs
def gen_fn(*input_values):
input_objects = tuple(map(Object.from_value, input_values))
instance = stream.get_instance(input_objects)
mapping = get_mapping(stream.inputs, input_objects)
values = tuple(mapping[inp] for inp in inputs)
assert(len(inputs) == len(values))
#for _ in irange(self.num):
for _ in irange(instance.num_optimistic):
yield [tuple(OptValue(stream.name, inputs, values, out)
for out in stream.outputs)]
return gen_fn
def get_constant_gen_fn(stream, constant):
def gen_fn(*input_values):
assert (len(stream.inputs) == len(input_values))
yield [tuple(constant for _ in range(len(stream.outputs)))]
return gen_fn
##################################################
# def get_unique_fn(stream):
# # TODO: this should take into account the output number...
# def fn(*input_values):
# #input_objects = map(opt_obj_from_value, input_values)
# #stream_instance = stream.get_instance(input_objects)
# #output_values = tuple(UniqueOpt(stream_instance, i) for i in range(len(stream.outputs)))
# output_values = tuple(object() for _ in range(len(stream.outputs)))
# return [output_values]
# return fn
def get_debug_gen_fn(stream):
return from_fn(lambda *args: tuple(DebugValue(stream.name, args, o)
for o in stream.outputs))
class DebugValue(object): # TODO: could just do an object
_output_counts = defaultdict(count)
_prefix = '@' # $ | @
def __init__(self, stream, input_values, output_parameter):
self.stream = stream
self.input_values = input_values
self.output_parameter = output_parameter
self.index = next(self._output_counts[output_parameter])
def __repr__(self):
# Can also just return first letter of the prefix
return '{}{}{}'.format(self._prefix, self.output_parameter[1:], self.index)
##################################################
class StreamInfo(ExternalInfo):
def __init__(self, opt_gen_fn=PartialInputs(unique=DEFAULT_UNIQUE), eager=False,
p_success=None, overhead=None, negate=False, effort_fn=None, simultaneous=False):
# TODO: could change frequency/priority for the incremental algorithm
super(StreamInfo, self).__init__(eager, p_success, overhead, effort_fn)
self.opt_gen_fn = opt_gen_fn
self.negate = negate
self.simultaneous = simultaneous
#self.order = 0
class StreamResult(Result):
def __init__(self, instance, output_objects, opt_index=None, call_index=None, list_index=None):
super(StreamResult, self).__init__(instance, opt_index)
self.output_objects = tuple(output_objects)
self.mapping = get_mapping(self.external.outputs, self.output_objects)
self.mapping.update(instance.mapping)
self.certified = substitute_expression(self.external.certified, self.get_mapping())
self.call_index = call_index
self.list_index = list_index
def get_mapping(self):
return self.mapping
def get_certified(self):
return self.certified
def get_tuple(self):
return self.external.name, self.instance.input_objects, self.output_objects
def remap_inputs(self, bindings):
input_objects = remap_objects(self.instance.input_objects, bindings)
fluent_facts = [(get_prefix(f),) + remap_objects(get_args(f), bindings)
for f in self.instance.fluent_facts]
new_instance = self.external.get_instance(input_objects, fluent_facts=fluent_facts)
new_instance.opt_index = self.instance.opt_index
return self.__class__(new_instance, self.output_objects, self.opt_index)
def is_successful(self):
return True
def __repr__(self):
return '{}:{}->{}'.format(self.external.name,
str_from_object(self.instance.input_objects),
str_from_object(self.output_objects))
class StreamInstance(Instance):
def __init__(self, stream, input_objects, fluent_facts):
super(StreamInstance, self).__init__(stream, input_objects)
self._generator = None
self.opt_index = stream.num_opt_fns
self.fluent_facts = frozenset(fluent_facts)
self.axiom_predicate = None
self.disabled_axiom = None
self.num_optimistic = 1
def _check_output_values(self, new_values):
if not isinstance(new_values, Sequence):
raise ValueError('An output list for stream [{}] is not a sequence: {}'.format(self.external.name, new_values))
for output_values in new_values:
if not isinstance(output_values, Sequence):
raise ValueError('An output tuple for stream [{}] is not a sequence: {}'.format(
self.external.name, output_values))
if len(output_values) != len(self.external.outputs):
raise ValueError('An output tuple for stream [{}] has length {} instead of {}: {}'.format(
self.external.name, len(output_values), len(self.external.outputs), output_values))
def _check_wild_facts(self, new_facts):
if not isinstance(new_facts, Sequence):
raise ValueError('Output wild facts for wild stream [{}] is not a sequence: {}'.format(
self.external.name, new_facts))
def get_result(self, object_objects, opt_index=None, list_index=None):
return self.external._Result(self, tuple(object_objects), opt_index=opt_index,
call_index=self.num_calls, list_index=list_index)
def use_unique(self):
return self.opt_index == 0
def get_fluent_values(self):
return [(get_prefix(f),) + values_from_objects(get_args(f)) for f in self.fluent_facts]
def _create_generator(self):
if self._generator is None:
input_values = self.get_input_values()
#try:
if self.external.is_fluent(): # self.fluent_facts
self._generator = self.external.gen_fn(*input_values, fluents=self.get_fluent_values())
else:
self._generator = self.external.gen_fn(*input_values)
#except TypeError as err:
# print('Stream [{}] expects {} inputs'.format(self.external.name, len(input_values)))
# raise err
def _next_outputs(self):
self._create_generator()
output, self.enumerated = get_next(self._generator, default=None)
if output is None:
return [], []
if not self.external.is_wild:
return output, []
if len(output) != 2:
raise RuntimeError('Wild stream [{}] does not generate pairs of output values and wild facts'.format(
self.external.name))
return output
def next_results(self, accelerate=1, verbose=False):
# TODO: prune repeated values
all_new_values = []
all_new_facts = []
all_results = []
start_calls = self.num_calls
for attempt in range(accelerate):
if all_results or self.enumerated:
break
start_time = time.time()
new_values, new_facts = self._next_outputs()
self._check_output_values(new_values)
self._check_wild_facts(new_facts)
new_results = [self.get_result(map(Object.from_value, output_values), list_index=list_index)
for list_index, output_values in enumerate(new_values)]
all_new_values.extend(new_values)
all_new_facts.extend(new_facts)
all_results.extend(new_results)
self.update_statistics(start_time, new_results)
if verbose and (VERBOSE_FAILURES or all_new_values):
print('{}-{}) {}:{}->{}'.format(start_calls, self.num_calls, self.external.name,
str_from_object(self.get_input_values()),
str_from_object(all_new_values)))
if verbose and all_new_facts:
# TODO: format all_new_facts
print('{}-{}) {}:{}->{}'.format(start_calls, self.num_calls, self.external.name,
str_from_object(self.get_input_values()), all_new_facts))
return all_results, list(map(obj_from_value_expression, all_new_facts))
def next_optimistic(self):
# TODO: compute this just once and store
if self.enumerated or self.disabled:
return []
# TODO: (potentially infinite) sequence of optimistic objects
# TODO: how do I distinguish between real and not real verifications of things?
# TODO: resue these?
self.opt_results = []
output_set = set()
for output_list in self.external.opt_gen_fn(*self.get_input_values()):
self._check_output_values(output_list)
for i, output_values in enumerate(output_list):
output_objects = []
for output_index, value in enumerate(output_values):
# TODO: maybe record history of values here?
unique = UniqueOptValue(self, len(self.opt_results), output_index) # object()
param = unique if self.use_unique() else value
output_objects.append(OptimisticObject.from_opt(value, param))
output_objects = tuple(output_objects)
if output_objects not in output_set:
output_set.add(output_objects) # No point returning the exact thing here...
self.opt_results.append(self.external._Result(self, output_objects, opt_index=self.opt_index,
call_index=len(self.opt_results), list_index=0))
return self.opt_results
def get_blocked_fact(self):
if self.external.is_fluent():
assert self.axiom_predicate is not None
return (self.axiom_predicate,) + self.input_objects
return (self.external.blocked_predicate,) + self.input_objects
def disable(self, evaluations, domain):
#assert not self.disabled
super(StreamInstance, self).disable(evaluations, domain)
if not self.external.is_fluent(): # self.fluent_facts:
if self.external.is_negated() and not self.successes:
evaluations[evaluation_from_fact(self.get_blocked_fact())] = INTERNAL
return
if self.axiom_predicate is not None:
return
index = len(self.external.disabled_instances)
self.external.disabled_instances.append(self)
self.axiom_predicate = '_ax{}-{}'.format(self.external.blocked_predicate, index)
evaluations[evaluation_from_fact(self.get_blocked_fact())] = INTERNAL
# TODO: allow reporting back which components lead to failure
import pddl
static_fact = (self.axiom_predicate,) + self.external.inputs
preconditions = [static_fact] + list(self.fluent_facts)
self.disabled_axiom = pddl.Axiom(name=self.external.blocked_predicate,
parameters=make_parameters(self.external.inputs),
num_external_parameters=len(self.external.inputs),
condition=make_preconditions(preconditions))
domain.axioms.append(self.disabled_axiom)
def enable(self, evaluations, domain):
super(StreamInstance, self).enable(evaluations, domain)
if self.axiom_predicate is not None: # TODO: re-enable?
raise NotImplementedError(self)
def __repr__(self):
return '{}:{}->{}'.format(self.external.name, self.input_objects, self.external.outputs)
class Stream(External):
_Instance = StreamInstance
_Result = StreamResult
def __init__(self, name, gen_fn, inputs, domain, outputs, certified, info, fluents=[], is_wild=False):
super(Stream, self).__init__(name, info, inputs, domain)
self.outputs = tuple(outputs)
self.certified = tuple(certified)
self.constants.update(a for i in certified for a in get_args(i) if not is_parameter(a))
for p, c in Counter(self.outputs).items():
if not is_parameter(p):
raise ValueError('Output [{}] for stream [{}] is not a parameter'.format(p, name))
if c != 1:
raise ValueError('Output [{}] for stream [{}] is not unique'.format(p, name))
for p in set(self.inputs) & set(self.outputs):
raise ValueError('Parameter [{}] for stream [{}] is both an input and output'.format(p, name))
certified_parameters = {a for i in certified for a in get_args(i) if is_parameter(a)}
for p in (certified_parameters - set(self.inputs + self.outputs)):
raise ValueError('Parameter [{}] for stream [{}] is not included within outputs'.format(p, name))
for p in (set(self.outputs) - certified_parameters):
print('Warning! Output [{}] for stream [{}] is not covered by a certified condition'.format(p, name))
# TODO: automatically switch to unique if only used once
self.gen_fn = get_debug_gen_fn(self) if gen_fn == DEBUG else gen_fn
self.num_opt_fns = 1 if self.outputs else 0 # Always unique if no outputs
if isinstance(self.info.opt_gen_fn, PartialInputs):
if self.info.opt_gen_fn.unique:
self.num_opt_fns = 0
self.opt_gen_fn = self.info.opt_gen_fn.get_opt_gen_fn(self)
else:
self.opt_gen_fn = self.info.opt_gen_fn
#self.bound_list_fn = None # TODO: generalize to a hierarchical sequence
#self.opt_fns = [get_unique_fn(self), get_shared_fn(self)] # get_unique_fn | get_shared_fn
self.fluents = [] if gen_fn == DEBUG else fluents
if NEGATIVE_BLOCKED:
self.blocked_predicate = '~{}-negative'.format(self.name) # Args are self.inputs
else:
self.blocked_predicate = '~{}'.format(self.name)
self.disabled_instances = []
self.is_wild = is_wild
if self.is_negated():
if self.outputs:
raise ValueError('Negated streams cannot have outputs: {}'.format(self.outputs))
#assert len(self.certified) == 1 # TODO: is it okay to have more than one fact?
for certified in self.certified:
if not (set(self.inputs) <= set(get_args(certified))):
raise ValueError('Negated streams must have certified facts including all input parameters')
def is_fluent(self):
return self.fluents
def is_negated(self):
return self.info.negate
def get_instance(self, input_objects, fluent_facts=frozenset()):
key = (tuple(input_objects), frozenset(fluent_facts))
if key not in self.instances:
self.instances[key] = self._Instance(self, input_objects, fluent_facts)
return self.instances[key]
def __repr__(self):
return '{}:{}->{}'.format(self.name, self.inputs, self.outputs)
##################################################
def parse_stream(lisp_list, stream_map, stream_info):
value_from_attribute = parse_lisp_list(lisp_list)
assert set(value_from_attribute) <= {':stream', ':wild-stream', ':inputs',
':domain', ':fluents', ':outputs', ':certified'}
is_wild = (':wild-stream' in value_from_attribute)
name = value_from_attribute[':wild-stream'] if is_wild else value_from_attribute[':stream']
domain = value_from_attribute.get(':domain', None)
# TODO: dnf_from_positive_formula(value_from_attribute.get(':domain', []))
if not (get_formula_operators(domain) <= {AND}):
# TODO: allow positive DNF
raise ValueError('Stream [{}] domain must be a conjunction'.format(name))
certified = value_from_attribute.get(':certified', None)
if not (get_formula_operators(certified) <= {AND}):
raise ValueError('Stream [{}] certified must be a conjunction'.format(name))
return Stream(name, get_procedure_fn(stream_map, name),
value_from_attribute.get(':inputs', []),
list_from_conjunction(domain),
value_from_attribute.get(':outputs', []),
list_from_conjunction(certified),
stream_info.get(name, StreamInfo()),
fluents=value_from_attribute.get(':fluents', []),
is_wild=is_wild)
| [
"caelan@mit.edu"
] | caelan@mit.edu |
dd63361c4e4d44981fa2db5ade5f1c511a995157 | 2726d731dbc75816e98d4ffefa78816959b9a3af | /04.26.51_1920.1080_421M/30.py | b2cf36fb591ebefb08b5bb717523463ab01dec88 | [] | no_license | suvrajeet01/python | b1280b840c3a1898ab872db1122f30cfd8fb48f1 | f3af690e4779ae3dc1ef6dfa58a133d6ae06f12f | refs/heads/master | 2022-09-10T22:01:46.743791 | 2020-05-18T17:18:22 | 2020-05-18T17:18:22 | 256,516,586 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,363 | py | #Modules and pip
#using modules in python
#modules are external python files that can be imported to current python file and all the functions and other stuffs from that file can be used in the current file after importing it
import useful_tools #importing external python file or module
print(useful_tools.roll_dice(10)) #importing function from external file
#find list of all supported modules
#goto google.com
#list of pyython modules
#click the docs website of the version using(3.8)
#huge list of python modules can be found here that can be imported and used according to needs
#third party modules can also be found outside of python docs
#module for a specific purpose can also be found out there in google written by other developers or peoples out there
#lot of useful and other modules may not be in module docs in python website or in lib folder in installation directory by default
#two types of modules
#built-in modules - built in with the interpreter
#external modules - are stored where python is installed in the system
#stored inside lib folder where python is installed
#the location of external modules can be found in modules docs
#click on the desired module to know it's location if present in install directory of python
#installing python modules that are not pre-installed
#find the module to be installed
#ex:- google ->python-docx
#goto python-docx website
#follow the install instruction
#can be installed using pip install python-docx
#pip(comes pre-installed with python 3 or later)
#it is a program used to install python modules(can be called as OR reffered to as a package manager)
#using pip user can
#install , manage , update , remove(uninstall) different python modules
#installing using pip
#open cmd for windows and terminal for mac/linux
#pip --version to check weather pip is installed or not
#to install a python module
#pip install module-name
#ex:- pip install python-docx
#these modules are stored inside python(install directory)/lib/site-packages
#to use the modules
#import module-name
#ex:-import docx
#to remove a python module
#pip uninstall python-module-name
#ex:-pip uninstall python-docx
| [
"suvrajeetbanerjee34767@gmail.com"
] | suvrajeetbanerjee34767@gmail.com |
c1b2e243d0b711a5d1b79c385fe8b6cb87d4e746 | 87e3e9630911138a38322384df02b300a8ca1211 | /Depth-first Search/100 - Same Tree.py | 94ec3005bbedfd16845e26bc1a15120c471ce447 | [] | no_license | JSantosha/LeetCode-Python | 238df566e3a4d6a54e2beb34c60b189773dabe00 | abbbc36788313f6087fbed5821f52ab161854894 | refs/heads/master | 2018-10-22T14:08:10.290364 | 2018-09-15T02:49:31 | 2018-09-15T02:49:31 | 126,122,322 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def isSameTree(self, p, q):
"""
:type p: TreeNode
:type q: TreeNode
:rtype: bool
"""
if p == None and q == None:
return True
elif p == None and q != None:
return False
elif p != None and q == None:
return False
elif p.val == q.val:
return self.isSameTree(p.left, q.left) and self.isSameTree(p.right, q.right)
elif p.val != q.val:
return False
| [
"jillellavenkateswarlu@gmail.com"
] | jillellavenkateswarlu@gmail.com |
e7d32f77d9828fea0ee44db2553119cbf58a159e | 3082582055ee57d94286ccfe5736df0a2bd10b79 | /PycharmProjects/pythonProject10/hw15/models/models.py | 8bc8646d40939d5ed4127a91dba2afc42c180923 | [] | no_license | dmytroreva/cursor | 776dff6a1ef73e723924890915beba8b7770f51e | dc675bbdb1ea9723ca07fabd173e58d0b9efc0df | refs/heads/master | 2023-07-08T04:17:02.401738 | 2021-08-12T21:18:34 | 2021-08-12T21:18:34 | 341,545,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,757 | py | """Data models."""
from app import db
from helpers.serializers import Serializer
class User(db.Model, Serializer):
"""Data model for user accounts."""
__tablename__ = 'users'
id = db.Column(
db.Integer,
primary_key=True
)
username = db.Column(
db.String(64),
index=False,
unique=True,
nullable=False
)
email = db.Column(
db.String(80),
index=True,
unique=True,
nullable=False
)
created = db.Column(
db.DateTime,
index=False,
unique=False,
nullable=False
)
bio = db.Column(
db.Text,
index=False,
unique=False,
nullable=True
)
admin = db.Column(
db.Boolean,
index=False,
unique=False,
nullable=False
)
articles = db.relationship("Article", backref='author', lazy=True)
def __repr__(self):
return '<User {}>'.format(self.username)
article_categories = db.Table('article_categories',
db.Column("article_id", db.Integer, db.ForeignKey('articles.id')),
db.Column("category_id", db.Integer, db.ForeignKey("category.id"))
)
class Article(db.Model):
__tablename__ = 'articles'
id = db.Column(
db.Integer,
primary_key=True
)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
# author = db.relationship("User", backref='articles', lazy=True)
title = db.Column(
db.String(255),
nullable=False,
index=True,
unique=False
)
slug = db.Column(
db.String(50),
nullable=False,
index=True,
unique=False
)
description = db.Column(
db.Text,
nullable=False,
index=False,
unique=False
)
short_description = db.Column(
db.String(350),
nullable=True,
index=False,
unique=False
)
img = db.Column(
db.String(255),
nullable=True,
index=False,
unique=False
)
categories = db.relationship("Category", secondary=article_categories, back_populates="articles")
@property
def serialize(self):
return {
"id": self.id,
"title": self.title,
"slug": self.slug,
'description': self.description,
"short_description": self.short_description,
"img": self.img
}
class Category(db.Model):
id = db.Column(
db.Integer,
primary_key=True
)
title = db.Column(
db.String(350)
)
articles = db.relationship("Article", secondary=article_categories, back_populates="categories") | [
"revadise@gmail.com"
] | revadise@gmail.com |
7d121d39dadb86ea8ec9d812b500334909cb0534 | 7cd63054a4d363aa143f154b33af400b350d9d11 | /NBprocessing/categorical/_general_functions_categorical.py | 3dfa60171dbb42714c83596ebfcdf365a2a5e737 | [
"MIT"
] | permissive | torreaopt/NBprocessing | 5b5dd2d659119728fbd2159add65c11f67000c04 | 3b020829e9c2ec0ef67b881a69637ac14e635e75 | refs/heads/master | 2022-12-28T20:49:15.208833 | 2020-10-07T11:17:45 | 2020-10-07T11:17:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | import re
from NBprocessing.src import constance_object
def color_imbalanced(raw_input):
"""
Takes a scalar and returns a string with
the css property `'color: red'` for negative
strings, black otherwise.
"""
color = constance_object.BLACK
if raw_input != "0.00":
pattern = re.compile(r'(: )(\d+)')
val = int(pattern.search(raw_input)[2])
if val >= 90:
color = constance_object.RED
return constance_object.OUTPUT.format(color)
| [
"nirbarazida@gmail.com"
] | nirbarazida@gmail.com |
3a86a7e83c8d88aa0d7e0a5952a56771bd01f41a | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/merra_scripts/01_netCDF_extraction/merra902Combine/604-tideGauge.py | 6e967f2eb7f15063af6ad0c01079f2c24d39111e | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,376 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 17 11:28:00 2020
--------------------------------------------
Load predictors for each TG and combine them
--------------------------------------------
@author: Michael Tadesse
"""
import os
import pandas as pd
#define directories
# dir_name = 'F:\\01_erainterim\\01_eraint_predictors\\eraint_D3'
dir_in = "/lustre/fs0/home/mtadesse/merraLocalized"
dir_out = "/lustre/fs0/home/mtadesse/merraAllCombined"
def combine():
os.chdir(dir_in)
#get names
tg_list_name = os.listdir()
x = 604
y = 605
for tg in range(x, y):
os.chdir(dir_in)
tg_name = tg_list_name[tg]
print(tg_name, '\n')
#looping through each TG folder
os.chdir(tg_name)
#check for empty folders
if len(os.listdir()) == 0:
continue
#defining the path for each predictor
where = os.getcwd()
csv_path = {'slp' : os.path.join(where, 'slp.csv'),\
"wnd_u": os.path.join(where, 'wnd_u.csv'),\
'wnd_v' : os.path.join(where, 'wnd_v.csv')}
first = True
for pr in csv_path.keys():
print(tg_name, ' ', pr)
#read predictor
pred = pd.read_csv(csv_path[pr])
#remove unwanted columns
pred.drop(['Unnamed: 0'], axis = 1, inplace=True)
#sort based on date as merra files are scrambled
pred.sort_values(by = 'date', inplace=True)
#give predictor columns a name
pred_col = list(pred.columns)
for pp in range(len(pred_col)):
if pred_col[pp] == 'date':
continue
pred_col[pp] = pr + str(pred_col[pp])
pred.columns = pred_col
#merge all predictors
if first:
pred_combined = pred
first = False
else:
pred_combined = pd.merge(pred_combined, pred, on = 'date')
#saving pred_combined
os.chdir(dir_out)
tg_name = str(tg)+"_"+tg_name;
pred_combined.to_csv('.'.join([tg_name, 'csv']))
os.chdir(dir_in)
print('\n')
#run script
combine()
| [
"michaelg.tadesse@gmail.com"
] | michaelg.tadesse@gmail.com |
1ef315af20a8dc8e72738bc364065d2ebb3d3e16 | df6bb6714cb725edda5db2bed2d37b3c55b6118c | /ocrolib/morph.py | 3ed29c932247c8edfd61b56a6d9f244670142030 | [] | no_license | binfen/media_report_page_segment | 789128bcb8826ce1b06ff28c76198ee576e4dce8 | ac8da1efb013cb583fb25d36d44b3f947d5ca989 | refs/heads/master | 2020-03-23T14:46:06.464115 | 2018-07-20T10:28:15 | 2018-07-20T10:28:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,454 | py | ################################################################
### various add-ons to the SciPy morphology package
################################################################
from __future__ import print_function
from numpy import *
import pylab
from pylab import *
from scipy.ndimage import morphology,measurements,filters
from scipy.ndimage.morphology import *
from toplevel import *
@checks(ABINARY2)
def label(image,**kw):
"""Redefine the scipy.ndimage.measurements.label function to
work with a wider range of data types. The default function
is inconsistent about the data types it accepts on different
platforms."""
try: return measurements.label(image,**kw)
except: pass
types = ["int32","uint32","int64","unit64","int16","uint16"]
for t in types:
try: return measurements.label(array(image,dtype=t),**kw)
except: pass
# let it raise the same exception as before
return measurements.label(image,**kw)
@checks(AINT2)
def find_objects(image,**kw):
"""Redefine the scipy.ndimage.measurements.find_objects function to
work with a wider range of data types. The default function
is inconsistent about the data types it accepts on different
platforms."""
try:
return measurements.find_objects(image,**kw)
except:
pass
types = ["int32","uint32","int64","unit64","int16","uint16"]
for t in types:
try:
return measurements.find_objects(array(image,dtype=t),**kw)
except:
pass
# let it raise the same exception as before
return measurements.find_objects(image,**kw)
def check_binary(image):
assert image.dtype=='B' or image.dtype=='i' or image.dtype==dtype('bool'),\
"array should be binary, is %s %s"%(image.dtype,image.shape)
assert amin(image)>=0 and amax(image)<=1,\
"array should be binary, has values %g to %g"%(amin(image),amax(image))
@checks(ABINARY2,uintpair)
def r_dilation(image,size,origin=0):
"""Dilation with rectangular structuring element using maximum_filter"""
return filters.maximum_filter(image,size,origin=origin)
@checks(ABINARY2,uintpair)
def r_erosion(image,size,origin=0):
"""Erosion with rectangular structuring element using minimum__filter"""
return filters.minimum_filter(image,size,origin=origin)
@checks(ABINARY2,uintpair)
def r_opening(image,size,origin=0):
"""Opening with rectangular structuring element using maximum/minimum filter"""
check_binary(image)
image = r_erosion(image,size,origin=origin)
return r_dilation(image,size,origin=origin)
@checks(ABINARY2,uintpair)
def r_closing(image,size,origin=0):
"""Closing with rectangular structuring element using maximum/minimum filter"""
check_binary(image)
image = r_dilation(image,size,origin=0)
return r_erosion(image,size,origin=0)
@checks(ABINARY2,uintpair)
def rb_dilation(image,size,origin=0):
"""Binary dilation using linear filters."""
output = zeros(image.shape,'f')
filters.uniform_filter(image,size,output=output,origin=origin,mode='constant',cval=0)
return array(output>0, 'i')
@checks(ABINARY2,uintpair)
def rb_erosion(image,size,origin=0):
"""Binary erosion using linear filters."""
output = zeros(image.shape,'f')
filters.uniform_filter(image,size,output=output,origin=origin,mode='constant',cval=1)
return array(output==1,'i')
@checks(ABINARY2,uintpair)
def rb_opening(image,size,origin=0):
"""Binary opening using linear filters."""
image = rb_erosion(image,size,origin=origin)
return rb_dilation(image,size,origin=origin)
@checks(ABINARY2,uintpair)
def rb_closing(image,size,origin=0):
"""Binary closing using linear filters."""
image = rb_dilation(image,size,origin=origin)
return rb_erosion(image,size,origin=origin)
@checks(GRAYSCALE,uintpair)
def rg_dilation(image,size,origin=0):
"""Grayscale dilation with maximum/minimum filters."""
return filters.maximum_filter(image,size,origin=origin)
@checks(GRAYSCALE,uintpair)
def rg_erosion(image,size,origin=0):
"""Grayscale erosion with maximum/minimum filters."""
return filters.minimum_filter(image,size,origin=origin)
@checks(GRAYSCALE,uintpair)
def rg_opening(image,size,origin=0):
"""Grayscale opening with maximum/minimum filters."""
image = r_erosion(image,size,origin=origin)
return r_dilation(image,size,origin=origin)
@checks(GRAYSCALE,uintpair)
def rg_closing(image,size,origin=0):
"""Grayscale closing with maximum/minimum filters."""
image = r_dilation(image,size,origin=0)
return r_erosion(image,size,origin=0)
@checks(SEGMENTATION)
def showlabels(x,n=7):
pylab.imshow(where(x>0,x%n+1,0),cmap=pylab.cm.gist_stern)
@checks(SEGMENTATION)
def spread_labels(labels,maxdist=9999999):
"""v"""
distances,features = morphology.distance_transform_edt(labels==0,return_distances=1,return_indices=1)
indexes = features[0]*labels.shape[1]+features[1]
spread = labels.ravel()[indexes.ravel()].reshape(*labels.shape)
spread *= (distances<maxdist)
return spread
@checks(ABINARY2,ABINARY2)
def keep_marked(image,markers):
"""Given a marker image, keep only the connected components
that overlap the markers."""
labels,_ = label(image)
marked = unique(labels*(markers!=0))
kept = in1d(labels.ravel(),marked)
return (image!=0)*kept.reshape(*labels.shape)
@checks(ABINARY2,ABINARY2)
def remove_marked(image,markers):
"""Given a marker image, remove all the connected components
that overlap markers."""
marked = keep_marked(image,markers)
return image*(marked==0)
@checks(SEGMENTATION,SEGMENTATION)
def correspondences(labels1,labels2):
"""Given two labeled images, compute an array giving the correspondences
between labels in the two images."""
q = 100000
assert amin(labels1)>=0 and amin(labels2)>=0
assert amax(labels2)<q
combo = labels1*q+labels2
result = unique(combo)
result = array([result//q,result%q])
return result
@checks(ABINARY2,SEGMENTATION)
def propagate_labels_simple(regions,labels):
"""Given an image and a set of labels, apply the labels
to all the regions in the image that overlap a label."""
rlabels,_ = label(regions)
cors = correspondences(rlabels,labels)
outputs = zeros(amax(rlabels)+1,'i')
for o,i in cors.T: outputs[o] = i
outputs[0] = 0
return outputs[rlabels]
@checks(ABINARY2,SEGMENTATION)
def propagate_labels(image,labels,conflict=0):
"""Given an image and a set of labels, apply the labels to all the regions in the image that overlap a label.
Assign the value `conflict` to any labels that have a conflict."""
rlabels,_ = label(image)
cors = correspondences(rlabels,labels)
outputs = zeros(amax(rlabels)+1,'i')
oops = -(1<<30)
for o,i in cors.T:
if outputs[o]!=0:
outputs[o] = oops
else:
outputs[o] = i
outputs[outputs==oops] = conflict
outputs[0] = 0
return outputs[rlabels]
@checks(ABINARY2,True)
def select_regions(binary,f,min=0,nbest=100000):
"""Given a scoring function f over slice tuples (as returned by find_objects),
keeps at most nbest regions whose scores is higher than min."""
labels, n = label(binary)
objects = find_objects(labels)
scores = [f(o) for o in objects]
best = argsort(scores)
keep = zeros(len(objects)+1,'i')
if nbest > 0:
for i in best[-nbest:]:
if scores[i]<=min:
continue
keep[i+1] = 1
return keep[labels]
@checks(SEGMENTATION)
def all_neighbors(image):
"""Given an image with labels, find all pairs of labels
that are directly neighboring each other."""
q = 100000
assert amax(image)<q
assert amin(image)>=0
u = unique(q*image+roll(image,1,0))
d = unique(q*image+roll(image,-1,0))
l = unique(q*image+roll(image,1,1))
r = unique(q*image+roll(image,-1,1))
all = unique(r_[u,d,l,r])
all = c_[all//q,all%q]
all = unique(array([sorted(x) for x in all]))
return all
################################################################
### Iterate through the regions of a color image.
################################################################
@checks(SEGMENTATION)
def renumber_labels_ordered(a,correspondence=0):
"""Renumber the labels of the input array in numerical order so
that they are arranged from 1...N"""
assert amin(a)>=0
assert amax(a)<=2**25
labels = sorted(unique(ravel(a)))
renum = zeros(amax(labels)+1,dtype='i')
renum[labels] = arange(len(labels),dtype='i')
if correspondence:
return renum[a],labels
else:
return renum[a]
@checks(SEGMENTATION)
def renumber_labels(a):
"""Alias for renumber_labels_ordered"""
return renumber_labels_ordered(a)
def pyargsort(seq,cmp=cmp,key=lambda x:x):
"""Like numpy's argsort, but using the builtin Python sorting
function. Takes an optional cmp."""
return sorted(range(len(seq)),key=lambda x:key(seq.__getitem__(x)),cmp=cmp)
@checks(SEGMENTATION)
def renumber_by_xcenter(seg):
"""Given a segmentation (as a color image), change the labels
assigned to each region such that when the labels are considered
in ascending sequence, the x-centers of their bounding boxes
are non-decreasing. This is used for sorting the components
of a segmented text line into left-to-right reading order."""
objects = [(slice(0,0),slice(0,0))]+find_objects(seg)
def xc(o):
# if some labels of the segmentation are missing, we
# return a very large xcenter, which will move them all
# the way to the right (they don't show up in the final
# segmentation anyway)
if o is None: return 999999
return mean((o[1].start,o[1].stop))
xs = array([xc(o) for o in objects])
order = argsort(xs)
segmap = zeros(amax(seg)+1,'i')
for i,j in enumerate(order): segmap[j] = i
return segmap[seg]
@checks(SEGMENTATION)
def ordered_by_xcenter(seg):
"""Verify that the labels of a segmentation are ordered
spatially (as determined by the x-center of their bounding
boxes) in left-to-right reading order."""
objects = [(slice(0,0),slice(0,0))]+find_objects(seg)
def xc(o): return mean((o[1].start,o[1].stop))
xs = array([xc(o) for o in objects])
for i in range(1,len(xs)):
if xs[i-1]>xs[i]: return 0
return 1
| [
"31203714@qq.com"
] | 31203714@qq.com |
9df9f831ab4c12e1d9076b116e537fc52979817f | 28068dd53ea6cf1b443af7ded3d9dd6341ea5ad9 | /occlusion_map.py | ff51e3e84e537e4bd7d97a1f38a4c90de65c3f45 | [] | no_license | bsaghafi/3DCNN-dMRI | 5a0d704a1d35120c0bf3df9335addda406dfb41f | 332e37853a7a68276e3b0b7168ecf8397d4fe701 | refs/heads/master | 2020-03-29T20:09:06.633036 | 2018-09-25T20:14:59 | 2018-09-25T20:14:59 | 150,298,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,283 | py | # Purpose: computes occlusion maps for the high impact exposure sample or (low)
# Inputs: delta FA maps (Post-Pre) and their classification labels, the 3DCNN model, the subject to compute the occlusion map for
# Outputs: occlusion map
# Date: 01/20/2018
# Author: Behrouz Saghafi
import numpy as np
np.random.seed(2016)
import warnings
warnings.filterwarnings("ignore")
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
from keras.layers.advanced_activations import ELU,LeakyReLU, PReLU
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Flatten, Activation
from keras.layers.convolutional import Convolution3D, MaxPooling3D, ZeroPadding3D
from keras.optimizers import SGD, Adam, Adadelta
from keras.callbacks import EarlyStopping
from keras.utils import np_utils
from keras.constraints import maxnorm
from keras.models import model_from_json
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l2,l1
from keras.wrappers.scikit_learn import KerasClassifier
from keras import __version__ as keras_version
from keras.callbacks import EarlyStopping, ModelCheckpoint
import keras.backend as K
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold, cross_val_score, KFold, train_test_split
from sklearn.preprocessing import MinMaxScaler, StandardScaler, Normalizer
from sklearn.metrics import log_loss, accuracy_score
from sklearn.metrics import confusion_matrix
from sklearn.metrics import log_loss, accuracy_score, recall_score, roc_auc_score, f1_score, cohen_kappa_score
from sklearn.metrics import confusion_matrix
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import math
import scipy.io as sio
## Loading Train Data
import timeit
import warnings
warnings.filterwarnings("ignore")
from scipy import ndimage
import nibabel as nb
start=timeit.default_timer()
def get_binary_metrics(expected_labels, our_labels):
# sensitivity
recall = recall_score(expected_labels, our_labels)
print("%s: %.2f%%" % ('Sensitivity', recall * 100))
# print '=========================='
# specificity
cm = confusion_matrix(expected_labels, our_labels)
tn, fp, fn, tp = cm.ravel()
specificity = tn / float(tn + fp)
print("%s: %.2f%%" % ('Specificity', specificity * 100))
print cm
# roc_auc_score
roc = roc_auc_score(expected_labels, our_labels)
print("%s: %.2f%%" % ('ROC_AUC sore', roc * 100))
# f1 score
f1score = f1_score(expected_labels, our_labels)
print("%s: %.2f%%" % ('F1 Score', f1score * 100))
# print '=========================='
accuracy = accuracy_score(expected_labels, our_labels)
print("%s: %.2f%%" % ('Accuracy', accuracy * 100))
# print '=========================='
kappa=cohen_kappa_score(expected_labels, our_labels)
print("%s: %.2f%%" % ('Kappa', kappa * 100))
print '=========================='
return recall, specificity, roc, f1score, accuracy, kappa
# X_train, X_test, y_train, y_test = load_data()
def load_data():
X=np.load('/project/bioinformatics/DLLab/Behrouz/dev/ITaKL/delta_FA_CD_new_24_36.npy')
y=np.load('/project/bioinformatics/DLLab/Behrouz/dev/ITaKL/rwe_cp_new_24_36.npy')
return X,y
# Reshaping the Data
def read_and_normalize_data():
X, y = load_data()
# Xd = np.reshape(X, (X.shape[0], -1))
# # scaler = MinMaxScaler(feature_range=(-1, 1))
# # scaler = Normalizer(norm='l2')
# scaler = StandardScaler() # ZM1V transformation
# Xd = scaler.fit_transform(Xd)
# X = np.reshape(Xd, np.shape(X))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 7, stratify=y)
print('Convert to numpy...')
train_data = np.array(X_train, dtype=np.float64)
# print('Normalize Train data...')
# traind=np.reshape(train_data,(train_data.shape[0],-1))
# # scaler=MinMaxScaler(feature_range=(-1, 1))
# scaler = Normalizer(norm='l2')
# # scaler = StandardScaler() # ZM1V transformation
# traind = scaler.fit_transform(traind)
# train_data=np.reshape(traind,np.shape(train_data))
# # least=np.amin(train_data)
# # most=np.amax(train_data)
# # print 'least is ',least,', most is ',most
# # train_data = (train_data-least) / (most-least)
# # print 'smallest train value is ', np.amin(train_data), ', largest train value is ', np.amax(train_data)
# #Scikitlearn way:
# # train_data=MinMaxScaler().fit_transform(train_data)
# # train_data = StandardScaler().fit_transform(train_data)
# Xmax=np.amax(train_data,axis=0)
# Xmin=np.amin(train_data,axis=0)
# Xmax_reptrain=[]
# Xmin_reptrain=[]
# for i in range(train_data.shape[0]):
# Xmax_reptrain.append(Xmax)
# Xmin_reptrain.append(Xmin)
# Xmax_reptrain = np.array(Xmax_reptrain, dtype=np.float64)
# Xmin_reptrain= np.array(Xmin_reptrain, dtype=np.float64)
# # eps = np.finfo(float).eps # 2.22e-16
# train_data_scaled=train_data
# train_data_scaled [Xmax_reptrain!=Xmin_reptrain]= (train_data[Xmax_reptrain!=Xmin_reptrain] - Xmin_reptrain[Xmax_reptrain!=Xmin_reptrain]) / (Xmax_reptrain[Xmax_reptrain!=Xmin_reptrain] - Xmin_reptrain[Xmax_reptrain!=Xmin_reptrain])
# train_data=train_data_scaled
print np.amax(train_data)
print np.amin(train_data)
print('Reshaping Train Data...')
train_data = train_data.reshape(train_data.shape[0], 1, train_data.shape[1], train_data.shape[2], train_data.shape[3])
print('Train shape:', train_data.shape)
print(train_data.shape[0], 'train samples')
y_train = np.array(y_train, dtype=np.uint8)
train_target = np_utils.to_categorical(y_train, 2)
print('Convert to numpy...')
test_data = np.array(X_test, dtype=np.float64)
# print('Normalize Test data...')
# testd = np.reshape(test_data, (test_data.shape[0], -1))
# testd =scaler.transform(testd)
# test_data = np.reshape(testd, np.shape(test_data))
# # test_data = (test_data - least) / (most - least)
# # print 'smallest test value is ', np.amin(test_data), ', largest test value is ', np.amax(test_data)
# # test_data=MinMaxScaler().transform(test_data)
# # test_data = StandardScaler().transform(test_data)
# Xmax_reptest = []
# Xmin_reptest = []
# for i in range(test_data.shape[0]):
# Xmax_reptest.append(Xmax)
# Xmin_reptest.append(Xmin)
# Xmax_reptest = np.array(Xmax_reptest, dtype=np.float64)
# Xmin_reptest = np.array(Xmin_reptest, dtype=np.float64)
# test_data_scaled=test_data
# test_data_scaled[Xmax_reptest!=Xmin_reptest] = (test_data[Xmax_reptest!=Xmin_reptest] - Xmin_reptest[Xmax_reptest!=Xmin_reptest])/(Xmax_reptest[Xmax_reptest!=Xmin_reptest] - Xmin_reptest[Xmax_reptest!=Xmin_reptest])
# test_data=test_data_scaled
print np.amax(test_data)
print np.amin(test_data)
print('Reshaping Test Data...')
test_data = test_data.reshape(test_data.shape[0], 1, test_data.shape[1], test_data.shape[2], test_data.shape[3])
print('Test shape:', test_data.shape)
print(test_data.shape[0], 'Test samples')
y_test = np.array(y_test, dtype=np.uint8)
test_target = np_utils.to_categorical(y_test, 2)
return train_data, train_target, test_data, test_target, y_train
# plotting training performance:
def plot_training_loss_acc(history):
#with plt.style.context(('seaborn-talk')):
fig, axs = plt.subplots(2, 1, sharex=True, sharey=False)
fig.set_facecolor('white')
axs[0].plot(history.history['loss'], label='Train Set', color='red')
axs[0].plot(history.history['val_loss'], label='Validation Set', color='blue')
axs[0].legend(loc='upper right')
axs[0].set_ylabel('Log Loss')
#axs[0].set_xlabel('Epochs')
axs[0].set_title('Training Performance')
axs[1].plot(history.history['acc'], label='Train Set', color='red')
axs[1].plot(history.history['val_acc'], label='Validation Set', color='blue')
axs[1].legend(loc='lower right')
axs[1].set_ylabel('Accuracy')
axs[1].set_xlabel('Epochs') # used for both subplots
plt.show()
fig.savefig('learning_curves.png', bbox_inches='tight')
# Creating Model
# X_train,X_test,y_train,y_test=load_data()
train_data, train_target, test_data, test_target, y_train = read_and_normalize_data()
kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=7)
print train_data.shape
print train_target.shape
bsize = 10
### load the model:
from keras.models import load_model
K.set_image_dim_ordering('th') # convert to theano dimension ordering
model=load_model('/project/bioinformatics/DLLab/Behrouz/dev/ITaKL/diffusion_model.h5')
split=['train:','test:']
data=[train_data,test_data]
target=[train_target,test_target]
# for i in range(2):
i=1
print split[i]
predicted_labels = model.predict(data[i], batch_size=bsize, verbose=0)
our_labels=np.argmax(predicted_labels, axis=1)
expected_labels=np.argmax(target[i], axis=1)
correct_classifications=np.nonzero(our_labels==expected_labels)
correct_probs=predicted_labels[correct_classifications]
confident_samples=np.argmax(correct_probs,axis=0)
# get_binary_metrics(expected_labels, our_labels)
print '\n'
np.save('probs',predicted_labels)
index_high=11
high_data_sample=test_data[index_high:index_high+1,:,:,:,:]
low_data_sample=test_data[1:2,:,:,:,:]
prob_H = model.predict(high_data_sample, batch_size=bsize, verbose=0)[:,1]
prob_L = model.predict(low_data_sample, batch_size=bsize, verbose=0)[:,0]
# FA_CD=nb.load('/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/Cropped_downsampled_delta_FA/kids_hs022.nii')
# sample_data=np.reshape(high_data_sample,(48,60,52))
# sample_img=nb.Nifti1Image(sample_data,FA_CD.affine)
# nb.save(sample_img,'/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/sample_img.nii')
FA_image=nb.load('/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/FA_post_highly_impacted.nii')
FA=FA_image.get_data()
FA_mask=1.0*(FA>.02)
FA_mask_image=nb.Nifti1Image(FA_mask,np.eye(4))
nb.save(FA_mask_image,'/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/Fa_mask.nii')
# FA_zoomed = ndimage.interpolation.zoom(FA, .5, order=3) # cubic interpolation
stepsize=2
fmap=np.zeros((48/stepsize,60/stepsize,52/stepsize))
## high exposure sample:
for x in range(48/stepsize):
for y in range(60/stepsize):
for z in range(52/stepsize):
print 'x=',x,'y=',y,'z=',z
dFA_temp=np.array(high_data_sample)
FA_temp=np.array(FA_mask)
FA_box=FA_temp[stepsize * x:stepsize * x + stepsize, stepsize * y:stepsize * y + stepsize, stepsize * z:stepsize * z + stepsize]
if np.sum(FA_box)==8:
dFA_temp[:, :, stepsize * x:stepsize * x + stepsize, stepsize * y:stepsize * y + stepsize, stepsize * z:stepsize * z + stepsize] = 2 * np.random.random((1, 1, stepsize,stepsize,stepsize)) - 1
prob_h = model.predict(dFA_temp, batch_size=bsize, verbose=0)[:, 1]
print 'prob_h=',prob_h
fmap[x,y,z]=(prob_H-prob_h)/prob_H
np.save('/project/bioinformatics/DLLab/Behrouz/dev/ITaKL/fmap_new'+str(index_high)+'.npy',fmap)
## low exposure sample:
# for x in range(48/stepsize):
# for y in range(60/stepsize):
# for z in range(52/stepsize):
# print 'x=',x,'y=',y,'z=',z
# dFA_temp=np.array(low_data_sample)
# FA_temp=np.array(FA_mask)
# FA_box=FA_temp[stepsize * x:stepsize * x + stepsize, stepsize * y:stepsize * y + stepsize, stepsize * z:stepsize * z + stepsize]
# if np.sum(FA_box)==8:
# dFA_temp[:, :, stepsize * x:stepsize * x + stepsize, stepsize * y:stepsize * y + stepsize, stepsize * z:stepsize * z + stepsize] = 2 * np.random.random((1, 1, stepsize,stepsize,stepsize)) - 1
# prob_l = model.predict(dFA_temp, batch_size=bsize, verbose=0)[:, 0]
# print 'prob_l=',prob_l
# fmap[x,y,z]=(prob_L-prob_l)/prob_L
# np.save('/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/fmap_low.npy',fmap)
feat_Map = ndimage.interpolation.zoom(fmap, stepsize, order=0,mode='nearest') # nearest neighbor
fmap_image=nb.Nifti1Image(feat_Map,np.eye(4))
nb.save(fmap_image,'/project/bioinformatics/DLLab/Behrouz/dev/ITaKL/fmap_image_low.nii')
# for x in range(12):
# for y in range(15):
# for z in range(13):
# print 'x=',x,'y=',y,'z=',z
# hds=np.array(high_data_sample)
# orig=np.reshape(hds[:,:,4*x:4*x+4,4*y:4*y+4,4*z:4*z+4],(4*4*4))
# shuffled=np.random.permutation(orig)
# hds[:, :, 4 * x:4 * x + 4, 4 * y:4 * y + 4, 4 * z:4 * z + 4]=np.reshape(shuffled,(1,1,4,4,4))
# prob_h = model.predict(hds, batch_size=bsize, verbose=0)[:, 1]
# print 'prob_h=',prob_h
# fmap[x,y,z]=(prob_H-prob_h)/prob_H
# np.save('/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/Omap_permuted.npy',fmap)
# fmap=np.zeros((12,15,13)) # 10x10x10 cubes with step of 10
# for x in range(12):
# for y in range(15):
# for z in range(13):
# print 'x=',x,'y=',y,'z=',z
# hds=np.array(high_data_sample)
# orig=np.reshape(hds[:,:,4*x:4*x+4,4*y:4*y+4,4*z:4*z+4],(4*4*4))
# shuffled=np.random.permutation(orig)
# hds[:, :, 4 * x:4 * x + 4, 4 * y:4 * y + 4, 4 * z:4 * z + 4]=np.reshape(shuffled,(1,1,4,4,4))
# prob_h = model.predict(hds, batch_size=bsize, verbose=0)[:, 1]
# print 'prob_h=',prob_h
# fmap[x,y,z]=(prob_H-prob_h)/prob_H
# np.save('/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/Omap_permuted.npy',fmap)
# feat_Map = ndimage.interpolation.zoom(fmap, stepsize, order=0,mode='nearest') # nearest neighbor
# fmap_image=nb.Nifti1Image(feat_Map,np.eye(4))
# # nb.save(fmap_image,'/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/fmap_image.nii')
# nb.save(fmap_image,'/project/radiology/ANSIR_lab/s174380/dev/DeepLearning/ITAKL/Omap_permuted_masked'+str(index_high)+'.nii')
stop = timeit.default_timer()
print 'Total run time in mins: {}'.format((stop - start) / 60) | [
"noreply@github.com"
] | bsaghafi.noreply@github.com |
2b73094cd52f82c9edc6aedc763c994ba6d9e6be | 6092f1215afb816ce253ce9177e9ed5985e3cbb3 | /Demo/009_patterns/pattern14.py | 86e805dfb84c886b7c864d14514d7e5bfbd968f2 | [] | no_license | Ataago/Python | 6060f39eddc226841a85ef1235e682cc34b76a84 | c0185090019f56e9a6a8769be1ae12cd5becba2c | refs/heads/master | 2021-04-30T01:47:51.582359 | 2020-06-10T20:36:02 | 2020-06-10T20:36:02 | 121,490,288 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | """
Program to display the following pattern
example:
Enter number (n): 4
pattern:
4 4
3 3
2 2
1
""" | [
"ataago7@gmail.com"
] | ataago7@gmail.com |
23c4329d40500aec2c3c730b0cf2ec1d3e0aa6f5 | fc63bcaaebbb153ac2b8047d3f36b63b872a675f | /pazhur/bin/python-config | 11ec33560324f042f30a679693ef9cfdda367972 | [] | no_license | temi999/pazhur | 571dd5aa83edb0a2f691752c637ff63b6f387814 | 2ceb72d18707f60695b6c21149f95e5638e4d5af | refs/heads/master | 2022-05-03T23:28:36.046203 | 2019-12-03T21:01:31 | 2019-12-03T21:01:31 | 223,620,354 | 0 | 0 | null | 2022-04-22T22:48:03 | 2019-11-23T16:34:47 | Python | UTF-8 | Python | false | false | 2,360 | #!/home/vitaliy/pythonprojects/pazhur/pazhur/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"temi999@github.com"
] | temi999@github.com | |
c82ac9545934cfb4af8dfdc42bb29a78cf7b1306 | 1a87ba6ca3e0636b85c67a7fc86c190322e6df2e | /widget/calendar.py | 0321482b6d80c152c99fcbe9c52623f1738c6378 | [] | no_license | Sincoyw/PyQt5 | f9fe8a0a3678dfdb96899767b76d1f3d4597d7c6 | dff5f0b6471341645e993144bb80568da3704b98 | refs/heads/master | 2020-03-26T05:19:49.126734 | 2018-08-14T12:07:42 | 2018-08-14T12:07:42 | 144,551,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 999 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
ZetCode PyQt5 tutorial
This example shows a QCalendarWidget widget.
Author: Jan Bodnar
Website: zetcode.com
Last edited: August 2017
"""
from PyQt5.QtWidgets import (QWidget, QCalendarWidget,
QLabel, QApplication, QVBoxLayout)
from PyQt5.QtCore import QDate
import sys
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
vbox = QVBoxLayout(self)
cal = QCalendarWidget(self)
cal.setGridVisible(True)
cal.clicked[QDate].connect(self.showDate)
vbox.addWidget(cal)
self.lbl = QLabel(self)
date = cal.selectedDate()
self.lbl.setText(date.toString())
vbox.addWidget(self.lbl)
self.setLayout(vbox)
self.setGeometry(300, 300, 350, 300)
self.setWindowTitle('Calendar')
self.show()
def showDate(self, date):
self.lbl.setText(date.toString())
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_()) | [
"heaven_20062006@126.com"
] | heaven_20062006@126.com |
282178eb96083ed7af80ea573a1dde94a35cd474 | 1bad7d2b7fc920ecf2789755ed7f44b039d4134d | /ABC/141/D.py | d02d4346f436cb62c503781981a2c46b5f3839df | [] | no_license | kanekyo1234/AtCoder_solve | ce95caafd31f7c953c0fc699f0f4897dddd7a159 | e5ea7b080b72a2a2fd3fcb826cd10c4ab2e2720e | refs/heads/master | 2023-04-01T04:01:15.885945 | 2021-04-06T04:03:31 | 2021-04-06T04:03:31 | 266,151,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | n,m=map(int,input().split())
a=list(map(int,input().split()))
for i in range(m):
a[a.index(max(a))]//=2
print(sum(a))
| [
"kanekyohunter.0314@softbank.ne.jp"
] | kanekyohunter.0314@softbank.ne.jp |
26f83b49b30e77c3576bfa0efc605f495d5ebbe3 | ffd77fb196b667ab8b6e36788d5e3996800fac03 | /bb_pc/pong2p.py | 0e479beb1a27b1cbc39acf3ab2a2104425252042 | [] | no_license | wozu-dichter/brainbats | 891f8a388bb416d4d0a46850e0d406864d8c1dc0 | f587b504bcbbad65023b22be097df130e2d770e0 | refs/heads/master | 2021-12-03T14:25:05.226628 | 2014-07-19T03:08:36 | 2014-07-19T03:08:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,784 | py | import pygame, sys
from numpy import *
from pygame.locals import *
import scipy
from pyeeg import bin_power
pygame.init()
import serial
from time import sleep
MAX_POLLS = 55
fpsClock= pygame.time.Clock()
window = pygame.display.set_mode((1024,600))
pygame.display.set_caption("Mindwave Viewer")
from parser import *
def sendConnect():
for (port,parser) in parsers:
print("Connecting to " + port)
parser.write_serial("\xc2")
sleep(1)
parser.update()
pollnum=0
while (parser.dongle_state != "connected"):
print(parser.dongle_state)
parser.write_serial("\xc2")
sleep(1)
parser.update()
print("polling to connect...")
pollnum += 1
if (pollnum >= MAX_POLLS):
break
def sendDisconnect():
for (port,parser) in parsers:
print("Disconnecting " + port)
parser.write_serial("\xc1")
sleep(0.5)
def closeParsers():
for (port,parser) in parsers:
parser.socket.close()
# scan for 2 attached Mindwave dongles
parsers = getParsers()
if (len(parsers) < 2):
print "Please connect two Mindwave dongles"
closeParsers()
pygame.quit()
sys.exit()
pvalues = []
# scan for available arduino ports.
arduinoPort = []
for pNum in range(256):
pName = "/dev/ttyACM"+str(pNum)
try:
s = serial.Serial(pName)
arduinoPort.append(pName)
s.close()
except serial.SerialException:
pass
arduino = serial.Serial(arduinoPort[0],9600)
blackColor = pygame.Color(0,0,0)
redColor = pygame.Color(255,0,0)
greenColor = pygame.Color(0,255,0)
deltaColor = pygame.Color(100,0,0)
thetaColor = pygame.Color(0,0,255)
alphaColor = pygame.Color(255,0,0)
betaColor = pygame.Color(0,255,00)
gammaColor = pygame.Color(0,255,255)
background_img = pygame.image.load("sdl_viewer_background.png")
font = pygame.font.Font("freesansbold.ttf",21)
raw_eeg = True
spectra = []
iteration = 0
null = 0
serialcounter = 0
serialdelay = 5
meditation_img = font.render("Meditation", False, redColor)
attention_img = font.render("Attention", False, redColor)
signal_img = font.render("Contact Quality ", False, redColor)
key_img = font.render("d t a a b b g g", False, redColor)
record_baseline = False
while True:
for (port,parser) in parsers:
parser.update()
if parser.sending_data:
pvalues.append(parser.current_attention)
print("APPENDING " + str(parser.current_attention) + " for port " + port)
else:
pvalues.append(0)
print("not sending data")
pass
arduino.write(str(pvalues[0]/10))
arduino.write(str(pvalues[1]/10))
print (pvalues[0]/10)," ",(pvalues[1]/10)
pvalues = []
window.blit(background_img,(0,0))
for (port,parser) in parsers:
p = parser
if p.sending_data:
iteration+=1
flen = 50
if len(p.raw_values)>=500:
spectrum, relative_spectrum = bin_power(p.raw_values[-p.buffer_len:], range(flen),512)
spectra.append(array(relative_spectrum))
if len(spectra)>30:
spectra.pop(0)
spectrum = mean(array(spectra),axis=0)
for i in range (flen-1):
value = float(spectrum[i]*1000)
if i<3:
color = deltaColor
elif i<8:
color = thetaColor
elif i<13:
color = alphaColor
elif i<30:
color = betaColor
else:
color = gammaColor
pygame.draw.rect(window, color, (25+i*10,200-value, 5,value))
else:
pass
pygame.draw.circle(window,redColor, (810,150),p.current_attention/2)
pygame.draw.circle(window,greenColor, (810,150),60/2,1)
pygame.draw.circle(window,greenColor, (810,150),100/2,1)
window.blit(attention_img, (760,210))
pygame.draw.circle(window,redColor, (650,150),p.current_meditation/2)
pygame.draw.circle(window,greenColor, (650,150),60/2,1)
pygame.draw.circle(window,greenColor, (650,150),100/2,1)
window.blit(meditation_img, (600,210))
serialcounter += 1
if (p.poor_signal < 50 and serialcounter > serialdelay):
pygame.draw.circle(window, greenColor, (150,400),60/2)
serialcounter = 0
elif serialcounter > serialdelay:
pygame.draw.circle(window, redColor, (150,400),60/2)
serialcounter = 0
window.blit(signal_img, (100,325))
if len(p.current_vector)>7:
m = max(p.current_vector)
if m == 0:
m = 0.01
for i in range(8):
value = p.current_vector[i] *100.0/m
pygame.draw.rect(window, redColor, (600+i*30,450-value, 6,value))
window.blit(key_img, (600, 275))
if raw_eeg:
lv = 0
for i,value in enumerate(p.raw_values[-1000:]):
v = value/ 255.0/ 5
pygame.draw.line(window, redColor, (i+25,500-lv),(i+25, 500-v))
lv = v
else:
img = font.render("Mindwave Headset is not sending data... Press F5 to autoconnect or F6 to disconnect.", False, redColor)
window.blit(img,(100,100))
for event in pygame.event.get():
if event.type==QUIT:
closeParsers()
pygame.quit()
sys.exit()
if event.type==KEYDOWN:
if event.key== K_SPACE:
resetGame()
elif event.key== K_F5:
sendConnect()
elif event.key== K_F6:
sendDisconnect()
elif event.key==K_ESCAPE:
closeParsers()
pygame.quit()
sys.exit()
pygame.display.update()
fpsClock.tick(30)
| [
"DrReeves@gmail.com"
] | DrReeves@gmail.com |
c716cba94127a5205a0509fb780da122e31af450 | 9997cdbc6cb1b5f69a6d66f30a74efe04afc5bca | /info/modules/passport/__init__.py | 5dca8b0586116c7e5392e9a6cc1efaf92245c20c | [] | no_license | Manphy/chuangjian1 | 3a2d65b93f1fc57e27638b40883a3656d9b8bdc6 | 3cc744913d22929f840bf28086b37c8fa94069d2 | refs/heads/master | 2020-04-14T15:40:31.699876 | 2019-01-04T07:55:24 | 2019-01-04T07:55:24 | 163,934,374 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | from flask import Blueprint
# 创建蓝图,并设置蓝图前缀
passport_blu = Blueprint("passport ",__name__,url_prefix='/passport')
from . import views | [
"Manphy"
] | Manphy |
c0c926ef5d2c8bb9f664a67b74aed40261cd6810 | dc176045014a6d3fb44a40c437786c73a226dcc1 | /gpio/a2d/ldr.py | a8b4e1869d8f54999dbd00ed4d7037c550435117 | [] | no_license | recantha/redwing-pi | fdda28a7280878c8773a7a6154bac6b33245f660 | 7505d283d62d838e85a20b07df35115389bdf5d6 | refs/heads/master | 2021-01-02T08:09:56.009082 | 2013-05-04T08:07:24 | 2013-05-04T08:07:24 | 6,124,243 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,317 | py | #!/usr/bin/env python
import time
import os
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
DEBUG = 1
# read SPI data from MCP3008 chip, 8 possible adc's (0 thru 7)
def readadc(adcnum, clockpin, mosipin, misopin, cspin):
if ((adcnum > 7) or (adcnum < 0)):
return -1
GPIO.output(cspin, True)
GPIO.output(clockpin, False) # start clock low
GPIO.output(cspin, False) # bring CS low
commandout = adcnum
commandout |= 0x18 # start bit + single-ended bit
commandout <<= 3 # we only need to send 5 bits here
for i in range(5):
if (commandout & 0x80):
GPIO.output(mosipin, True)
else:
GPIO.output(mosipin, False)
commandout <<= 1
GPIO.output(clockpin, True)
GPIO.output(clockpin, False)
adcout = 0
# read in one empty bit, one null bit and 10 ADC bits
for i in range(12):
GPIO.output(clockpin, True)
GPIO.output(clockpin, False)
adcout <<= 1
if (GPIO.input(misopin)):
adcout |= 0x1
GPIO.output(cspin, True)
adcout >>= 1 # first bit is 'null' so drop it
return adcout
# change these as desired - they're the pins connected from the
# SPI port on the ADC to the Cobbler
SPICLK = 18
SPIMISO = 23
SPIMOSI = 24
SPICS = 25
# set up the SPI interface pins
GPIO.setup(SPIMOSI, GPIO.OUT)
GPIO.setup(SPIMISO, GPIO.IN)
GPIO.setup(SPICLK, GPIO.OUT)
GPIO.setup(SPICS, GPIO.OUT)
# 10k trim pot connected to adc #0
ldr_port = 1;
last_read = 0 # this keeps track of the last potentiometer value
tolerance = 1 # to keep from being jittery we'll only change
while True:
# we'll assume that the pot didn't move
reading_changed = False
# read the analog pin
port_reading = readadc(ldr_port, SPICLK, SPIMOSI, SPIMISO, SPICS)
# how much has it changed since the last read?
pot_adjust = abs(port_reading - last_read)
if DEBUG:
print "port_reading:", port_reading
print "pot_adjust:", pot_adjust
print "last_read", last_read
if ( pot_adjust > tolerance ):
reading_changed = True
if DEBUG:
print "reading_changed", reading_changed
if ( reading_changed ):
set_volume = port_reading / 10.24 # convert 10bit adc0 (0-1024) trim pot read into 0-100 volume level
set_volume = round(set_volume) # round out decimal value
set_volume = int(set_volume) # cast volume as integer
print 'Volume = {volume}%' .format(volume = set_volume)
set_vol_cmd = 'sudo amixer cset numid=1 -- {volume}% > /dev/null' .format(volume = set_volume)
os.system(set_vol_cmd) # set volume
if DEBUG:
print "set_volume", set_volume
print "tri_pot_changed", set_volume
# save the potentiometer reading for the next loop
last_read = port_reading
# hang out and do nothing for a half second
time.sleep(0.5)
| [
"mike@recantha.coo.uk"
] | mike@recantha.coo.uk |
e7b063dd4d020434b6ad9afd84f98811d55adaec | 36b5d2fec65ac9317749b76c4fd93a663935e669 | /test.py | cb7756d65834a92070d3de5cf02950a8a1057c3e | [
"MIT"
] | permissive | arminalgln/SolarPMU | 946bcc1b1347ab2c8f8ca90e956472255b72b126 | ddb6458b1c5f8fd20233a07065d9ebf5003c52ff | refs/heads/master | 2022-12-08T03:50:19.396486 | 2020-09-09T23:12:27 | 2020-09-09T23:12:27 | 293,867,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], enable=True)
#%%
inputs = tf.random.normal([32, 10, 8])
lstm = tf.keras.layers.LSTM(4, activation='tanh')
output = lstm(inputs)
print(output.shape)
#%%
lstm = tf.keras.layers.LSTM(4, return_sequences=True, return_state=True)
whole_seq_output, final_memory_state, final_carry_state = lstm(inputs)
print(whole_seq_output.shape)
print(final_memory_state.shape)
print(final_carry_state.shape)
| [
"armin.aligholian@gmail.com"
] | armin.aligholian@gmail.com |
161891b9fe83357fd142c4642604ab48c2402d95 | af3dfd01c3063b4f92cbef68fcd94b5493b139e3 | /Experiments/DataMining/FactoryProduct/train.py | 50a604f0d3c26c8a7d583526c6db6ac7087bc361 | [] | no_license | DarringZhang/GitRepository | 1869b640fa5c1fc9738c334de4a737c7109e3ddb | cd5bdc66925f49eb9b2c1922453ed922efee86a6 | refs/heads/master | 2020-04-03T18:25:26.036614 | 2020-01-09T03:00:21 | 2020-01-09T03:00:21 | 155,483,511 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,910 | py | import numpy as np
import pandas as pd
import catboost as cbt
from sklearn.metrics import accuracy_score, roc_auc_score,log_loss
import gc
import math
import time
from tqdm import tqdm
import datetime
from sklearn.model_selection import KFold,StratifiedKFold
from sklearn.preprocessing import LabelEncoder
import matplotlib.pyplot as plt
from datetime import datetime,timedelta
import warnings
import os
warnings.filterwarnings('ignore')
pd.options.display.max_columns = None
pd.options.display.max_rows = None
train = pd.read_csv('first_round_training_data.csv')
test = pd.read_csv('first_round_testing_data.csv')
submit = pd.read_csv('submit_example.csv')
data = train.append(test).reset_index(drop=True)
dit = {'Excellent':0,'Good':1,'Pass':2,'Fail':3}
data['label'] = data['Quality_label'].map(dit)
train['label'] = train['Quality_label'].map(dit)
feature_name = ['Parameter{0}'.format(i) for i in range(5, 11)]
tr_index = ~data['label'].isnull()
X_train = data[tr_index][feature_name].reset_index(drop=True)
y = data[tr_index]['label'].reset_index(drop=True).astype(int)
X_test = data[~tr_index][feature_name].reset_index(drop=True)
print(X_train.shape,X_test.shape)
oof = np.zeros((X_train.shape[0],4))
prediction = np.zeros((X_test.shape[0],4))
seeds = [19970412, 2019 * 2 + 1024, 4096, 2048, 1024]
num_model_seed = 5
for model_seed in range(num_model_seed):
print(model_seed + 1)
oof_cat = np.zeros((X_train.shape[0],4))
prediction_cat=np.zeros((X_test.shape[0],4))
skf = StratifiedKFold(n_splits=5, random_state=seeds[model_seed], shuffle=True)
for index, (train_index, test_index) in enumerate(skf.split(X_train, y)):
print(index)
train_x, test_x, train_y, test_y = X_train.iloc[train_index], X_train.iloc[test_index], y.iloc[train_index], y.iloc[test_index]
gc.collect()
cbt_model = cbt.CatBoostClassifier(iterations=800,learning_rate=0.01,verbose=300,
early_stopping_rounds=200,loss_function='MultiClass')
cbt_model.fit(train_x, train_y ,eval_set=(train_x, train_y))
oof_cat[test_index] += cbt_model.predict_proba(test_x)
prediction_cat += cbt_model.predict_proba(X_test)/5
gc.collect()
oof += oof_cat / num_model_seed
prediction += prediction_cat / num_model_seed
print('logloss',log_loss(pd.get_dummies(y).values, oof_cat))
print('ac',accuracy_score(y, np.argmax(oof_cat,axis=1)))
print('mae',1/(1 + np.sum(np.absolute(np.eye(4)[y] - oof_cat))/480))
print('logloss',log_loss(pd.get_dummies(y).values, oof))
print('ac',accuracy_score(y, np.argmax(oof,axis=1)))
print('mae',1/(1 + np.sum(np.absolute(np.eye(4)[y] - oof))/480))
sub = test[['Group']]
prob_cols = [i for i in submit.columns if i not in ['Group']]
for i, f in enumerate(prob_cols):
sub[f] = prediction[:, i]
for i in prob_cols:
sub[i] = sub.groupby('Group')[i].transform('mean')
sub = sub.drop_duplicates()
sub.to_csv("submission.csv",index=False) | [
"1002970363@qq.com"
] | 1002970363@qq.com |
60b2c195bdcf461f2239e51c24e394fb45103c4e | 9c281920e9847a7c9123841a44cda887961eda17 | /algorithm-questions/searching/ternary_search.py | a6a7ff1e825b4971ddb22a77fe95281bed50fdd6 | [] | no_license | hhoangphuoc/data-structures-and-algorithms | e8b28729f7befc6f18f16140332b18ba40c995c0 | 79ccc0aa34d9e3eb508bfe64d1fcee9dd1ea5e98 | refs/heads/master | 2023-01-02T18:02:24.251679 | 2020-11-02T03:51:12 | 2020-11-02T03:51:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 914 | py | # coding: UTF-8
'''
Ternary search is similar to linear and binary search that is used to determine the position of an element in a list.
In binary search, the sorted array is divided into two parts while in ternary search the sorted array is divided into three
parts.
It is a divide and conquer algorithm where in each iteration, it neglects 2/3 of the part and repeats the same operation with
the remaining 1/3.
Complexity: (log N where base is 3)
'''
def ternary_search(left, right, key, array):
if (left <= right):
midl = left + (right - left) / 3
midr = right - (right - left) / 3
if array[midl] == key:
return midl
if array[midr] == key:
return midr
if array[midl] > key:
return ternary_search(left, midl-1, key, array)
if array[midr] < key:
return ternary_search(midr+1, right, key, array)
else:
return ternary_search(midl+1, midr-1, key, array)
return -1
| [
"charul.agrl@gmail.com"
] | charul.agrl@gmail.com |
d8697fb977edd87fe504d6ffb8a9a877b8389dfc | 9be7dd059042e382bc68d2da0e9db929770c36a1 | /madgraph/aloha/aloha_object.py | 8da8fee9f9e7b8286a6b1d6d08387d081f5a6cb2 | [] | no_license | restrepo/SimplifiedDM-SSSFDM-Toolbox | ee891d51d252e3087e6287fb9e3ce055f55e1354 | bbdefde970b7016159f2f9f51eaf9cefc127f220 | refs/heads/master | 2021-01-12T17:15:48.830510 | 2017-11-11T22:13:54 | 2017-11-11T22:13:54 | 71,532,276 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 41,974 | py | ################################################################################
#
# Copyright (c) 2010 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
## Diagram of Class
##
## Variable <--- aloha_lib.Variable
## |
## +- LorentzObject <--- Gamma
## |
## +- Sigma
## |
## +- P
##
## list <--- AddVariable
## |
## +- MultVariable <--- MultLorentz
##
## list <--- LorentzObjectRepresentation <-- ConstantObject
##
################################################################################
from __future__ import division
import aloha.aloha_lib as aloha_lib
import aloha
import cmath
#===============================================================================
# P (Momenta)
#===============================================================================
class L_P(aloha_lib.LorentzObject):
""" Helas Object for an Impulsion """
contract_first = 1
def __init__(self, name, lorentz1, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name,[lorentz1], [],['P%s'%particle])
aloha_lib.KERNEL.add_tag((name,))
def create_representation(self):
self.sub0 = aloha_lib.DVariable('P%s_0' % self.particle)
self.sub1 = aloha_lib.DVariable('P%s_1' % self.particle)
self.sub2 = aloha_lib.DVariable('P%s_2' % self.particle)
self.sub3 = aloha_lib.DVariable('P%s_3' % self.particle)
self.representation= aloha_lib.LorentzObjectRepresentation(
{(0,): self.sub0, (1,): self.sub1, \
(2,): self.sub2, (3,): self.sub3},
self.lorentz_ind, [])
class P(aloha_lib.FactoryLorentz):
""" Helas Object for an Impulsion """
object_class = L_P
#def __init__(self, lorentz1, particle):
@classmethod
def get_unique_name(self, lorentz1, particle):
return '_P^%s_%s' % (particle, lorentz1)
#===============================================================================
# Pslash
#===============================================================================
class L_PSlash(aloha_lib.LorentzObject):
""" Gamma Matrices """
#gamma0 = [[0, 0, 1, 0], [0, 0, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]]
#gamma1 = [[0, 0, 0, 1], [0, 0, 1, 0], [0, -1, 0, 0], [-1, 0, 0, 0]]
#gamma2 = [[0, 0, 0, -complex(0,1)],[0, 0, complex(0,1), 0],
# [0, complex(0,1), 0, 0], [-complex(0,1), 0, 0, 0]]
#gamma3 = [[0, 0, 1, 0], [0, 0, 0, -1], [-1, 0, 0, 0], [0, 1, 0, 0]]
#
#gamma = [gamma0, gamma1, gamma2, gamma3]
def __init__(self, name, spin1, spin2, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self,name,[], [spin1, spin2])
def create_representation(self):
"""create representation"""
p0 = aloha_lib.DVariable('P%s_0' % self.particle)
p1 = aloha_lib.DVariable('P%s_1' % self.particle)
p2 = aloha_lib.DVariable('P%s_2' % self.particle)
p3 = aloha_lib.DVariable('P%s_3' % self.particle)
gamma = {
(0, 0): 0, (0, 1): 0, (0, 2): p0-p3, (0, 3): -1*p1+1j*p2,
(1, 0): 0, (1, 1): 0, (1, 2): -1*p1-1j*p2, (1, 3): p0+p3,
(2, 0): p0+p3, (2, 1): p1-1j*p2, (2, 2): 0, (2, 3): 0,
(3, 0): p1+1j*p2, (3, 1): p0-p3, (3, 2): 0, (3, 3): 0}
self.representation = aloha_lib.LorentzObjectRepresentation(gamma,
self.lorentz_ind,self.spin_ind)
class PSlash(aloha_lib.FactoryLorentz):
object_class = L_PSlash
@classmethod
def get_unique_name(self, spin1, spin2, particle):
return '_P%s/_%s_%s' % (particle, spin1,spin2)
#===============================================================================
# Mass
#===============================================================================
class L_Mass(aloha_lib.LorentzObject):
""" Helas Object for a Mass"""
def __init__(self, name, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name,[], [])
def create_representation(self):
mass = aloha_lib.DVariable('M%s' % self.particle)
self.representation = aloha_lib.LorentzObjectRepresentation(
mass, self.lorentz_ind, self.spin_ind)
class Mass(aloha_lib.FactoryLorentz):
object_class = L_Mass
@classmethod
def get_unique_name(self, particle):
return '_M%s' % particle
#===============================================================================
# Mass
#===============================================================================
class L_Coup(aloha_lib.LorentzObject):
""" Helas Object for a Mass"""
def __init__(self, name, nb):
self.nb = nb
aloha_lib.LorentzObject.__init__(self, name,[], [])
def create_representation(self):
coup = aloha_lib.Variable('COUP%s' % self.nb)
self.representation = aloha_lib.LorentzObjectRepresentation(
coup, self.lorentz_ind, self.spin_ind)
class Coup(aloha_lib.FactoryLorentz):
object_class = L_Coup
@classmethod
def get_unique_name(self, nb):
return 'coup%s' % nb
#===============================================================================
# FCT
#===============================================================================
class L_FCT(aloha_lib.LorentzObject):
""" Helas Object for a Mass"""
def __init__(self, name, id):
self.fctid = id
aloha_lib.LorentzObject.__init__(self, name,[], [])
def create_representation(self):
var = aloha_lib.Variable('FCT%s' % self.fctid)
self.representation = aloha_lib.LorentzObjectRepresentation(
var, self.lorentz_ind, self.spin_ind)
class FCT(aloha_lib.FactoryLorentz):
object_class = L_FCT
@classmethod
def get_unique_name(self, name):
return '_FCT%s' % name
#===============================================================================
# OverMass2
#===============================================================================
class L_OverMass2(aloha_lib.LorentzObject):
""" Helas Object for 1/M**2 """
def __init__(self, name, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name, [], [], tags=['OM%s' % particle])
def create_representation(self):
mass = aloha_lib.DVariable('OM%s' % self.particle)
self.representation = aloha_lib.LorentzObjectRepresentation(
mass, self.lorentz_ind, self.spin_ind)
class OverMass2(aloha_lib.FactoryLorentz):
object_class = L_OverMass2
@classmethod
def get_unique_name(self, particle):
return '_OM2_%s' % particle
#===============================================================================
# Width
#===============================================================================
class L_Width(aloha_lib.LorentzObject):
""" Helas Object for an Impulsion """
def __init__(self, name, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name, [], [])
def create_representation(self):
width = aloha_lib.DVariable('W%s' % self.particle)
self.representation= aloha_lib.LorentzObjectRepresentation(
width, self.lorentz_ind, self.spin_ind)
class Width(aloha_lib.FactoryLorentz):
object_class = L_Width
@classmethod
def get_unique_name(self, particle):
return '_W%s' % particle
#===============================================================================
# Param
#===============================================================================
class L_Param(aloha_lib.LorentzObject):
""" Object for a Model Parameter """
def __init__(self, Lname, name):
self.varname = name
aloha_lib.LorentzObject.__init__(self, name, [], [])
def create_representation(self):
param = aloha_lib.Variable( self.varname, aloha_lib.ExtVariable)
self.representation= aloha_lib.LorentzObjectRepresentation(
param, [], [])
class Param(aloha_lib.FactoryLorentz):
object_class = L_Param
@classmethod
def get_unique_name(self, name):
if name == 'Pi':
KERNEL.has_pi = True
return 'Param_%s' % name
#===============================================================================
# Scalar
#===============================================================================
class L_Scalar(aloha_lib.LorentzObject):
""" Helas Object for a Spinor"""
def __init__(self, name, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name, [], [])
def create_representation(self):
rep = aloha_lib.Variable('S%s_1' % self.particle)
self.representation= aloha_lib.LorentzObjectRepresentation(
rep, [], [])
class Scalar(aloha_lib.FactoryLorentz):
object_class = L_Scalar
@classmethod
def get_unique_name(self,particle):
return '_S%s' % particle
#===============================================================================
# Spinor
#===============================================================================
class L_Spinor(aloha_lib.LorentzObject):
""" Helas Object for a Spinor"""
contract_first = 1
def __init__(self, name, spin1, particle, prefactor=1):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name,[], [spin1])
def create_representation(self):
self.sub0 = aloha_lib.Variable('F%s_1' % self.particle)
self.sub1 = aloha_lib.Variable('F%s_2' % self.particle)
self.sub2 = aloha_lib.Variable('F%s_3' % self.particle)
self.sub3 = aloha_lib.Variable('F%s_4' % self.particle)
self.representation= aloha_lib.LorentzObjectRepresentation(
{(0,): self.sub0, (1,): self.sub1, \
(2,): self.sub2, (3,): self.sub3},
[],self.spin_ind)
class Spinor(aloha_lib.FactoryLorentz):
""" Helas Object for a Spinor"""
object_class = L_Spinor
@classmethod
def get_unique_name(self,spin1, particle):
return '_F%s_%s' % (particle,spin1)
#===============================================================================
# Vector
#===============================================================================
class L_Vector(aloha_lib.LorentzObject):
""" Helas Object for a Vector"""
contract_first = 1
def __init__(self, name, lorentz, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name, [lorentz], [])
def create_representation(self):
self.sub0 = aloha_lib.Variable('V%s_1' % self.particle)
self.sub1 = aloha_lib.Variable('V%s_2' % self.particle)
self.sub2 = aloha_lib.Variable('V%s_3' % self.particle)
self.sub3 = aloha_lib.Variable('V%s_4' % self.particle)
self.representation= aloha_lib.LorentzObjectRepresentation(
{(0,): self.sub0, (1,): self.sub1, \
(2,): self.sub2, (3,): self.sub3},
self.lorentz_ind, [])
class Vector(aloha_lib.FactoryLorentz):
object_class = L_Vector
@classmethod
def get_unique_name(self, lor, particle):
return '_V%s_%s' % (particle, lor)
#===============================================================================
# Spin3/2
#===============================================================================
class L_Spin3Half(aloha_lib.LorentzObject):
""" Helas Object for a Spin2"""
def __init__(self, name, lorentz, spin, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name, [lorentz], [spin])
def create_representation(self):
self.sub00 = aloha_lib.Variable('R%s_1' % self.particle)
self.sub01 = aloha_lib.Variable('R%s_2' % self.particle)
self.sub02 = aloha_lib.Variable('R%s_3' % self.particle)
self.sub03 = aloha_lib.Variable('R%s_4' % self.particle)
self.sub10 = aloha_lib.Variable('R%s_5' % self.particle)
self.sub11 = aloha_lib.Variable('R%s_6' % self.particle)
self.sub12 = aloha_lib.Variable('R%s_7' % self.particle)
self.sub13 = aloha_lib.Variable('R%s_8' % self.particle)
self.sub20 = aloha_lib.Variable('R%s_9' % self.particle)
self.sub21 = aloha_lib.Variable('R%s_10' % self.particle)
self.sub22 = aloha_lib.Variable('R%s_11' % self.particle)
self.sub23 = aloha_lib.Variable('R%s_12' % self.particle)
self.sub30 = aloha_lib.Variable('R%s_13' % self.particle)
self.sub31 = aloha_lib.Variable('R%s_14' % self.particle)
self.sub32 = aloha_lib.Variable('R%s_15' % self.particle)
self.sub33 = aloha_lib.Variable('R%s_16' % self.particle)
rep = {(0,0): self.sub00, (0,1): self.sub01, (0,2): self.sub02, (0,3): self.sub03,
(1,0): self.sub10, (1,1): self.sub11, (1,2): self.sub12, (1,3): self.sub13,
(2,0): self.sub20, (2,1): self.sub21, (2,2): self.sub22, (2,3): self.sub23,
(3,0): self.sub30, (3,1): self.sub31, (3,2): self.sub32, (3,3): self.sub33}
self.representation= aloha_lib.LorentzObjectRepresentation( rep, \
self.lorentz_ind, self.spin_ind)
class Spin3Half(aloha_lib.FactoryLorentz):
object_class = L_Spin3Half
@classmethod
def get_unique_name(self, lor, spin, part):
return 'Spin3Half%s^%s_%s' % (part, lor, spin)
#===============================================================================
# Spin2
#===============================================================================
class L_Spin2(aloha_lib.LorentzObject):
""" Helas Object for a Spin2"""
def __init__(self, name, lorentz1, lorentz2, particle):
self.particle = particle
aloha_lib.LorentzObject.__init__(self, name, [lorentz1, lorentz2], [])
def create_representation(self):
self.sub00 = aloha_lib.Variable('T%s_1' % self.particle)
self.sub01 = aloha_lib.Variable('T%s_2' % self.particle)
self.sub02 = aloha_lib.Variable('T%s_3' % self.particle)
self.sub03 = aloha_lib.Variable('T%s_4' % self.particle)
self.sub10 = aloha_lib.Variable('T%s_5' % self.particle)
self.sub11 = aloha_lib.Variable('T%s_6' % self.particle)
self.sub12 = aloha_lib.Variable('T%s_7' % self.particle)
self.sub13 = aloha_lib.Variable('T%s_8' % self.particle)
self.sub20 = aloha_lib.Variable('T%s_9' % self.particle)
self.sub21 = aloha_lib.Variable('T%s_10' % self.particle)
self.sub22 = aloha_lib.Variable('T%s_11' % self.particle)
self.sub23 = aloha_lib.Variable('T%s_12' % self.particle)
self.sub30 = aloha_lib.Variable('T%s_13' % self.particle)
self.sub31 = aloha_lib.Variable('T%s_14' % self.particle)
self.sub32 = aloha_lib.Variable('T%s_15' % self.particle)
self.sub33 = aloha_lib.Variable('T%s_16' % self.particle)
rep = {(0,0): self.sub00, (0,1): self.sub01, (0,2): self.sub02, (0,3): self.sub03,
(1,0): self.sub10, (1,1): self.sub11, (1,2): self.sub12, (1,3): self.sub13,
(2,0): self.sub20, (2,1): self.sub21, (2,2): self.sub22, (2,3): self.sub23,
(3,0): self.sub30, (3,1): self.sub31, (3,2): self.sub32, (3,3): self.sub33}
self.representation= aloha_lib.LorentzObjectRepresentation( rep, \
self.lorentz_ind, [])
class Spin2(aloha_lib.FactoryLorentz):
object_class = L_Spin2
@classmethod
def get_unique_name(self, lor1, lor2, part):
return 'Spin2^%s_%s_%s' % (part, lor1, lor2)
#===============================================================================
# Gamma
#===============================================================================
class L_Gamma(aloha_lib.LorentzObject):
""" Gamma Matrices """
#gamma0 = [[0, 0, 1, 0], [0, 0, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]]
#gamma1 = [[0, 0, 0, 1], [0, 0, 1, 0], [0, -1, 0, 0], [-1, 0, 0, 0]]
#gamma2 = [[0, 0, 0, -complex(0,1)],[0, 0, complex(0,1), 0],
# [0, complex(0,1), 0, 0], [-complex(0,1), 0, 0, 0]]
#gamma3 = [[0, 0, 1, 0], [0, 0, 0, -1], [-1, 0, 0, 0], [0, 1, 0, 0]]
#
#gamma = [gamma0, gamma1, gamma2, gamma3]
gamma = { #Gamma0
(0, 0, 0): 0, (0, 0, 1): 0, (0, 0, 2): 1, (0, 0, 3): 0,
(0, 1, 0): 0, (0, 1, 1): 0, (0, 1, 2): 0, (0, 1, 3): 1,
(0, 2, 0): 1, (0, 2, 1): 0, (0, 2, 2): 0, (0, 2, 3): 0,
(0, 3, 0): 0, (0, 3, 1): 1, (0, 3, 2): 0, (0, 3, 3): 0,
#Gamma1
(1, 0, 0): 0, (1, 0, 1): 0, (1, 0, 2): 0, (1, 0, 3): 1,
(1, 1, 0): 0, (1, 1, 1): 0, (1, 1, 2): 1, (1, 1, 3): 0,
(1, 2, 0): 0, (1, 2, 1): -1, (1, 2, 2): 0, (1, 2, 3): 0,
(1, 3, 0): -1, (1, 3, 1): 0, (1, 3, 2): 0, (1, 3, 3): 0,
#Gamma2
(2, 0, 0): 0, (2, 0, 1): 0, (2, 0, 2): 0, (2, 0, 3): -1j,
(2, 1, 0): 0, (2, 1, 1): 0, (2, 1, 2): 1j, (2, 1, 3): 0,
(2, 2, 0): 0, (2, 2, 1): 1j, (2, 2, 2): 0, (2, 2, 3): 0,
(2, 3, 0): -1j, (2, 3, 1): 0, (2, 3, 2): 0, (2, 3, 3): 0,
#Gamma3
(3, 0, 0): 0, (3, 0, 1): 0, (3, 0, 2): 1, (3, 0, 3): 0,
(3, 1, 0): 0, (3, 1, 1): 0, (3, 1, 2): 0, (3, 1, 3): -1,
(3, 2, 0): -1, (3, 2, 1): 0, (3, 2, 2): 0, (3, 2, 3): 0,
(3, 3, 0): 0, (3, 3, 1): 1, (3, 3, 2): 0, (3, 3, 3): 0
}
def __init__(self, name, lorentz, spin1, spin2):
aloha_lib.LorentzObject.__init__(self,name,[lorentz], [spin1, spin2])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.gamma,
self.lorentz_ind,self.spin_ind)
class Gamma(aloha_lib.FactoryLorentz):
object_class = L_Gamma
@classmethod
def get_unique_name(self, lor, spin1, spin2):
return 'Gamma^%s_%s_%s' % (lor, spin1, spin2)
#===============================================================================
# Sigma
#===============================================================================
class L_Sigma(aloha_lib.LorentzObject):
""" Sigma Matrices """
#zero = [[0,0,0,0]]*4
#i = complex(0,1)
#sigma01 = [[ 0, -i, 0, 0], [-i, 0, 0, 0], [0, 0, 0, i], [0, 0, i, 0]]
#sigma02 = [[ 0, -1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1], [0, 0, -1, 0]]
#sigma03 = [[-i, 0, 0, 0], [0, i, 0, 0], [0, 0, i, 0], [0, 0, 0, -i]]
#sigma12 = [[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, 1, 0], [0, 0, 0, -1]]
#sigma13 = [[0, i, 0, 0], [-i, 0, 0, 0], [0, 0, 0, i], [0, 0, -i, 0]]
#sigma23 = [[0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]]
#def inv(matrice):
# out=[]
# for i in range(4):
# out2=[]
# out.append(out2)
# for j in range(4):
# out2.append(-1*matrice[i][j])
# return out
#
#sigma =[[zero, sigma01, sigma02, sigma03], \
# [inv(sigma01), zero, sigma12, sigma13],\
# [inv(sigma02), inv(sigma12), zero, sigma23],\
# [inv(sigma03), inv(sigma13), inv(sigma23), zero]]
sigma={(0, 2, 0, 1): -0.5, (3, 1, 2, 0): 0, (3, 2, 3, 1): 0, (1, 3, 1, 3): 0,
(2, 3, 3, 2): 0.5, (2, 1, 3, 1): 0, (0, 2, 2, 1): 0, (3, 1, 0, 0): 0,
(2, 3, 3, 1): 0, (3, 3, 1, 2): 0, (3, 1, 0, 3): 0, (1, 1, 0, 3): 0,
(0, 1, 2, 2): 0, (3, 2, 3, 2): -0.5, (2, 1, 0, 1): 0, (3, 3, 3, 3): 0,
(1, 1, 2, 2): 0, (2, 2, 3, 2): 0, (2, 1, 2, 1): 0, (0, 1, 0, 3): 0,
(2, 1, 2, 2): -0.5, (1, 2, 2, 1): 0, (2, 2, 1, 3): 0, (0, 3, 1, 3): 0,
(3, 0, 3, 2): 0, (1, 2, 0, 1): 0, (3, 0, 3, 1): 0, (0, 0, 2, 2): 0,
(1, 2, 0, 2): 0, (2, 0, 0, 3): 0, (0, 0, 2, 1): 0, (0, 3, 3, 2): 0,
(3, 0, 1, 1): -0.5j, (3, 2, 0, 1): -0.5, (1, 0, 1, 0): 0.5j, (0, 0, 0, 1): 0,
(0, 2, 1, 1): 0, (3, 1, 3, 2): 0.5j, (3, 2, 2, 1): 0, (1, 3, 2, 3): 0.5j,
(1, 0, 3, 0): 0, (3, 2, 2, 2): 0, (0, 2, 3, 1): 0, (1, 0, 3, 3): 0,
(2, 3, 2, 1): 0, (0, 2, 3, 2): -0.5, (3, 1, 1, 3): 0, (1, 1, 1, 3): 0,
(1, 3, 0, 2): 0, (2, 3, 0, 1): 0.5, (1, 1, 1, 0): 0, (2, 3, 0, 2): 0,
(3, 3, 0, 3): 0, (1, 1, 3, 0): 0, (0, 1, 3, 3): 0, (2, 2, 0, 1): 0,
(2, 1, 1, 0): 0, (3, 3, 2, 2): 0, (2, 3, 1, 0): 0.5, (2, 2, 2, 3): 0,
(0, 3, 0, 3): 0, (0, 1, 1, 2): 0, (0, 3, 0, 0): -0.5j, (2, 3, 1, 1): 0,
(1, 2, 3, 0): 0, (2, 0, 1, 3): 0, (0, 0, 3, 1): 0, (0, 3, 2, 0): 0,
(2, 3, 1, 2): 0, (2, 0, 1, 0): -0.5, (1, 2, 1, 0): 0, (3, 0, 0, 2): 0,
(1, 0, 0, 2): 0, (0, 0, 1, 1): 0, (1, 2, 1, 3): 0, (2, 3, 1, 3): 0,
(2, 0, 3, 0): 0, (0, 0, 1, 2): 0, (1, 3, 3, 3): 0, (3, 2, 1, 0): -0.5,
(1, 3, 3, 0): 0, (1, 0, 2, 3): -0.5j, (0, 2, 0, 0): 0, (3, 1, 2, 3): -0.5j,
(3, 2, 3, 0): 0, (1, 3, 1, 0): -0.5j, (3, 2, 3, 3): 0, (0, 2, 2, 0): 0,
(2, 3, 3, 0): 0, (3, 3, 1, 3): 0, (0, 2, 2, 3): 0.5, (3, 1, 0, 2): 0,
(1, 1, 0, 2): 0, (3, 3, 1, 0): 0, (0, 1, 2, 3): 0.5j, (1, 1, 0, 1): 0,
(2, 1, 0, 2): 0, (0, 1, 2, 0): 0, (3, 3, 3, 0): 0, (1, 1, 2, 1): 0,
(2, 2, 3, 3): 0, (0, 1, 0, 0): 0, (2, 2, 3, 0): 0, (2, 1, 2, 3): 0,
(1, 2, 2, 2): 0.5, (2, 2, 1, 0): 0, (0, 3, 1, 2): 0, (0, 3, 1, 1): 0.5j,
(3, 0, 3, 0): 0, (1, 2, 0, 3): 0, (2, 0, 0, 2): 0, (0, 0, 2, 0): 0,
(0, 3, 3, 1): 0, (3, 0, 1, 0): 0, (2, 0, 0, 1): 0.5, (3, 2, 0, 2): 0,
(3, 0, 1, 3): 0, (1, 0, 1, 3): 0, (0, 0, 0, 0): 0, (0, 2, 1, 2): 0,
(3, 1, 3, 3): 0, (0, 0, 0, 3): 0, (1, 3, 2, 2): 0, (3, 1, 3, 0): 0,
(3, 2, 2, 3): -0.5, (1, 3, 2, 1): 0, (1, 0, 3, 2): -0.5j, (2, 3, 2, 2): 0,
(0, 2, 3, 3): 0, (3, 1, 1, 0): 0.5j, (1, 3, 0, 1): 0.5j, (1, 1, 1, 1): 0,
(2, 1, 3, 2): 0, (2, 3, 0, 3): 0, (3, 3, 0, 2): 0, (1, 1, 3, 1): 0,
(3, 3, 0, 1): 0, (2, 1, 3, 3): 0.5, (0, 1, 3, 2): 0.5j, (1, 1, 3, 2): 0,
(2, 1, 1, 3): 0, (3, 0, 2, 1): 0, (0, 1, 3, 1): 0, (3, 3, 2, 1): 0,
(2, 2, 2, 2): 0, (0, 1, 1, 1): 0, (2, 2, 2, 1): 0, (0, 3, 0, 1): 0,
(3, 0, 2, 2): -0.5j, (1, 2, 3, 3): -0.5, (0, 0, 3, 2): 0, (0, 3, 2, 1): 0,
(2, 0, 1, 1): 0, (2, 2, 0, 0): 0, (0, 3, 2, 2): 0.5j, (3, 0, 0, 3): 0,
(1, 0, 0, 3): 0, (1, 2, 1, 2): 0, (2, 0, 3, 1): 0, (1, 0, 0, 0): 0,
(0, 0, 1, 3): 0, (2, 0, 3, 2): 0.5, (3, 2, 1, 3): 0, (1, 3, 3, 1): 0,
(1, 0, 2, 0): 0, (2, 2, 0, 2): 0, (0, 2, 0, 3): 0, (3, 1, 2, 2): 0,
(1, 3, 1, 1): 0, (3, 1, 2, 1): 0, (2, 2, 0, 3): 0, (3, 0, 0, 1): 0,
(1, 3, 1, 2): 0, (2, 3, 3, 3): 0, (0, 2, 2, 2): 0, (3, 1, 0, 1): -0.5j,
(3, 3, 1, 1): 0, (1, 1, 0, 0): 0, (2, 1, 0, 3): 0, (0, 1, 2, 1): 0,
(3, 3, 3, 1): 0, (2, 1, 0, 0): -0.5, (1, 1, 2, 0): 0, (3, 3, 3, 2): 0,
(0, 1, 0, 1): -0.5j, (1, 1, 2, 3): 0, (2, 2, 3, 1): 0, (2, 1, 2, 0): 0,
(0, 1, 0, 2): 0, (1, 2, 2, 3): 0, (2, 0, 2, 1): 0, (2, 2, 1, 1): 0,
(1, 2, 2, 0): 0, (2, 2, 1, 2): 0, (0, 3, 1, 0): 0, (3, 0, 3, 3): 0.5j,
(2, 1, 3, 0): 0, (1, 2, 0, 0): 0.5, (0, 0, 2, 3): 0, (0, 3, 3, 0): 0,
(2, 0, 0, 0): 0, (3, 2, 0, 3): 0, (0, 3, 3, 3): -0.5j, (3, 0, 1, 2): 0,
(1, 0, 1, 2): 0, (3, 2, 0, 0): 0, (0, 2, 1, 3): 0, (1, 0, 1, 1): 0,
(0, 0, 0, 2): 0, (0, 2, 1, 0): 0.5, (3, 1, 3, 1): 0, (3, 2, 2, 0): 0,
(1, 3, 2, 0): 0, (1, 0, 3, 1): 0, (2, 3, 2, 3): 0.5, (0, 2, 3, 0): 0,
(3, 1, 1, 1): 0, (2, 3, 2, 0): 0, (1, 3, 0, 0): 0, (3, 1, 1, 2): 0,
(1, 1, 1, 2): 0, (1, 3, 0, 3): 0, (2, 3, 0, 0): 0, (2, 0, 2, 0): 0,
(3, 3, 0, 0): 0, (1, 1, 3, 3): 0, (2, 1, 1, 2): 0, (0, 1, 3, 0): 0,
(3, 3, 2, 0): 0, (2, 1, 1, 1): 0.5, (2, 0, 2, 2): 0, (3, 3, 2, 3): 0,
(0, 1, 1, 0): -0.5j, (2, 2, 2, 0): 0, (0, 3, 0, 2): 0, (3, 0, 2, 3): 0,
(0, 1, 1, 3): 0, (2, 0, 2, 3): -0.5, (1, 2, 3, 2): 0, (3, 0, 2, 0): 0,
(0, 0, 3, 3): 0, (1, 2, 3, 1): 0, (2, 0, 1, 2): 0, (0, 0, 3, 0): 0,
(0, 3, 2, 3): 0, (3, 0, 0, 0): 0.5j, (1, 2, 1, 1): -0.5, (1, 0, 0, 1): 0.5j,
(0, 0, 1, 0): 0, (2, 0, 3, 3): 0, (3, 2, 1, 2): 0, (1, 3, 3, 2): -0.5j,
(1, 0, 2, 1): 0, (3, 2, 1, 1): 0, (0, 2, 0, 2): 0, (1, 0, 2, 2): 0}
def __init__(self, name, lorentz1, lorentz2, spin1, spin2):
aloha_lib.LorentzObject.__init__(self, name, [lorentz1, lorentz2], \
[spin1, spin2])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.sigma,
self.lorentz_ind,self.spin_ind)
class Sigma(aloha_lib.FactoryLorentz):
object_class = L_Sigma
@classmethod
def get_unique_name(self, lorentz1, lorentz2, spin1, spin2):
return 'Sigma_[%s,%s]^[%s,%s]' % (spin1, spin2, lorentz1, lorentz2)
#===============================================================================
# Gamma5
#===============================================================================
class L_Gamma5(aloha_lib.LorentzObject):
#gamma5 = [[-1, 0, 0, 0, 0], [0, -1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]
gamma5 = {(0,0): -1, (0,1): 0, (0,2): 0, (0,3): 0,\
(1,0): 0, (1,1): -1, (1,2): 0, (1,3): 0,\
(2,0): 0, (2,1): 0, (2,2): 1, (2,3): 0,\
(3,0): 0, (3,1): 0, (3,2): 0, (3,3): 1}
def __init__(self, name, spin1, spin2):
aloha_lib.LorentzObject.__init__(self, name, [], [spin1, spin2])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.gamma5,
self.lorentz_ind,self.spin_ind)
class Gamma5(aloha_lib.FactoryLorentz):
object_class = L_Gamma5
@classmethod
def get_unique_name(self, spin1, spin2):
return 'Gamma5_%s_%s' % (spin1, spin2)
#===============================================================================
# Conjugate Matrices
#===============================================================================
class L_C(aloha_lib.LorentzObject):
#[0, -1, 0, 0] [1,0,0,0] [0,0,0,1],[0,0,-1,0]
Cmetrix = {(0,0): 0, (0,1): -1, (0,2): 0, (0,3): 0,\
(1,0): 1, (1,1): 0, (1,2): 0, (1,3): 0,\
(2,0): 0, (2,1): 0, (2,2): 0, (2,3): 1,\
(3,0): 0, (3,1): 0, (3,2): -1, (3,3): 0}
def __init__(self, name, spin_list):
# spin_list is automatically ordered. The sign for the symmetrization
# is set in the Factory routine
aloha_lib.LorentzObject.__init__(self, name, [], spin_list)
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.Cmetrix,
self.lorentz_ind,self.spin_ind)
class C(aloha_lib.FactoryLorentz):
object_class = L_C
def __new__(cls, spin1, spin2):
spin_list = [spin1, spin2]
spin_list.sort()
sign = give_sign_perm(spin_list, [spin1, spin2])
name = cls.get_unique_name(spin_list)
if sign == 1:
return aloha_lib.FactoryVar.__new__(cls, name, cls.object_class, spin_list)
else:
out = aloha_lib.FactoryVar.__new__(cls, name, cls.object_class, spin_list)
out.prefactor = -1
return out
@classmethod
def get_unique_name(cls, spin_list):
return "C_%s_%s" % tuple(spin_list)
#===============================================================================
# EPSILON
#===============================================================================
#Helpfull function
def give_sign_perm(perm0, perm1):
"""Check if 2 permutations are of equal parity.
Assume that both permutation lists are of equal length
and have the same elements. No need to check for these
conditions.
"""
assert len(perm0) == len(perm1)
perm1 = list(perm1) ## copy this into a list so we don't mutate the original
perm1_map = dict((v, i) for i,v in enumerate(perm1))
transCount = 0
for loc, p0 in enumerate(perm0):
p1 = perm1[loc]
if p0 != p1:
sloc = perm1_map[p0] # Find position in perm1
perm1[loc], perm1[sloc] = p0, p1 # Swap in perm1
perm1_map[p0], perm1_map[p1] = loc, sloc # Swap the map
transCount += 1
# Even number of transposition means equal parity
return -2 * (transCount % 2) + 1
# Practical definition of Epsilon
class L_Epsilon(aloha_lib.LorentzObject):
""" The fully anti-symmetric object in Lorentz-Space """
def give_parity(self, perm):
"""return the parity of the permutation"""
assert set(perm) == set([0,1,2,3])
i1 , i2, i3, i4 = perm
#formula found on wikipedia
return -self.sign * ((i2-i1) * (i3-i1) *(i4-i1) * (i3-i2) * (i4-i2) *(i4-i3))/12
# DEFINE THE REPRESENTATION OF EPSILON
def __init__(self, name, lorentz1, lorentz2, lorentz3, lorentz4):
lorentz_list = [lorentz1 , lorentz2, lorentz3, lorentz4]
#order_lor = list(lorentz_list)
#order_lor.sort()
#self.sign = give_sign_perm(order_lor, lorentz_list)
self.sign=1
aloha_lib.LorentzObject.__init__(self, name, lorentz_list, [])
def create_representation(self):
if not hasattr(self, 'epsilon'):
# init all element to zero
epsilon = dict( ((l1, l2, l3, l4), 0)
for l1 in range(4) \
for l2 in range(4) \
for l3 in range(4) \
for l4 in range(4))
# update non trivial one
epsilon.update(dict(
((l1, l2, l3, l4), self.give_parity((l1,l2,l3,l4)))
for l1 in range(4) \
for l2 in range(4) if l2 != l1\
for l3 in range(4) if l3 not in [l1,l2]\
for l4 in range(4) if l4 not in [l1,l2,l3]))
L_Epsilon.epsilon = epsilon
self.representation = aloha_lib.LorentzObjectRepresentation(self.epsilon,
self.lorentz_ind,self.spin_ind)
class Epsilon(aloha_lib.FactoryLorentz):
object_class = L_Epsilon
@classmethod
def get_unique_name(cls,l1,l2,l3,l4):
return '_EPSILON_%s_%s_%s_%s' % (l1,l2,l3,l4)
#===============================================================================
# Metric
#===============================================================================
class L_Metric(aloha_lib.LorentzObject):
metric = {(0,0): 1, (0,1): 0, (0,2): 0, (0,3): 0,\
(1,0): 0, (1,1): -1, (1,2): 0, (1,3): 0,\
(2,0): 0, (2,1): 0, (2,2): -1, (2,3): 0,\
(3,0): 0, (3,1): 0, (3,2): 0, (3,3): -1}
#[[1, 0, 0,0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, -1]]
def __init__(self, name, lorentz1, lorentz2):
aloha_lib.LorentzObject.__init__(self,name,[lorentz1, lorentz2], [])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.metric,
self.lorentz_ind,self.spin_ind)
class Metric(aloha_lib.FactoryLorentz):
object_class = L_Metric
@classmethod
def get_unique_name(cls,l1,l2):
if l1<l2:
return '_ETA_%s_%s' % (l1,l2)
else:
return '_ETA_%s_%s' % (l2,l1)
#===============================================================================
# Identity
#===============================================================================
class L_Identity(aloha_lib.LorentzObject):
#identity = [[1, 0, 0,0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]
identity = {(0,0): 1, (0,1): 0, (0,2): 0, (0,3): 0,\
(1,0): 0, (1,1): 1, (1,2): 0, (1,3): 0,\
(2,0): 0, (2,1): 0, (2,2): 1, (2,3): 0,\
(3,0): 0, (3,1): 0, (3,2): 0, (3,3): 1}
def __init__(self, name, spin1, spin2):
aloha_lib.LorentzObject.__init__(self, name, [],[spin1, spin2])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.identity,
self.lorentz_ind,self.spin_ind)
class Identity(aloha_lib.FactoryLorentz):
object_class = L_Identity
@classmethod
def get_unique_name(self, spin1, spin2):
return 'Id_%s_%s' % (spin1, spin2)
#===============================================================================
# IdentityL
#===============================================================================
class L_IdentityL(aloha_lib.LorentzObject):
#identity = [[1, 0, 0,0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]
identity = {(0,0): 1, (0,1): 0, (0,2): 0, (0,3): 0,\
(1,0): 0, (1,1): 1, (1,2): 0, (1,3): 0,\
(2,0): 0, (2,1): 0, (2,2): 1, (2,3): 0,\
(3,0): 0, (3,1): 0, (3,2): 0, (3,3): 1}
def __init__(self, name, l1, l2):
aloha_lib.LorentzObject.__init__(self, name, [l1,l2], [])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.identity,
self.lorentz_ind,self.spin_ind)
class IdentityL(aloha_lib.FactoryLorentz):
object_class = L_Identity
@classmethod
def get_unique_name(self, l1, l2):
return 'IdL_%s_%s' % (l1, l2)
#===============================================================================
# ProjM
#===============================================================================
class L_ProjM(aloha_lib.LorentzObject):
""" A object for (1-gamma5)/2 """
#projm = [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
projm= {(0,0): 1, (0,1): 0, (0,2): 0, (0,3): 0,\
(1,0): 0, (1,1): 1, (1,2): 0, (1,3): 0,\
(2,0): 0, (2,1): 0, (2,2): 0, (2,3): 0,\
(3,0): 0, (3,1): 0, (3,2): 0, (3,3): 0}
def __init__(self,name, spin1, spin2):
"""Initialize the object"""
aloha_lib.LorentzObject.__init__(self, name, [], [spin1, spin2])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.projm,
self.lorentz_ind,self.spin_ind)
class ProjM(aloha_lib.FactoryLorentz):
object_class = L_ProjM
@classmethod
def get_unique_name(self, spin1, spin2):
return 'PROJM_%s_%s' % (spin1, spin2)
#===============================================================================
# ProjP
#===============================================================================
class L_ProjP(aloha_lib.LorentzObject):
"""A object for (1+gamma5)/2 """
#projp = [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]
projp = {(0,0): 0, (0,1): 0, (0,2): 0, (0,3): 0,\
(1,0): 0, (1,1): 0, (1,2): 0, (1,3): 0,\
(2,0): 0, (2,1): 0, (2,2): 1, (2,3): 0,\
(3,0): 0, (3,1): 0, (3,2): 0, (3,3): 1}
def __init__(self,name, spin1, spin2):
"""Initialize the object"""
aloha_lib.LorentzObject.__init__(self, name, [], [spin1, spin2])
def create_representation(self):
self.representation = aloha_lib.LorentzObjectRepresentation(self.projp,
self.lorentz_ind, self.spin_ind)
class ProjP(aloha_lib.FactoryLorentz):
object_class = L_ProjP
@classmethod
def get_unique_name(self, spin1, spin2):
return 'PROJP_%s_%s' % (spin1, spin2)
#===============================================================================
# Denominator Propagator
#===============================================================================
class DenominatorPropagator(aloha_lib.LorentzObject):
"""The Denominator of the Propagator"""
def __new__(cls, particle):
name = 'DenomP%s' % particle
return aloha_lib.Variable.__new__(cls, name)
def __init__(self, particle):
if self:
return
self.particle = particle
aloha_lib.LorentzObject.__init__(self, [], [])
def get_unique_name(self,*args):
return 'DenomP%s' % self.particle
def simplify(self):
"""Return the Denominator in a abstract way"""
mass = Mass(self.particle)
width = Width(self.particle)
denominator = P('i1', self.particle) * P('i1', self.particle) - \
mass * mass + complex(0,1) * mass* width
return denominator
def create_representation(self):
"""Create the representation for the Vector propagator"""
object = self.simplify()
self.representation = object.expand()
#===============================================================================
# Numerator Propagator
#===============================================================================
SpinorPropagatorout = lambda spin1, spin2, particle: -1 * (Gamma('mu', spin1, spin2) * \
P('mu', particle) - Mass(particle) * Identity(spin1, spin2))
SpinorPropagatorin = lambda spin1, spin2, particle: (Gamma('mu', spin1, spin2) * \
P('mu', particle) + Mass(particle) * Identity(spin1, spin2))
VectorPropagator = lambda l1, l2, part: complex(0,1)*(-1 * Metric(l1, l2) + OverMass2(part) * \
Metric(l1,'I3')* P('I3', part) * P(l2, part))
VectorPropagatorMassless= lambda l1, l2, part: complex(0,-1) * Metric(l1, l2)
Spin3halfPropagatorin = lambda mu, nu, s1, s2, part: (\
-1/3 * (Gamma(mu,s1,-2) + Identity(s1, -2) * P(mu, part) * Mass(part) * OverMass2(part))* \
(PSlash(-2,-3, part) - Identity(-2,-3) * Mass(part)) * \
( Gamma(nu, -3, s2)+ Mass(part) * OverMass2(part) * Identity(-3, s2) * P(nu, part) ) - \
(PSlash(s1,s2, part) + Mass(part) * Identity(s1,s2)) * (Metric(mu, nu) - OverMass2(part) * P(mu, part) * P(nu,part)))
Spin3halfPropagatorout = lambda mu, nu, s1, s2, part: ( \
-1/3 * (Gamma(mu,s1,-2) - Identity(s1, -2) * P(mu, part) * Mass(part) * OverMass2(part))* \
(-1*PSlash(-2,-3, part) - Identity(-2,-3) * Mass(part)) * \
( Gamma(nu, -3, s2)- Mass(part) * OverMass2(part) * Identity(-3, s2) * P(nu, part) ) - \
(-1*PSlash(s1,s2, part)
+ Mass(part) * Identity(s1,s2)) * (Metric(mu, nu) - OverMass2(part) * P(mu, part) * P(nu,part)))
Spin3halfPropagatorMasslessOut = lambda mu, nu, s1, s2, part: Gamma(nu, s1,-1) * PSlash(-1,-2, part) * Gamma(mu,-2, s2)
Spin3halfPropagatorMasslessIn = lambda mu, nu, s1, s2, part: -1 * Gamma(mu, s1,-1) * PSlash(-1,-2, part) * Gamma(nu,-2, s2)
Spin2masslessPropagator = lambda mu, nu, alpha, beta: 1/2 *( Metric(mu, alpha)* Metric(nu, beta) +\
Metric(mu, beta) * Metric(nu, alpha) - Metric(mu, nu) * Metric(alpha, beta))
Spin2Propagator = lambda mu, nu, alpha, beta, part: Spin2masslessPropagator(mu, nu, alpha, beta) + \
- 1/2 * OverMass2(part) * (Metric(mu,alpha)* P(nu, part) * P(beta, part) + \
Metric(nu, beta) * P(mu, part) * P(alpha, part) + \
Metric(mu, beta) * P(nu, part) * P(alpha, part) + \
Metric(nu, alpha) * P(mu, part) * P(beta , part) )+ \
1/6 * (Metric(mu,nu) + 2 * OverMass2(part) * P(mu, part) * P(nu, part)) * \
(Metric(alpha,beta) + 2 * OverMass2(part) * P(alpha, part) * P(beta, part))
| [
"restrepo@udea.edu.co"
] | restrepo@udea.edu.co |
139127929e231175ddfc0c3b4801f86a9cde7362 | feebe89417007ec9b37b6e63375376020c90fca7 | /chp07/test_chp07.py | a5c1ea808e55ae8ad026927fc9e2f8323ebc4b1c | [
"MIT"
] | permissive | rockman/learn-python-fluent | 94279b13d1f624e6fb2ee822d7f7245416cd6a2f | 02072daea62143471523679aca28af238a7f3ed4 | refs/heads/master | 2020-12-10T03:04:42.433133 | 2020-02-22T03:47:48 | 2020-02-22T03:47:48 | 233,488,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,357 | py |
from functools import singledispatch
import numbers
def averager():
values = []
def average(value):
values.append(value)
return sum(values) // len(values)
return average
def test_closures():
avg = averager()
assert avg(10) == 10
assert avg(20) == 15
assert avg(30) == 20
def counter(f):
count = 0
def wrapper(n):
nonlocal count
count += 1
return f(n), count
return wrapper
def test_counter():
@counter
def echo(n):
return n
assert echo(42) == (42, 1)
assert echo('what') == ('what', 2)
@singledispatch
def funky(obj):
return f'object:{obj}'
@funky.register(str)
def _(s):
return f'string:{s}'
@funky.register(numbers.Integral)
def _(n):
return f'integral:{n}'
def test_singledispatch():
assert funky(test_singledispatch).startswith('object:<function ')
assert funky('hello') == 'string:hello'
assert funky(42) == 'integral:42'
def star(n=3):
def decorate(f):
def wrapper():
stars = '*' * n
return f'{stars}:{f()}'
return wrapper
return decorate
def test_parameterized_decorator():
@star()
def hello():
return 'hello'
@star(1)
def world():
return 'world'
assert hello() == '***:hello'
assert world() == '*:world'
| [
"rockman@users.noreply.github.com"
] | rockman@users.noreply.github.com |
103f8612b470238fdcec4f748e975de167f871bd | 9e401b884382ff048898f64231c3b6164dc0259b | /sieve/sieve_test.py | ea6ee6ef66694cbeb0ee73036df14986025910b1 | [] | no_license | rjmah/exercism-solutions-py | 3d701e74c5de968825a7e79225775481830713e4 | 6c1eaf5f1dfdd66f740c1775413a34ca4913716f | refs/heads/master | 2021-01-15T21:53:48.409165 | 2014-10-31T16:50:51 | 2014-10-31T16:50:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | import unittest
from sieve import sieve
class SieveTest(unittest.TestCase):
def test_a_few_primes(self):
expected = [2, 3, 5, 7]
self.assertEqual(expected, sieve(10))
def test_primes(self):
expected = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997]
self.assertEqual(expected, sieve(1000))
if __name__ == '__main__':
unittest.main()
| [
"ryanjamesmah@gmail.com"
] | ryanjamesmah@gmail.com |
a95420f768dc166655e558de8bdc1f864cfa316d | 8c33124986c836c8ef2f30741a6fad86f8c1aa94 | /Python/pythonCourse/python3EssentialTraining/exerciseFiles/01QuickStart/hello.py | 460544036e50e91b6fb4355eb46be7832c20bbed | [] | no_license | kennykat/CourseWork-TTA | fe2f00409f34e97d5d56db74ba3324107f671491 | 18159f5edc33344f9cf5c9487ef55f7c7472de98 | refs/heads/master | 2021-01-18T23:50:19.375829 | 2018-05-26T20:03:39 | 2018-05-26T20:03:39 | 51,634,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50 | py | #!/usr/local/bin/python3
print ("Hello, World!")
| [
"irahetakendra@gmail.com"
] | irahetakendra@gmail.com |
77b74a14ad5a1874eb757c258db26fc759163437 | 43e900f11e2b230cdc0b2e48007d40294fefd87a | /laioffer/remove-certain-characters.py | ba414a70cff9fca8d6bb41e33f8626f682e9c25a | [] | no_license | DarkAlexWang/leetcode | 02f2ed993688c34d3ce8f95d81b3e36a53ca002f | 89142297559af20cf990a8e40975811b4be36955 | refs/heads/master | 2023-01-07T13:01:19.598427 | 2022-12-28T19:00:19 | 2022-12-28T19:00:19 | 232,729,581 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 496 | py | class Solution:
def remove(self, string, t):
array = list(string)
uniq_t = set(t)
slow = 0
fast = 0
for fast in range(0, len(array)):
if array[fast] not in uniq_t:
array[slow] = array[fast]
slow += 1
res = ""
for i in range(slow):
res += array[i]
return res
if __name__ == "__main__":
solution = Solution()
res = solution.remove("aaabbbccc", "a")
print(res)
| [
"wangzhihuan0815@gmail.com"
] | wangzhihuan0815@gmail.com |
194439312eb994931c4e5cd372e342ca9dab1e8a | e177d6366c4e9572c4dbd6bdf66463fc0ec25b7c | /Python/gigasecond/gigasecond_test.py | dde3cc7143d916b58fdf90ee3f6a2d9bb8beb746 | [] | no_license | RafaelRampineli/exercism.io | a76e21d6036987eac4be678626b1b13df2d0bccc | 2d01aba240188289400d705698985c33e7401d42 | refs/heads/master | 2020-03-27T13:54:48.168686 | 2019-02-12T10:55:00 | 2019-02-12T10:55:00 | 146,634,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,441 | py | import unittest
from datetime import datetime
from gigasecond import add_gigasecond
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.1.0
class GigasecondTest(unittest.TestCase):
def test_date_only_specification_of_time(self):
self.assertEqual(
add_gigasecond(datetime(2011, 4, 25)),
datetime(2043, 1, 1, 1, 46, 40))
def test_another_date_only_specification_of_time(self):
self.assertEqual(
add_gigasecond(datetime(1977, 6, 13)),
datetime(2009, 2, 19, 1, 46, 40))
def test_one_more_date_only_specification_of_time(self):
self.assertEqual(
add_gigasecond(datetime(1959, 7, 19)),
datetime(1991, 3, 27, 1, 46, 40))
def test_full_time_specified(self):
self.assertEqual(
add_gigasecond(datetime(2015, 1, 24, 22, 0, 0)),
datetime(2046, 10, 2, 23, 46, 40))
def test_full_time_with_day_roll_over(self):
self.assertEqual(
add_gigasecond(datetime(2015, 1, 24, 23, 59, 59)),
datetime(2046, 10, 3, 1, 46, 39))
def test_yourself(self):
# customize this to test your birthday and find your gigasecond date:
your_birthday = datetime(1970, 1, 1)
your_gigasecond = datetime(2001, 9, 9, 1, 46, 40)
self.assertEqual(add_gigasecond(your_birthday), your_gigasecond)
if __name__ == '__main__':
unittest.main()
| [
"rafael.rampineli@hotmail.com"
] | rafael.rampineli@hotmail.com |
87b5b1013121a670da7f12288049e9aa81b73e98 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-9528.py | 9b6ba89efb100cc35e49f496f579cb54f9ede179 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,756 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
$ID:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
ceb0bf1e823ba32931903f316a3e63e9c298dbf2 | 86510b47b768d80127adcbd53b06fdff58fd95a4 | /python/problem_067.py | b77ec6c66cacef5160ffd9fc456950d5f2882f00 | [] | no_license | Kimbsy/project-euler | d018ad759ae599147e11431f818c9bfd3fc82f73 | e1eda2779b6499a6d33a848eacc5e1c15405bf70 | refs/heads/master | 2021-08-27T16:22:19.167892 | 2021-08-16T17:09:08 | 2021-08-16T17:09:08 | 50,948,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | import csv
"""By starting at the top of the triangle below and moving to adjacent numbers
on the row below, the maximum total from top to bottom is 23.
3
7 4
2 4 6
8 5 9 3
That is, 3 + 7 + 4 + 9 = 23.
Find the maximum total from top to bottom in
data/problem067.py
a 15K text file containing a triangle with one-hundred rows.
NOTE: This is a much more difficult version of Problem 18. It is not possible to
try every route to solve this problem, as there are 2^99 altogether! If you
could check one trillion (10^12) routes every second it would take over twenty
billion years to check them all. There is an efficient algorithm to solve it.
"""
data = []
with open('data/problem_067.csv') as input_file:
for row in csv.reader(input_file):
int_row = [int(value) for value in row]
data.append(int_row)
for i in range(len(data) - 1, 0, -1):
row = data[i]
for j in range(0, len(row) - 1):
max_val = max([row[j], row[j + 1]])
data[i - 1][j] = data[i - 1][j] + max_val
row = data[i]
print(data[0][0]) | [
"lordkimber@gmail.com"
] | lordkimber@gmail.com |
e856c3512502cc8ddd31849054c4633d661bca3c | 9d6271fd3851acb797a5120e0d884130f7548833 | /kmeans.py | 4950fd689a0074d89dbcfb3e82ec63e3d12597e9 | [] | no_license | Wenbin94/toolbox | f5d69e1b3a158ad076562829e2d83738e282da04 | e88e1ba51e5a4c963626000b434072b6aa64e09d | refs/heads/master | 2020-08-22T02:50:57.779313 | 2019-10-08T10:57:52 | 2019-10-08T10:57:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,233 | py | '''
2019.10.8 ming71
功能: 对box进行kmeans聚类
注意:
- 停止条件是最小值索引不变而不是最小值不变,会造成早停,可以改
- 暂时仅支持voc标注
- 如需改动再重写get_all_boxes函数即可
'''
import numpy as np
import glob
import os
from decimal import Decimal
class Kmeans:
def __init__(self, cluster_number, all_boxes, save_path):
self.cluster_number = cluster_number
self.all_boxes = all_boxes
self.save_path = save_path
# 输入两个二维数组:所有box和种子点box
# 输出[num_boxes, k]的结果
def iou(self, boxes, clusters): # 1 box -> k clusters
n = boxes.shape[0]
k = self.cluster_number #类别
box_area = boxes[:, 0] * boxes[:, 1] #列表切片操作:取所有行0列和1列相乘 ,得到gt的面积的行向量
box_area = box_area.repeat(k) #行向量进行重复
box_area = np.reshape(box_area, (n, k))
cluster_area = clusters[:, 0] * clusters[:, 1] #种子点的面积行向量
cluster_area = np.tile(cluster_area, [1, n])
cluster_area = np.reshape(cluster_area, (n, k))
box_w_matrix = np.reshape(boxes[:, 0].repeat(k), (n, k))
cluster_w_matrix = np.reshape(np.tile(clusters[:, 0], (1, n)), (n, k))
min_w_matrix = np.minimum(cluster_w_matrix, box_w_matrix)
box_h_matrix = np.reshape(boxes[:, 1].repeat(k), (n, k))
cluster_h_matrix = np.reshape(np.tile(clusters[:, 1], (1, n)), (n, k))
min_h_matrix = np.minimum(cluster_h_matrix, box_h_matrix)
inter_area = np.multiply(min_w_matrix, min_h_matrix)
result = inter_area / (box_area + cluster_area - inter_area + 1e-16)
assert (result>0).all() == True , 'negtive anchors present , cluster again!'
return result
def avg_iou(self, boxes, clusters):
accuracy = np.mean([np.max(self.iou(boxes, clusters), axis=1)])
return accuracy
#注意:这里代码选择的停止聚类的条件是最小值的索引不变,而不是种子点的数值不变。这样的误差会大一点。
def kmeans(self, boxes, k, dist=np.median):
box_number = boxes.shape[0] # box个数
distances = np.empty((box_number, k)) # 初始化[box_number , k]二维数组,存放自定义iou距离(obj*anchor)
last_nearest = np.zeros((box_number,)) # [box_number , ]的标量
np.random.seed()
clusters = boxes[np.random.choice(
box_number, k, replace=False)] # 种子点随机初始化
# 种子点一旦重复会有计算错误,避免!
while True :
uniques_clusters = np.unique(clusters,axis=0)
if len(uniques_clusters)==len(clusters) :
break
clusters = boxes[np.random.choice(box_number, k, replace=False)]
# k-means
while True:
# 每轮循环,计算种子点外所有点各自到k个种子点的自定义距离,并且按照距离各个点找离自己最近的种子点进行归类;计算新的各类中心;然后下一轮循环
distances = 1 - self.iou(boxes, clusters) # iou越大,距离越小
current_nearest = np.argmin(distances, axis=1) # 展开为box_number长度向量,代表每个box当前属于哪个种子点类别(0,k-1)
if (last_nearest == current_nearest).all(): # 每个box的当前类别所属和上一次相同,不再移动聚类
break
#计算新的k个种子点坐标
for cluster in range(k):
clusters[cluster] = dist(boxes[current_nearest == cluster], axis=0) # 只对还需要聚类的种子点进行位移
last_nearest = current_nearest
return clusters
def result2txt(self, data):
f = open(self.save_path, 'w')
row = np.shape(data)[0]
for i in range(row):
if i == 0:
x_y = "%d,%d" % (data[i][0], data[i][1])
else:
x_y = ", %d,%d" % (data[i][0], data[i][1])
f.write(x_y)
f.close() #最终输出的是w1,h1,w2,h2,w3,h3,...
def clusters(self):
all_boxes = np.array(self.all_boxes) #返回全部gt的宽高二维数组
result = self.kmeans(all_boxes, k=self.cluster_number) #传入两个聚类参数:所有gt宽高的二维数组和种子点数,并返回聚类结果k*2
result = result[np.lexsort(result.T[0, None])] #将得到的三个anchor按照宽进行从小到大,重新排序
self.result2txt(result)
print("K anchors:\n {}".format(result))
print("Accuracy: {:.2f}%".format(
self.avg_iou(all_boxes, result) * 100))
# 返回所有label的box,形式为[[w1,h1],[w2,h2],...]
def get_all_boxes(path):
mode = 'voc'
boxes = []
labels = sorted(glob.glob(os.path.join(path, '*.*')))
for label in labels:
with open(label,'r') as f:
contents = f.read()
objects = contents.split('<object>')
objects.pop(0)
assert len(objects) > 0, 'No object found in ' + xml_path
for object in objects:
xmin = int(object[object.find('<xmin>')+6 : object.find('</xmin>')])
xmax = int(object[object.find('<xmax>')+6 : object.find('</xmax>')])
ymin = int(object[object.find('<ymin>')+6 : object.find('</ymin>')])
ymax = int(object[object.find('<ymax>')+6 : object.find('</ymax>')])
box_w = xmax - xmin
box_h = ymax - ymin
boxes.append((box_w,box_h))
return boxes
if __name__ == "__main__":
cluster_number = 9 # 种子点个数,即anchor数目
label_path = r'/py/datasets/ship/tiny_ships/yolo_ship/train_labels'
save_path = r'/py/yolov3/cfg/anchor-cluster.txt'
all_boxes = get_all_boxes(label_path)
kmeans = Kmeans(cluster_number, all_boxes,save_path)
kmeans.clusters()
| [
"chaser.ming@gmail.com"
] | chaser.ming@gmail.com |
7f23b4f5caab5d1fc6f4597acfb6856c18aa205f | ca2ac853bcf65c040e4a1e70fafa1048945559a4 | /target_offer/链表中环的入口节点.py | bc63c46acc6d87fdd291dd593a560599a606384a | [] | no_license | tianwen0110/find-job | a287989c1dd9a4b965894049b3593530188b54ee | 88e6ac37ed411b62574bcc8bcbfcfd56055d17e3 | refs/heads/master | 2020-06-21T05:15:32.327040 | 2020-06-08T12:52:58 | 2020-06-08T12:52:58 | 197,353,258 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,407 | py | '''
一个链表中包含环,请找出该链表的环的入口结点。
'''
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
# 没有考虑自环,只需加一次判断
class solution(object):
def meetnode(self, head):
if head == None:
return None
pslow = head.next
if pslow == None:
return None
pfast = pslow.next
if pfast == None:
return None
while pslow != pfast:
pslow = pslow.next
pfast = pfast.next
if pfast == None:
return None
pfast = pfast.next
if pfast == None:
return None
i = 1
pfast = pfast.next
# 这里没有考虑自环,只需加一次判断
while pslow != pfast:
pfast = pfast.next
i = i + 1
pfast = head
pslow = head
for j in range(i):
pfast = pfast.next
i = 0
while pfast != pslow:
pfast = pfast.next
pslow = pslow.next
return pfast.val
node1 = ListNode(1)
node2 = ListNode(2)
node3 = ListNode(3)
node4 = ListNode(4)
node5 = ListNode(5)
node6 = ListNode(6)
node1.next = node2
node2.next = node3
node3.next = node4
node4.next = node5
node5.next = node6
node6.next = node1
s = solution()
print(s.meetnode(node1))
| [
"841310406@qq.com"
] | 841310406@qq.com |
2c2523b87fc17bc946905833fc696df850396c2f | 0ec9d7334902af5170f85886d1e55807e49cd573 | /find_max.py | fd57543f0a8ecc3b8da32c504a552f17f3af2b41 | [] | no_license | IsiceraDew/python_learning | 3484a904660019b03906a2309170cea70a6dadbc | cd3ae3f56f3a4f9946a6f164a5f5fcbbaf8ac180 | refs/heads/master | 2021-08-22T03:39:25.139578 | 2017-11-29T05:13:49 | 2017-11-29T05:13:49 | 109,782,983 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | """
Write a function that determines the maximal value in a non-empty list of integers.
E.g., find_max([1,2,3,4,5,6]) == 6
find_max([4, 1, 10, 2]) == 10
"""
def find_max(x):
i = 0
max_so_far = x[0]
while i < len(x):
if x[i] > max_so_far :
max_so_far = x[i]
i = i + 1
return max_so_far
print "This should be 6:", find_max([1,2,6])
print "This should be 10:", find_max([4, 1, 10, 2])
print "This should be 200:", find_max([200, 4, 1, 10, 2])
| [
"mugenxero@gmail.com"
] | mugenxero@gmail.com |
33580815c9068387ec17fb3985c09c0b2dbc4331 | 7027a90b73d774394c309fd7518599dc9364bb10 | /test/functional/test_framework/bignum.py | c31bc3ba7ee578cbdacc8a58e6eb13e22b4c8f9e | [
"MIT"
] | permissive | IDC-Group/VHKD | a5a5b1b9b275a9fadbf8b9c714c8358ee8f7c46a | 0256ddf1477439ebc84e97132d3673aa61c39b73 | refs/heads/master | 2020-03-21T09:12:22.738342 | 2018-06-23T09:46:17 | 2018-06-23T09:46:17 | 138,387,759 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,224 | py | #!/usr/bin/env python3
#
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Big number routines.
This file is copied from python-vhkdlib.
"""
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
# vhkd-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
| [
"support@idcm.io"
] | support@idcm.io |
65b8808ec3e1a0d27451e396ee0d6a134cdabb91 | a98cab2f9c24a85a5f46b2cbec7506b79f4ea634 | /app/src/models/sentence_model.py | 1588e64f2771d2064366357aaa9e173d0246e6a2 | [] | no_license | DIS-SIN/ODSC-2019 | b8b8d10b41d95925219a0be36b5ef8b541396681 | c2a606471452e358f0e245841e78f562c570bbf5 | refs/heads/master | 2020-05-17T18:27:08.591454 | 2019-04-30T03:02:35 | 2019-04-30T03:02:35 | 183,884,786 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | from neomodel import (
StructuredNode,
StringProperty,
DateTimeProperty,
UniqueIdProperty
)
from datetime import datetime
class Sentence(StructuredNode):
nodeId = UniqueIdProperty()
sentence = StringProperty(required=True)
addedOn = DateTimeProperty(default_now=True)
updatedOn = DateTimeProperty()
sentimentScore = StringProperty()
magnitudeScore = StringProperty()
def pre_save(self):
self.updatedOn = datetime.utcnow()
| [
"omardev9898@gmail.com"
] | omardev9898@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.