index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
29,039
|
GulaJan/currency_converter
|
refs/heads/master
|
/api.py
|
#!/usr/bin/python3.6
# -*- coding: utf-8 -*-
# Author: Jan Gula
# Date: 02/2018
# File: API using Flask
import sys
from flask import Flask, request, jsonify
from currency_converter import fetch_rates, convert_to_output_currency, recognize_symbol
app = Flask(__name__)
@app.route('/currency_converter')
def api():
amount = request.args.get('amount')
input_currency = request.args.get('input_currency')
output_currency = request.args.get('output_currency')
err_msg = ""
if not amount:
err_msg = 'Amount required'
# Supposedly fastest way to check if a string is a number, benchmark results https://i.stack.imgur.com/DFoK6.png
# Problem discussed here https://stackoverflow.com/questions/354038/how-do-i-check-if-a-string-is-a-number-float
elif not(amount.replace('.','',1).isdigit()):
err_msg = 'Amount has to be a positive number'
elif not input_currency:
err_msg = 'Input currency required'
if err_msg:
response = jsonify({'error': {'code' : '201', 'message': err_msg}})
response.status_code = 201
return response
rates = fetch_rates()
try:
input_currency = recognize_symbol(input_currency, rates)
if output_currency:
output_currency = recognize_symbol(output_currency, rates)
except KeyError:
err_msg = 'Input or output symbol was not recognized'
if err_msg:
response = jsonify({'error': {'code' : '202', 'message': err_msg}})
response.status_code = 202
return response
try:
converted_val = convert_to_output_currency(amount, input_currency, output_currency, rates)
except UnboundLocalError:
response = jsonify({'error': {'code' : '202', 'message': 'Input or output currency was not recognized'}})
response.status_code = 202
return response
if output_currency:
converted_val = str(round(converted_val, 2))
output = {output_currency : converted_val}
else:
output = converted_val
return jsonify({'input': {'amount': str(amount), 'currency': input_currency}, 'output': output })
if __name__ == '__main__' :
app.run('127.0.0.1', 5000)
#Set for localhost listening on the default port 5000
#To reach from outside of localhost use these settings:
#app.run('0.0.0.0')
|
{"/cli.py": ["/currency_converter.py", "/constants.py"], "/api.py": ["/currency_converter.py"], "/currency_converter.py": ["/constants.py"]}
|
29,040
|
GulaJan/currency_converter
|
refs/heads/master
|
/currency_converter.py
|
#!/usr/bin/python3.6
# -*- coding: utf-8 -*-
# Author: Jan Gula
# Date: 02/2018
# File: Shared functions for both CLI and API
import urllib.request
import xmltodict
import json
import decimal
from constants import decipher_symbol
def fetch_rates():
url = "http://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml"
xml_content = xmltodict.parse(urllib.request.urlopen(url).read())
currency_rates_dict = {}
for item in xml_content['gesmes:Envelope']['Cube']['Cube']['Cube']:
currency_rates_dict[item['@currency']] = item['@rate']
# currency = key and rates = value
currency_rates_dict.update({'EUR':1})
return currency_rates_dict
def calculate_result(amount, input_currency, output_currency, currency_rates):
input_rate = currency_rates.get(input_currency)
output_rate = currency_rates.get(output_currency)
return decimal.Decimal(amount) / decimal.Decimal(input_rate) * decimal.Decimal(output_rate)
def recognize_symbol(currency, rates):
if not rates.get(currency):
currency = decipher_symbol(currency)
if not currency:
raise KeyError
return currency
def convert_to_output_currency(amount, input_currency, output_currency, filtered_rates):
if not output_currency:
all_currencies = {}
convert_to_euro = calculate_result(amount, input_currency, 'EUR', filtered_rates)
two_places_result = str(round(convert_to_euro, 2))
# If no output is set we have to explicitly add EUR because it's the base in our data source
all_currencies['EUR'] = two_places_result
for currency_code in filtered_rates:
try:
converted_value = calculate_result(amount, input_currency, currency_code, filtered_rates)
except UnboundLocalError:
raise UnboundLocalError
two_places_result = str(round(converted_value, 2))
all_currencies[currency_code] = two_places_result
return all_currencies
else:
try:
converted_value = calculate_result(amount, input_currency, output_currency, filtered_rates)
except UnboundLocalError:
raise UnboundLocalError
return converted_value
|
{"/cli.py": ["/currency_converter.py", "/constants.py"], "/api.py": ["/currency_converter.py"], "/currency_converter.py": ["/constants.py"]}
|
29,058
|
Wiatrogon/pyimgui
|
refs/heads/master
|
/doc/source/gen_example.py
|
# -*- coding: utf-8 -*-
from inspect import cleandoc
import os
import glfw
import OpenGL.GL as gl
from PIL import Image
import imgui
from imgui.impl import GlfwImpl
def render_snippet(
source,
file_path,
title="",
width=200,
height=200,
auto_window=False,
auto_layout=False,
output_dir='.',
):
code = compile(source, '<str>', 'exec')
window_name = "minimal ImGui/GLFW3 example"
if not glfw.init():
print("Could not initialize OpenGL context")
exit(1)
# OS X supports only forward-compatible core profiles from 3.2
glfw.window_hint(glfw.CONTEXT_VERSION_MAJOR, 3)
glfw.window_hint(glfw.CONTEXT_VERSION_MINOR, 3)
glfw.window_hint(glfw.OPENGL_PROFILE, glfw.OPENGL_CORE_PROFILE)
glfw.window_hint(glfw.OPENGL_FORWARD_COMPAT, gl.GL_TRUE)
# note: creating context without window is tricky so made window invisible
glfw.window_hint(glfw.VISIBLE, False)
window = glfw.create_window(
int(width), int(height), window_name, None, None
)
glfw.make_context_current(window)
if not window:
glfw.terminate()
print("Could not initialize Window")
exit(1)
imgui_ctx = GlfwImpl(window)
imgui_ctx.enable()
glfw.poll_events()
# render target for framebuffer
texture = gl.glGenTextures(1)
gl.glBindTexture(gl.GL_TEXTURE_2D, texture)
gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA, width, height, 0, gl.GL_RGB, gl.GL_UNSIGNED_BYTE, None)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_NEAREST)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_NEAREST)
# create new framebuffer
offscreen_fb = gl.glGenFramebuffers(1)
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, offscreen_fb)
# attach texture to framebuffer
gl.glFramebufferTexture2D(gl.GL_FRAMEBUFFER, gl.GL_COLOR_ATTACHMENT0, gl.GL_TEXTURE_2D, texture, 0)
imgui_ctx.new_frame()
with imgui.styled(imgui.STYLE_ALPHA, 1):
imgui.core.set_next_window_size(0, 0)
if auto_layout:
imgui.set_next_window_size(width - 10, height - 10)
imgui.set_next_window_centered()
if auto_window:
imgui.set_next_window_size(width - 10, height - 10)
imgui.set_next_window_centered()
imgui.begin("Example: %s" % title)
exec(code, locals(), globals())
if auto_window:
imgui.end()
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, offscreen_fb)
gl.glClearColor(1, 1, 1, 0)
gl.glClear(gl.GL_COLOR_BUFFER_BIT)
imgui.render()
# retrieve pixels from framebuffer and write to file
pixels = gl.glReadPixels(0, 0, width, height, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE)
image = Image.frombytes('RGBA', (width, height), pixels)
# note: glReadPixels returns lines "bottom to top" but PIL reads bytes
# top to bottom
image = image.transpose(Image.FLIP_TOP_BOTTOM)
image.save(os.path.join(output_dir, file_path))
glfw.terminate()
if __name__ == "__main__":
example_source = cleandoc(
"""
imgui.text("Bar")
imgui.text_colored("Eggs", 0.2, 1., 0.)
"""
)
render_snippet(example_source, 'example_snippet.png')
|
{"/doc/source/gen_example.py": ["/imgui/__init__.py", "/imgui/impl/__init__.py"], "/imgui/impl/_glfw.py": ["/imgui/__init__.py"], "/imgui/impl/__init__.py": ["/imgui/impl/_glfw.py"]}
|
29,059
|
Wiatrogon/pyimgui
|
refs/heads/master
|
/imgui/__init__.py
|
# -*- coding: utf-8 -*-
VERSION = (0, 0, 0) # PEP 386
__version__ = ".".join([str(x) for x in VERSION])
from imgui.core import * # noqa
from imgui import core
VERTEX_BUFFER_POS_OFFSET = core.vertex_buffer_vertex_pos_offset()
VERTEX_BUFFER_UV_OFFSET = core.vertex_buffer_vertex_uv_offset()
VERTEX_BUFFER_COL_OFFSET = core.vertex_buffer_vertex_col_offset()
VERTEX_SIZE = core.vertex_buffer_vertex_size()
INDEX_SIZE = core.index_buffer_index_size()
# ==== Condition constants (redefines for autodoc)
#: Set the variable always
ALWAYS = core.ALWAYS
#: Only set the variable on the first call per runtime session
ONCE = core.ONCE
#: Only set the variable if the window doesn't exist in the .ini file
FIRST_USE_EVER = core.FIRST_USE_EVER
#: Only set the variable if the window is appearing after being inactive
#: (or the first time)
APPEARING = core.APPEARING
# === Key map constants (redefines for autodoc)
#: for tabbing through fields
KEY_TAB = core.KEY_TAB
#: for text edit
KEY_LEFT_ARROW = core.KEY_LEFT_ARROW
#: for text edit
KEY_RIGHT_ARROW = core.KEY_UP_ARROW
#: for text edit
KEY_UP_ARROW = core.KEY_UP_ARROW
#: for text edit
KEY_DOWN_ARROW = core.KEY_DOWN_ARROW
KEY_PAGE_UP = core.KEY_PAGE_UP
KEY_PAGE_DOWN = core.KEY_PAGE_DOWN
#: for text edit
KEY_HOME = core.KEY_HOME
#: for text edit
KEY_END = core.KEY_END
#: for text edit
KEY_DELETE = core.KEY_DELETE
#: for text edit
KEY_BACKSPACE = core.KEY_BACKSPACE
#: for text edit
KEY_ENTER = core.KEY_ENTER
#: for text edit
KEY_ESCAPE = core.KEY_ESCAPE
#: for text edit CTRL+A: select all
KEY_A = core.KEY_A
#: for text edit CTRL+C: copy
KEY_C = core.KEY_C
#: for text edit CTRL+V: paste
KEY_V = core.KEY_V
#: for text edit CTRL+X: cut
KEY_X = core.KEY_X
#: for text edit CTRL+Y: redo
KEY_Y = core.KEY_Y
#: for text edit CTRL+Z: undo
KEY_Z = core.KEY_Z
# === Style var constants (redefines for autodoc)
#: associated type: ``float``
STYLE_ALPHA = core.STYLE_ALPHA
#: associated type: ``Vec2``
STYLE_WINDOW_PADDING = core.STYLE_WINDOW_PADDING
#: associated type: ``float``
STYLE_WINDOW_ROUNDING = core.STYLE_WINDOW_ROUNDING
#: associated type: ``Vec2``
STYLE_WINDOW_MIN_SIZE = core.STYLE_WINDOW_MIN_SIZE
#: associated type: ``float``
STYLE_CHILD_WINDOW_ROUNDING = core.STYLE_CHILD_WINDOW_ROUNDING
#: associated type: ``Vec2``
STYLE_FRAME_PADDING = core.STYLE_FRAME_PADDING
#: associated type: ``float``
STYLE_FRAME_ROUNDING = core.STYLE_FRAME_ROUNDING
#: associated type: ``Vec2``
STYLE_ITEM_SPACING = core.STYLE_ITEM_SPACING
#: associated type: ``Vec2``
STYLE_ITEM_INNER_SPACING = core.STYLE_ITEM_INNER_SPACING
#: associated type: ``float``
STYLE_INDENT_SPACING = core.STYLE_INDENT_SPACING
#: associated type: ``float``
STYLE_GRAB_MIN_SIZE = core.STYLE_GRAB_MIN_SIZE
if hasattr(core, 'STYLE_BUTTON_TEXT_ALIGN'):
#: associated type: flags ImGuiAlign_*
STYLE_BUTTON_TEXT_ALIGN = core.STYLE_BUTTON_TEXT_ALIGN
#: Disable title-bar
WINDOW_NO_TITLE_BAR = core.WINDOW_NO_TITLE_BAR
#: Disable user resizing with the lower-right grip
WINDOW_NO_RESIZE = core.WINDOW_NO_RESIZE
#: Disable user moving the window
WINDOW_NO_MOVE = core.WINDOW_NO_MOVE
#: Disable scrollbars (window can still scroll with mouse or programatically)
WINDOW_NO_SCROLLBAR = core.WINDOW_NO_SCROLLBAR
#: Disable user vertically scrolling with mouse wheel
WINDOW_NO_SCROLL_WITH_MOUSE = core.WINDOW_NO_SCROLL_WITH_MOUSE
#: Disable user collapsing window by double-clicking on it
WINDOW_NO_COLLAPSE = core.WINDOW_NO_COLLAPSE
#: Resize every window to its content every frame
WINDOW_ALWAYS_AUTO_RESIZE = core.WINDOW_ALWAYS_AUTO_RESIZE
#: Show borders around windows and items
WINDOW_SHOW_BORDERS = core.WINDOW_SHOW_BORDERS
#: Never load/save settings in .ini file
WINDOW_NO_SAVED_SETTINGS = core.WINDOW_NO_SAVED_SETTINGS
#: Disable catching mouse or keyboard inputs
WINDOW_NO_INPUTS = core.WINDOW_NO_INPUTS
#: Has a menu-bar
WINDOW_MENU_BAR = core.WINDOW_MENU_BAR
#: Allow horizontal scrollbar to appear (off by default)
WINDOW_HORIZONTAL_SCROLLING_BAR = core.WINDOW_HORIZONTAL_SCROLLING_BAR
#: Disable taking focus when transitioning from hidden to visible state
WINDOW_NO_FOCUS_ON_APPEARING = core.WINDOW_NO_FOCUS_ON_APPEARING
#: Disable bringing window to front when taking focus (e.g. clicking on it or
#: programatically giving it focus)
WINDOW_NO_BRING_TO_FRONT_ON_FOCUS = core.WINDOW_NO_BRING_TO_FRONT_ON_FOCUS
#: Always show vertical scrollbar (even if ContentSize.y < Size.y)
WINDOW_ALWAYS_VERTICAL_SCROLLBAR = core.WINDOW_ALWAYS_VERTICAL_SCROLLBAR
#: Always show horizontal scrollbar (even if ContentSize.x < Size.x)
WINDOW_ALWAYS_HORIZONTAL_SCROLLBAR = core.WINDOW_ALWAYS_HORIZONTAL_SCROLLBAR
#: Ensure child windows without border uses style.WindowPadding (ignored by
#: default for non-bordered child windows, because more convenient)
WINDOW_ALWAYS_USE_WINDOW_PADDING = core.WINDOW_ALWAYS_USE_WINDOW_PADDING
|
{"/doc/source/gen_example.py": ["/imgui/__init__.py", "/imgui/impl/__init__.py"], "/imgui/impl/_glfw.py": ["/imgui/__init__.py"], "/imgui/impl/__init__.py": ["/imgui/impl/_glfw.py"]}
|
29,060
|
Wiatrogon/pyimgui
|
refs/heads/master
|
/imgui/impl/_glfw.py
|
# -*- coding: utf-8 -*-
import glfw
import OpenGL.GL as gl
import imgui
import ctypes
class GlfwImpl(object):
"""Basic GLFW3 integration implementation."""
VERTEX_SHADER_SRC = """
#version 330
uniform mat4 ProjMtx;
in vec2 Position;
in vec2 UV;
in vec4 Color;
out vec2 Frag_UV;
out vec4 Frag_Color;
void main() {
Frag_UV = UV;
Frag_Color = Color;
gl_Position = ProjMtx * vec4(Position.xy, 0, 1);
}
"""
FRAGMENT_SHADER_SRC = """
#version 330
uniform sampler2D Texture;
in vec2 Frag_UV;
in vec4 Frag_Color;
out vec4 Out_Color;
void main() {
Out_Color = Frag_Color * texture(Texture, Frag_UV.st);
}
"""
def __init__(self, window):
self.window = window
self.io = imgui.get_io()
self._shader_handle = None
self._vert_handle = None
self._fragment_handle = None
self._attrib_location_tex = None
self._attrib_proj_mtx = None
self._attrib_location_position = None
self._attrib_location_uv = None
self._attrib_location_color = None
self._vbo_handle = None
self._elements_handle = None
self._vao_handle = None
self._font_texture = None
def enable(self):
# setup input callbacks
# todo: add some option to have additional callbacks
glfw.set_key_callback(self.window, self.keyboard_callback)
glfw.set_cursor_pos_callback(self.window, self.mouse_callback)
glfw.set_window_size_callback(self.window, self.resize_callback)
glfw.set_char_callback(self.window, self.char_callback)
glfw.set_scroll_callback(self.window, self.scroll_callback)
# todo: maybe it is not necessary
self.io.delta_time = 1.0 / 60.0
self.io.display_size = glfw.get_framebuffer_size(self.window)
# setup default font
self.io.fonts.get_tex_data_as_rgba32()
self.io.fonts.add_font_default()
self._create_device_objects()
self._map_keys()
# todo: add option to set new_frame callback/implementation
self.io.render_callback = self.render
def _map_keys(self):
key_map = self.io.key_map
key_map[imgui.KEY_TAB] = glfw.KEY_TAB
key_map[imgui.KEY_LEFT_ARROW] = glfw.KEY_LEFT
key_map[imgui.KEY_RIGHT_ARROW] = glfw.KEY_RIGHT
key_map[imgui.KEY_UP_ARROW] = glfw.KEY_UP
key_map[imgui.KEY_DOWN_ARROW] = glfw.KEY_DOWN
key_map[imgui.KEY_PAGE_UP] = glfw.KEY_PAGE_UP
key_map[imgui.KEY_PAGE_DOWN] = glfw.KEY_PAGE_DOWN
key_map[imgui.KEY_HOME] = glfw.KEY_HOME
key_map[imgui.KEY_END] = glfw.KEY_END
key_map[imgui.KEY_DELETE] = glfw.KEY_DELETE
key_map[imgui.KEY_BACKSPACE] = glfw.KEY_BACKSPACE
key_map[imgui.KEY_ENTER] = glfw.KEY_ENTER
key_map[imgui.KEY_ESCAPE] = glfw.KEY_ESCAPE
key_map[imgui.KEY_A] = glfw.KEY_A
key_map[imgui.KEY_C] = glfw.KEY_C
key_map[imgui.KEY_V] = glfw.KEY_V
key_map[imgui.KEY_X] = glfw.KEY_X
key_map[imgui.KEY_Y] = glfw.KEY_Y
key_map[imgui.KEY_Z] = glfw.KEY_Z
def keyboard_callback(self, window, key, scancode, action, mods):
# perf: local for faster access
io = self.io
if action == glfw.PRESS:
io.keys_down[key] = True
elif action == glfw.RELEASE:
io.keys_down[key] = False
io.key_ctrl = (
io.keys_down[glfw.KEY_LEFT_CONTROL] or
io.keys_down[glfw.KEY_RIGHT_CONTROL]
)
io.key_alt = (
io.keys_down[glfw.KEY_LEFT_ALT] or
io.keys_down[glfw.KEY_RIGHT_ALT]
)
io.key_shift = (
io.keys_down[glfw.KEY_LEFT_SHIFT] or
io.keys_down[glfw.KEY_RIGHT_SHIFT]
)
io.key_super = (
io.keys_down[glfw.KEY_LEFT_SUPER] or
io.keys_down[glfw.KEY_RIGHT_SUPER]
)
def char_callback(self, window, char):
io = imgui.get_io()
if 0 < char < 0x10000:
io.add_input_character(char)
def resize_callback(self, window, width, height):
self.io.display_size = width, height
def mouse_callback(self, *args, **kwargs):
pass
def scroll_callback(self, window, x_offset, y_offset):
self.io.mouse_wheel = y_offset
def new_frame(self):
# todo: consider moving to init
if not self._font_texture:
self._create_device_objects()
io = imgui.get_io()
w, h = glfw.get_window_size(self.window)
dw, dh = glfw.get_framebuffer_size(self.window)
io.display_size = w, h
io.display_fb_scale = float(dw)/w, float(dh)/h
io.delta_time = 1.0/60
if glfw.get_window_attrib(self.window, glfw.FOCUSED):
io.mouse_pos = glfw.get_cursor_pos(self.window)
else:
io.mouse_pos = -1, -1
# todo: py3k compat
for i in xrange(3):
io.mouse_down[i] = glfw.get_mouse_button(self.window, i)
imgui.new_frame()
def _create_device_objects(self):
# save state
last_texture = gl.glGetIntegerv(gl.GL_TEXTURE_BINDING_2D)
last_array_buffer = gl.glGetIntegerv(gl.GL_ARRAY_BUFFER_BINDING)
last_vertex_array = gl.glGetIntegerv(gl.GL_VERTEX_ARRAY_BINDING)
self._shader_handle = gl.glCreateProgram()
# note: no need to store shader parts handles after linking
vertex_shader = gl.glCreateShader(gl.GL_VERTEX_SHADER)
fragment_shader = gl.glCreateShader(gl.GL_FRAGMENT_SHADER)
gl.glShaderSource(vertex_shader, self.VERTEX_SHADER_SRC)
gl.glShaderSource(fragment_shader, self.FRAGMENT_SHADER_SRC)
gl.glCompileShader(vertex_shader)
gl.glCompileShader(fragment_shader)
gl.glAttachShader(self._shader_handle, vertex_shader)
gl.glAttachShader(self._shader_handle, fragment_shader)
gl.glLinkProgram(self._shader_handle)
# todo: remove shader parts after linking
self._attrib_location_tex = gl.glGetUniformLocation(self._shader_handle, "Texture")
self._attrib_proj_mtx = gl.glGetUniformLocation(self._shader_handle, "ProjMtx")
self._attrib_location_position = gl.glGetAttribLocation(self._shader_handle, "Position")
self._attrib_location_uv = gl.glGetAttribLocation(self._shader_handle, "UV")
self._attrib_location_color = gl.glGetAttribLocation(self._shader_handle, "Color")
self._vbo_handle = gl.glGenBuffers(1)
self._elements_handle = gl.glGenBuffers(1)
self._vao_handle = gl.glGenVertexArrays(1)
gl.glBindVertexArray(self._vao_handle)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._vbo_handle)
gl.glEnableVertexAttribArray(self._attrib_location_position)
gl.glEnableVertexAttribArray(self._attrib_location_uv)
gl.glEnableVertexAttribArray(self._attrib_location_color)
gl.glVertexAttribPointer(self._attrib_location_position, 2, gl.GL_FLOAT, gl.GL_FALSE, imgui.VERTEX_SIZE, ctypes.c_void_p(imgui.VERTEX_BUFFER_POS_OFFSET))
gl.glVertexAttribPointer(self._attrib_location_uv, 2, gl.GL_FLOAT, gl.GL_FALSE, imgui.VERTEX_SIZE, ctypes.c_void_p(imgui.VERTEX_BUFFER_UV_OFFSET))
gl.glVertexAttribPointer(self._attrib_location_color, 4, gl.GL_UNSIGNED_BYTE, gl.GL_TRUE, imgui.VERTEX_SIZE, ctypes.c_void_p(imgui.VERTEX_BUFFER_COL_OFFSET))
self._create_font_texture()
# restore state
gl.glBindTexture(gl.GL_TEXTURE_2D, last_texture)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, last_array_buffer)
gl.glBindVertexArray(last_vertex_array)
def _create_font_texture(self):
# save texture state
last_texture = gl.glGetIntegerv(gl.GL_TEXTURE_BINDING_2D)
width, height, pixels = self.io.fonts.get_tex_data_as_rgba32()
self._font_texture = gl.glGenTextures(1)
gl.glBindTexture(gl.GL_TEXTURE_2D, self._font_texture)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR)
gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA, width, height, 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, pixels)
self.io.fonts.texture_id = self._font_texture
gl.glBindTexture(gl.GL_TEXTURE_2D, last_texture)
def render(self, draw_data):
# perf: local for faster access
io = self.io
display_width, display_height = self.io.display_size
fb_width = int(display_width * io.display_fb_scale[0])
fb_height = int(display_height * io.display_fb_scale[1])
if fb_width == 0 or fb_height == 0:
return
draw_data.scale_clip_rects(*io.display_fb_scale)
# backup GL state
# todo: provide cleaner version of this backup-restore code
last_program = gl.glGetIntegerv(gl.GL_CURRENT_PROGRAM)
last_texture = gl.glGetIntegerv(gl.GL_TEXTURE_BINDING_2D)
last_active_texture = gl.glGetIntegerv(gl.GL_ACTIVE_TEXTURE)
last_array_buffer = gl.glGetIntegerv(gl.GL_ARRAY_BUFFER_BINDING)
last_element_array_buffer = gl.glGetIntegerv(gl.GL_ELEMENT_ARRAY_BUFFER_BINDING)
last_vertex_array = gl.glGetIntegerv(gl.GL_VERTEX_ARRAY_BINDING)
last_blend_src = gl.glGetIntegerv(gl.GL_BLEND_SRC)
last_blend_dst = gl.glGetIntegerv(gl.GL_BLEND_DST)
last_blend_equation_rgb = gl. glGetIntegerv(gl.GL_BLEND_EQUATION_RGB)
last_blend_equation_alpha = gl.glGetIntegerv(gl.GL_BLEND_EQUATION_ALPHA)
last_viewport = gl.glGetIntegerv(gl.GL_VIEWPORT)
last_scissor_box = gl.glGetIntegerv(gl.GL_SCISSOR_BOX)
last_enable_blend = gl.glIsEnabled(gl.GL_BLEND)
last_enable_cull_face = gl.glIsEnabled(gl.GL_CULL_FACE)
last_enable_depth_test = gl.glIsEnabled(gl.GL_DEPTH_TEST)
last_enable_scissor_test = gl.glIsEnabled(gl.GL_SCISSOR_TEST)
gl.glEnable(gl.GL_BLEND)
gl.glBlendEquation(gl.GL_FUNC_ADD)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
gl.glDisable(gl.GL_CULL_FACE)
gl.glDisable(gl.GL_DEPTH_TEST)
gl.glEnable(gl.GL_SCISSOR_TEST)
gl.glActiveTexture(gl.GL_TEXTURE0)
gl.glViewport(0, 0, int(fb_width), int(fb_height))
ortho_projection = [
[ 2.0/display_width, 0.0, 0.0, 0.0],
[ 0.0, 2.0/-display_height, 0.0, 0.0],
[ 0.0, 0.0, -1.0, 0.0],
[-1.0, 1.0, 0.0, 1.0]
]
gl.glUseProgram(self._shader_handle)
gl.glUniform1i(self._attrib_location_tex, 0)
gl.glUniformMatrix4fv(self._attrib_proj_mtx, 1, gl.GL_FALSE, ortho_projection)
gl.glBindVertexArray(self._vao_handle)
for commands in draw_data.commands_lists:
idx_buffer_offset = 0
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._vbo_handle)
# todo: check this (sizes)
gl.glBufferData(gl.GL_ARRAY_BUFFER, commands.vtx_buffer_size * imgui.VERTEX_SIZE, ctypes.c_void_p(commands.vtx_buffer_data), gl.GL_STREAM_DRAW)
gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self._elements_handle)
# todo: check this (sizes)
gl.glBufferData(gl.GL_ELEMENT_ARRAY_BUFFER, commands.idx_buffer_size * imgui.INDEX_SIZE, ctypes.c_void_p(commands.idx_buffer_data), gl.GL_STREAM_DRAW)
# todo: allow to iterate over _CmdList
for command in commands.commands:
gl.glBindTexture(gl.GL_TEXTURE_2D, command.texture_id)
# todo: use named tuple
x, y, w, z = command.clip_rect
gl.glScissor(int(x), int(fb_height - w), int(z - x), int(w - y))
if imgui.INDEX_SIZE == 2:
gltype = gl.GL_UNSIGNED_SHORT
else:
gltype = gl.GL_UNSIGNED_INT
gl.glDrawElements(gl.GL_TRIANGLES, command.elem_count, gltype, ctypes.c_void_p(idx_buffer_offset))
idx_buffer_offset += command.elem_count * imgui.INDEX_SIZE
# restore modified GL state
gl.glUseProgram(last_program)
gl.glActiveTexture(last_active_texture)
gl.glBindTexture(gl.GL_TEXTURE_2D, last_texture)
gl.glBindVertexArray(last_vertex_array)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, last_array_buffer)
gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, last_element_array_buffer)
gl.glBlendEquationSeparate(last_blend_equation_rgb, last_blend_equation_alpha)
gl.glBlendFunc(last_blend_src, last_blend_dst)
if last_enable_blend:
gl.glEnable(gl.GL_BLEND)
else:
gl.glDisable(gl.GL_BLEND)
if last_enable_cull_face:
gl.glEnable(gl.GL_CULL_FACE)
else:
gl.glDisable(gl.GL_CULL_FACE)
if last_enable_depth_test:
gl.glEnable(gl.GL_DEPTH_TEST)
else:
gl.glDisable(gl.GL_DEPTH_TEST)
if last_enable_scissor_test:
gl.glEnable(gl.GL_SCISSOR_TEST)
else:
gl.glDisable(gl.GL_SCISSOR_TEST)
gl.glViewport(last_viewport[0], last_viewport[1], last_viewport[2], last_viewport[3])
gl.glScissor(last_scissor_box[0], last_scissor_box[1], last_scissor_box[2], last_scissor_box[3])
|
{"/doc/source/gen_example.py": ["/imgui/__init__.py", "/imgui/impl/__init__.py"], "/imgui/impl/_glfw.py": ["/imgui/__init__.py"], "/imgui/impl/__init__.py": ["/imgui/impl/_glfw.py"]}
|
29,061
|
Wiatrogon/pyimgui
|
refs/heads/master
|
/doc/source/custom_directives.py
|
# -*- coding: utf-8 -*-
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.parsers.rst import directives
import os
import re
from hashlib import sha1
from sphinx.ext.autodoc import AutodocReporter
try:
from gen_example import render_snippet
except ImportError:
render_snippet = None
VISUAL_EXAMPLES_DIR = "visual_examples"
# todo: maybe should be more generic from sphinx conf
SOURCE_DIR = os.path.join(os.path.dirname(__file__))
def flag(argument):
"""Reimplement directives.flag to return True instead of None
Check for a valid flag option (no argument) and return ``None``.
(Directive option conversion function.)
Raise ``ValueError`` if an argument is found.
"""
if argument and argument.strip():
raise ValueError('no argument is allowed; "%s" supplied' % argument)
else:
return True
class WrapsDirective(Directive):
has_content = True
def run(self):
head = nodes.paragraph()
head.append(nodes.inline("Wraps API:", "Wraps API: "))
source = '\n'.join(self.content.data)
literal_node = nodes.literal_block(source, source)
literal_node['laguage'] = 'C++'
return [head, literal_node]
class VisualDirective(Directive):
has_content = True
final_argument_whitespace = True
option_spec = {
'title': directives.unchanged,
'width': directives.positive_int,
'height': directives.positive_int,
'auto_window': flag,
'auto_layout': flag,
}
def run(self):
source = '\n'.join(self.content.data)
literal = nodes.literal_block(source, source)
literal['language'] = 'python'
# docutils document model is insane!
head1 = nodes.paragraph()
head1.append(nodes.inline("Example:", "Example: "))
head2 = nodes.paragraph()
head2.append(
nodes.section("foo", nodes.inline("Outputs:", "Outputs: "))
)
directive_nodes = [
head1,
literal,
head2,
self.get_image_node(source)
]
return directive_nodes
def name_source_snippet(self, source):
env = self.state.document.settings.env
if (
isinstance(self.state.reporter, AutodocReporter) and
self.state.parent and self.state.parent.parent
):
# If it is generated by autodoc then autogenerate title from
# the function/method/class signature
# note: hacky assumption that this is a signature node
signature_node = self.state.parent.parent.children[0]
signature = signature_node['names'][0]
occurence = env.new_serialno(signature)
name = signature + '_' + str(occurence)
else:
# If we could not quess then use explicit title or hexdigest
name = self.options.get('title', sha1(source).hexdigest())
return self.phrase_to_filename(name)
def phrase_to_filename(self, phrase):
"""Convert phrase to normilized file name."""
# remove non-word characters
name = re.sub(r"[^\w\s\.]", '', phrase.strip().lower())
# replace whitespace with underscores
name = re.sub(r"\s+", '_', name)
return name + '.png'
def get_image_node(self, source):
file_name = self.name_source_snippet(source)
file_path = os.path.join(VISUAL_EXAMPLES_DIR, file_name)
env = self.state.document.settings.env
if render_snippet and env.config['render_examples']:
try:
render_snippet(
source, file_path,
output_dir=SOURCE_DIR, **self.options
)
except:
print("problematic code:\n%s" % source)
raise
img = nodes.image()
img['uri'] = "/" + file_path
return img
def setup(app):
app.add_config_value('render_examples', False, 'html')
app.add_directive('wraps', WrapsDirective)
app.add_directive('visual-example', VisualDirective)
return {'version': '0.1'}
|
{"/doc/source/gen_example.py": ["/imgui/__init__.py", "/imgui/impl/__init__.py"], "/imgui/impl/_glfw.py": ["/imgui/__init__.py"], "/imgui/impl/__init__.py": ["/imgui/impl/_glfw.py"]}
|
29,062
|
Wiatrogon/pyimgui
|
refs/heads/master
|
/ci/completion.py
|
# -*- coding: utf-8 -*-
from inspect import cleandoc
import sys
import re
import fileinput
try:
from urllib import quote
except ImportError:
from urllib.parse import quote
BASE_URL = 'https://img.shields.io/badge/completion-%s-blue.svg'
BADGE_TEMPLATE = "[](https://github.com/swistakm/pyimgui)"
ALL_RE = re.compile(r'(?!(^\s*$)|(^\s*#)).*[✗✓]')
DONE_RE = re.compile(r'(?!(^\s*$)|(^\s*#)).*✓')
BADGE_RE = re.compile(r'\[!\[completion\]\(.*\)\](\(.*\))?\s*$')
if __name__ == "__main__":
if len(sys.argv) == 2:
pxd_file_name, out_file_name = sys.argv[1], None
elif len(sys.argv) == 3:
pxd_file_name, out_file_name = sys.argv[1:]
else:
pxd_file_name, out_file_name = None, None
exit(cleandoc(
"""Usage: python %s PXD_FILE [README]
Estimate completion status and print result.
Note: if README argument is provided it will
try to update it with completion badge looking
for existing markdown badge markup.
""" % __file__
))
with open(pxd_file_name) as pyx_file:
lines = pyx_file.readlines()
all_count = len(list(filter(ALL_RE.match, lines)))
done_count = len(list(filter(DONE_RE.match, lines)))
result = "%d%% (%s of %s)" % (
float(done_count)/all_count * 100,
done_count, all_count
)
badge_url = BASE_URL % quote(result)
badge_md = BADGE_TEMPLATE % badge_url
if out_file_name:
output = fileinput.input(files=(out_file_name,), inplace=True)
try:
for line in output:
if BADGE_RE.match(line):
sys.stdout.write(badge_md + "\n")
else:
sys.stdout.write(line)
finally:
fileinput.close()
print("Estimated: %s" % result)
print("Badge: %s" % badge_md)
|
{"/doc/source/gen_example.py": ["/imgui/__init__.py", "/imgui/impl/__init__.py"], "/imgui/impl/_glfw.py": ["/imgui/__init__.py"], "/imgui/impl/__init__.py": ["/imgui/impl/_glfw.py"]}
|
29,063
|
Wiatrogon/pyimgui
|
refs/heads/master
|
/imgui/impl/__init__.py
|
# -*- coding: utf-8 -*-
from imgui.impl._glfw import GlfwImpl
__all__ = [
'GlfwImpl'
]
|
{"/doc/source/gen_example.py": ["/imgui/__init__.py", "/imgui/impl/__init__.py"], "/imgui/impl/_glfw.py": ["/imgui/__init__.py"], "/imgui/impl/__init__.py": ["/imgui/impl/_glfw.py"]}
|
29,064
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/forms.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 18 15:37:29 2021
@author: Harvey
"""
from django import forms
from rate_the_game_app.models import UserProfile, Game, Review, Category
from django.contrib.auth.models import User
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput())
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('picture',)
# class Contact(forms.Form):
# first_name = forms.CharField(max_length = 50)
# last_name = forms.CharField(max_length = 50)
# email_address = forms.EmailField(max_length = 150)
# message = forms.CharField(widget = forms.Textarea, max_length = 2000)
def should_be_empty(value):
if value:
raise forms.ValidationError('Field is not empty')
class ContactForm(forms.Form):
name = forms.CharField(max_length=80)
message = forms.CharField(widget=forms.Textarea)
email = forms.EmailField()
forcefield = forms.CharField(
required=False, widget=forms.HiddenInput, label="Leave empty", validators=[should_be_empty])
class GameForm(forms.ModelForm):
title = forms.CharField(max_length=Game.TITLE_MAX_LENGTH, help_text="Please enter the name of the game.")
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
# category information is already passed via the view so is not required in the form
class Meta:
model = Game
exclude = ('category',)
class ReviewForm(forms.ModelForm):
score = forms.IntegerField(help_text="Please enter a score between 1 and 10 for this game.")
comment = forms.CharField(max_length=Review.REVIEW_MAX_LENGTH,widget=forms.Textarea,help_text="Please leave a comment to finish your review")
# username and game title information is already passed via the view so is not required in the form
class Meta:
model = Review
exclude = ('user','game',)
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user',None)
super(ReviewForm, self).__init__(*args, **kwargs)
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,065
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/urls.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 13 12:47:37 2021
@author: Harvey
"""
from django.urls import path
from rate_the_game_app import views
app_name = 'rate_the_game_app'
urlpatterns = [
path('', views.index, name='index'),
path('index/', views.index, name = 'index_page'),
path('register/', views.register, name='register'),
path('login/', views.user_login, name='login'),
path('category/', views.show_list, name='show_list'),
path('category/<slug:category_name_slug>/', views.show_category, name='show_category'),
path('category/<slug:category_name_slug>/add_game/', views.add_game, name='add_game'),
path('category/<slug:category_name_slug>/<slug:game_name_slug>/', views.show_game, name='show_game'),
path('category/<slug:category_name_slug>/<slug:game_name_slug>/add_review/', views.add_review, name='add_review'),
path('logout/', views.user_logout, name='logout'),
path('my_account/', views.my_account, name='my_account'),
path('contact/', views.contact_form, name='contact'),
]
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,066
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/admin.py
|
from django.contrib import admin
from rate_the_game_app.models import UserProfile, Category, Game, Review
# Register your models here.
admin.site.register(UserProfile)
admin.site.register(Category)
admin.site.register(Game)
admin.site.register(Review)
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,067
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
from django.core.validators import MaxValueValidator
# Create your models here.
class UserProfile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile')
#additional attribute for the user model
picture = models.ImageField(upload_to='profile_images', blank=True)
def __str__(self):
return self.user.username
#category data structure
class Category(models.Model):
NAME_MAX_LENGTH = 128
name = models.CharField(max_length=NAME_MAX_LENGTH, unique=True)
#slug used so that lower chance of failure of URL mapping
slug = models.SlugField(unique=True)
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Category, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = 'Categories'
def __str__(self):
return self.name
#game data structure
class Game(models.Model):
TITLE_MAX_LENGTH = 128
category = models.ForeignKey(Category, on_delete=models.CASCADE)
title = models.CharField(max_length=TITLE_MAX_LENGTH)
#slug used so that lower chance of failure of URL mapping
slug = models.SlugField(unique=True)
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(Game, self).save(*args, **kwargs)
def __str__(self):
return self.title
#Review data structure
class Review(models.Model):
REVIEW_MAX_LENGTH = 2000
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
score = models.IntegerField(validators=[MaxValueValidator(10)],null=True)
comment = models.CharField(max_length=REVIEW_MAX_LENGTH,blank=True)
game = models.ForeignKey(Game, on_delete=models.CASCADE)
class Meta:
verbose_name_plural = 'Reviews'
def __str__(self):
return '{} {}'.format(self.game,self.user)
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,068
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/views.py
|
from django.shortcuts import render
from rate_the_game_app.forms import UserForm, UserProfileForm, ContactForm, GameForm, ReviewForm
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from rate_the_game_app.models import Category, Game, Review, UserProfile
from django.core.mail import send_mail, BadHeaderError
def index(request):
return render(request, 'rate_the_game_app/index.html')
# def contact(request):
# if request.method == "POST":
# form = contactForm(request.POST)
# if form.is_valid():
# subject = "Website Inquiry"
# body = {
# 'first_name': form.cleaned_data['first_name'],
# 'last_name': form.cleaned_data['last_name'],
# 'email': form.cleaned_data['email_address'],
# 'message': form.cleaned_data['message'],
# }
# message = "\n".join(body.values())
# try:
# send_mail(subject,message,'admin@example,com',['admin@example.com'])
# except BadHeaderError:
# return HttpResponse('Invalid header found.')
# return redirect ("rate_the_game_app:index")
# form = contactForm()
# return render(request, "rate_the_game_app/contact.html", {'form:':form})
def contact_form(request):
form = ContactForm()
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
subject = f'Message from {form.cleaned_data["name"]}'
message = form.cleaned_data["message"]
sender = form.cleaned_data["email"]
recipients = ['hkheskani01@gmail.com']
try:
send_mail(subject, message, sender, recipients)
except BadHeaderError:
return HttpResponse('Invalid header found')
return HttpResponse('Success...Your email has been sent')
return render(request, 'rate_the_game_app/contact.html', {'form': form})
@login_required
def my_account(request):
context_dict = {}
try:
reviews = Review.objects.filter(user=request.user.profile)
context_dict['reviews'] = reviews
except Review.DoesNotExist:
context_dict['reviews'] = None
return render(request, 'rate_the_game_app/my_account.html', context=context_dict)
def register(request):
#boolean to tell template whether the registration worked
#set false initially, change to true when successful
registered = False
#if its a HTTP POST, we wanna process the form data
if request.method == 'POST':
#try grab info from form, use both UserForm AND UserProfileForm
user_form = UserForm(request.POST)
profile_form = UserProfileForm(request.POST)
#if two forms are valid...
if user_form.is_valid() and profile_form.is_valid():
#save users form data to database
user = user_form.save()
#Now we hash the password with the set method and update user object
user.set_password(user.password)
user.save()
#now sort out UserProfile instance
#need to set the user attribute ourselves
#so set commit = False to delay saving the model until ready, for integrity
profile = profile_form.save(commit=False)
profile.user = user
#Did user give a pic? if so then need to get it from form
#and put it in UserProfile model
if 'picture' in request.FILES:
profile.picture = request.FILES['picture']
#now save UserProfile model instance
profile.save()
#update variable to show successful registration in template
registered = True
else:
#invalid form(s) mistakes or otherwise? print problems
print(user_form.errors, profile_form.errors)
else:
#Not a HTTP POST, so render form using 2 ModelForm instances.
#These forms will be blank & ready for input
user_form = UserForm()
profile_form = UserProfileForm()
return render(request,
'rate_the_game_app/register.html',
context = {'user_form': user_form,
'profile_form': profile_form,
'registered': registered})
def user_login(request):
#if HTTP POST, try pull relevant info
if request.method == 'POST':
#Gather username & password from login form
#We use request.POST.get('<variable>') instead of request.POST['<variable>']
#because the former returns None if the value doesn't exist and the latter raises an error
username = request.POST.get('username')
password = request.POST.get('password')
#use djangos machinery to see if username/password combo is valid
#returns a user object if it is
user = authenticate(username=username, password=password)
#if we have user object-details are correct
#if None, no user with credentials was found
if user:
#is account still active?
if user.is_active:
#if account is valid and active, log in and send to homepage
login(request, user)
return redirect(reverse('rate_the_game_app:my_account'))
else:
#inactive account - no log in!
return HttpResponse("Your Rate>The>Game account is disabled.")
else:
#bad login details - no log in!
print(f"Invalid login details: {username}, {password}")
return HttpResponse("Invalid login details supplied.")
#no POST request so display login form.
#this scenario would most likely be a HTTP GET
else:
#no context vars to pass
return render(request, 'rate_the_game_app/login.html')
#User login_required() to ensure only those logged in can access
@login_required
def user_logout(request):
#since we know user is logged in, we can log them out.
logout(request)
return redirect(reverse('rate_the_game_app:index'))
#list of categories page
def show_list(request):
#refrence sent to html file to produce page with relevant information
context_dict = {}
category_list = Category.objects.all()
context_dict['categories'] = category_list
return render(request, 'rate_the_game_app/list.html', context=context_dict)
def show_category(request, category_name_slug):
#refrence sent to html file to produce page with relevant information
context_dict = {}
try:
category = Category.objects.get(slug=category_name_slug)
games = Game.objects.filter(category=category)
# passing the game and category information to the html
context_dict['games'] = games
context_dict['category'] = category
except Category.DoesNotExist:
context_dict['category'] = None
context_dict['games'] = None
return render(request, 'rate_the_game_app/category.html', context=context_dict)
def show_game(request, game_name_slug, category_name_slug):
#refrence sent to html file to produce page with relevant information
context_dict = {}
try:
game = Game.objects.get(slug=game_name_slug)
category = Category.objects.get(slug=category_name_slug)
reviews = Review.objects.filter(game=game)
# passing the game and review information to the html
context_dict['curGame'] = game
context_dict['reviews'] = reviews
context_dict['category'] = category
except Game.DoesNotExist:
context_dict['game'] = None
context_dict['reviews'] = None
return render(request, 'rate_the_game_app/game.html', context=context_dict)
@login_required
def add_game(request, category_name_slug):
# identify the category that the game belongs to i.e. the category from were the,
# "add game" button has been pressed
try:
category = Category.objects.get(slug=category_name_slug)
except Category.DoesNotExist:
category = None
# if category does not exist redirect to homepage
if category == None:
return redirect('/rate_the_game_app/')
form = GameForm()
if request.method == 'POST':
form = GameForm(request.POST)
# assign category to the game automatically so this does not have to be included in form
if form.is_valid():
if category:
game = form.save(commit=False)
game.category = category
game.title = form.cleaned_data['title']
game.save()
#redirect back to the category page were this game has been created
return redirect(reverse('rate_the_game_app:show_category', kwargs={'category_name_slug': category_name_slug}))
else:
print(form.errors)
# passing the category data to the html for refrence
context_dict = {'form':form, 'category':category}
return render(request, 'rate_the_game_app/add_game.html', context=context_dict)
@login_required
def add_review(request, game_name_slug, category_name_slug):
# identify which user is making the review and which game they're reviewing
# then get the relevant information for the user and game
#def add_review(request, game_name_slug, category_name_slug, user):
try:
#ignore the wrong way round!
game = Game.objects.get(slug=category_name_slug)
#user = UserProfile.objects.get(user=request.user.profile.user)
except Game.DoesNotExist:
game = None
# if game does not exist redirect to homepage
if game is None:
return redirect('/rate_the_game_app/')
form = ReviewForm()
if request.method == 'POST':
form = ReviewForm(request.POST, user=request.user)
# get username and game to add to review so that client does not have to enter these fields
if form.is_valid():
if game:
review = form.save(commit=False)
x = UserProfile.objects.get_or_create(user=request.user)[0]
review.user = x
review.game = game
#maybe need cleaned data function
review.save()
#redirect back to the game page were this review has been allocated
cat_slug = game.category.slug
return redirect(reverse('rate_the_game_app:show_game', kwargs={'game_name_slug':game_name_slug, 'category_name_slug':cat_slug}))
else:
print(form.errors)
# passing the game and user details into the html for refrence
context_dict = {'form':form, 'game':game,'category':category_name_slug}
return render(request, 'rate_the_game_app/add_review.html', context=context_dict)
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,069
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/templatetags/rate_the_game_app_template_tags.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 26 12:57:58 2021
@author: Harvey
"""
from django import template
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,070
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/templatetags/__init__.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 26 12:57:27 2021
@author: Harvey
"""
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,071
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/population_script.py
|
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'rate_the_game.settings')
import django
django.setup()
from rate_the_game_app.models import Category, Game, Review, UserProfile, User
#make sure to run these after code changes or pulling the repo onto a new machine so that your system is aware,
#manage.py makemigrations
#manage.py migrate
def populate():
users = ['harvey2001', 'ricky2051', 'harsh2801', 'UofG2019', 'WAD2510']
for username in users:
duplicate = User.objects.filter(username=username)
if not (duplicate.exists()):
q = User.objects.create_user(
username = username,
password = "password"
)
p = UserProfile.objects.create(user=q)
q.save()
p.save()
action = [
{'title':'Tekken 3', 'user':'harvey2001', 'score':7, 'comment':'A lot of fun would reccommend'},
{'title':'Farcry 5', 'user':'ricky2051', 'score':7, 'comment':'Amazing story!'},
{'title':'Super Mario Galaxy', 'user':'harsh2801', 'score':4, 'comment':'Had a blast while playing!'},
{'title':'Super Smash Bros Brawl', 'user':'UofG2019', 'score':9, 'comment':'Got super heated when playing with friends'},
{'title':'Half-Life 2', 'user':'WAD2510', 'score':5, 'comment':'It was alright, quite repetitative'},
]
adventure = [
{'title':'The Last Of Us', 'user':'WAD2510', 'score':10, 'comment':'It was extremely immersive, and beautiful story'},
{'title':'Super Mario World', 'user':'ricky2051', 'score':8.5, 'comment':'Made me feel nostalgic, still as same as I remeber it!'},
{'title':'Pokemon: Diamond and Pearl', 'user':'harsh2801', 'score':9.5, 'comment':'So much fun, would wish for it to be longer'},
{'title':'Zelda: Breath of the Wild', 'user':'UofG2019', 'score':8, 'comment':'Beautiful visuals'},
{'title':'Cyberpunk 2077', 'user':'harvey2001', 'score':6.5, 'comment':'So much potential, but fell a little short!'},
]
casual = [
{'title':'Minecraft', 'user':'harvey2001', 'score':7.5, 'comment':'A classic!'},
{'title':'Stardew Valley', 'user':'ricky2051', 'score':9, 'comment':'Something very relaxing about it'},
{'title':'Portal 2', 'user':'harsh2801', 'score':8, 'comment':'After 7 years so much fun'},
{'title':'Terreria', 'user':'WAD2510', 'score':7, 'comment':'Not as func as minecraft but hey'},
{'title':'Rocket League', 'user':'UofG2019', 'score':10, 'comment':'Insanity!'},
]
indie = [
{'title':'Cuphead', 'user':'UofG2019', 'score':7.5, 'comment':'Great graphics but super difficult'},
{'title':'Super Meat Boy', 'user':'ricky2051', 'score':6, 'comment':'Way too short! but still fun'},
{'title':'Rust', 'user':'harsh2801', 'score':8, 'comment':'So much fun with friends'},
{'title':'Overcooked 2', 'user':'harvey2001', 'score':5, 'comment':'Fun but repetative'},
{'title':'Totally Accurate Battle Simulator', 'user':'WAD2510', 'score':8.5, 'comment':'It great new take on the indie genre!'},
]
massively_multiplayer = [
{'title':'Battlefield 3', 'user':'harvey2001', 'score':7.5, 'comment':'Bautiful!'},
{'title':'Among Us', 'user':'ricky2051', 'score':8.5, 'comment':'ave to lie to my friends but still fun!'},
{'title':'Fall Guys', 'user':'harsh2801', 'score':4.5, 'comment':'Got boring fast!'},
{'title':'Fortnite', 'user':'WAD2510', 'score':8, 'comment':'A really creative game, but hard to learn'},
{'title':'Valorant', 'user':'UofG2019', 'score':8, 'comment':'A mix between CS:GO and overwatch'},
]
racing = [
{'title':'Need for Speed: Most Wanted', 'user':'harvey2001', 'score':10, 'comment':'Forever classic'},
{'title':'Mario Kart', 'user':'ricky2051', 'score':10, 'comment':'GOAT'},
{'title':'Forza Horizon', 'user':'harsh2801', 'score':7, 'comment':'Amzing vizuals'},
{'title':'Trackmania', 'user':'UofG2019', 'score':7, 'comment':'Throwback!'},
{'title':'Gran Turismo 5', 'user':'WAD2510', 'score':4, 'comment':'Quite oring, feels limited for a racing game'},
]
rpg = [
{'title':'Dark Souls', 'user':'UofG2019', 'score':7, 'comment':'Really engaging, but quite dark'},
{'title':'The Elder Scrolls V: Skyrim', 'user':'ricky2051', 'score':9.5, 'comment':'Intense as hell'},
{'title':'The Witcher 3: Wild Hunt', 'user':'harsh2801', 'score':10, 'comment':'Stunning game, amazing story'},
{'title':'Fallout 4', 'user':'WAD2510', 'score':6, 'comment':'Was hopinng for more but still fun'},
{'title':'South Park: The Stick of Truth', 'user':'harvey2001', 'score':8.5, 'comment':'Way too funny!'},
]
simulation = [
{'title':'Kerbal Space Program', 'user':'harvey2001', 'score':5.5, 'comment':'Extremely interesting'},
{'title':'Euro Truck Simulator 2', 'user':'ricky2051', 'score':5, 'comment':'Really realistic'},
{'title':'Planet Coaster', 'user':'WAD2510', 'score':7, 'comment':'Such a goofy game'},
{'title':'The Sims 4', 'user':'UofG2019', 'score':8.5, 'comment':'Classic!'},
{'title':'Microsoft Flight Simulator', 'user':'harsh2801', 'score':9.5, 'comment':'Insanely immersive, super realistic'},
]
sports = [
{'title':'FIFA 21', 'user':'harsh2801', 'score':7.5, 'comment':'Would like something newer but still classic fifa'},
{'title':'NBA2K21', 'user':'UofG2019', 'score':9, 'comment':'MyCarrear was amazing!'},
{'title':"Tony Hawk's Pro Skater 1 + 2", 'user':'ricky2051', 'score':8.5, 'comment':'It beena while, but worth the wait'},
{'title':'Madden NFL 21', 'user':'harvey2001', 'score':7, 'comment':'Hmm its the same every year, but still fun'},
{'title':'NFL2K21', 'user':'WAD2510', 'score':7, 'comment':'New player designs are amazing'},
]
strategy = [
{'title':'Civilization VI', 'user':'harsh2801', 'score':9.5, 'comment':'The possibilities are endless'},
{'title':'Plague Inc.', 'user':'WAD2510', 'score':7.5, 'comment':'Every game is unique'},
{'title':'Evil Genius 2: World Domination', 'user':'UofG2019', 'score':8.5, 'comment':'So much fun!'},
{'title':'Stellaris', 'user':'ricky2051', 'score':8, 'comment':'Love the sci-fi vibe'},
{'title':'XCOM 2', 'user':'harvey2001', 'score':8.5, 'comment':'Really unique take on a strategy game'},
]
categories = {'Action': {'games':action},
'Adventure': {'games':adventure},
'Casual': {'games':casual},
'indie':{'games':indie},
'massively_multiplayer':{'games':massively_multiplayer},
'racing':{'games':racing},
'rpg':{'games':rpg},
'simulation':{'games':simulation},
'sports':{'games':sports},
'strategy':{'games':strategy},
}
for cat, cat_data in categories.items():
c = add_cat(cat)
for q in cat_data["games"]:
add_game(c, q["title"])
#curUser = add_user(q["user"])
add_review(c, q["title"],q["user"], q["score"], q["comment"] )
for c in Category.objects.all():
for q in Game.objects.filter(category=c):
print(f"- {c}: {q}")
def add_game (cat, title):
q = Game.objects.get_or_create(category=cat, title=title)
#q.user=user
#q.score=score
#q.comment=comment
return q
def add_review (cat, title, user, score, comment):
h = User.objects.filter(username=user)
prof = UserProfile.objects.get(user__in=h, )
game = Game.objects.get(category=cat, title=title)
q = Review.objects.get_or_create(user = prof, game=game, score=score, comment=comment)
return q
def add_cat(name):
c=Category.objects.get_or_create(name=name)[0]
c.save()
return c
if __name__=="__main__":
print("Starting RTG population")
populate()
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,072
|
harshkheskani/rate_the_game
|
refs/heads/main
|
/rate_the_game_app/migrations/0005_auto_20210406_1811.py
|
# Generated by Django 2.2.17 on 2021-04-06 17:11
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rate_the_game_app', '0004_review'),
]
operations = [
migrations.AlterField(
model_name='review',
name='comment',
field=models.CharField(blank=True, max_length=2000),
),
migrations.AlterField(
model_name='review',
name='score',
field=models.IntegerField(null=True, validators=[django.core.validators.MaxValueValidator(10)]),
),
]
|
{"/rate_the_game_app/forms.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/admin.py": ["/rate_the_game_app/models.py"], "/rate_the_game_app/views.py": ["/rate_the_game_app/forms.py", "/rate_the_game_app/models.py"], "/population_script.py": ["/rate_the_game_app/models.py"]}
|
29,073
|
timpinkerton/testFlaskApp
|
refs/heads/master
|
/app.py
|
from flask import Flask, render_template, redirect, url_for
from data import Articles
# Creating an instance of the Flask class
app = Flask(__name__)
# creating a variable Articles and setting it equal to the Articles function
Articles = Articles()
# home page route
@app.route('/')
def index():
return render_template('home.html')
# route to about page
@app.route('/about')
def about():
return render_template('/about.html')
@app.route('/articles')
def articles():
# adding a parameter (articles) to pass in the data (Articles)
return render_template('/articles.html', articles = Articles)
@app.route('/article/<string:id>/')
def article(id):
return render_template('/article.html', id = id)
if __name__ == '__main__':
# debug = True will automatically restart the server when changes are made
app.run(debug = True)
|
{"/app.py": ["/data.py"]}
|
29,074
|
timpinkerton/testFlaskApp
|
refs/heads/master
|
/data.py
|
def Articles():
articles = [
{
'id': 1,
'title': 'Article Number One',
'body': 'lorem ispummmm yuuummmmm',
'author': 'Me',
'create_date': '04-29-2017'
},
{
'id': 2,
'title': 'Article Number Too',
'body': 'lorem ispummmm yuuummmmm um um',
'author': 'You',
'create_date': '04-28-2017'
},
{
'id': 1,
'title': 'Article Number Tree',
'body': 'lorem ispummmm yuuummmmy ummy ummy',
'author': 'Us',
'create_date': '04-30-2017'
}
]
return articles
|
{"/app.py": ["/data.py"]}
|
29,075
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/management/commands/generatetokens.py
|
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from tokenauth.models import Token
if hasattr(settings, 'USER_AUTH_MODEL'):
User = settings.USER_AUTH_MODEL
class GenerateTokensCommand(BaseCommand):
def handle(self, *args, **kwargs):
users = User.objects.all()
Token.objects.all().delete()
for user in users:
Token.objects.create(user=user)
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,076
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/views.py
|
from django.conf import settings
from django.shortcuts import redirect
from django.views.generic import View
from tokenauth.auth import login
class Login(View):
def get(self, request):
login(request)
next = request.GET.get('next', None)
if next:
return redirect(next)
else:
return redirect(settings.LOGIN_REDIRECT_URL)
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,077
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/urls.py
|
from django.conf.urls import url
from tokenauth.views import Login
urlpatterns = [
url(r'^login$', Login.as_view(), name='login'),
]
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,078
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/auth.py
|
from uuid import UUID
from django.conf import settings
from django.contrib.auth import login as auth_login
from tokenauth.models import Token
def login(request, silence=False):
param_name = 'token'
if hasattr(settings, 'TOKENAUTH_PARAMETER_NAME'):
param_name = settings.TOKENAUTH_PARAMETER_NAME
hex = request.GET.get(param_name, None)
if hex:
try:
# Ensure uuid4 format.
value = UUID(hex, version=4)
except ValueError:
raise ValueError('Invalid token format.')
try:
token = Token.objects.get(uuid=value.hex)
ALLOW_ADMINS = False
if hasattr(settings, 'TOKENAUTH_ALLOW_ADMINS'):
ALLOW_ADMINS = settings.TOKENAUTH_ALLOW_ADMINS
if not ALLOW_ADMINS\
and token.user.is_superuser:
raise Exception('Super users cannot login via token.')
auth_login(request, token.user)
except Token.DoesNotExist:
raise ValueError('The token does not exists.')
elif not silence:
raise ValueError('You should provide a token.')
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,079
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/signals.py
|
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from tokenauth.models import Token
if hasattr(settings, 'USER_AUTH_MODEL'):
user = settings.user_auth_model
def create_user_token(sender, instance, created, **kwargs):
if created:
token = Token(user=instance)
token.save()
post_save.connect(create_user_token, sender=User)
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,080
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/models.py
|
import uuid
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
if hasattr(settings, 'USER_AUTH_MODEL'):
User = settings.USER_AUTH_MODEL
class Token(models.Model):
uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.OneToOneField(User, on_delete=models.CASCADE)
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
default_related_name = 'token'
def __str__(self):
return '<Token {} {}>'.format(self.user.username, self.uuid)
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,081
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/admin.py
|
from django.contrib import admin
from tokenauth.models import Token
@admin.register(Token)
class TokenAdmin(admin.ModelAdmin):
list_display = ('uuid', 'user', 'date_created')
list_display_links = ('uuid', )
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,082
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/__init__.py
|
default_app_config = 'tokenauth.apps.TokenauthConfig'
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,083
|
umutcoskun/django-tokenauth
|
refs/heads/master
|
/tokenauth/apps.py
|
from django.apps import AppConfig
class TokenauthConfig(AppConfig):
name = 'tokenauth'
def ready(self):
import tokenauth.signals
|
{"/tokenauth/management/commands/generatetokens.py": ["/tokenauth/models.py"], "/tokenauth/views.py": ["/tokenauth/auth.py"], "/tokenauth/urls.py": ["/tokenauth/views.py"], "/tokenauth/auth.py": ["/tokenauth/models.py"], "/tokenauth/signals.py": ["/tokenauth/models.py"], "/tokenauth/admin.py": ["/tokenauth/models.py"], "/tokenauth/apps.py": ["/tokenauth/signals.py"]}
|
29,101
|
volkodava/CarND-Vehicle-Detection
|
refs/heads/master
|
/tools/window_search.py
|
import shutil
import scipy.misc
from tqdm import tqdm
from experiments import *
# csvsql --query "select expected, actual, input_size, result_size, window_size, filepath from windows" work/windows.csv | csvlook
screenshots = [
{
"location": "../test_images/test1.jpg",
"cars_num": 2,
"coords": [((820, 420), (940, 480)), ((1070, 410), (1260, 510))]
},
{
"location": "../test_images/test001.jpg",
"cars_num": 1,
"coords": [((1120, 390), (1270, 520))]
},
{
"location": "../test_images/test2.jpg",
"cars_num": 0,
"coords": []
},
{
"location": "../test_images/test002.jpg",
"cars_num": 2,
"coords": [((990, 400), (1270, 580)), ((870, 410), (950, 450))]
},
{
"location": "../test_images/test3.jpg",
"cars_num": 1,
"coords": [((870, 420), (960, 460))]
},
{
"location": "../test_images/test003.jpg",
"cars_num": 2,
"coords": [((770, 420), (860, 470)), ((1220, 420), (1279, 480))]
},
{
"location": "../test_images/test4.jpg",
"cars_num": 2,
"coords": [((820, 420), (940, 480)), ((1050, 410), (1240, 490))]
},
{
"location": "../test_images/test5.jpg",
"cars_num": 2,
"coords": [((810, 420), (940, 470)), ((1120, 410), (1279, 500))]
},
{
"location": "../test_images/test6.jpg",
"cars_num": 2,
"coords": [((810, 420), (940, 480)), ((1020, 420), (1200, 480))]
}
]
top_y = 0.53
bottom_y = 0.9
heatmap_threshold = 0
xy_window_min = 32
xy_window_max = 256
xy_overlap = 0.75
cars, notcars, (sample_height, sample_width, sample_depth) = read_all_data(cars_path="../work/cars.pkl",
notcars_path="../work/notcars.pkl")
orient = 16 # HOG orientations
pix_per_cell = 32 # HOG pixels per cell
linear_svc_path = "../work/models/linear_svc_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
standard_scaler_path = "../work/models/standard_scaler_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
svc, X_scaler = load_trained_model(linear_svc_path, standard_scaler_path)
log_dir = "../work/window_log"
shutil.rmtree(log_dir, ignore_errors=True)
os.makedirs(log_dir, exist_ok=True)
def find_cars(image, xy_window=xy_window_min, xy_overlap=xy_overlap, debug=False):
height, width = image.shape[:2]
y_start = int(height * top_y)
y_stop = int(height * bottom_y)
slide_windows = slide_window(image, x_start_stop=[None, None], y_start_stop=[y_start, y_stop],
xy_window=(xy_window, xy_window),
xy_overlap=(xy_overlap, xy_overlap))
hot_windows = search_windows(image, slide_windows, svc, X_scaler,
sample_height, sample_width, color_space,
spatial_size, hist_bins, hist_range, orient,
pix_per_cell, cell_per_block, hog_channel,
block_norm, transform_sqrt, vis, feature_vec, spatial_feat, hist_feat, hog_feat)
heat = np.zeros_like(image[:, :, 0]).astype(np.float)
# Add heat to each box in box list
heat = add_heat(heat, hot_windows)
# Apply threshold to help remove false positives
heat = apply_heat_threshold(heat, heatmap_threshold)
# Visualize the heatmap when displaying
heatmap = np.clip(heat, 0, 255)
# Find final boxes from heatmap using label function
labels = label(heatmap)
found_cars = []
input_windows = []
result_windows = []
# Iterate through all detected cars
for car_number in range(1, labels[1] + 1):
# Find pixels with each car_number label value
nonzero = (labels[0] == car_number).nonzero()
# Identify x and y values of those pixels
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Define a bounding box based on min/max x and y
bbox = ((np.min(nonzerox), np.min(nonzeroy)), (np.max(nonzerox), np.max(nonzeroy)))
found_cars.append(bbox)
input_windows.append(xy_window)
result_windows.append([abs(bbox[0][0] - bbox[1][0]), abs(bbox[0][1] - bbox[1][1])])
if debug:
image_boxes = draw_boxes(image, found_cars)
plt.imshow(image_boxes)
plt.show()
return input_windows, result_windows, found_cars
if __name__ == "__main__":
result_file = os.path.join(log_dir, "windows.csv")
open(result_file, "w").close()
with open(result_file, "a") as f:
writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
writer.writerow(
(
"expected", "actual",
"input_size", "result_size",
"window_size", "location",
"input_coords", "result_coords",
"filename",
"error"
)
)
inc = 3
num_of_inc = int((xy_window_max - xy_window_min) / inc)
num_of_experiments = num_of_inc * len(screenshots)
with tqdm(total=num_of_experiments) as pbar:
for screenshot in screenshots:
xy_window = xy_window_min
location = screenshot["location"]
expected_car_num = screenshot["cars_num"]
expected_coords = screenshot["coords"]
original_filename = os.path.basename(location)
# splitted_original_filename = original_filename.split(".")
# basename = splitted_original_filename[:-1]
# file_extension = splitted_original_filename[-1]
input_size = []
for bbox in expected_coords:
input_size.append([abs(bbox[0][0] - bbox[1][0]), abs(bbox[0][1] - bbox[1][1])])
fname, image = read_image(location)
for idx in range(num_of_inc):
actual_car_num = 0
result_size = ""
window_size = str([xy_window, xy_window])
result_coords = ""
input_coords = str(expected_coords)
error = ""
filename = "%s_%s" % (idx, original_filename)
filepath = os.path.join(log_dir, filename)
found_cars = None
try:
input_windows, result_windows, found_cars = find_cars(image, xy_window=xy_window)
actual_car_num = len(found_cars)
result_size = str(result_windows)
result_coords = str(found_cars)
except Exception as exc:
error = str(exc)
if actual_car_num > 0:
debug_image = draw_boxes(image, found_cars)
scipy.misc.toimage(debug_image).save(filepath)
if actual_car_num > 0 or error:
writer.writerow(
(
expected_car_num, actual_car_num,
input_size, result_size,
window_size, location,
input_coords, result_coords,
filename,
error
)
)
f.flush()
xy_window += inc
pbar.update(1)
|
{"/tools/window_search.py": ["/experiments.py"], "/experiments.py": ["/common_functions.py"], "/tools/RoiEditorUi.py": ["/experiments.py"], "/tools/ModelTestUi.py": ["/experiments.py"], "/tools/model_experiments.py": ["/experiments.py"], "/tools/FeatureEditorUi.py": ["/experiments.py"]}
|
29,102
|
volkodava/CarND-Vehicle-Detection
|
refs/heads/master
|
/experiments.py
|
import glob
import os
import time
from collections import deque
import scipy.misc
from moviepy.video.io.VideoFileClip import VideoFileClip
from scipy.ndimage.measurements import label
from sklearn.externals import joblib
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.svm import LinearSVC
from common_functions import *
output_path = "output_images"
color_space = "HSV" # Can be RGB, HSV, LUV, HLS, YUV, YCrCb, LAB
orient = 32 # HOG orientations
pix_per_cell = 16 # HOG pixels per cell
cell_per_block = 2 # HOG cells per block
hog_channel = "ALL" # Can be "0", "1", "2", or "ALL"
block_norm = "L2" # Can be "L1", "L1-sqrt", "L2", "L2-Hys"
spatial_size = None # Spatial binning dimensions
hist_bins = -1 # Number of histogram bins
hist_range = None # Histogram range
spatial_feat = False # Spatial features on or off
hist_feat = False # Histogram features on or off
hog_feat = True # HOG features on or off
transform_sqrt = True
vis = False
feature_vec = True
heatmap_threshold = 2
# retrain = False
retrain = False
xy_overlap = 0.75
# Video processing params
QUEUE_LENGTH = 5
# width, height
window_size_threshold = (32, 32)
# image region for slide windows
# http://htmlcolorcodes.com/color-names/
slide_window_config = [
{
"top_y": 0.53,
"bottom_y": 0.9,
"xy_window": 130,
"color": (255, 0, 0) # red
}, {
"top_y": 0.53,
"bottom_y": 0.9,
"xy_window": 120,
"color": (0, 255, 0) # green
}, {
"top_y": 0.53,
"bottom_y": 0.9,
"xy_window": 110,
"color": (0, 0, 255) # blue
}, {
"top_y": 0.53,
"bottom_y": 0.9,
"xy_window": 100,
"color": (255, 20, 147) # deep pink
}, {
"top_y": 0.53,
"bottom_y": 0.9,
"xy_window": 90,
"color": (255, 165, 0) # orange
}, {
"top_y": 0.53,
"bottom_y": 0.9,
"xy_window": 80,
"color": (255, 255, 0) # yellow
}
]
# saved model
linear_svc_path = "output_images/linear_svc.pkl"
standard_scaler_path = "output_images/standard_scaler.pkl"
cars_path = "work/cars.pkl"
notcars_path = "work/notcars.pkl"
def cars_notcars_available(cars_path=cars_path, notcars_path=notcars_path):
return os.path.exists(cars_path) \
and os.path.exists(notcars_path)
def save_cars_notcars(cars, notcars,
cars_path=cars_path, notcars_path=notcars_path):
joblib.dump(cars, cars_path)
joblib.dump(notcars, notcars_path)
def load_cars_notcars(cars_path=cars_path, notcars_path=notcars_path):
return joblib.load(cars_path), \
joblib.load(notcars_path)
def trained_model_available(linear_svc_path=linear_svc_path,
standard_scaler_path=standard_scaler_path):
return os.path.exists(linear_svc_path) \
and os.path.exists(standard_scaler_path)
def save_trained_model(linear_svc, standard_scaler,
linear_svc_path=linear_svc_path,
standard_scaler_path=standard_scaler_path):
joblib.dump(linear_svc, linear_svc_path)
joblib.dump(standard_scaler, standard_scaler_path)
def load_trained_model(linear_svc_path=linear_svc_path,
standard_scaler_path=standard_scaler_path):
return joblib.load(linear_svc_path), \
joblib.load(standard_scaler_path)
def read_train_data(cars_search_pattern, notcars_search_pattern, sample_size=-1):
cars = [path for path in glob.iglob(cars_search_pattern, recursive=True)]
notcars = [path for path in glob.iglob(notcars_search_pattern, recursive=True)]
cars = cars[0:sample_size]
notcars = notcars[0:sample_size]
_, car_sample = read_image(cars[0])
height, width, depth = car_sample.shape
cars_images = validate_images_shape(cars, (height, width, depth))
notcars_images = validate_images_shape(notcars, (height, width, depth))
return cars_images, notcars_images, (height, width, depth)
def rescale_to_0_1(image):
if np.max(image) > 1:
return np.float32(image) / 255
return image
def train_classifier(cars, notcars,
color_space="RGB", spatial_size=(32, 32),
hist_bins=32, hist_range=(0, 256), orient=9,
pix_per_cell=8, cell_per_block=2,
hog_channel="0", block_norm="L2-Hys",
transform_sqrt=True, vis=False, feature_vec=True,
spatial_feat=True, hist_feat=True, hog_feat=True,
retrain=False, debug=True,
linear_svc_path=linear_svc_path,
standard_scaler_path=standard_scaler_path):
if not retrain and trained_model_available(linear_svc_path, standard_scaler_path):
print("Model loaded from backup")
return (*load_trained_model(linear_svc_path, standard_scaler_path), -1, -1)
car_features = extract_features(cars, color_space=color_space,
spatial_size=spatial_size,
hist_bins=hist_bins, hist_range=hist_range,
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=vis, feature_vec=feature_vec,
spatial_feat=spatial_feat, hist_feat=hist_feat, hog_feat=hog_feat)
notcar_features = extract_features(notcars, color_space=color_space,
spatial_size=spatial_size,
hist_bins=hist_bins, hist_range=hist_range,
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=vis, feature_vec=feature_vec,
spatial_feat=spatial_feat, hist_feat=hist_feat, hog_feat=hog_feat)
X = np.vstack((car_features, notcar_features)).astype(np.float64)
# Fit a per-column scaler
X_scaler = StandardScaler().fit(X)
# Apply the scaler to X
scaled_X = X_scaler.transform(X)
# Define the labels vector
y = np.hstack((np.ones(len(car_features)), np.zeros(len(notcar_features))))
# Split up data into randomized training and test sets
X_train, X_test, y_train, y_test = train_test_split(
scaled_X, y, test_size=0.2, random_state=0)
feature_vector_length = len(X_train[0])
if debug:
print("Using:", orient, "orientations", pix_per_cell,
"pixels per cell and", cell_per_block, "cells per block")
print("Feature vector length:", feature_vector_length)
# Use a linear SVC
svc = LinearSVC()
# Check the training time for the SVC
t = time.time()
svc.fit(X_train, y_train)
t2 = time.time()
# Check the score of the SVC
score = round(svc.score(X_test, y_test), 4)
if debug:
print(round(t2 - t, 2), "Seconds to train SVC...")
print("Test Accuracy of SVC = ", score)
save_trained_model(svc, X_scaler, linear_svc_path, standard_scaler_path)
return svc, X_scaler, score, feature_vector_length
# Define a single function that can extract features using hog sub-sampling and make predictions
def find_car_windows(image, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell, cell_per_block, spatial_size,
hist_bins, trg_color_space=cv2.COLOR_RGB2YCrCb):
windows = []
img_tosearch = image[ystart:ystop, :, :]
ctrans_tosearch = cv2.cvtColor(img_tosearch, trg_color_space)
if scale != 1:
imshape = ctrans_tosearch.shape
ctrans_tosearch = cv2.resize(ctrans_tosearch, (np.int(imshape[1] / scale), np.int(imshape[0] / scale)))
ch1 = ctrans_tosearch[:, :, 0]
ch2 = ctrans_tosearch[:, :, 1]
ch3 = ctrans_tosearch[:, :, 2]
# Define blocks and steps as above
nxblocks = (ch1.shape[1] // pix_per_cell) - cell_per_block + 1
nyblocks = (ch1.shape[0] // pix_per_cell) - cell_per_block + 1
nfeat_per_block = orient * cell_per_block ** 2
# 64 was the orginal sampling rate, with 8 cells and 8 pix per cell
window = 64
nblocks_per_window = (window // pix_per_cell) - cell_per_block + 1
cells_per_step = 2 # Instead of overlap, define how many cells to step
nxsteps = (nxblocks - nblocks_per_window) // cells_per_step
nysteps = (nyblocks - nblocks_per_window) // cells_per_step
# Compute individual channel HOG features for the entire image
hog1 = get_hog_features(ch1, orient, pix_per_cell, cell_per_block, feature_vec=False)
hog2 = get_hog_features(ch2, orient, pix_per_cell, cell_per_block, feature_vec=False)
hog3 = get_hog_features(ch3, orient, pix_per_cell, cell_per_block, feature_vec=False)
for xb in range(nxsteps):
for yb in range(nysteps):
ypos = yb * cells_per_step
xpos = xb * cells_per_step
# Extract HOG for this patch
hog_feat1 = hog1[ypos:ypos + nblocks_per_window, xpos:xpos + nblocks_per_window].ravel()
hog_feat2 = hog2[ypos:ypos + nblocks_per_window, xpos:xpos + nblocks_per_window].ravel()
hog_feat3 = hog3[ypos:ypos + nblocks_per_window, xpos:xpos + nblocks_per_window].ravel()
hog_features = np.hstack((hog_feat1, hog_feat2, hog_feat3))
xleft = xpos * pix_per_cell
ytop = ypos * pix_per_cell
# Extract the image patch
subimg = cv2.resize(ctrans_tosearch[ytop:ytop + window, xleft:xleft + window], (64, 64))
# Get color features
spatial_features = bin_spatial(subimg, size=spatial_size)
hist_features = color_hist(subimg, nbins=hist_bins)
# Scale features and make a prediction
test_features = X_scaler.transform(
np.hstack((spatial_features, hist_features, hog_features)).reshape(1, -1))
# test_features = X_scaler.transform(np.hstack((shape_feat, hist_feat)).reshape(1, -1))
test_prediction = svc.predict(test_features)
if test_prediction == 1:
xbox_left = np.int(xleft * scale)
ytop_draw = np.int(ytop * scale)
win_draw = np.int(window * scale)
windows.append(((xbox_left, ytop_draw + ystart),
(xbox_left + win_draw, ytop_draw + win_draw + ystart)))
return windows
def read_all_data(force=False, cars_search_pattern="work/vehicles/**/*.png",
notcars_search_pattern="work/non-vehicles/**/*.png",
cars_path=cars_path, notcars_path=notcars_path):
if not force and cars_notcars_available(cars_path, notcars_path):
cars, notcars = load_cars_notcars(cars_path, notcars_path)
height, width, depth = cars[0].shape
print("Cars/NotCars loaded from backup")
return cars, notcars, (height, width, depth)
cars, notcars, (height, width, depth) = read_train_data(cars_search_pattern=cars_search_pattern,
notcars_search_pattern=notcars_search_pattern)
save_cars_notcars(cars, notcars, cars_path, notcars_path)
return cars, notcars, (height, width, depth)
def group_windows(image, windows,
heatmap_threshold=heatmap_threshold,
window_size_threshold=window_size_threshold):
heat = np.zeros_like(image[:, :, 0]).astype(np.float)
# Add heat to each box in box list
heat = add_heat(heat, windows)
# Apply threshold to help remove false positives
heat = apply_heat_threshold(heat, heatmap_threshold)
# Visualize the heatmap when displaying
heatmap = np.clip(heat, 0, 255)
# plot and convert to image heatmap
plt.close()
plt.imshow(heatmap, cmap="hot")
plt.axis("off")
plt.tight_layout()
heatmap_img = plot_to_image()
# Find final boxes from heatmap using label function
labels = label(heatmap)
grouped_bboxes = group_bboxes(labels, window_size_threshold)
return heatmap_img, grouped_bboxes
def to_grayscale(image):
return np.mean(image, axis=2)
def show_images(images, labels, cols, figsize=(16, 8), title=None):
assert len(images) == len(labels)
rows = (len(images) / cols) + 1
plt.figure(figsize=figsize)
for idx, image in enumerate(images):
plt.subplot(rows, cols, idx + 1)
image = image.squeeze()
if len(image.shape) == 2:
plt.imshow(image, cmap="gray")
else:
plt.imshow(image)
plt.title(labels[idx])
plt.axis("off")
if title is not None:
plt.suptitle(title, fontsize=16)
plt.tight_layout(pad=3.0)
plt.show()
def combine_images_horiz(a, b):
assert len(a.shape) == 3, "Height, width, depth required"
assert len(a.shape) == len(b.shape), "Shape of images must be equal"
ha, wa, da = a.shape[:3]
hb, wb, db = b.shape[:3]
assert da == db, "Depth must be the same for both images"
max_height = np.max([ha, hb])
total_width = wa + wb
new_img = np.zeros(shape=(max_height, total_width, da), dtype=np.uint8)
new_img[:ha, :wa] = a
new_img[:hb, wa:wa + wb] = b
return new_img
def combine_images_vert(a, b):
assert len(a.shape) == 3, "Height, width, depth required"
assert len(a.shape) == len(b.shape), "Shape of images must be equal"
ha, wa, da = a.shape[:3]
hb, wb, db = b.shape[:3]
assert da == db, "Depth must be the same for both images"
total_height = ha + hb
max_width = np.max([wa, wb])
new_img = np.zeros(shape=(total_height, max_width, da), dtype=np.uint8)
new_img[:ha, :wa] = a
new_img[ha:ha + hb, :wb] = b
return new_img
def combine_3_images(main, first, second):
if main is None \
or first is None \
or second is None:
return main
height, width, depth = main.shape
new_height = height // 2
first_width = width // 2
second_width = width - first_width
result_image = np.zeros((height + new_height, width, depth), dtype=np.uint8)
main_height_range = (0, height)
main_width_range = (0, width)
first_height_range = (height, height + new_height)
first_width_range = (0, first_width)
second_height_range = (height, height + new_height)
second_width_range = (first_width, first_width + second_width)
# main
result_image[main_height_range[0]:main_height_range[1], main_width_range[0]:main_width_range[1], :] = main
# first
result_image[first_height_range[0]:first_height_range[1], first_width_range[0]:first_width_range[1], :] = \
cv2.resize(first, (first_width, new_height))
# second
result_image[second_height_range[0]:second_height_range[1], second_width_range[0]:second_width_range[1], :] = \
cv2.resize(second, (second_width, new_height))
return result_image
def convert_hog(image, block_norm=block_norm, cell_per_block=cell_per_block, hog_channel=hog_channel, orient=orient,
pix_per_cell=pix_per_cell, transform_sqrt=transform_sqrt):
result_hog_image = None
if hog_channel == "ALL":
for channel in range(image.shape[2]):
features, hog_image = get_hog_features(image[:, :, channel],
orient, pix_per_cell, cell_per_block, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=True,
feature_vec=False)
if len(hog_image.shape) == 2:
hog_image = np.expand_dims(hog_image, axis=2)
hog_image = np.uint8(hog_image * 255)
if result_hog_image is None:
result_hog_image = hog_image
else:
result_hog_image = combine_images_horiz(result_hog_image, hog_image)
else:
features, result_hog_image = get_hog_features(image[:, :, int(hog_channel)], orient,
pix_per_cell, cell_per_block, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=True,
feature_vec=False)
return result_hog_image.squeeze()
class LaneProcessor:
def __init__(self, sample_height, sample_width, svc, X_scaler,
window_size_threshold, output_dir, debug):
self.debug = debug
self.sample_height = sample_height
self.sample_width = sample_width
self.svc = svc
self.X_scaler = X_scaler
self.count = 0
self.color_configs = {}
for config in slide_window_config:
self.color_configs[config["xy_window"]] = config["color"]
# shutil.rmtree(output_dir, ignore_errors=True)
# os.makedirs(output_dir, exist_ok=True)
self.window_size_threshold = window_size_threshold
self.output_dir = output_dir
self.grouped_windows = deque(maxlen=QUEUE_LENGTH)
def aggregate(self, grouped_windows, values):
if grouped_windows is not None:
values.append(grouped_windows)
if len(values) > 0:
grouped_windows = []
list(grouped_windows.extend(value) for value in values)
return grouped_windows
def process(self, image):
height, width = image.shape[:2]
all_slide_windows = []
for config in slide_window_config:
top_y = config["top_y"]
bottom_y = config["bottom_y"]
xy_window = config["xy_window"]
y_start = int(height * top_y)
y_stop = int(height * bottom_y)
all_slide_windows = all_slide_windows + slide_window(image, x_start_stop=[None, None],
y_start_stop=[y_start, y_stop],
xy_window=(xy_window, xy_window),
xy_overlap=(xy_overlap, xy_overlap))
hot_windows = search_windows(image, all_slide_windows, self.svc, self.X_scaler,
self.sample_height, self.sample_width, color_space,
spatial_size, hist_bins, hist_range, orient,
pix_per_cell, cell_per_block, hog_channel,
block_norm, transform_sqrt, vis, feature_vec,
spatial_feat, hist_feat, hog_feat)
window_img = draw_boxes(image, hot_windows, color_configs=self.color_configs)
heatmap_img, grouped_bboxes = group_windows(image, hot_windows,
heatmap_threshold,
self.window_size_threshold)
aggregated_bboxes = self.aggregate(grouped_bboxes, self.grouped_windows)
heatmap_img, grouped_bboxes = group_windows(image, aggregated_bboxes,
heatmap_threshold=0,
window_size_threshold=(0, 0))
window_grouped_img = draw_boxes(image, grouped_bboxes)
result_image = combine_3_images(window_grouped_img, window_img, heatmap_img)
cv2.putText(result_image, "#%s" % self.count, (80, 40),
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), lineType=cv2.LINE_AA, thickness=2)
# for debugging
if self.debug:
scipy.misc.toimage(image).save(os.path.join(self.output_dir, "%s_orig.png" % self.count))
scipy.misc.toimage(result_image).save(os.path.join(self.output_dir, "%s_res.png" % self.count))
self.count += 1
return result_image
def tag_video(finput, foutput, sample_height, sample_width,
linear_svc_path, standard_scaler_path,
window_size_threshold=(32, 32),
subclip_secs=None, output_dir="./work/debug_video"):
detector = LaneProcessor(sample_height, sample_width,
linear_svc_path, standard_scaler_path,
window_size_threshold, output_dir, debug=False)
video_clip = VideoFileClip(finput)
if subclip_secs is not None:
video_clip = video_clip.subclip(*subclip_secs)
out_clip = video_clip.fl_image(detector.process)
out_clip.write_videofile(foutput, audio=False)
if __name__ == "__main__":
test_images_fnames = [path for path in glob.iglob("test_images/*.jpg", recursive=True)]
image_paths, images = read_images(test_images_fnames)
images_to_show = images
labels_to_show = image_paths
# show_images(images_to_show, labels=labels_to_show, cols=len(images_to_show) // 2,
# title="Input")
# tag_video("project_video.mp4", os.path.join(output_path, "out_project_video.mp4"), subclip_secs=(38, 42))
# Read in cars and notcars
cars, notcars, (sample_height, sample_width, sample_depth) = read_all_data()
# cars = cars[:10]
# notcars = notcars[:10]
print("Train samples loaded.")
# csvsql --query "select * from model_orient__pix_per_cell_params order by score desc, feature_vector_length" work/models/model_orient__pix_per_cell_params.csv | csvlook
#
# orient = [8, 12, 16, 24, 32] # HOG orientations
# pix_per_cell = [8, 12, 16, 24, 32] # HOG pixels per cell
#
# models_dir = os.path.join("work", "models")
# shutil.rmtree(models_dir, ignore_errors=True)
# os.makedirs(models_dir, exist_ok=True)
#
# result_file = os.path.join(models_dir, "model_orient__pix_per_cell_params.csv")
# open(result_file, "w").close()
#
# with open(result_file, "a") as f:
# writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
# writer.writerow(
# (
# "score", "orient", "pix_per_cell", "feature_vector_length",
# "linear_svc_file", "standard_scaler_file",
# "error"
# )
# )
#
# for orient_param in orient:
# for pix_per_cell_param in pix_per_cell:
# name = "orient_%s__pix_per_cell_%s" % (orient_param, pix_per_cell_param)
#
# linear_svc_file = "linear_svc_%s.pkl" % name
# standard_scaler_file = "standard_scaler_%s.pkl" % name
#
# linear_svc_path = os.path.join(models_dir, linear_svc_file)
# standard_scaler_path = os.path.join(models_dir, standard_scaler_file)
#
# score = -1
# feature_vector_length = -1
# error = ""
# try:
# svc, X_scaler, score, feature_vector_length = train_classifier(rescale_to_0_1(cars),
# rescale_to_0_1(notcars),
# color_space=color_space,
# spatial_size=spatial_size,
# hist_bins=hist_bins,
# hist_range=hist_range,
# orient=orient_param,
# pix_per_cell=pix_per_cell_param,
# cell_per_block=cell_per_block,
# hog_channel=hog_channel,
# block_norm=block_norm,
# transform_sqrt=transform_sqrt,
# vis=vis,
# feature_vec=feature_vec,
# spatial_feat=spatial_feat,
# hist_feat=hist_feat,
# hog_feat=hog_feat,
# retrain=retrain,
# linear_svc_path=linear_svc_path,
# standard_scaler_path=standard_scaler_path)
# except Exception as exc:
# error = str(exc)
#
# writer.writerow(
# (
# score, orient_param, pix_per_cell_param, feature_vector_length,
# linear_svc_file, standard_scaler_file,
# error
# )
# )
# f.flush()
# tag_video("project_video.mp4", os.path.join(output_path, "out_project_video.mp4"), subclip_secs=(38, 42))
# self.linear_svc_path = "../work/models/linear_svc_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
# self.standard_scaler_path = "../work/models/standard_scaler_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
# linear_svc_path = "work/models/linear_svc_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
# standard_scaler_path = "work/models/standard_scaler_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
#
# svc, X_scaler = load_trained_model(linear_svc_path, standard_scaler_path)
svc, X_scaler, _, _ = train_classifier(rescale_to_0_1(cars),
rescale_to_0_1(notcars),
color_space=color_space,
spatial_size=spatial_size,
hist_bins=hist_bins,
hist_range=hist_range,
orient=orient,
pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel,
block_norm=block_norm,
transform_sqrt=transform_sqrt,
vis=vis,
feature_vec=feature_vec,
spatial_feat=spatial_feat,
hist_feat=hist_feat,
hog_feat=hog_feat,
retrain=retrain,
linear_svc_path=linear_svc_path,
standard_scaler_path=standard_scaler_path)
tag_video("project_video.mp4", os.path.join(output_path, "out_project_video_tmp.mp4"), sample_height, sample_width,
svc, X_scaler, window_size_threshold, output_dir="./output_images")
# tag_video("project_video.mp4", os.path.join(output_path, output_file), sample_height, sample_width,
# svc, X_scaler, subclip_secs=(10, 11))
|
{"/tools/window_search.py": ["/experiments.py"], "/experiments.py": ["/common_functions.py"], "/tools/RoiEditorUi.py": ["/experiments.py"], "/tools/ModelTestUi.py": ["/experiments.py"], "/tools/model_experiments.py": ["/experiments.py"], "/tools/FeatureEditorUi.py": ["/experiments.py"]}
|
29,103
|
volkodava/CarND-Vehicle-Detection
|
refs/heads/master
|
/tools/RoiEditorUi.py
|
from skimage.viewer import CollectionViewer
from skimage.viewer.plugins import Plugin
from skimage.viewer.widgets import CheckBox, Slider
from experiments import *
# ROI polygon coefficients
# to find big cars
top_y = 0.53
bottom_y = 0.9
xy_window = 192
xy_overlap = 0.75
# to find small cars
# top_y = 0.53
# bottom_y = 0.9
# xy_window = 89
# xy_overlap = 0.5
class RoiEditorUi:
def __init__(self, search_pattern):
self.plugin = Plugin(image_filter=self.image_filter, dock="bottom")
self.show_origin_checkbox = CheckBox("show_orig", value=False, alignment="left")
self.top_y_slider = Slider('top_y', 0, 1, value=top_y)
self.bottom_y_slider = Slider('bottom_y', 0, 1, value=bottom_y)
self.xy_window_slider = Slider('xy_window', 0, 512, value=xy_window, value_type='int')
self.xy_overlap_slider = Slider('xy_overlap', 0, 1, value=xy_overlap)
self.plugin += self.show_origin_checkbox
self.plugin += self.top_y_slider
self.plugin += self.bottom_y_slider
self.plugin += self.xy_window_slider
self.plugin += self.xy_overlap_slider
fnames = [path for path in glob.iglob(search_pattern, recursive=True)]
self.fnames, self.images = read_images(fnames)
self.viewer = CollectionViewer(self.images)
self.viewer.connect_event("key_press_event", self.on_press)
self.viewer += self.plugin
print("Done")
def image_filter(self, image, *args, **kwargs):
image = np.copy(image)
show_orig = kwargs["show_orig"]
top_y = kwargs["top_y"]
bottom_y = kwargs["bottom_y"]
xy_window = kwargs["xy_window"]
xy_overlap = kwargs["xy_overlap"]
car_index = self.viewer.slider.val
cv2.putText(image, self.fnames[car_index], (10, 100), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 0), 2, cv2.LINE_AA)
if show_orig:
return image
height, width = image.shape[:2]
y_start = int(height * top_y)
y_stop = int(height * bottom_y)
slide_windows = slide_window(image, x_start_stop=[None, None], y_start_stop=[y_start, y_stop],
xy_window=(xy_window, xy_window),
xy_overlap=(xy_overlap, xy_overlap))
image = draw_boxes(image, slide_windows)
# draw red start/stop position on image
cv2.line(image, (0, y_start), (width, y_start), (255, 0, 0), 2)
cv2.line(image, (0, y_stop), (width, y_stop), (255, 0, 0), 2)
return image
def on_press(self, event):
if event.key == "ctrl+r":
self.on_reset()
elif event.key == "ctrl+p":
self.on_print()
def on_print(self, args=None):
print("""
top_y = {}
bottom_y = {}
xy_window = {}
xy_overlap = {}
""".format(round(self.top_y_slider.val, 3), round(self.bottom_y_slider.val, 3),
self.xy_window_slider.val, round(self.xy_overlap_slider.val, 3)
))
def on_reset(self, args=None):
print("Reset")
self.update_val(self.top_y_slider, top_y)
self.update_val(self.bottom_y_slider, bottom_y)
self.update_val(self.xy_window_slider, xy_window)
self.update_val(self.xy_overlap_slider, xy_overlap)
self.plugin.filter_image()
def show(self):
self.viewer.show()
def update_val(self, comp, newval):
comp.val = newval
comp.editbox.setText("%s" % newval)
return newval
if __name__ == "__main__":
RoiEditorUi("../test_images/test*.jpg").show()
|
{"/tools/window_search.py": ["/experiments.py"], "/experiments.py": ["/common_functions.py"], "/tools/RoiEditorUi.py": ["/experiments.py"], "/tools/ModelTestUi.py": ["/experiments.py"], "/tools/model_experiments.py": ["/experiments.py"], "/tools/FeatureEditorUi.py": ["/experiments.py"]}
|
29,104
|
volkodava/CarND-Vehicle-Detection
|
refs/heads/master
|
/tools/ModelTestUi.py
|
from skimage.viewer import CollectionViewer
from skimage.viewer.plugins import Plugin
from skimage.viewer.widgets import CheckBox, Slider
from experiments import *
orient = 16 # HOG orientations
pix_per_cell = 32 # HOG pixels per cell
# ROI polygon coefficients
top_y = 0.53
bottom_y = 0.9
heatmap_threshold = 2
xy_window_red = 130
xy_overlap_red = 0.75
xy_window_green = 120
xy_overlap_green = 0.75
xy_window_blue = 110
xy_overlap_blue = 0.75
xy_window_yellow = 100
xy_overlap_yellow = 0.75
xy_window_red_color = (255, 0, 0)
xy_window_green_color = (0, 255, 0)
xy_window_blue_color = (0, 0, 255)
xy_window_yellow_color = (255, 255, 0)
class ModelTestUi:
def __init__(self, search_pattern):
self.plugin = Plugin(image_filter=self.image_filter, dock="right")
self.show_origin_checkbox = CheckBox("show_orig", value=False, alignment="left")
self.use_first_window_config_checkbox = CheckBox("use_first_window_config", value=True, alignment="left")
self.top_y_slider = Slider('top_y', 0, 1, value=top_y)
self.bottom_y_slider = Slider('bottom_y', 0, 1, value=bottom_y)
self.heatmap_threshold_slider = Slider('heatmap_threshold', 0, 10, value=heatmap_threshold, value_type='int')
self.xy_window_red_slider = Slider('xy_window_red', 0, 256, value=xy_window_red, value_type='int')
self.xy_overlap_red_slider = Slider('xy_overlap_red', 0, 1, value=xy_overlap_red)
self.xy_window_green_slider = Slider('xy_window_green', 0, 256, value=xy_window_green, value_type='int')
self.xy_overlap_green_slider = Slider('xy_overlap_green', 0, 1, value=xy_overlap_green)
self.xy_window_blue_slider = Slider('xy_window_blue', 0, 256, value=xy_window_blue, value_type='int')
self.xy_overlap_blue_slider = Slider('xy_overlap_blue', 0, 1, value=xy_overlap_blue)
self.xy_window_yellow_slider = Slider('xy_window_yellow', 0, 256, value=xy_window_yellow, value_type='int')
self.xy_overlap_yellow_slider = Slider('xy_overlap_yellow', 0, 1, value=xy_overlap_yellow)
self.plugin += self.show_origin_checkbox
self.plugin += self.use_first_window_config_checkbox
self.plugin += self.top_y_slider
self.plugin += self.bottom_y_slider
self.plugin += self.heatmap_threshold_slider
self.plugin += self.xy_window_red_slider
self.plugin += self.xy_overlap_red_slider
self.plugin += self.xy_window_green_slider
self.plugin += self.xy_overlap_green_slider
self.plugin += self.xy_window_blue_slider
self.plugin += self.xy_overlap_blue_slider
self.plugin += self.xy_window_yellow_slider
self.plugin += self.xy_overlap_yellow_slider
self.cars, self.notcars = load_cars_notcars(cars_path="../work/cars.pkl", notcars_path="../work/notcars.pkl")
self.sample_height, self.sample_width, self.sample_depth = self.cars[0].shape
linear_svc_path = "../work/models/linear_svc_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
standard_scaler_path = "../work/models/standard_scaler_orient_%s__pix_per_cell_%s.pkl" % (orient, pix_per_cell)
self.svc, self.X_scaler = load_trained_model(linear_svc_path, standard_scaler_path)
fnames = [path for path in glob.iglob(search_pattern, recursive=True)]
_, self.images = read_images(fnames)
self.viewer = CollectionViewer(self.images)
self.viewer.connect_event("key_press_event", self.on_press)
self.viewer += self.plugin
print("Done")
def image_filter(self, image, *args, **kwargs):
image = np.copy(image)
show_orig = kwargs["show_orig"]
use_first_window_config = kwargs["use_first_window_config"]
top_y = kwargs["top_y"]
bottom_y = kwargs["bottom_y"]
heatmap_threshold = kwargs["heatmap_threshold"]
xy_window_red = kwargs["xy_window_red"]
xy_overlap_red = kwargs["xy_overlap_red"]
xy_window_green = kwargs["xy_window_green"]
xy_overlap_green = kwargs["xy_overlap_green"]
xy_window_blue = kwargs["xy_window_blue"]
xy_overlap_blue = kwargs["xy_overlap_blue"]
xy_window_yellow = kwargs["xy_window_yellow"]
xy_overlap_yellow = kwargs["xy_overlap_yellow"]
if show_orig:
return image
height, width = image.shape[:2]
y_start = int(height * top_y)
y_stop = int(height * bottom_y)
slide_windows_red = slide_window(image, x_start_stop=[None, None], y_start_stop=[y_start, y_stop],
xy_window=(xy_window_red, xy_window_red),
xy_overlap=(xy_overlap_red, xy_overlap_red))
all_slide_windows = slide_windows_red
if not use_first_window_config:
slide_windows_green = slide_window(image, x_start_stop=[None, None], y_start_stop=[y_start, y_stop],
xy_window=(xy_window_green, xy_window_green),
xy_overlap=(xy_overlap_green, xy_overlap_green))
slide_windows_blue = slide_window(image, x_start_stop=[None, None], y_start_stop=[y_start, y_stop],
xy_window=(xy_window_blue, xy_window_blue),
xy_overlap=(xy_overlap_blue, xy_overlap_blue))
slide_windows_yellow = slide_window(image, x_start_stop=[None, None], y_start_stop=[y_start, y_stop],
xy_window=(xy_window_yellow, xy_window_yellow),
xy_overlap=(xy_overlap_yellow, xy_overlap_yellow))
all_slide_windows = all_slide_windows + slide_windows_green
all_slide_windows = all_slide_windows + slide_windows_blue
all_slide_windows = all_slide_windows + slide_windows_yellow
hot_windows = search_windows(image, all_slide_windows, self.svc, self.X_scaler,
self.sample_height, self.sample_width, color_space,
spatial_size, hist_bins, hist_range, orient,
pix_per_cell, cell_per_block, hog_channel,
block_norm, transform_sqrt, vis, feature_vec, spatial_feat, hist_feat, hog_feat)
image_boxes = draw_boxes(image, all_slide_windows)
window_img = draw_boxes(image, hot_windows, color_configs={
xy_window_red: xy_window_red_color,
xy_window_green: xy_window_green_color,
xy_window_blue: xy_window_blue_color,
xy_window_yellow: xy_window_yellow_color
})
heatmap_img, grouped_bboxes = group_windows(image, hot_windows,
heatmap_threshold)
window_grouped_img = draw_boxes(image, grouped_bboxes)
# draw red start/stop position on image
cv2.line(image, (0, y_start), (width, y_start), (255, 0, 0), 2)
cv2.line(image, (0, y_stop), (width, y_stop), (255, 0, 0), 2)
result = combine_images_horiz(image, image_boxes)
result = combine_images_horiz(result, window_img)
result = combine_images_horiz(result, heatmap_img)
result = combine_images_horiz(result, window_grouped_img)
return result
def on_press(self, event):
if event.key == "ctrl+r":
self.on_reset()
elif event.key == "ctrl+p":
self.on_print()
def on_print(self, args=None):
print("""
top_y = {}
bottom_y = {}
heatmap_threshold = {}
xy_window_red = {}
xy_overlap_red = {}
xy_window_green = {}
xy_overlap_green = {}
xy_window_blue = {}
xy_overlap_blue = {}
xy_window_yellow = {}
xy_overlap_yellow = {}
""".format(round(self.top_y_slider.val, 3), round(self.bottom_y_slider.val, 3), self.heatmap_threshold_slider.val,
self.xy_window_red_slider.val, round(self.xy_overlap_red_slider.val, 3),
self.xy_window_green_slider.val, round(self.xy_overlap_green_slider.val, 3),
self.xy_window_blue_slider.val, round(self.xy_overlap_blue_slider.val, 3),
self.xy_window_yellow_slider.val, round(self.xy_overlap_yellow_slider.val, 3),
))
def on_reset(self, args=None):
print("Reset")
self.update_val(self.top_y_slider, top_y)
self.update_val(self.bottom_y_slider, bottom_y)
self.update_val(self.heatmap_threshold_slider, heatmap_threshold)
self.update_val(self.xy_window_red_slider, xy_window_red)
self.update_val(self.xy_overlap_red_slider, xy_overlap_red)
self.update_val(self.xy_window_green_slider, xy_window_green)
self.update_val(self.xy_overlap_green_slider, xy_overlap_green)
self.update_val(self.xy_window_blue_slider, xy_window_blue)
self.update_val(self.xy_overlap_blue_slider, xy_overlap_blue)
self.update_val(self.xy_window_yellow_slider, xy_window_yellow)
self.update_val(self.xy_overlap_yellow_slider, xy_overlap_yellow)
self.plugin.filter_image()
def show(self):
self.viewer.show()
def update_val(self, comp, newval):
comp.val = newval
comp.editbox.setText("%s" % newval)
return newval
if __name__ == "__main__":
ModelTestUi("../test_images/test*.jpg").show()
|
{"/tools/window_search.py": ["/experiments.py"], "/experiments.py": ["/common_functions.py"], "/tools/RoiEditorUi.py": ["/experiments.py"], "/tools/ModelTestUi.py": ["/experiments.py"], "/tools/model_experiments.py": ["/experiments.py"], "/tools/FeatureEditorUi.py": ["/experiments.py"]}
|
29,105
|
volkodava/CarND-Vehicle-Detection
|
refs/heads/master
|
/tools/model_experiments.py
|
import csv
from tqdm import tqdm
from experiments import *
# csvsql --query "select * from __results order by score desc" work/__results.csv | csvlook
# csvsql --query "select * from __results where score > 0.98 order by score desc" work/__results.csv | csvlook
# csvsql --query "select * from __results order by score desc limit 10" work/__results.csv | csvlook
first_val = 5
second_val = 8
sample_size = 1000
cars, notcars, (sample_height, sample_width, sample_depth) = read_all_data()
selected_cars_indices = np.random.choice(len(cars), size=sample_size, replace=False)
selected_notcars_indices = np.random.choice(len(notcars), size=sample_size, replace=False)
cars = np.array(cars)[selected_cars_indices]
notcars = np.array(notcars)[selected_notcars_indices]
def sequence(n, ainit=0, binit=1):
result = []
a, b = ainit, binit
while b < n:
result.append(b)
a, b = b, a + b
return result
config_ycrcb = {
"label": "ycrcb",
"color_space": "YCrCb",
"orient": sequence(96, first_val, second_val),
"pix_per_cell": sequence(34, first_val, second_val),
"cell_per_block": 2,
"hog_channel": "ALL",
"block_norm": ["L1", "L1-sqrt", "L2", "L2-Hys"],
"transform_sqrt": [True, False],
"spatial_size": None,
"hist_bins": None,
"hist_range": None,
"spatial_feat": False,
"hist_feat": False,
"hog_feat": True,
"vis": False,
"feature_vec": True,
"heatmap_threshold": 1,
"retrain": True
}
fnames = [path for path in glob.iglob("test_images/test*.jpg", recursive=True)]
_, images = read_images(fnames)
def run_test(config):
print("----------------------", flush=True)
print(config["label"], flush=True)
print("----------------------", flush=True)
print("Cars size:", len(cars), flush=True)
print("Notcars size:", len(notcars), flush=True)
orients = config.get("orient")
pix_per_cells = config.get("pix_per_cell")
block_norms = config.get("block_norm")
transform_sqrts = config.get("transform_sqrt")
experiments_num = len(orients) * len(pix_per_cells) * len(block_norms) * len(transform_sqrts)
print("Experiments Num:", experiments_num, flush=True)
result_file = os.path.join("work", "__results.csv")
open(result_file, "w").close()
with open(result_file, "a") as f:
writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
writer.writerow(("score", "orient", "pix_per_cell", "block_norm", "transform_sqrt", "error"))
with tqdm(total=experiments_num) as pbar:
for orient in orients:
for pix_per_cell in pix_per_cells:
for block_norm in block_norms:
for transform_sqrt in transform_sqrts:
key = "%s_orient_%s_pix_per_cell_%s_block_norm_%s_transform_sqrt_%s" % \
(config["label"], orient, pix_per_cell, block_norm, transform_sqrt)
score = 0
error = ""
try:
svc, X_scaler, score = train_classifier(rescale_to_0_1(cars), rescale_to_0_1(notcars),
color_space=config.get("color_space"),
spatial_size=config.get("spatial_size"),
hist_bins=config.get("hist_bins"),
hist_range=config.get("hist_range"),
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=config.get("cell_per_block"),
hog_channel=config.get("hog_channel"),
block_norm=block_norm,
transform_sqrt=transform_sqrt,
vis=config.get("vis"),
feature_vec=config.get("feature_vec"),
spatial_feat=config.get("spatial_feat"),
hist_feat=config.get("hist_feat"),
hog_feat=config.get("hog_feat"),
retrain=config.get("retrain"), debug=False)
linear_svc_path = os.path.join("work", "__linear_svc_%s.pkl" % key)
standard_scaler_path = os.path.join("work", "__standard_scaler_%s.pkl" % key)
save_trained_model(svc, X_scaler, linear_svc_path, standard_scaler_path)
except Exception as exc:
error = str(exc)
writer.writerow((float(score), int(orient), int(pix_per_cell),
str(block_norm), int(transform_sqrt), str(error)))
f.flush()
pbar.update(1)
if __name__ == "__main__":
run_test(config_ycrcb)
|
{"/tools/window_search.py": ["/experiments.py"], "/experiments.py": ["/common_functions.py"], "/tools/RoiEditorUi.py": ["/experiments.py"], "/tools/ModelTestUi.py": ["/experiments.py"], "/tools/model_experiments.py": ["/experiments.py"], "/tools/FeatureEditorUi.py": ["/experiments.py"]}
|
29,106
|
volkodava/CarND-Vehicle-Detection
|
refs/heads/master
|
/tools/FeatureEditorUi.py
|
from skimage.viewer import CollectionViewer
from skimage.viewer.plugins import Plugin
from skimage.viewer.widgets import CheckBox, ComboBox, Slider
from experiments import *
color_space_index = 0
orient = 9
pix_per_cell = 8
cell_per_block = 2
block_norm_index = 0
transform_sqrt = True
spatial_size = 32
hist_bins = 32
hist_range_start = 0
hist_range_end = 256
hog_channel_index = 0
class FeatureEditorUi:
def __init__(self, image_scale=(512, 512), size=20):
self.image_scale = image_scale
self.size = size
self.plugin = Plugin(image_filter=self.image_filter, dock="right")
self.configurations = ["Color Space", "HOG", "Spatial", "Hist", "Extract All"]
self.color_spaces = ["YCrCb", "LAB", "HSV", "LUV", "YUV", "HLS", "RGB"]
self.image_collection = ["cars", "notcars"]
self.block_norms = ["L1", "L1-sqrt", "L2", "L2-Hys"]
self.hog_channel = ["ALL", "0", "1", "2"]
self.image_collection_combobox = ComboBox("cars_notcars", self.image_collection)
self.show_origin_checkbox = CheckBox("show_orig", value=False, alignment="left")
self.configuration_combobox = ComboBox("configuration", self.configurations)
self.colorspace_combobox = ComboBox("color_space", self.color_spaces)
self.orient_slider = Slider("orient", 1, 50, value=orient, value_type="int")
self.pix_per_cell_slider = Slider("pix_per_cell", 1, 256, value=pix_per_cell, value_type="int")
self.cell_per_block_slider = Slider("cell_per_block", 1, 256, value=cell_per_block, value_type="int")
self.block_norm_combobox = ComboBox("block_norm", self.block_norms)
self.transform_sqrt_checkbox = CheckBox("transform_sqrt", value=transform_sqrt, alignment="left")
self.spatial_size_slider = Slider("spatial_size", 1, 256, value=spatial_size, value_type="int")
self.hist_bins_slider = Slider("hist_bins", 1, 256, value=hist_bins, value_type="int")
self.hist_range_start_slider = Slider("hist_range_start", 0, 256, value=hist_range_start, value_type="int")
self.hist_range_end_slider = Slider("hist_range_end", 0, 256, value=hist_range_end, value_type="int")
self.hog_channel_combobox = ComboBox("hog_channel", self.hog_channel)
self.plugin += self.image_collection_combobox
self.plugin += self.show_origin_checkbox
self.plugin += self.configuration_combobox
self.plugin += self.colorspace_combobox
self.plugin += self.orient_slider
self.plugin += self.pix_per_cell_slider
self.plugin += self.cell_per_block_slider
self.plugin += self.block_norm_combobox
self.plugin += self.transform_sqrt_checkbox
self.plugin += self.spatial_size_slider
self.plugin += self.hist_bins_slider
self.plugin += self.hist_range_start_slider
self.plugin += self.hist_range_end_slider
self.plugin += self.hog_channel_combobox
self.cars_images, self.notcars_images = load_cars_notcars(cars_path="../work/cars.pkl",
notcars_path="../work/notcars.pkl")
self.sample_height, self.sample_width, self.sample_depth = self.cars_images[0].shape
self.rnd_cars_indices = np.random.choice(len(self.cars_images), size=self.size, replace=False)
self.rnd_notcars_indices = np.random.choice(len(self.notcars_images), size=self.size, replace=False)
self.cars_images = np.uint8(self.cars_images)[self.rnd_cars_indices]
self.notcars_images = np.uint8(self.notcars_images)[self.rnd_notcars_indices]
self.cars_selected = True
self.viewer = CollectionViewer(self.cars_images)
self.viewer.connect_event("key_press_event", self.on_press)
self.viewer += self.plugin
print("Done")
def image_filter(self, image, *args, **kwargs):
image = np.copy(image)
show_orig = kwargs["show_orig"]
cars_notcars = kwargs["cars_notcars"]
color_space = kwargs["color_space"]
configuration = kwargs["configuration"]
orient = kwargs["orient"]
pix_per_cell = kwargs["pix_per_cell"]
cell_per_block = kwargs["cell_per_block"]
block_norm = kwargs["block_norm"]
transform_sqrt = kwargs["transform_sqrt"]
spatial_size = kwargs["spatial_size"]
hist_bins = kwargs["hist_bins"]
hist_range_start = kwargs["hist_range_start"]
hist_range_end = kwargs["hist_range_end"]
hog_channel = kwargs["hog_channel"]
car_index = self.viewer.slider.val
if cars_notcars == "cars" and not self.cars_selected:
self.viewer.image_collection = self.cars_images
self.viewer.update_index(None, 0)
self.cars_selected = True
elif cars_notcars == "notcars" and self.cars_selected:
self.viewer.image_collection = self.notcars_images
self.viewer.update_index(None, 0)
self.cars_selected = False
if show_orig:
return image
target_color_space = cv2.COLOR_RGB2HSV
if color_space == "RGB":
target_color_space = None
elif color_space == "YCrCb":
target_color_space = cv2.COLOR_RGB2YCrCb
elif color_space == "LAB":
target_color_space = cv2.COLOR_RGB2LAB
elif color_space == "LUV":
target_color_space = cv2.COLOR_RGB2LUV
elif color_space == "YUV":
target_color_space = cv2.COLOR_RGB2YUV
elif color_space == "HLS":
target_color_space = cv2.COLOR_RGB2HLS
converted_image = image
converted_car_image = self.cars_images[car_index]
converted_notcars_image = self.notcars_images[car_index]
if target_color_space is None:
# image already in RGB
pass
else:
converted_image = cv2.cvtColor(image, target_color_space)
converted_car_image = cv2.cvtColor(converted_car_image, target_color_space)
converted_notcars_image = cv2.cvtColor(converted_notcars_image, target_color_space)
if configuration == "Color Space":
return combine_images_vert(converted_car_image, converted_notcars_image)
if configuration == "HOG":
hog_car_image = convert_hog(converted_car_image, block_norm, cell_per_block, hog_channel, orient,
pix_per_cell, transform_sqrt)
hog_notcar_image = convert_hog(converted_notcars_image, block_norm, cell_per_block, hog_channel, orient,
pix_per_cell, transform_sqrt)
hog_car_image = np.expand_dims(hog_car_image, axis=2)
hog_notcar_image = np.expand_dims(hog_notcar_image, axis=2)
return combine_images_vert(hog_car_image, hog_notcar_image).squeeze()
if configuration == "Spatial":
spatial_car_image = self.show_spatial(converted_car_image, spatial_size)
spatial_notcar_image = self.show_spatial(converted_notcars_image, spatial_size)
return combine_images_vert(spatial_car_image, spatial_notcar_image)
if configuration == "Hist":
hist_car_image = self.show_hist(converted_car_image, hist_bins, hist_range_end, hist_range_start,
"Car")
hist_notcar_image = self.show_hist(converted_notcars_image, hist_bins, hist_range_end, hist_range_start,
"NotCar")
return combine_images_vert(hist_car_image, hist_notcar_image)
if configuration == "Extract All":
return self.show_extract_all(car_index, block_norm, cars_notcars, cell_per_block, color_space,
converted_image, hist_bins, hist_range_end, hist_range_start, hog_channel,
orient, pix_per_cell, spatial_size, transform_sqrt)
return converted_image
def show_extract_all(self, car_index, block_norm, cars_notcars, cell_per_block, color_space, converted_image,
hist_bins,
hist_range_end, hist_range_start, hog_channel, orient, pix_per_cell, spatial_size,
transform_sqrt):
print("Run extract all features")
cars_images, notcars_images = self.cars_images, self.notcars_images
# cars_images, notcars_images, _ = read_all_data()
# cars_images = cars_images[:self.size]
# notcars_images = notcars_images[:self.size]
car_features = extract_features(rescale_to_0_1(cars_images), color_space=color_space,
spatial_size=(spatial_size, spatial_size),
hist_bins=hist_bins, hist_range=(hist_range_start, hist_range_end),
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=False, feature_vec=True,
spatial_feat=True, hist_feat=True, hog_feat=True)
print("Car features extracted")
notcar_features = extract_features(rescale_to_0_1(notcars_images), color_space=color_space,
spatial_size=(spatial_size, spatial_size),
hist_bins=hist_bins, hist_range=(hist_range_start, hist_range_end),
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=False, feature_vec=True,
spatial_feat=True, hist_feat=True, hog_feat=True)
print("NotCar features extracted")
if len(car_features) > 0 and cars_notcars == "cars":
X = np.vstack((car_features, notcar_features)).astype(np.float64)
# Fit a per-column scaler
X_scaler = StandardScaler().fit(X)
# Apply the scaler to X
scaled_X = X_scaler.transform(X)
# Plot an example of raw and scaled features
plt.close()
fig = plt.figure(figsize=(12, 4))
plt.subplot(131)
plt.imshow(cars_images[car_index])
plt.title('Original Image')
plt.subplot(132)
plt.plot(X[car_index].squeeze())
plt.title('Raw Features')
plt.subplot(133)
plt.plot(scaled_X[car_index].squeeze())
plt.title('Normalized Features')
fig.tight_layout()
all_features = plot_to_image()
return all_features
else:
print("Features NOT found!!!")
return converted_image
def show_hist(self, converted_image, hist_bins, hist_range_end, hist_range_start, title=""):
hist_features = color_hist(converted_image, nbins=hist_bins, bins_range=(hist_range_start, hist_range_end))
plt.close()
plt.plot(hist_features.squeeze())
plt.title(title)
plt.ylim([0, np.max(hist_features)])
hist_image = plot_to_image()
print("Hist Number of features: ", len(hist_features))
return hist_image
def show_spatial(self, converted_image, spatial_size):
spatial_features = bin_spatial(converted_image, size=(spatial_size, spatial_size))
plt.close()
plt.plot(spatial_features.squeeze())
plt.ylim([0, np.max(spatial_features)])
spatial_image = plot_to_image()
print("Spatial Number of features: ", len(spatial_features))
return spatial_image
def on_press(self, event):
if event.key == "ctrl+r":
self.on_reset()
elif event.key == "ctrl+p":
self.on_print()
def on_print(self, args=None):
print("""
color_space_index = {}
orient = {}
pix_per_cell = {}
cell_per_block = {}
block_norm_index = {}
transform_sqrt = {}
spatial_size = {}
hist_bins = {}
hist_range_start = {}
hist_range_end = {}
hog_channel_index = {}
""".format(self.colorspace_combobox.index, self.orient_slider.val, self.pix_per_cell_slider.val,
self.cell_per_block_slider.val, self.block_norm_combobox.index, self.transform_sqrt_checkbox.val,
self.spatial_size_slider.val, self.hist_bins_slider.val, self.hist_range_start_slider.val,
self.hist_range_end_slider.val, self.hog_channel_combobox.index
))
def on_reset(self, args=None):
print("Reset")
self.update_combobox(self.colorspace_combobox, color_space_index)
self.update_val(self.orient_slider, orient)
self.update_val(self.pix_per_cell_slider, pix_per_cell)
self.update_val(self.cell_per_block_slider, cell_per_block)
self.update_combobox(self.block_norm_combobox, block_norm_index)
self.update_checkbox(self.transform_sqrt_checkbox, transform_sqrt)
self.update_val(self.spatial_size_slider, spatial_size)
self.update_val(self.hist_bins_slider, hist_bins)
self.update_val(self.hist_range_start_slider, hist_range_start)
self.update_val(self.hist_range_end_slider, hist_range_end)
self.update_combobox(self.hog_channel_combobox, hog_channel_index)
self.plugin.filter_image()
def show(self):
self.viewer.show()
def update_checkbox(self, comp, newval):
comp.val = newval
return newval
def update_combobox(self, comp, index):
comp.index = index
return index
def update_val(self, comp, newval):
comp.val = newval
comp.editbox.setText("%s" % newval)
return newval
if __name__ == "__main__":
FeatureEditorUi().show()
|
{"/tools/window_search.py": ["/experiments.py"], "/experiments.py": ["/common_functions.py"], "/tools/RoiEditorUi.py": ["/experiments.py"], "/tools/ModelTestUi.py": ["/experiments.py"], "/tools/model_experiments.py": ["/experiments.py"], "/tools/FeatureEditorUi.py": ["/experiments.py"]}
|
29,107
|
volkodava/CarND-Vehicle-Detection
|
refs/heads/master
|
/common_functions.py
|
import io
import cv2
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
from mpl_toolkits.mplot3d import Axes3D
from skimage.feature import hog
def validate_images_shape(fnames, expected_shape):
result = []
for fname in fnames:
_, image = read_image(fname)
assert expected_shape == image.shape
result.append(image)
return result
def read_image(fname):
if fname is None:
return fname, None
result_image = mpimg.imread(fname)
if fname.endswith(".png"):
# data from .png images scaled 0 to 1 by mpimg
result_image *= 255
return fname, np.uint8(result_image)
def read_images(fnames):
assert isinstance(fnames, (list, tuple, np.ndarray)), "Files must be list/tuple/ndarray"
result = [read_image(fname) for fname in fnames]
return zip(*result)
def show_images(images, labels, cols, figsize=(16, 8), title=None):
assert len(images) == len(labels)
rows = (len(images) / cols) + 1
plt.figure(figsize=figsize)
for idx, image in enumerate(images):
plt.subplot(rows, cols, idx + 1)
image = image.squeeze()
if len(image.shape) == 2:
plt.imshow(image, cmap="gray")
else:
plt.imshow(image)
plt.title(labels[idx])
plt.axis("off")
if title is not None:
plt.suptitle(title, fontsize=16)
plt.tight_layout(pad=3.0)
plt.show()
def plot_to_image():
buf = io.BytesIO()
plt.savefig(buf, format="png")
buf.seek(0)
img = Image.open(buf).convert("RGB")
buf.close()
# close plot before return to stop from adding more information from outer scope
plt.close()
return np.array(img.getdata(), np.uint8).reshape(img.size[1], img.size[0], 3)
# Define a function to return HOG features and visualization
def get_hog_features(img, orient, pix_per_cell, cell_per_block, block_norm="L2-Hys",
transform_sqrt=True, vis=False, feature_vec=True):
if vis:
features, hog_image = hog(img, orientations=orient,
pixels_per_cell=(pix_per_cell, pix_per_cell),
cells_per_block=(cell_per_block, cell_per_block),
block_norm=block_norm, transform_sqrt=transform_sqrt,
visualise=vis, feature_vector=feature_vec)
return features, hog_image
else:
features = hog(img, orientations=orient,
pixels_per_cell=(pix_per_cell, pix_per_cell),
cells_per_block=(cell_per_block, cell_per_block),
block_norm=block_norm, transform_sqrt=transform_sqrt,
visualise=vis, feature_vector=feature_vec)
return features
# Define a function to compute binned color features
def bin_spatial(img, size=(32, 32)):
# Use cv2.resize().ravel() to create the feature vector
features = cv2.resize(img, size).ravel()
# Return the feature vector
return features
# Define a function to compute color histogram features
# NEED TO CHANGE bins_range if reading .png files with mpimg!
def color_hist(img, nbins=32, bins_range=(0, 256)):
# Compute the histogram of the color channels separately
channel1_hist = np.histogram(img[:, :, 0], bins=nbins, range=bins_range)
channel2_hist = np.histogram(img[:, :, 1], bins=nbins, range=bins_range)
channel3_hist = np.histogram(img[:, :, 2], bins=nbins, range=bins_range)
# Concatenate the histograms into a single feature vector
hist_features = np.concatenate((channel1_hist[0], channel2_hist[0], channel3_hist[0]))
# Return the individual histograms, bin_centers and feature vector
return hist_features
# Define a function to extract features from a single image window
# This function is very similar to extract_features()
# just for a single image rather than list of images
def single_img_features(img, color_space="RGB", spatial_size=(32, 32),
hist_bins=32, hist_range=(0, 256), orient=9,
pix_per_cell=8, cell_per_block=2,
hog_channel="0", block_norm="L2-Hys",
transform_sqrt=True, vis=False, feature_vec=True,
spatial_feat=True, hist_feat=True, hog_feat=True):
# 1) Define an empty list to receive features
img_features = []
# 2) Apply color conversion if other than "RGB"
feature_image = convert_color(img, color_space)
# 3) Compute spatial features if flag is set
if spatial_feat:
spatial_features = bin_spatial(feature_image, size=spatial_size)
# 4) Append features to list
img_features.append(spatial_features)
# 5) Compute histogram features if flag is set
if hist_feat:
hist_features = color_hist(feature_image, nbins=hist_bins, bins_range=hist_range)
# 6) Append features to list
img_features.append(hist_features)
# 7) Compute HOG features if flag is set
if hog_feat:
if hog_channel == "ALL":
hog_features = []
for channel in range(feature_image.shape[2]):
hog_features.extend(get_hog_features(feature_image[:, :, channel],
orient, pix_per_cell, cell_per_block, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=vis, feature_vec=feature_vec))
else:
hog_features = get_hog_features(feature_image[:, :, int(hog_channel)], orient,
pix_per_cell, cell_per_block, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=vis, feature_vec=feature_vec)
# 8) Append features to list
img_features.append(hog_features)
# 9) Return concatenated array of features
return np.concatenate(img_features)
def convert_color(img, color_space):
feature_image = None
if color_space != "RGB":
if color_space == "HSV":
feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)
elif color_space == "LUV":
feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2LUV)
elif color_space == "HLS":
feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
elif color_space == "YUV":
feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2YUV)
elif color_space == "YCrCb":
feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2YCrCb)
elif color_space == "LAB":
feature_image = cv2.cvtColor(img, cv2.COLOR_RGB2LAB)
else:
feature_image = np.copy(img)
return feature_image
# Define a function to extract features from a list of images
# Have this function call bin_spatial() and color_hist()
def extract_features(imgs, color_space="RGB", spatial_size=(32, 32),
hist_bins=32, hist_range=(0, 256), orient=9,
pix_per_cell=8, cell_per_block=2,
hog_channel="0", block_norm="L2-Hys",
transform_sqrt=True, vis=False, feature_vec=True,
spatial_feat=True, hist_feat=True, hog_feat=True):
# Create a list to append feature vectors to
features = []
# Iterate through the list of images
for image in imgs:
img_features = single_img_features(image, color_space=color_space, spatial_size=spatial_size,
hist_bins=hist_bins, hist_range=hist_range, orient=orient,
pix_per_cell=pix_per_cell, cell_per_block=cell_per_block,
hog_channel=hog_channel, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=vis, feature_vec=feature_vec,
spatial_feat=spatial_feat, hist_feat=hist_feat, hog_feat=hog_feat)
features.append(img_features)
# Return list of feature vectors
return features
# Define a function you will pass an image
# and the list of windows to be searched (output of slide_windows())
def search_windows(image, windows, clf, scaler, sample_height, sample_width,
color_space="RGB", spatial_size=(32, 32), hist_bins=32,
hist_range=(0, 256), orient=9,
pix_per_cell=8, cell_per_block=2,
hog_channel="0", block_norm="L2-Hys",
transform_sqrt=True, vis=False, feature_vec=True,
spatial_feat=True, hist_feat=True, hog_feat=True):
# 1) Create an empty list to receive positive detection windows
on_windows = []
# 2) Iterate over all windows in the list
for window in windows:
# 3) Extract the test window from original image
resized_image = cv2.resize(image[window[0][1]:window[1][1], window[0][0]:window[1][0]],
(sample_width, sample_height))
# 4) Extract features for that window using single_img_features()
features = single_img_features(resized_image, color_space=color_space,
spatial_size=spatial_size,
hist_bins=hist_bins, hist_range=hist_range,
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel, block_norm=block_norm,
transform_sqrt=transform_sqrt, vis=vis, feature_vec=feature_vec,
spatial_feat=spatial_feat, hist_feat=hist_feat, hog_feat=hog_feat)
# 5) Scale extracted features to be fed to classifier
test_features = scaler.transform(np.array(features).reshape(1, -1))
# 6) Predict using your classifier
prediction = clf.predict(test_features)
# 7) If positive (prediction == 1) then save the window
if prediction == 1:
on_windows.append(window)
# 8) Return windows for positive detections
return on_windows
# Define a function that takes an image,
# start and stop positions in both x and y,
# window size (x and y dimensions),
# and overlap fraction (for both x and y)
def slide_window(img, x_start_stop=[None, None], y_start_stop=[None, None],
xy_window=(64, 64), xy_overlap=(0.5, 0.5)):
# If x and/or y start/stop positions not defined, set to image size
if x_start_stop[0] is None:
x_start_stop[0] = 0
if x_start_stop[1] is None:
x_start_stop[1] = img.shape[1]
if y_start_stop[0] is None:
y_start_stop[0] = 0
if y_start_stop[1] is None:
y_start_stop[1] = img.shape[0]
# Compute the span of the region to be searched
xspan = x_start_stop[1] - x_start_stop[0]
yspan = y_start_stop[1] - y_start_stop[0]
# Compute the number of pixels per step in x/y
nx_pix_per_step = np.int(xy_window[0] * (1 - xy_overlap[0]))
ny_pix_per_step = np.int(xy_window[1] * (1 - xy_overlap[1]))
# Compute the number of windows in x/y
nx_buffer = np.int(xy_window[0] * (xy_overlap[0]))
ny_buffer = np.int(xy_window[1] * (xy_overlap[1]))
nx_windows = np.int((xspan - nx_buffer) / nx_pix_per_step)
ny_windows = np.int((yspan - ny_buffer) / ny_pix_per_step)
# Initialize a list to append window positions to
window_list = []
# Loop through finding x and y window positions
# Note: you could vectorize this step, but in practice
# you"ll be considering windows one by one with your
# classifier, so looping makes sense
for ys in range(ny_windows):
for xs in range(nx_windows):
# Calculate window position
startx = xs * nx_pix_per_step + x_start_stop[0]
endx = startx + xy_window[0]
starty = ys * ny_pix_per_step + y_start_stop[0]
endy = starty + xy_window[1]
# Append window position to list
window_list.append(((startx, starty), (endx, endy)))
# Return the list of windows
return window_list
# Define a function to draw bounding boxes
def draw_boxes(img, bboxes, color=(0, 0, 255), thick=5, color_configs=None):
# Make a copy of the image
imcopy = np.copy(img)
apply_color_config = False
if color_configs is not None:
apply_color_config = True
# Iterate through the bounding boxes
for bbox in bboxes:
use_color = color
if apply_color_config:
# window_list.append(((startx, starty), (endx, endy)))
x_diff = abs(bbox[0][0] - bbox[1][0])
y_diff = abs(bbox[0][1] - bbox[1][1])
assert x_diff == y_diff, "X:%s == Y:%s" % (x_diff, y_diff)
use_color = color_configs[x_diff]
# Draw a rectangle given bbox coordinates
cv2.rectangle(imcopy, bbox[0], bbox[1], use_color, thick)
# Return the image copy with boxes drawn
return imcopy
def add_heat(heatmap, bbox_list):
# Iterate through list of bboxes
for box in bbox_list:
# Add += 1 for all pixels inside each bbox
# Assuming each "box" takes the form ((x1, y1), (x2, y2))
heatmap[box[0][1]:box[1][1], box[0][0]:box[1][0]] += 1
# Return updated heatmap
return heatmap # Iterate through list of bboxes
def apply_heat_threshold(heatmap, threshold):
# Zero out pixels below the threshold
heatmap[heatmap <= threshold] = 0
# Return thresholded map
return heatmap
def group_bboxes(labels, window_size_threshold=(32, 32)):
result_bboxes = []
# Iterate through all detected cars
for car_number in range(1, labels[1] + 1):
# Find pixels with each car_number label value
nonzero = (labels[0] == car_number).nonzero()
# Identify x and y values of those pixels
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Define a bounding box based on min/max x and y
bbox = ((np.min(nonzerox), np.min(nonzeroy)), (np.max(nonzerox), np.max(nonzeroy)))
bbox_width = bbox[1][0] - bbox[0][0]
bbox_height = bbox[1][1] - bbox[0][1]
# Draw the box on the image
if bbox_width >= window_size_threshold[0] \
and bbox_height >= window_size_threshold[1]:
result_bboxes.append(bbox)
# Return the result bboxes
return result_bboxes
def plot3d(pixels, colors_rgb,
axis_labels=list("RGB"), axis_limits=((0, 255), (0, 255), (0, 255))):
"""Plot pixels in 3D."""
# Create figure and 3D axes
fig = plt.figure(figsize=(8, 8))
ax = Axes3D(fig)
# Set axis limits
ax.set_xlim(*axis_limits[0])
ax.set_ylim(*axis_limits[1])
ax.set_zlim(*axis_limits[2])
# Set axis labels and sizes
ax.tick_params(axis="both", which="major", labelsize=14, pad=8)
ax.set_xlabel(axis_labels[0], fontsize=16, labelpad=16)
ax.set_ylabel(axis_labels[1], fontsize=16, labelpad=16)
ax.set_zlabel(axis_labels[2], fontsize=16, labelpad=16)
# Plot pixel values with colors given in colors_rgb
ax.scatter(
pixels[:, :, 0].ravel(),
pixels[:, :, 1].ravel(),
pixels[:, :, 2].ravel(),
c=colors_rgb.reshape((-1, 3)), edgecolors="none")
# return Axes3D object for further manipulation
return ax
|
{"/tools/window_search.py": ["/experiments.py"], "/experiments.py": ["/common_functions.py"], "/tools/RoiEditorUi.py": ["/experiments.py"], "/tools/ModelTestUi.py": ["/experiments.py"], "/tools/model_experiments.py": ["/experiments.py"], "/tools/FeatureEditorUi.py": ["/experiments.py"]}
|
29,135
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0032_alter_customer_updated_at.py
|
# Generated by Django 3.2.7 on 2021-10-27 06:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0031_alter_customer_created_at'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='updated_at',
field=models.DateField(null=True),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,136
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0033_auto_20211028_1741.py
|
# Generated by Django 3.2.7 on 2021-10-28 12:11
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0032_alter_customer_updated_at'),
]
operations = [
migrations.AddField(
model_name='supplier',
name='created_at',
field=models.DateField(default=datetime.datetime.now, editable=False),
),
migrations.AddField(
model_name='supplier',
name='updated_at',
field=models.DateField(null=True),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,137
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0014_credit_receipt_sales_return.py
|
# Generated by Django 3.2.3 on 2021-10-13 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0013_cash'),
]
operations = [
migrations.CreateModel(
name='Credit',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('invoice_number', models.TextField(max_length=100)),
('date', models.CharField(max_length=100)),
('internal_ref_no', models.TextField(max_length=100)),
('due_on', models.TextField(max_length=100)),
('user_id', models.TextField(max_length=100)),
('credit_limit_amt', models.TextField(max_length=100)),
('customer_id', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
('item_id1', models.TextField(max_length=100)),
('item_id2', models.TextField(max_length=100)),
('item_details1', models.TextField(max_length=100)),
('item_details2', models.TextField(max_length=100)),
('price1_1', models.TextField(max_length=100)),
('price1_2', models.TextField(max_length=100)),
('price2_1', models.TextField(max_length=100)),
('price2_2', models.TextField(max_length=100)),
('quantity1', models.TextField(max_length=100)),
('quantity2', models.TextField(max_length=100)),
('quantity3', models.TextField(max_length=100)),
('quantity4', models.TextField(max_length=100)),
('amount1', models.TextField(max_length=100)),
('amount2', models.TextField(max_length=100)),
('sales_ex1', models.TextField(max_length=100)),
('sales_ex2', models.TextField(max_length=100)),
('job1', models.TextField(max_length=100)),
('job2', models.TextField(max_length=100)),
('labour_charge', models.TextField(max_length=100)),
('other_charge', models.TextField(max_length=100)),
('total1', models.TextField(max_length=100)),
('total2', models.TextField(max_length=100)),
('total3', models.TextField(max_length=100)),
('total4', models.TextField(max_length=100)),
('total5', models.TextField(max_length=100)),
('total6', models.TextField(max_length=100)),
('discount', models.TextField(max_length=100)),
('tax', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Receipt',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('receipt_number', models.TextField(max_length=100)),
('date', models.CharField(max_length=100)),
('internal_ref_no', models.TextField(max_length=100)),
('due_on', models.TextField(max_length=100)),
('credit_limit_amt', models.TextField(max_length=100)),
('user_id', models.TextField(max_length=100)),
('customer_id', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
('si_no1', models.TextField(max_length=100)),
('si_no2', models.TextField(max_length=100)),
('si_no3', models.TextField(max_length=100)),
('invoice_no1', models.TextField(max_length=100)),
('invoice_no2', models.TextField(max_length=100)),
('invoice_no3', models.TextField(max_length=100)),
('invoice_date1', models.TextField(max_length=100)),
('invoice_date2', models.TextField(max_length=100)),
('invoice_date3', models.TextField(max_length=100)),
('duedate1', models.TextField(max_length=100)),
('duedate2', models.TextField(max_length=100)),
('duedate3', models.TextField(max_length=100)),
('invoice_amt1', models.TextField(max_length=100)),
('invoice_amt2', models.TextField(max_length=100)),
('invoice_amt3', models.TextField(max_length=100)),
('received_amt1', models.TextField(max_length=100)),
('received_amt2', models.TextField(max_length=100)),
('received_amt3', models.TextField(max_length=100)),
('outstanding1', models.TextField(max_length=100)),
('outstanding2', models.TextField(max_length=100)),
('outstanding3', models.TextField(max_length=100)),
('discount1', models.TextField(max_length=100)),
('discount2', models.TextField(max_length=100)),
('discount3', models.TextField(max_length=100)),
('balance_amt1', models.TextField(max_length=100)),
('balance_amt2', models.TextField(max_length=100)),
('balance_amt3', models.TextField(max_length=100)),
('tick_space1', models.TextField(max_length=100)),
('tick_space2', models.TextField(max_length=100)),
('tick_space3', models.TextField(max_length=100)),
('partial1', models.TextField(max_length=100)),
('partial2', models.TextField(max_length=100)),
('partial3', models.TextField(max_length=100)),
('total1', models.TextField(max_length=100)),
('total2', models.TextField(max_length=100)),
('total3', models.TextField(max_length=100)),
('total4', models.TextField(max_length=100)),
('total5', models.TextField(max_length=100)),
('total6', models.TextField(max_length=100)),
('on_account', models.TextField(max_length=100)),
('discount', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Sales_Return',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('invoice_number', models.TextField(max_length=100)),
('date', models.CharField(max_length=100)),
('internal_ref_no', models.TextField(max_length=100)),
('user_id', models.TextField(max_length=100)),
('customer_id', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
('item_id1', models.TextField(max_length=100)),
('item_id2', models.TextField(max_length=100)),
('item_details1', models.TextField(max_length=100)),
('item_details2', models.TextField(max_length=100)),
('price1_1', models.TextField(max_length=100)),
('price1_2', models.TextField(max_length=100)),
('price2_1', models.TextField(max_length=100)),
('price2_2', models.TextField(max_length=100)),
('quantity1', models.TextField(max_length=100)),
('quantity2', models.TextField(max_length=100)),
('quantity3', models.TextField(max_length=100)),
('quantity4', models.TextField(max_length=100)),
('amount1', models.TextField(max_length=100)),
('amount2', models.TextField(max_length=100)),
('sales_ex1', models.TextField(max_length=100)),
('sales_ex2', models.TextField(max_length=100)),
('job1', models.TextField(max_length=100)),
('job2', models.TextField(max_length=100)),
('labour_charge', models.TextField(max_length=100)),
('other_charge', models.TextField(max_length=100)),
('total1', models.TextField(max_length=100)),
('total2', models.TextField(max_length=100)),
('total3', models.TextField(max_length=100)),
('total4', models.TextField(max_length=100)),
('total5', models.TextField(max_length=100)),
('total6', models.TextField(max_length=100)),
('discount', models.TextField(max_length=100)),
('tax', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,138
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0019_item_statement_job_masterdata_job_statement_stock_adjustment_stock_balance_stock_masterdata.py
|
# Generated by Django 3.2.3 on 2021-10-25 11:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0018_ledger_journal_ledger_masterdata'),
]
operations = [
migrations.CreateModel(
name='Item_Statement',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('item_id', models.TextField(max_length=100)),
('item_name', models.TextField(max_length=100)),
('period', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='job_Masterdata',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('reportdate', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='job_Statement',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('job', models.TextField(max_length=100)),
('job_id', models.TextField(max_length=100)),
('period', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Stock_Adjustment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('reportdate', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Stock_Balance',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('reportdate', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Stock_Masterdata',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('reportdate', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,139
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0011_auto_20210930_0943.py
|
# Generated by Django 3.2.3 on 2021-09-30 04:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0010_auto_20210929_1808'),
]
operations = [
migrations.AlterField(
model_name='item',
name='image1',
field=models.ImageField(null=True, upload_to='images/', verbose_name=''),
),
migrations.AlterField(
model_name='item',
name='image2',
field=models.ImageField(null=True, upload_to='images/', verbose_name=''),
),
migrations.AlterField(
model_name='item',
name='image3',
field=models.ImageField(null=True, upload_to='images/', verbose_name=''),
),
migrations.AlterField(
model_name='item',
name='image4',
field=models.ImageField(null=True, upload_to='images/', verbose_name=''),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,140
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0030_alter_supplier_masterdata_created_at.py
|
# Generated by Django 3.2.7 on 2021-10-27 05:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0029_alter_supplier_masterdata_updated_at'),
]
operations = [
migrations.AlterField(
model_name='supplier_masterdata',
name='created_at',
field=models.DateField(auto_now_add=True),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,141
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/forms.py
|
from django import forms
from .models import Item, Job, Receipt
class ItemForm(forms.ModelForm):
class Meta:
model = Item
fields ="__all__"
class JobForm(forms.ModelForm):
class Meta:
model = Job
fields ="__all__"
class SalesForm(forms.ModelForm):
class Meta:
model = Receipt
fields ="__all__"
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,142
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0028_auto_20211026_1748.py
|
# Generated by Django 3.2.7 on 2021-10-26 12:18
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('Sam', '0027_auto_20211026_1647'),
]
operations = [
migrations.AlterField(
model_name='supplier_masterdata',
name='created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='supplier_masterdata',
name='updated_at',
field=models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
preserve_default=False,
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,143
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0012_asset_expences_income_liabilities.py
|
# Generated by Django 3.2.3 on 2021-10-12 10:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0011_auto_20210930_0943'),
]
operations = [
migrations.CreateModel(
name='Asset',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('asset_parent', models.TextField(max_length=100)),
('asset_child', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Expences',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('expenses_parent', models.TextField(max_length=100)),
('expenses_child', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Income',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('income_parent', models.TextField(max_length=100)),
('income_child', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Liabilities',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('liability_parent', models.TextField(max_length=100)),
('liability_child', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,144
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/models.py
|
from django.db import models
import datetime
import os
# Create your models here.
def filepath(request, filename):
old_fname = filename
timeNow = datetime.datetime.now().strftime('%Y%m%d%H%M%s')
filename = "%s%s", (timeNow, old_fname)
return os.path.join('uploads/', filename)
class Item(models.Model):
item_name = models.TextField(max_length=100)
item_desc = models.TextField(max_length=500, null=True)
item_barcode = models.TextField(max_length=50)
item_category = models.TextField(max_length=50)
item_unit_prim = models.TextField(max_length=100)
item_unit_sec = models.TextField(max_length=100)
open_balance = models.TextField(max_length=100)
buying_price = models.TextField(max_length=50)
sell_price = models.TextField(max_length=50)
image1 = models.ImageField(upload_to='images/', null=True,verbose_name='')
image2 = models.ImageField(upload_to='images/', null=True,verbose_name='')
image3 = models.ImageField(upload_to='images/', null=True,verbose_name='')
image4 = models.ImageField(upload_to='images/', null=True,verbose_name='')
class Customer(models.Model):
customer_name = models.TextField(max_length=100)
vat_reg_no = models.TextField(max_length=100)
cr_no = models.TextField(max_length=100)
expired_on = models.TextField(max_length=100)
land_phone = models.TextField(max_length=100)
mobile = models.TextField(max_length=100)
contact_person = models.TextField(max_length=100)
contact_mobile = models.TextField(max_length=100)
email = models.TextField(max_length=100)
address = models.TextField(max_length=100)
open_balance = models.TextField(max_length=100)
credit_lim_am = models.TextField(max_length=100)
credit_lim_dur = models.TextField(max_length=100)
created_at = models.DateField(default=datetime.datetime.now,editable=False)
updated_at = models.DateField(null=True)
class Supplier(models.Model):
customer_name = models.TextField(max_length=100)
vat_reg_no = models.TextField(max_length=100)
cr_no = models.TextField(max_length=100)
expired_on = models.TextField(max_length=100)
land_phone = models.TextField(max_length=100)
mobile = models.TextField(max_length=100)
contact_person = models.TextField(max_length=100)
contact_mobile = models.TextField(max_length=100)
email = models.TextField(max_length=100)
address = models.TextField(max_length=100)
open_balance = models.TextField(max_length=100)
credit_lim_am = models.TextField(max_length=100)
credit_lim_dur = models.TextField(max_length=100)
bank_acc_name = models.TextField(max_length=100)
bank_acc_no = models.TextField(max_length=100)
created_at = models.DateField(default=datetime.datetime.now,editable=False)
updated_at = models.DateField(null=True)
class User(models.Model):
mobile_no = models.TextField(max_length=100)
username = models.TextField(max_length=100)
password = models.TextField(max_length=100)
class Login(models.Model):
username = models.TextField(max_length=100)
password = models.TextField(max_length=100)
class Job(models.Model):
job_name = models.TextField(max_length=100)
job_desc = models.TextField(max_length=500,null=True)
imag1 = models.ImageField(upload_to='images/', null=True,blank=True)
imag2 = models.ImageField(upload_to='images/', null=True,blank=True)
imag3 = models.ImageField(upload_to='images/', null=True,blank=True)
imag4 = models.ImageField(upload_to='images/', null=True,blank=True)
class Employee(models.Model):
emp_name = models.TextField(max_length=100)
nationality = models.TextField(max_length=100)
birth_date = models.TextField(max_length=100)
joining_date = models.TextField(max_length=100)
designation = models.TextField(max_length=100)
department = models.TextField(max_length=100)
salary_categ = models.TextField(max_length=100)
passport_no = models.TextField(max_length=100)
expir = models.TextField(max_length=100)
id_no = models.TextField(max_length=100)
id_expir = models.TextField(max_length=100)
img1 = models.ImageField(upload_to='images/', null=True,blank=True)
img2 = models.ImageField(upload_to='images/', null=True,blank=True)
img3 = models.ImageField(upload_to='images/', null=True,blank=True)
img4 = models.ImageField(upload_to='images/', null=True,blank=True)
basic = models.TextField(max_length=100)
housing = models.TextField(max_length=100)
transportation = models.TextField(max_length=100)
food = models.TextField(max_length=100)
mobile = models.TextField(max_length=100)
other = models.TextField(max_length=100)
netpay = models.TextField(max_length=100)
class Group(models.Model):
group_name = models.TextField(max_length=100)
category = models.TextField(max_length=100)
class Ledger(models.Model):
ledger_name = models.TextField(max_length=100)
group_name = models.TextField(max_length=100)
category = models.TextField(max_length=100)
opening_bal = models.TextField(max_length=100)
class Asset(models.Model):
asset_parent = models.TextField(max_length=100)
asset_child = models.TextField(max_length=100)
class Liabilities(models.Model):
liability_parent = models.TextField(max_length=100)
liability_child = models.TextField(max_length=100)
class Income(models.Model):
income_parent = models.TextField(max_length=100)
income_child = models.TextField(max_length=100)
class Expences(models.Model):
expenses_parent = models.TextField(max_length=100)
expenses_child = models.TextField(max_length=100)
class Cash(models.Model):
invoice_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
cash = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
account = models.TextField(max_length=100)
customer_id = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
item_id1 = models.TextField(max_length=100)
item_id2 = models.TextField(max_length=100)
item_details1 = models.TextField(max_length=100)
item_details2 = models.TextField(max_length=100)
price1_1 = models.TextField(max_length=100)
price1_2 = models.TextField(max_length=100)
price2_1 = models.TextField(max_length=100)
price2_2 = models.TextField(max_length=100)
quantity1 = models.TextField(max_length=100)
quantity2 = models.TextField(max_length=100)
quantity3 = models.TextField(max_length=100)
quantity4 = models.TextField(max_length=100)
amount1 = models.TextField(max_length=100)
amount2 = models.TextField(max_length=100)
sales_ex1 = models.TextField(max_length=100)
sales_ex2 = models.TextField(max_length=100)
job1 = models.TextField(max_length=100)
job2 = models.TextField(max_length=100)
labour_charge = models.TextField(max_length=100)
other_charge = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
tax = models.TextField(max_length=100)
class Credit(models.Model):
invoice_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
due_on = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
credit_limit_amt = models.TextField(max_length=100)
customer_id = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
item_id1 = models.TextField(max_length=100)
item_id2 = models.TextField(max_length=100)
item_details1 = models.TextField(max_length=100)
item_details2 = models.TextField(max_length=100)
price1_1 = models.TextField(max_length=100)
price1_2 = models.TextField(max_length=100)
price2_1 = models.TextField(max_length=100)
price2_2 = models.TextField(max_length=100)
quantity1 = models.TextField(max_length=100)
quantity2 = models.TextField(max_length=100)
quantity3 = models.TextField(max_length=100)
quantity4 = models.TextField(max_length=100)
amount1 = models.TextField(max_length=100)
amount2 = models.TextField(max_length=100)
sales_ex1 = models.TextField(max_length=100)
sales_ex2 = models.TextField(max_length=100)
job1 = models.TextField(max_length=100)
job2 = models.TextField(max_length=100)
labour_charge = models.TextField(max_length=100)
other_charge = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
tax = models.TextField(max_length=100)
class Sales_Return(models.Model):
invoice_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
customer_id = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
item_id1 = models.TextField(max_length=100)
item_id2 = models.TextField(max_length=100)
item_details1 = models.TextField(max_length=100)
item_details2 = models.TextField(max_length=100)
price1_1 = models.TextField(max_length=100)
price1_2 = models.TextField(max_length=100)
price2_1 = models.TextField(max_length=100)
price2_2 = models.TextField(max_length=100)
quantity1 = models.TextField(max_length=100)
quantity2 = models.TextField(max_length=100)
quantity3 = models.TextField(max_length=100)
quantity4 = models.TextField(max_length=100)
amount1 = models.TextField(max_length=100)
amount2 = models.TextField(max_length=100)
sales_ex1 = models.TextField(max_length=100)
sales_ex2 = models.TextField(max_length=100)
job1 = models.TextField(max_length=100)
job2 = models.TextField(max_length=100)
labour_charge = models.TextField(max_length=100)
other_charge = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
tax = models.TextField(max_length=100)
class Receipt(models.Model):
receipt_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
due_on = models.TextField(max_length=100)
credit_limit_amt = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
customer_id = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
si_no1 = models.TextField(max_length=100)
si_no2 = models.TextField(max_length=100)
si_no3 = models.TextField(max_length=100)
invoice_no1 = models.TextField(max_length=100)
invoice_no2 = models.TextField(max_length=100)
invoice_no3 = models.TextField(max_length=100)
invoice_date1 = models.TextField(max_length=100)
invoice_date2 = models.TextField(max_length=100)
invoice_date3 = models.TextField(max_length=100)
duedate1 = models.TextField(max_length=100)
duedate2 = models.TextField(max_length=100)
duedate3 = models.TextField(max_length=100)
invoice_amt1 = models.TextField(max_length=100)
invoice_amt2 = models.TextField(max_length=100)
invoice_amt3 = models.TextField(max_length=100)
received_amt1 = models.TextField(max_length=100)
received_amt2 = models.TextField(max_length=100)
received_amt3 = models.TextField(max_length=100)
outstanding1 = models.TextField(max_length=100)
outstanding2 = models.TextField(max_length=100)
outstanding3 = models.TextField(max_length=100)
discount1 = models.TextField(max_length=100)
discount2 = models.TextField(max_length=100)
discount3 = models.TextField(max_length=100)
balance_amt1 = models.TextField(max_length=100)
balance_amt2 = models.TextField(max_length=100)
balance_amt3 = models.TextField(max_length=100)
tick_space1 = models.TextField(max_length=100)
tick_space2 = models.TextField(max_length=100)
tick_space3 = models.TextField(max_length=100)
partial1 = models.TextField(max_length=100)
partial2 = models.TextField(max_length=100)
partial3 = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
on_account = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
class PCash(models.Model):
invoice_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
cash = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
account = models.TextField(max_length=100)
supp_id = models.TextField(max_length=100)
supp_name = models.TextField(max_length=100)
item_id1 = models.TextField(max_length=100)
item_id2 = models.TextField(max_length=100)
item_details1 = models.TextField(max_length=100)
item_details2 = models.TextField(max_length=100)
price1_1 = models.TextField(max_length=100)
price1_2 = models.TextField(max_length=100)
price2_1 = models.TextField(max_length=100)
price2_2 = models.TextField(max_length=100)
quantity1 = models.TextField(max_length=100)
quantity2 = models.TextField(max_length=100)
quantity3 = models.TextField(max_length=100)
quantity4 = models.TextField(max_length=100)
amount1 = models.TextField(max_length=100)
amount2 = models.TextField(max_length=100)
sales_ex1 = models.TextField(max_length=100)
sales_ex2 = models.TextField(max_length=100)
job1 = models.TextField(max_length=100)
job2 = models.TextField(max_length=100)
labour_charge = models.TextField(max_length=100)
other_charge = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
tax = models.TextField(max_length=100)
class PCredit(models.Model):
invoice_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
due_on = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
credit_limit_amt = models.TextField(max_length=100)
supp_id = models.TextField(max_length=100)
supp_name = models.TextField(max_length=100)
item_id1 = models.TextField(max_length=100)
item_id2 = models.TextField(max_length=100)
item_details1 = models.TextField(max_length=100)
item_details2 = models.TextField(max_length=100)
price1_1 = models.TextField(max_length=100)
price1_2 = models.TextField(max_length=100)
price2_1 = models.TextField(max_length=100)
price2_2 = models.TextField(max_length=100)
quantity1 = models.TextField(max_length=100)
quantity2 = models.TextField(max_length=100)
quantity3 = models.TextField(max_length=100)
quantity4 = models.TextField(max_length=100)
amount1 = models.TextField(max_length=100)
amount2 = models.TextField(max_length=100)
sales_ex1 = models.TextField(max_length=100)
sales_ex2 = models.TextField(max_length=100)
job1 = models.TextField(max_length=100)
job2 = models.TextField(max_length=100)
labour_charge = models.TextField(max_length=100)
other_charge = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
tax = models.TextField(max_length=100)
class PRSales_Return(models.Model):
invoice_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
due_on = models.TextField(max_length=100)
credit_limit_amt = models.TextField(max_length=100)
supp_id = models.TextField(max_length=100)
supp_name = models.TextField(max_length=100)
item_id1 = models.TextField(max_length=100)
item_id2 = models.TextField(max_length=100)
item_details1 = models.TextField(max_length=100)
item_details2 = models.TextField(max_length=100)
price1_1 = models.TextField(max_length=100)
price1_2 = models.TextField(max_length=100)
price2_1 = models.TextField(max_length=100)
price2_2 = models.TextField(max_length=100)
quantity1 = models.TextField(max_length=100)
quantity2 = models.TextField(max_length=100)
quantity3 = models.TextField(max_length=100)
quantity4 = models.TextField(max_length=100)
amount1 = models.TextField(max_length=100)
amount2 = models.TextField(max_length=100)
sales_ex1 = models.TextField(max_length=100)
sales_ex2 = models.TextField(max_length=100)
job1 = models.TextField(max_length=100)
job2 = models.TextField(max_length=100)
labour_charge = models.TextField(max_length=100)
other_charge = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
tax = models.TextField(max_length=100)
class PReceipt(models.Model):
receipt_number = models.TextField(max_length=100)
date = models.CharField(max_length=100)
internal_ref_no = models.TextField(max_length=100)
due_on = models.TextField(max_length=100)
credit_limit_amt = models.TextField(max_length=100)
user_id = models.TextField(max_length=100)
supp_id = models.TextField(max_length=100)
supp_name = models.TextField(max_length=100)
si_no1 = models.TextField(max_length=100)
si_no2 = models.TextField(max_length=100)
si_no3 = models.TextField(max_length=100)
invoice_no1 = models.TextField(max_length=100)
invoice_no2 = models.TextField(max_length=100)
invoice_no3 = models.TextField(max_length=100)
invoice_date1 = models.TextField(max_length=100)
invoice_date2 = models.TextField(max_length=100)
invoice_date3 = models.TextField(max_length=100)
duedate1 = models.TextField(max_length=100)
duedate2 = models.TextField(max_length=100)
duedate3 = models.TextField(max_length=100)
invoice_amt1 = models.TextField(max_length=100)
invoice_amt2 = models.TextField(max_length=100)
invoice_amt3 = models.TextField(max_length=100)
received_amt1 = models.TextField(max_length=100)
received_amt2 = models.TextField(max_length=100)
received_amt3 = models.TextField(max_length=100)
outstanding1 = models.TextField(max_length=100)
outstanding2 = models.TextField(max_length=100)
outstanding3 = models.TextField(max_length=100)
discount1 = models.TextField(max_length=100)
discount2 = models.TextField(max_length=100)
discount3 = models.TextField(max_length=100)
balance_amt1 = models.TextField(max_length=100)
balance_amt2 = models.TextField(max_length=100)
balance_amt3 = models.TextField(max_length=100)
tick_space1 = models.TextField(max_length=100)
tick_space2 = models.TextField(max_length=100)
tick_space3 = models.TextField(max_length=100)
partial1 = models.TextField(max_length=100)
partial2 = models.TextField(max_length=100)
partial3 = models.TextField(max_length=100)
total1 = models.TextField(max_length=100)
total2 = models.TextField(max_length=100)
total3 = models.TextField(max_length=100)
total4 = models.TextField(max_length=100)
total5 = models.TextField(max_length=100)
total6 = models.TextField(max_length=100)
on_account = models.TextField(max_length=100)
discount = models.TextField(max_length=100)
class Ledger_Statement(models.Model):
date = models.TextField(max_length=100)
ledger_name = models.TextField(max_length=100)
ledger_id = models.TextField(max_length=100)
period = models.TextField(max_length=100)
class Ledger_Journal(models.Model):
date = models.TextField(max_length=100)
reportdate = models.TextField(max_length=100)
class Ledger_Masterdata(models.Model):
date = models.TextField(max_length=100)
reportdate = models.TextField(max_length=100)
class Stock_Balance(models.Model):
date = models.TextField(max_length=100)
reportdate = models.TextField(max_length=100)
class Item_Statement(models.Model):
date = models.TextField(max_length=100)
item_id = models.TextField(max_length=100)
item_name = models.TextField(max_length=100)
period = models.TextField(max_length=100)
class Stock_Adjustment(models.Model):
date = models.TextField(max_length=100)
reportdate = models.TextField(max_length=100)
class Stock_Masterdata(models.Model):
date = models.TextField(max_length=100)
reportdate = models.TextField(max_length=100)
class job_Statement(models.Model):
date = models.TextField(max_length=100)
job = models.TextField(max_length=100)
job_id = models.TextField(max_length=100)
period = models.TextField(max_length=100)
class job_Masterdata(models.Model):
date = models.TextField(max_length=100)
reportdate = models.TextField(max_length=100)
class Customer_Statement(models.Model):
date = models.TextField(max_length=100)
report_period = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
customer_id = models.TextField(max_length=100)
class Customer_Outstand(models.Model):
date = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
customer_id = models.TextField(max_length=100)
class Customer_Invoice(models.Model):
report_date = models.TextField(max_length=100)
invoice_no = models.TextField(max_length=100)
customer_id = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
class Customer_Receipt(models.Model):
customer_id = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
customer_name = models.TextField(max_length=100)
receipt_no = models.TextField(max_length=100)
class Customer_Invoice_Receipt(models.Model):
date = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
class Customer_Masterdata(models.Model):
date = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
class Supplier_Statement(models.Model):
Supplier_name = models.TextField(max_length=100)
Supplier_id = models.TextField(max_length=100)
date = models.TextField(max_length=100)
report_period = models.TextField(max_length=100)
class Supplier_Outstand(models.Model):
date = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
Supplier_name = models.TextField(max_length=100)
Supplier_id = models.TextField(max_length=100)
class Supplier_Invoice(models.Model):
report_date = models.TextField(max_length=100)
invoice_no = models.TextField(max_length=100)
Supplier_name = models.TextField(max_length=100)
Supplier_id = models.TextField(max_length=100)
class payment_History(models.Model):
Supplier_name = models.TextField(max_length=100)
Supplier_id = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
voucher_no = models.TextField(max_length=100)
class Supplier_Invoice_Receipt(models.Model):
date = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
class Supplier_Masterdata(models.Model):
date = models.TextField(max_length=100)
report_date = models.TextField(max_length=100)
created_at = models.DateField(auto_now_add=True,auto_now=False)
updated_at = models.DateTimeField(null=True)
# default=datetime.datetime.now,editable=False
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,145
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/urls.py
|
from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^$', views.go, name='go'),
url(r'^go$', views.go, name='go'),
url(r'^gocust$', views.gocust, name='gocust'),
url(r'^cutomercreate$', views.cutomercreate, name='cutomercreate'),
url(r'^custview$', views.custview, name='custview'),
url(r'^editcust/(?P<id>\d+)$', views.editcust, name='editcust'),
url(r'^editcust/updatecust/(?P<id>\d+)$', views.updatecust, name='updatecust'),
url(r'^deletecust/(?P<id>\d+)$', views.deletecust, name='deletecust'),
url(r'^gosupp$', views.gosupp, name='gosupp'),
url(r'^suppcreate$', views.suppcreate, name='suppcreate'),
url(r'^suppview$', views.suppview, name='suppview'),
url(r'^editsupp/(?P<id>\d+)$', views.editsupp, name='editsupp'),
url(r'^editsupp/updatesupp/(?P<id>\d+)$', views.updatesupp, name='updatesupp'),
url(r'^deletesupp/(?P<id>\d+)$', views.deletesupp, name='deletesupp'),
url(r'^goitem$', views.goitem, name='goitem'),
url(r'^createitem$', views.createitem, name='createitem'),
url(r'^itemview$', views.itemview, name='itemview'),
url(r'^edititem/(?P<id>\d+)$', views.edititem, name='edititem'),
url(r'^edititem/updateitem/(?P<id>\d+)$', views.updateitem, name='updateitem'),
url(r'^deleteitem/(?P<id>\d+)$', views.deleteitem, name='deleteitem'),
url(r'^gojob$', views.gojob, name='gojob'),
url(r'^createjob$', views.createjob, name='createjob'),
url(r'^jobview$', views.jobview, name='jobview'),
url(r'^editjob/(?P<id>\d+)$', views.editjob, name='editjob'),
url(r'^editjob/updatejob/(?P<id>\d+)$', views.updatejob, name='updatejob'),
url(r'^deletejob/(?P<id>\d+)$', views.deletejob, name='deletejob'),
url(r'^gogroup$', views.gogroup, name='gogroup'),
url(r'^groupcreate$', views.groupcreate, name='groupcreate'),
url(r'^groupview$', views.groupview, name='groupview'),
url(r'^editgroup/(?P<id>\d+)$', views.editgroup, name='editgroup'),
url(r'^editgroup/updategroup/(?P<id>\d+)$', views.updategroup, name='updategroup'),
url(r'^deletegroup/(?P<id>\d+)$', views.deletegroup, name='deletegroup'),
url(r'^goledger$', views.goledger, name='goledger'),
url(r'^ledgercreate$', views.ledgercreate, name='ledgercreate'),
url(r'^ledgerview$', views.ledgerview, name='ledgerview'),
url(r'^editledger/(?P<id>\d+)$', views.editledger, name='editledger'),
url(r'^editledger/updateledger/(?P<id>\d+)$', views.updateledger, name='updateledger'),
url(r'^deleteledger/(?P<id>\d+)$', views.deleteledger, name='deleteledger'),
url(r'^goemp$', views.goemp, name='goemp'),
url(r'^goaccount$', views.goaccount, name='goaccount'),
url(r'^assetview$', views.assetview, name='assetview'),
url(r'^assetcreate$', views.assetcreate, name='assetcreate'),
url(r'^goliability$', views.goliability, name='goliability'),
url(r'^liabilitycreate$', views.liabilitycreate, name='liabilitycreate'),
url(r'^goincome$', views.goincome, name='goincome'),
url(r'^incomecreate$', views.incomecreate, name='incomecreate'),
url(r'^goexpences$', views.goexpences, name='goexpences'),
url(r'^expencescreate$', views.expencescreate, name='expencescreate'),
url(r'^gosales$', views.gosales, name='gosales'),
url(r'^gocashsale$', views.gocashsale, name='gocashsale'),
url(r'^cashcreate$', views.cashcreate, name='cashcreate'),
url(r'^cashview$', views.cashview, name='cashview'),
url(r'^editcash/(?P<id>\d+)$', views.editcash, name='editcash'),
url(r'^editcash/updatecash/(?P<id>\d+)$', views.updatecash, name='updatecash'),
url(r'^deletecash/(?P<id>\d+)$', views.deletecash, name='deletecash'),
url(r'^gocreditsale$', views.gocreditsale, name='gocreditsale'),
url(r'^creditcreate$', views.creditcreate, name='creditcreate'),
url(r'^creditview$', views.creditview, name='creditview'),
url(r'^editcredit/(?P<id>\d+)$', views.editcredit, name='editcredit'),
url(r'^editcredit/updatecredit/(?P<id>\d+)$', views.updatecredit, name='updatecredit'),
url(r'^deletecredit/(?P<id>\d+)$', views.deletecredit, name='deletecredit'),
url(r'^gosreturnsale$', views.gosreturnsale, name='gosreturnsale'),
url(r'^sreturncreate$', views.sreturncreate, name='sreturncreate'),
url(r'^sreturnview$', views.sreturnview, name='sreturnview'),
url(r'^editsreturn/(?P<id>\d+)$', views.editsreturn, name='editsreturn'),
url(r'^editsreturn/updatesreturn/(?P<id>\d+)$', views.updatesreturn, name='updatesreturn'),
url(r'^deletesreturn/(?P<id>\d+)$', views.deletesreturn, name='deletesreturn'),
url(r'^goreceipt$', views.goreceipt, name='goreceipt'),
url(r'^receiptcreate$', views.receiptcreate, name='receiptcreate'),
url(r'^receiptview$', views.receiptview, name='receiptview'),
url(r'^editreceipt/(?P<id>\d+)$', views.editreceipt, name='editreceipt'),
url(r'^editreceipt/updatereceipt/(?P<id>\d+)$', views.updatereceipt, name='updatereceipt'),
url(r'^deletereceipt/(?P<id>\d+)$', views.deletereceipt, name='deletereceipt'),
url(r'^gopsales$', views.gopsales, name='gopsales'),
url(r'^gopcashsale$', views.gopcashsale, name='gopcashsale'),
url(r'^pcashcreate$', views.pcashcreate, name='pcashcreate'),
url(r'^pcashview$', views.pcashview, name='pcashview'),
url(r'^editpcash/(?P<id>\d+)$', views.editpcash, name='editpcash'),
url(r'^editpcash/updatepcash/(?P<id>\d+)$', views.updatepcash, name='updatepcash'),
url(r'^deletepcash/(?P<id>\d+)$', views.deletepcash, name='deletepcash'),
url(r'^gopcreditsale$', views.gopcreditsale, name='gopcreditsale'),
url(r'^pcreditcreate$', views.pcreditcreate, name='pcreditcreate'),
url(r'^pcreditview$', views.pcreditview, name='pcreditview'),
url(r'^editpcredit/(?P<id>\d+)$', views.editpcredit, name='editpcredit'),
url(r'^editpcredit/updatepcredit/(?P<id>\d+)$', views.updatepcredit, name='updatepcredit'),
url(r'^deletepcredit/(?P<id>\d+)$', views.deletepcredit, name='deletepcredit'),
url(r'^gopsreturnsale$', views.gopsreturnsale, name='gopsreturnsale'),
url(r'^psreturncreate$', views.psreturncreate, name='psreturncreate'),
url(r'^psreturnview$', views.psreturnview, name='psreturnview'),
url(r'^editpsreturn/(?P<id>\d+)$', views.editpsreturn, name='editpsreturn'),
url(r'^editpsreturn/updatepsreturn/(?P<id>\d+)$', views.updatepsreturn, name='updatepsreturn'),
url(r'^deletepsreturn/(?P<id>\d+)$', views.deletepsreturn, name='deletepsreturn'),
url(r'^gopreceipt$', views.gopreceipt, name='gopreceipt'),
url(r'^preceiptcreate$', views.preceiptcreate, name='preceiptcreate'),
url(r'^preceiptview$', views.preceiptview, name='preceiptview'),
url(r'^editpreceipt/(?P<id>\d+)$', views.editpreceipt, name='editpreceipt'),
url(r'^editpreceipt/updatepreceipt/(?P<id>\d+)$', views.updatepreceipt, name='updatepreceipt'),
url(r'^deletepreceipt/(?P<id>\d+)$', views.deletepreceipt, name='deletepreceipt'),
url(r'^goreports$', views.goreports, name='goreports'),
url(r'^goledgerstmt$', views.goledgerstmt, name='goledgerstmt'),
url(r'^ldgrstmtcreate$', views.ldgrstmtcreate, name='ldgrstmtcreate'),
url(r'^goledgerjournal$', views.goledgerjournal, name='goledgerjournal'),
url(r'^ldgrjournalcreate$', views.ldgrjournalcreate, name='ldgrjournalcreate'),
url(r'^goledgermasterdata$', views.goledgermasterdata, name='goledgermasterdata'),
url(r'^ldgrmasterdatacreate$', views.ldgrmasterdatacreate, name='ldgrmasterdatacreate'),
url(r'^gostockbalance$', views.gostockbalance, name='gostockbalance'),
url(r'^stkbalanceacreate$', views.stkbalanceacreate, name='stkbalanceacreate'),
url(r'^goitemstms$', views.goitemstms, name='goitemstms'),
url(r'^itemstmtcreate$', views.itemstmtcreate, name='itemstmtcreate'),
url(r'^gostockadj$', views.gostockadj, name='gostockadj'),
url(r'^stockadjcreate$', views.stockadjcreate, name='stockadjcreate'),
url(r'^gostockmaster$', views.gostockmaster, name='gostockmaster'),
url(r'^stockmastercreate$', views.stockmastercreate, name='stockmastercreate'),
url(r'^gojobstms$', views.gojobstms, name='gojobstms'),
url(r'^jobstmtcreate$', views.jobstmtcreate, name='jobstmtcreate'),
url(r'^gojobmaster$', views.gojobmaster, name='gojobmaster'),
url(r'^jobmastercreate$', views.jobmastercreate, name='jobmastercreate'),
url(r'^gocuststms$', views.gocuststms, name='gocuststms'),
url(r'^custstmscreate$', views.custstmscreate, name='custstmscreate'),
url(r'^gocustouts$', views.gocustouts, name='gocustouts'),
url(r'^custoutscreate$', views.custoutscreate, name='custoutscreate'),
url(r'^gocustinvo$', views.gocustinvo, name='gocustinvo'),
url(r'^custinvocreate$', views.custinvocreate, name='custinvocreate'),
url(r'^gocustrecpt$', views.gocustrecpt, name='gocustrecpt'),
url(r'^custrecptcreate$', views.custrecptcreate, name='custrecptcreate'),
url(r'^gocustinvorecpt$', views.gocustinvorecpt, name='gocustinvorecpt'),
url(r'^custinvorecptcreate$', views.custinvorecptcreate, name='custinvorecptcreate'),
url(r'^gocustrmasterdata$', views.gocustrmasterdata, name='gocustrmasterdata'),
url(r'^custrmasterdatacreate$', views.custrmasterdatacreate, name='custrmasterdatacreate'),
# url(r'^CustomerMasterdataReport$', views.CustomerMasterdataReport, name='CustomerMasterdataReport'),
url(r'^gosupstms$', views.gosupstms, name='gosupstms'),
url(r'^supstmscreate$', views.supstmscreate, name='supstmscreate'),
url(r'^gosupouts$', views.gosupouts, name='gosupouts'),
url(r'^supoutscreate$', views.supoutscreate, name='supoutscreate'),
url(r'^gosupinvo$', views.gosupinvo, name='gosupinvo'),
url(r'^supinvocreate$', views.supinvocreate, name='supinvocreate'),
url(r'^gosuprecpt$', views.gosuprecpt, name='gosuprecpt'),
url(r'^suprecptcreate$', views.suprecptcreate, name='suprecptcreate'),
url(r'^gosupinvorecpt$', views.gosupinvorecpt, name='gosupinvorecpt'),
url(r'^supinvorecptcreate$', views.supinvorecptcreate, name='supinvorecptcreate'),
url(r'^gosupmasterdata$', views.gosupmasterdata, name='gosupmasterdata'),
url(r'^supmasterdatacreate$', views.supmasterdatacreate, name='supmasterdatacreate'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,146
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0007_employee.py
|
# Generated by Django 3.2.7 on 2021-09-16 09:24
import Sam.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0006_job'),
]
operations = [
migrations.CreateModel(
name='Employee',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('emp_name', models.TextField(max_length=100)),
('nationality', models.TextField(max_length=100)),
('birth_date', models.TextField(max_length=100)),
('joining_date', models.TextField(max_length=100)),
('designation', models.TextField(max_length=100)),
('department', models.TextField(max_length=100)),
('salary_categ', models.TextField(max_length=100)),
('passport_no', models.TextField(max_length=100)),
('expir', models.TextField(max_length=100)),
('id_no', models.TextField(max_length=100)),
('id_expir', models.TextField(max_length=100)),
('img1', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('img2', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('img3', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('img4', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('basic', models.TextField(max_length=100)),
('housing', models.TextField(max_length=100)),
('transportation', models.TextField(max_length=100)),
('food', models.TextField(max_length=100)),
('mobile', models.TextField(max_length=100)),
('other', models.TextField(max_length=100)),
('netpay', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,147
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0006_job.py
|
# Generated by Django 3.2.7 on 2021-09-16 09:16
import Sam.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0005_login'),
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('job_name', models.TextField(max_length=100)),
('job_desc', models.TextField(max_length=500, null=True)),
('imag1', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('imag2', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('imag3', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('imag4', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,148
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0010_auto_20210929_1808.py
|
# Generated by Django 3.2.3 on 2021-09-29 12:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0009_ledger'),
]
operations = [
migrations.AlterField(
model_name='employee',
name='img1',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='employee',
name='img2',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='employee',
name='img3',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='employee',
name='img4',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='item',
name='image1',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='item',
name='image2',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='item',
name='image3',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='item',
name='image4',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='job',
name='imag1',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='job',
name='imag2',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='job',
name='imag3',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='job',
name='imag4',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,149
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/views.py
|
from datetime import datetime
from django.shortcuts import HttpResponse
from django.db.models.fields import DateTimeField
from django.shortcuts import render, redirect
from Sam.models import Customer, Customer_Invoice, Customer_Invoice_Receipt, Customer_Masterdata, Customer_Outstand, Customer_Receipt, Customer_Statement, Supplier, Stock_Adjustment, Supplier_Invoice, Supplier_Invoice_Receipt, Supplier_Masterdata, Supplier_Outstand, Supplier_Statement, job_Masterdata, job_Statement, Stock_Masterdata, Ledger_Masterdata,Item_Statement, Stock_Balance, Group, Ledger, PCredit, PCash,Ledger_Statement, Ledger_Journal, PRSales_Return, Item, Job, Asset, Liabilities, Expences, Receipt, PReceipt, Income, Cash, Credit, Sales_Return, payment_History
from .forms import ItemForm, JobForm
from rest_framework.exceptions import AuthenticationFailed
def go(request):
return render(request,'Sam/dashboard.html')
def gocust(request):
return render(request,'Sam/customer.html')
def goreports(request):
return render(request,'Sam/Report.html')
def goledgerstmt(request):
return render(request,'Sam/ledger statement.html')
def ldgrstmtcreate(request):
ldgr2 = Ledger_Statement(date=request.POST['date'],ledger_name=request.POST['ledger_name'],ledger_id=request.POST['ledger_id'],period=request.POST['period'],)
ldgr2.save()
return redirect( '/')
def goledgerjournal(request):
return render(request,'Sam/All Journal Entry.html')
def ldgrjournalcreate(request):
ldgr2 = Ledger_Journal(date=request.POST['date'],reportdate=request.POST['reportdate'],)
ldgr2.save()
return redirect( '/')
def goledgermasterdata(request):
return render(request,'Sam/Ledger Marsterdata.html')
def ldgrmasterdatacreate(request):
ldgr2 = Ledger_Masterdata(date=request.POST['date'],reportdate=request.POST['reportdate'],)
ldgr2.save()
return redirect( '/')
def gostockbalance(request):
return render(request,'Sam/Stock Balance.html')
def stkbalanceacreate(request):
stk2 = Stock_Balance(date=request.POST['date'],reportdate=request.POST['reportdate'],)
stk2.save()
return redirect( '/')
def goitemstms(request):
return render(request,'Sam/Item Statement.html')
def itemstmtcreate(request):
itm2 = Item_Statement(date=request.POST['date'],item_id=request.POST['item_id'],item_name=request.POST['item_name'],period=request.POST['period'],)
itm2.save()
return redirect( '/')
def gostockadj(request):
return render(request,'Sam/Stock Adjustment.html')
def stockadjcreate(request):
stk2 = Stock_Adjustment(date=request.POST['date'], reportdate=request.POST['reportdate'], )
stk2.save()
return redirect('/')
def gostockmaster(request):
return render(request,'Sam/Stock Masterdata.html')
def stockmastercreate(request):
stk2 = Stock_Masterdata(date=request.POST['date'], reportdate=request.POST['reportdate'], )
stk2.save()
return redirect('/')
def gojobstms(request):
return render(request,'Sam/Job Statement.html')
def jobstmtcreate(request):
job2 = job_Statement(date=request.POST['date'],job=request.POST['job'],job_id=request.POST['job_id'],period=request.POST['period'],)
job2.save()
return redirect( '/')
def gojobmaster(request):
return render(request,'Sam/Job Masterdate.html')
def jobmastercreate(request):
job2 = job_Masterdata(date=request.POST['date'], reportdate=request.POST['reportdate'], )
job2.save()
return redirect('/')
def gocuststms(request):
return render(request,'Sam/Customer AccountStatement.html')
def custstmscreate(request):
# cus1 = Customer_Statement(date=request.POST['date'], report_period=request.POST['report_period'],customer_name= request.POST['customer_name'],customer_id=request.POST['customer_id'],)
# cus1.save()
# return redirect('/')
custid = request.POST.get('customer_id')
# custnm = request.POST.get('customer_name')
report1 = Receipt.objects.all()
report = report1.filter(customer_id = custid)
context = {'report': report}
return render(request,'Sam/Customer AccountStatement.html', context)
def gocustouts(request):
return render(request,'Sam/Customer Outstanding.html')
def custoutscreate(request):
# cus1 = Customer_Outstand(date=request.POST['date'], report_date=request.POST['report_date'],customer_name= request.POST['customer_name'],customer_id=request.POST['customer_id'],)
# cus1.save()
# return redirect('/')
custid = request.POST.get('customer_id')
# custnm = request.POST.get('customer_name')
report1 = Receipt.objects.all()
report = report1.filter(customer_id = custid)
context = {'report': report}
return render(request,'Sam/Customer Outstanding.html', context)
def gocustinvo(request):
return render(request,'Sam/Customer InvoiceHistory.html')
def custinvocreate(request):
cus1 = Customer_Invoice(invoice_no=request.POST['invoice_no'], report_date=request.POST['report_date'],customer_name= request.POST['customer_name'],customer_id=request.POST['customer_id'],)
cus1.save()
return redirect('/')
def gocustrecpt(request):
return render(request,'Sam/Customer ReceiptHistory.html')
def custrecptcreate(request):
cus1 = Customer_Receipt(receipt_no=request.POST['receipt_no'], report_date=request.POST['report_date'],customer_name= request.POST['customer_name'],customer_id=request.POST['customer_id'],)
cus1.save()
return redirect('/')
def gocustinvorecpt(request):
return render(request,'Sam/CustomerInvoice ReceiptsReg.html')
def custinvorecptcreate(request):
cus1 = Customer_Invoice_Receipt(date=request.POST['date'], report_date=request.POST['report_date'],)
cus1.save()
return redirect('/')
def gocustrmasterdata(request):
return render(request,'Sam/Customer Masterdata.html')
def custrmasterdatacreate(request):
# ldgr2 = Customer_Masterdata(date=request.POST['date'],report_date=request.POST['report_date'],)
# ldgr2.save()
dt = request.POST.get('date')
report1 = Customer.objects.all()
report = report1.filter(created_at = dt)
context = {'report': report}
return render(request,'Sam/Customer Masterdata.html', context)
# if request.method == 'POST':
# dat = request.GET.get('date')
# # ldgr2 = Customer_Masterdata(date=request.POST['date'],report_date=request.POST['report_date'],)
# # ldgr2.save()
# # return redirect( 'CustomerMasterdataReport')
# # ldgr2.date = request.POST.get['date']
# report = Customer.objects.filter(created_at=dat)
# # cust = Customer.objects.filter(created_at=date).first()
# if dat == Customer.created_at:
# # context = {'report':report}
# return render(request,'Sam/Customer Masterdata.html',{'report':report})
# # return render(request,'Sam/Customer Masterdata.html', {'report':report})
# return HttpResponse('Admin Login Successfully')
def gosupstms(request):
return render(request,'Sam/Supplier AccountStatement.html')
def supstmscreate(request):
# cus1 = Supplier_Statement(date=request.POST['date'], report_period=request.POST['report_period'],Supplier_name= request.POST['Supplier_name'],Supplier_id=request.POST['Supplier_id'],)
# cus1.save()
# return redirect('/')
custid = request.POST.get('Supplier_id')
# custnm = request.POST.get('customer_name')
report1 = PReceipt.objects.all()
report = report1.filter(supp_id = custid)
context = {'report': report}
return render(request,'Sam/Supplier AccountStatement.html', context)
def gosupouts(request):
return render(request,'Sam/Supplier Outstanding.html')
def supoutscreate(request):
# cus1 = Supplier_Outstand(date=request.POST['date'], report_date=request.POST['report_date'],Supplier_name= request.POST['Supplier_name'],Supplier_id=request.POST['Supplier_id'],)
# cus1.save()
# return redirect('/')
custid = request.POST.get('Supplier_id')
# custnm = request.POST.get('customer_name')
report1 = PReceipt.objects.all()
report = report1.filter(supp_id = custid)
context = {'report': report}
return render(request,'Sam/Supplier Outstanding.html', context)
def gosupinvo(request):
return render(request,'Sam/Supplier InvoiceHistory.html')
def supinvocreate(request):
cus1 = Supplier_Invoice(invoice_no=request.POST['invoice_no'], report_date=request.POST['report_date'],Supplier_name= request.POST['Supplier_name'],Supplier_id=request.POST['Supplier_id'],)
cus1.save()
return redirect('/')
def gosuprecpt(request):
return render(request,'Sam/Payment History.html')
def suprecptcreate(request):
cus1 = payment_History(voucher_no=request.POST['voucher_no'], report_date=request.POST['report_date'],Supplier_name= request.POST['Supplier_name'],Supplier_id=request.POST['Supplier_id'],)
cus1.save()
return redirect('/')
def gosupinvorecpt(request):
return render(request,'Sam/SupplierInvoice ReceiptReg.html')
def supinvorecptcreate(request):
cus1 = Supplier_Invoice_Receipt(date=request.POST['date'], report_date=request.POST['report_date'],)
cus1.save()
return redirect('/')
def gosupmasterdata(request):
return render(request,'Sam/Supplier Masterdata.html')
def supmasterdatacreate(request):
# ldgr2 = Supplier_Masterdata(date=request.POST['date'],report_date=request.POST['report_date'],)
# ldgr2.save()
dt = request.POST.get('date')
report1 = Supplier.objects.all()
report = report1.filter(created_at = dt)
context = {'report': report}
return render(request,'Sam/Supplier Masterdata.html', context)
# def CustomerMasterdataReport(request):
# report = Customer.objects.all()
# if request.method == 'POST':
# date = request.POST.get['date']
# cust = Customer.objects.filter(created_at=date).first()
# if cust is None:
# raise AuthenticationFailed('No data')
# context = {'report':report}
# return render(request,'Sam/Customer Masterdata.html', context)
# if request.method == 'POST':
# ldgr2.date = request.POST.get['date']
# report = Customer.objects.all()
# cust = Customer.objects.filter(created_at=ldgr2.date ).first()
# if cust is None:
# raise AuthenticationFailed('No data')
# else:
# context = {'report':report}
# return render(request,'Sam/Customer Masterdata.html', context)
def cutomercreate(request):
cust2 = Customer(customer_name=request.POST['customer_name'],vat_reg_no=request.POST['vat_reg_no'],cr_no=request.POST['cr_no'],expired_on=request.POST['expired_on'],land_phone=request.POST['land_phone'],mobile=request.POST['mobile'],contact_person=request.POST['contact_person'],contact_mobile=request.POST['contact_mobile'],email=request.POST['email'],address=request.POST['address'],open_balance=request.POST['open_balance'],credit_lim_am=request.POST['credit_lim_am'],credit_lim_dur=request.POST['credit_lim_dur'],)
cust2.save()
return redirect( '/')
def custview(request):
cust1 = Customer.objects.all()
context = {'cust': cust1}
return render(request,'Sam/customer view.html', context)
def editcust(request,id):
cust1 = Customer.objects.get(id=id)
context = {'cust': cust1}
return render(request,'Sam/edit customer.html',context)
def updatecust(request,id):
cust = Customer.objects.get(id=id)
cust.customer_name=request.POST['customer_name']
cust.vat_reg_no = request.POST['vat_reg_no']
cust.cr_no = request.POST['cr_no']
cust.expired_on = request.POST['expired_on']
cust.land_phone = request.POST['land_phone']
cust.mobile = request.POST['mobile']
cust.contact_person = request.POST['contact_person']
cust.contact_mobile = request.POST['contact_mobile']
cust.email = request.POST['email']
cust.address = request.POST['address']
cust.open_balance = request.POST['open_balance']
cust.credit_lim_am = request.POST['credit_lim_am']
cust.credit_lim_dur = request.POST['credit_lim_dur']
cust.updated_at = datetime.now().replace(microsecond=0)
cust.save()
return render(request, 'Sam/dashboard.html')
def deletecust(request, id):
cust = Customer.objects.get(id=id)
cust.delete()
return render(request, 'Sam/dashboard.html')
def gosupp(request):
return render(request,'Sam/supplier.html')
def suppcreate(request):
supp2 = Supplier(customer_name=request.POST['customer_name'],vat_reg_no=request.POST['vat_reg_no'],cr_no=request.POST['cr_no'],expired_on=request.POST['expired_on'],land_phone=request.POST['land_phone'],mobile=request.POST['mobile'],contact_person=request.POST['contact_person'],contact_mobile=request.POST['contact_mobile'],email=request.POST['email'],address=request.POST['address'],open_balance=request.POST['open_balance'],credit_lim_am=request.POST['credit_lim_am'],credit_lim_dur=request.POST['credit_lim_dur'],bank_acc_name=request.POST['bank_acc_name'],bank_acc_no=request.POST['bank_acc_no'],)
supp2.save()
return redirect( '/')
def suppview(request):
supp1 = Supplier.objects.all()
context = {'supp': supp1}
return render(request,'Sam/supplier view.html', context)
def editsupp(request,id):
supp1 = Supplier.objects.get(id=id)
context = {'supp': supp1}
return render(request,'Sam/edit supplier.html', context)
def updatesupp(request,id):
supp = Supplier.objects.get(id=id)
supp.customer_name=request.POST['customer_name']
supp.vat_reg_no = request.POST['vat_reg_no']
supp.cr_no = request.POST['cr_no']
supp.expired_on = request.POST['expired_on']
supp.land_phone = request.POST['land_phone']
supp.mobile = request.POST['mobile']
supp.contact_person = request.POST['contact_person']
supp.contact_mobile = request.POST['contact_mobile']
supp.email = request.POST['email']
supp.address = request.POST['address']
supp.open_balance = request.POST['open_balance']
supp.credit_lim_am = request.POST['credit_lim_am']
supp.credit_lim_dur = request.POST['credit_lim_dur']
supp.bank_acc_name = request.POST['bank_acc_name']
supp.bank_acc_no = request.POST['bank_acc_no']
supp.updated_at = datetime.now().replace(microsecond=0)
supp.save()
return render(request, 'Sam/dashboard.html')
def deletesupp(request, id):
supp = Supplier.objects.get(id=id)
supp.delete()
return render(request, 'Sam/dashboard.html')
def goitem(request):
return render(request,'Sam/item.html')
def createitem(request):
if request.method == "POST":
item_name = request.POST['item_name']
item_desc = request.POST['item_desc']
item_barcode = request.POST['item_barcode']
item_category = request.POST['item_category']
item_unit_prim = request.POST['item_unit_prim']
item_unit_sec = request.POST['item_unit_sec']
open_balance = request.POST['open_balance']
buying_price = request.POST['buying_price']
sell_price = request.POST['sell_price']
image1 = request.FILES.get('image1')
image2 = request.FILES.get('image2')
image3 = request.FILES.get('image3')
image4 = request.FILES.get('image4')
itm = Item.objects.create(item_name=item_name, item_desc=item_desc, item_barcode=item_barcode,
item_category=item_category, item_unit_prim=item_unit_prim,item_unit_sec=item_unit_sec,
open_balance=open_balance, buying_price=buying_price,
sell_price=sell_price, image1=image1, image2=image2, image3=image3, image4=image4,)
return redirect('go')
def itemview(request):
itm = Item.objects.all()
return render(request, 'Sam/item view.html', {'itmview': itm})
def edititem(request,id):
itm = Item.objects.get(id=id)
context = {'itmview': itm}
return render(request,'Sam/edit item.html', context)
def updateitem(request,id):
itm = Item.objects.get(id=id)
form = ItemForm(request.POST, instance=itm)
if form.is_valid():
form.save()
return redirect('/')
return render(request,'Sam/dashboard.html', {'itmview': itm})
def deleteitem(request, id):
itm = Item.objects.get(id=id)
itm.delete()
return render(request, 'Sam/dashboard.html')
def gojob(request):
return render(request,'Sam/job.html')
def createjob(request):
if request.method == "POST":
form = JobForm(request.POST, request.FILES)
if form.is_valid():
try:
form.save()
return redirect(request, 'Sam/dashboard.html')
except:
pass
else:
form = JobForm()
return render(request, 'Sam/dashboard.html', {'form': form})
def jobview(request):
job = Job.objects.all()
return render(request, 'Sam/job view.html', {'jobview': job})
def editjob(request,id):
job = Job.objects.get(id=id)
context = {'jobview': job}
return render(request,'Sam/edit job.html', context)
def updatejob(request,id):
job = Job.objects.get(id=id)
form = JobForm(request.POST, instance=job)
if form.is_valid():
form.save()
return redirect('/')
return render(request,'Sam/dashboard.html', {'jobview': job})
def deletejob(request, id):
job = Job.objects.get(id=id)
job.delete()
return render(request, 'Sam/dashboard.html')
def gogroup(request):
return render(request,'Sam/group.html')
def groupcreate(request):
grp2 = Group(group_name=request.POST['group_name'],category=request.POST['category'],)
grp2.save()
return redirect( '/')
def groupview(request):
grp1 = Group.objects.all()
context = {'grp': grp1}
return render(request,'Sam/group view.html', context)
def editgroup(request,id):
grp1 = Group.objects.get(id=id)
context = {'grp': grp1}
return render(request,'Sam/edit group.html', context)
def updategroup(request,id):
grp = Group.objects.get(id=id)
grp.group_name=request.POST['group_name']
grp.category = request.POST['category']
grp.save()
return render(request, 'Sam/dashboard.html')
def deletegroup(request, id):
grp = Group.objects.get(id=id)
grp.delete()
return render(request, 'Sam/dashboard.html')
def goledger(request):
return render(request,'Sam/ledger.html')
def ledgercreate(request):
ldg2 = Ledger(ledger_name=request.POST['ledger_name'],group_name=request.POST['group_name'],category=request.POST['category'],opening_bal=request.POST['opening_bal'],)
ldg2.save()
return redirect( '/')
def ledgerview(request):
ldg1 = Ledger.objects.all()
context = {'ldg': ldg1}
return render(request,'Sam/ledger view.html', context)
def editledger(request,id):
ldg1 = Ledger.objects.get(id=id)
context = {'ldg': ldg1}
return render(request,'Sam/edit ledger.html', context)
def updateledger(request,id):
ldg = Ledger.objects.get(id=id)
ldg.ledger_name = request.POST['ledger_name']
ldg.group_name = request.POST['group_name']
ldg.category = request.POST['category']
ldg.opening_bal = request.POST['opening_bal']
ldg.save()
return render(request, 'Sam/dashboard.html')
def deleteledger(request, id):
ldg = Ledger.objects.get(id=id)
ldg.delete()
return render(request, 'Sam/dashboard.html')
def goemp(request):
return render(request,'Sam/employee.html')
def goaccount(request):
return render(request,'Sam/chart of account.html')
def assetcreate(request):
ast2 = Asset(asset_parent=request.POST['asset_parent'],asset_child=request.POST['asset_child'],)
ast2.save()
return redirect( '/')
def assetview(request):
return render(request,'Sam/Add new asset.html')
def goliability(request):
return render(request,'Sam/Add new liability.html')
def liabilitycreate(request):
lbt2 = Liabilities(liability_parent=request.POST['liability_parent'],liability_child=request.POST['liability_child'],)
lbt2.save()
return redirect( '/')
def goincome(request):
return render(request,'Sam/Add new income.html')
def incomecreate(request):
inm2 = Income(income_parent=request.POST['income_parent'],income_child=request.POST['income_child'],)
inm2.save()
return redirect( '/')
def goexpences(request):
return render(request,'Sam/Add new expences.html')
def expencescreate(request):
exp2 = Expences(expenses_parent=request.POST['expenses_parent'],expenses_child=request.POST['expenses_child'],)
exp2.save()
return redirect( '/')
def gosales(request):
return render(request, 'Sam/Sales.html')
def gocashsale(request):
return render(request, 'Sam/cash sale.html')
def cashcreate(request):
csh2 = Cash(invoice_number=request.POST['invoice_number'],date=request.POST['date'],
internal_ref_no=request.POST['internal_ref_no'],cash=request.POST['cash'],
user_id=request.POST['user_id'],account=request.POST['account'],
customer_id=request.POST['customer_id'],customer_name=request.POST['customer_name'],
item_id1=request.POST['item_id1'],item_id2=request.POST['item_id2'],
item_details1=request.POST['item_details1'],item_details2=request.POST['item_details2'],
price1_1=request.POST['price1_1'],price1_2=request.POST['price1_2'],
quantity1=request.POST['quantity1'],quantity2=request.POST['quantity2'],
price2_1=request.POST['price2_1'], price2_2=request.POST['price2_2'],
quantity3=request.POST['quantity3'], quantity4=request.POST['quantity4'],
amount1=request.POST['amount1'], amount2=request.POST['amount2'],
sales_ex1=request.POST['sales_ex1'], sales_ex2=request.POST['sales_ex2'],
job1=request.POST['job1'], job2=request.POST['job2'],
labour_charge=request.POST['labour_charge'], other_charge=request.POST['other_charge'],
total1=request.POST['total1'], total2=request.POST['total2'],
total3=request.POST['total3'], total4=request.POST['total4'],
total5=request.POST['total5'], total6=request.POST['total6'],
discount=request.POST['discount'], tax=request.POST['tax'],)
csh2.save()
return redirect( '/')
def cashview(request):
csh1 = Cash.objects.all()
context = {'csh': csh1}
return render(request,'Sam/show cash sales.html', context)
def editcash(request,id):
csh1 = Cash.objects.get(id=id)
context = {'csh': csh1}
return render(request,'Sam/edit cash sales.html', context)
def updatecash(request,id):
csh = Cash.objects.get(id=id)
csh.invoice_number = request.POST['invoice_number']
csh.date = request.POST['date']
csh.internal_ref_no = request.POST['internal_ref_no']
csh.cash = request.POST['cash']
csh.user_id = request.POST['user_id']
csh.account = request.POST['account'],
csh.customer_id = request.POST['customer_id']
csh.customer_name = request.POST['customer_name']
csh.item_id1 = request.POST['item_id1']
csh.item_id2 = request.POST['item_id2']
csh.item_details1 = request.POST['item_details1']
csh.item_details2 = request.POST['item_details2']
csh.price1_1 = request.POST['price1_1']
csh.price1_2 = request.POST['price1_2']
csh.quantity1 = request.POST['quantity1']
csh.quantity2 = request.POST['quantity2']
csh.price2_1 = request.POST['price2_1']
csh.price2_2 = request.POST['price2_2']
csh.quantity3 = request.POST['quantity3']
csh.quantity4 = request.POST['quantity4']
csh.amount1 = request.POST['amount1']
csh.amount2 = request.POST['amount2']
csh.sales_ex1 = request.POST['sales_ex1']
csh.sales_ex2 = request.POST['sales_ex2']
csh.job1 = request.POST['job1']
csh.job2 = request.POST['job2']
csh.labour_charge = request.POST['labour_charge']
csh.other_charge = request.POST['other_charge']
csh.total1 = request.POST['total1']
csh.total2 = request.POST['total2']
csh.total3 = request.POST['total3']
csh.total4 = request.POST['total4']
csh.total5 = request.POST['total5']
csh.total6 = request.POST['total6']
csh.discount = request.POST['discount']
csh.tax = request.POST['tax']
csh.save()
return render(request, 'Sam/Sales.html')
def deletecash(request, id):
csh = Cash.objects.get(id=id)
csh.delete()
return render(request, 'Sam/Sales.html')
def gocreditsale(request):
return render(request, 'Sam/credit sales.html')
def creditcreate(request):
crd2 = Credit(invoice_number=request.POST['invoice_number'],date=request.POST['date'],
internal_ref_no=request.POST['internal_ref_no'],due_on=request.POST['due_on'],
user_id=request.POST['user_id'],credit_limit_amt=request.POST['credit_limit_amt'],
customer_id=request.POST['customer_id'],customer_name=request.POST['customer_name'],
item_id1=request.POST['item_id1'],item_id2=request.POST['item_id2'],
item_details1=request.POST['item_details1'],item_details2=request.POST['item_details2'],
price1_1=request.POST['price1_1'],price1_2=request.POST['price1_2'],
quantity1=request.POST['quantity1'],quantity2=request.POST['quantity2'],
price2_1=request.POST['price2_1'], price2_2=request.POST['price2_2'],
quantity3=request.POST['quantity3'], quantity4=request.POST['quantity4'],
amount1=request.POST['amount1'], amount2=request.POST['amount2'],
sales_ex1=request.POST['sales_ex1'], sales_ex2=request.POST['sales_ex2'],
job1=request.POST['job1'], job2=request.POST['job2'],
labour_charge=request.POST['labour_charge'], other_charge=request.POST['other_charge'],
total1=request.POST['total1'], total2=request.POST['total2'],
total3=request.POST['total3'], total4=request.POST['total4'],
total5=request.POST['total5'], total6=request.POST['total6'],
discount=request.POST['discount'], tax=request.POST['tax'],)
crd2.save()
return redirect( '/')
def creditview(request):
crd1 = Credit.objects.all()
context = {'crd': crd1}
return render(request,'Sam/show credit sales.html', context)
def editcredit(request,id):
crd1 = Credit.objects.get(id=id)
context = {'crd': crd1}
return render(request,'Sam/edit credit sales.html', context)
def updatecredit(request,id):
crd = Credit.objects.get(id=id)
crd.invoice_number = request.POST['invoice_number']
crd.date = request.POST['date']
crd.internal_ref_no = request.POST['internal_ref_no']
crd.due_on = request.POST['due_on']
crd.user_id = request.POST['user_id']
crd.credit_limit_amt = request.POST['credit_limit_amt'],
crd.customer_id = request.POST['customer_id']
crd.customer_name = request.POST['customer_name']
crd.item_id1 = request.POST['item_id1']
crd.item_id2 = request.POST['item_id2']
crd.item_details1 = request.POST['item_details1']
crd.item_details2 = request.POST['item_details2']
crd.price1_1 = request.POST['price1_1']
crd.price1_2 = request.POST['price1_2']
crd.quantity1 = request.POST['quantity1']
crd.quantity2 = request.POST['quantity2']
crd.price2_1 = request.POST['price2_1']
crd.price2_2 = request.POST['price2_2']
crd.quantity3 = request.POST['quantity3']
crd.quantity4 = request.POST['quantity4']
crd.amount1 = request.POST['amount1']
crd.amount2 = request.POST['amount2']
crd.sales_ex1 = request.POST['sales_ex1']
crd.sales_ex2 = request.POST['sales_ex2']
crd.job1 = request.POST['job1']
crd.job2 = request.POST['job2']
crd.labour_charge = request.POST['labour_charge']
crd.other_charge = request.POST['other_charge']
crd.total1 = request.POST['total1']
crd.total2 = request.POST['total2']
crd.total3 = request.POST['total3']
crd.total4 = request.POST['total4']
crd.total5 = request.POST['total5']
crd.total6 = request.POST['total6']
crd.discount = request.POST['discount']
crd.tax = request.POST['tax']
crd.save()
return render(request, 'Sam/Sales.html')
def deletecredit(request, id):
crd = Credit.objects.get(id=id)
crd.delete()
return render(request, 'Sam/Sales.html')
def gosreturnsale(request):
return render(request, 'Sam/sales return.html')
def sreturncreate(request):
rtn2 = Sales_Return(invoice_number=request.POST['invoice_number'],date=request.POST['date'],
internal_ref_no=request.POST['internal_ref_no'],
user_id=request.POST['user_id'],
customer_id=request.POST['customer_id'],customer_name=request.POST['customer_name'],
item_id1=request.POST['item_id1'],item_id2=request.POST['item_id2'],
item_details1=request.POST['item_details1'],item_details2=request.POST['item_details2'],
price1_1=request.POST['price1_1'],price1_2=request.POST['price1_2'],
quantity1=request.POST['quantity1'],quantity2=request.POST['quantity2'],
price2_1=request.POST['price2_1'], price2_2=request.POST['price2_2'],
quantity3=request.POST['quantity3'], quantity4=request.POST['quantity4'],
amount1=request.POST['amount1'], amount2=request.POST['amount2'],
sales_ex1=request.POST['sales_ex1'], sales_ex2=request.POST['sales_ex2'],
job1=request.POST['job1'], job2=request.POST['job2'],
labour_charge=request.POST['labour_charge'], other_charge=request.POST['other_charge'],
total1=request.POST['total1'], total2=request.POST['total2'],
total3=request.POST['total3'], total4=request.POST['total4'],
total5=request.POST['total5'], total6=request.POST['total6'],
discount=request.POST['discount'], tax=request.POST['tax'],)
rtn2.save()
return redirect( '/')
def sreturnview(request):
rtn1 = Sales_Return.objects.all()
context = {'rtn': rtn1}
return render(request,'Sam/show sales return.html', context)
def editsreturn(request,id):
rtn1 = Sales_Return.objects.get(id=id)
context = {'rtn': rtn1}
return render(request,'Sam/edit sales return.html', context)
def updatesreturn(request,id):
rtn = Sales_Return.objects.get(id=id)
rtn.invoice_number = request.POST['invoice_number']
rtn.date = request.POST['date']
rtn.internal_ref_no = request.POST['internal_ref_no']
rtn.user_id = request.POST['user_id']
rtn.customer_id = request.POST['customer_id']
rtn.customer_name = request.POST['customer_name']
rtn.item_id1 = request.POST['item_id1']
rtn.item_id2 = request.POST['item_id2']
rtn.item_details1 = request.POST['item_details1']
rtn.item_details2 = request.POST['item_details2']
rtn.price1_1 = request.POST['price1_1']
rtn.price1_2 = request.POST['price1_2']
rtn.quantity1 = request.POST['quantity1']
rtn.quantity2 = request.POST['quantity2']
rtn.price2_1 = request.POST['price2_1']
rtn.price2_2 = request.POST['price2_2']
rtn.quantity3 = request.POST['quantity3']
rtn.quantity4 = request.POST['quantity4']
rtn.amount1 = request.POST['amount1']
rtn.amount2 = request.POST['amount2']
rtn.sales_ex1 = request.POST['sales_ex1']
rtn.sales_ex2 = request.POST['sales_ex2']
rtn.job1 = request.POST['job1']
rtn.job2 = request.POST['job2']
rtn.labour_charge = request.POST['labour_charge']
rtn.other_charge = request.POST['other_charge']
rtn.total1 = request.POST['total1']
rtn.total2 = request.POST['total2']
rtn.total3 = request.POST['total3']
rtn.total4 = request.POST['total4']
rtn.total5 = request.POST['total5']
rtn.total6 = request.POST['total6']
rtn.discount = request.POST['discount']
rtn.tax = request.POST['tax']
rtn.save()
return render(request, 'Sam/Sales.html')
def deletesreturn(request, id):
rtn = Sales_Return.objects.get(id=id)
rtn.delete()
return render(request, 'Sam/Sales.html')
def goreceipt(request):
return render(request, 'Sam/Receipt.html')
def receiptcreate(request):
rpt2 = Receipt(receipt_number=request.POST['receipt_number'], date=request.POST['date'], internal_ref_no=request.POST['internal_ref_no'],
due_on=request.POST['due_on'], credit_limit_amt=request.POST['credit_limit_amt'], user_id=request.POST['user_id'],
customer_id=request.POST['customer_id'], customer_name = request.POST['customer_name'],invoice_no1 = request.POST['invoice_no1'],
invoice_no2 = request.POST['invoice_no2'],invoice_no3 = request.POST['invoice_no3'],invoice_date1 = request.POST['invoice_date1'],
invoice_date2 = request.POST['invoice_date2'],invoice_date3 = request.POST['invoice_date3'],duedate1 = request.POST['duedate1'],
duedate2 = request.POST['duedate2'],duedate3 = request.POST['duedate3'],invoice_amt1 = request.POST['invoice_amt1'],
invoice_amt2 = request.POST['invoice_amt2'],invoice_amt3 = request.POST['invoice_amt3'],received_amt1 = request.POST['received_amt1'],
received_amt2 = request.POST['received_amt2'],received_amt3 = request.POST['received_amt3'],outstanding1 = request.POST['outstanding1'],
outstanding2 = request.POST['outstanding2'],outstanding3 = request.POST['outstanding3'],discount1 = request.POST['discount1'],discount2 = request.POST['discount2'],
discount3 = request.POST['discount3'],balance_amt1 = request.POST['balance_amt1'],balance_amt2 = request.POST['balance_amt2'],balance_amt3 = request.POST['balance_amt3'],
tick_space1 = request.POST['tick_space1'],tick_space2 = request.POST['tick_space2'],tick_space3 = request.POST['tick_space3'],partial1 = request.POST['partial1'],
partial2 = request.POST['partial2'],partial3 = request.POST['partial3'],total1 = request.POST['total1'],total2 = request.POST['total2'],
total3 = request.POST['total3'],total4 = request.POST['total4'],total5 = request.POST['total5'],total6 = request.POST['total6'],
on_account = request.POST['on_account'],discount = request.POST['discount'],)
rpt2.save()
return redirect('/')
def receiptview(request):
rpt1 = Receipt.objects.all()
context = {'rpt': rpt1}
return render(request,'Sam/Show receipt.html', context)
def editreceipt(request,id):
rpt1 = Receipt.objects.get(id=id)
context = {'rpt': rpt1}
return render(request,'Sam/edit receipt.html', context)
def updatereceipt(request,id):
rpt = Receipt.objects.get(id=id)
rpt.receipt_number=request.POST['receipt_number']
rpt.date = request.POST['date']
rpt.internal_ref_no = request.POST['internal_ref_no']
rpt.due_on = request.POST['due_on']
rpt.credit_limit_amt = request.POST['credit_limit_amt']
rpt.user_id = request.POST['user_id']
rpt.customer_id = request.POST['customer_id']
rpt.customer_name = request.POST['customer_name']
rpt.invoice_no1 = request.POST['invoice_no1']
rpt.invoice_no2 = request.POST['invoice_no2']
rpt.invoice_no3 = request.POST['invoice_no3']
rpt.invoice_date1 = request.POST['invoice_date1']
rpt.invoice_date2 = request.POST['invoice_date2']
rpt.invoice_date3 = request.POST['invoice_date3']
rpt.duedate1 = request.POST['duedate1']
rpt.invoice_amt2 = request.POST['invoice_amt2']
rpt.invoice_amt3 = request.POST['invoice_amt3']
rpt.received_amt1 = request.POST['received_amt1']
rpt.received_amt2 = request.POST['received_amt2']
rpt.received_amt3 = request.POST['received_amt3']
rpt.outstanding1 = request.POST['outstanding1']
rpt.outstanding2 = request.POST['outstanding2']
rpt.outstanding3 = request.POST['outstanding3']
rpt.discount1 = request.POST['discount1']
rpt.discount2 = request.POST['discount2']
rpt.discount3 = request.POST['discount3']
rpt.balance_amt1 = request.POST['balance_amt1']
rpt.balance_amt2 = request.POST['balance_amt2']
rpt.balance_amt3 = request.POST['balance_amt3']
rpt.tick_space1 = request.POST['tick_space1']
rpt.tick_space2 = request.POST['tick_space2']
rpt.tick_space3 = request.POST['tick_space3']
rpt.partial1 = request.POST['partial1']
rpt.partial2 = request.POST['partial2']
rpt.partial3 = request.POST['partial3']
rpt.total1 = request.POST['total1']
rpt.total2 = request.POST['total2']
rpt.total3 = request.POST['total3']
rpt.total4 = request.POST['total4']
rpt.total5 = request.POST['total5']
rpt.total6 = request.POST['total6']
rpt.on_account = request.POST['on_account']
rpt.discount = request.POST['discount']
rpt.save()
return render(request, 'Sam/Sales.html')
def deletereceipt(request, id):
rpt = Receipt.objects.get(id=id)
rpt.delete()
return render(request, 'Sam/Sales.html')
def gopsales(request):
return render(request, 'Sam/purchase.html')
def gopcashsale(request):
return render(request, 'Sam/cash purchase.html')
def pcashcreate(request):
csh2 = PCash(invoice_number=request.POST['invoice_number'],date=request.POST['date'],
internal_ref_no=request.POST['internal_ref_no'],cash=request.POST['cash'],
user_id=request.POST['user_id'],account=request.POST['account'],
supp_id=request.POST['supp_id'],supp_name=request.POST['supp_name'],
item_id1=request.POST['item_id1'],item_id2=request.POST['item_id2'],
item_details1=request.POST['item_details1'],item_details2=request.POST['item_details2'],
price1_1=request.POST['price1_1'],price1_2=request.POST['price1_2'],
quantity1=request.POST['quantity1'],quantity2=request.POST['quantity2'],
price2_1=request.POST['price2_1'], price2_2=request.POST['price2_2'],
quantity3=request.POST['quantity3'], quantity4=request.POST['quantity4'],
amount1=request.POST['amount1'], amount2=request.POST['amount2'],
sales_ex1=request.POST['sales_ex1'], sales_ex2=request.POST['sales_ex2'],
job1=request.POST['job1'], job2=request.POST['job2'],
labour_charge=request.POST['labour_charge'], other_charge=request.POST['other_charge'],
total1=request.POST['total1'], total2=request.POST['total2'],
total3=request.POST['total3'], total4=request.POST['total4'],
total5=request.POST['total5'], total6=request.POST['total6'],
discount=request.POST['discount'], tax=request.POST['tax'],)
csh2.save()
return redirect( '/')
def pcashview(request):
csh1 = PCash.objects.all()
context = {'csh': csh1}
return render(request,'Sam/show cash purchase.html', context)
def editpcash(request,id):
csh1 = PCash.objects.get(id=id)
context = {'csh': csh1}
return render(request,'Sam/edit cash purchase.html', context)
def updatepcash(request,id):
csh = PCash.objects.get(id=id)
csh.invoice_number = request.POST['invoice_number']
csh.date = request.POST['date']
csh.internal_ref_no = request.POST['internal_ref_no']
csh.cash = request.POST['cash']
csh.user_id = request.POST['user_id']
csh.account = request.POST['account'],
csh.supp_id = request.POST['supp_id']
csh.supp_name = request.POST['supp_name']
csh.item_id1 = request.POST['item_id1']
csh.item_id2 = request.POST['item_id2']
csh.item_details1 = request.POST['item_details1']
csh.item_details2 = request.POST['item_details2']
csh.price1_1 = request.POST['price1_1']
csh.price1_2 = request.POST['price1_2']
csh.quantity1 = request.POST['quantity1']
csh.quantity2 = request.POST['quantity2']
csh.price2_1 = request.POST['price2_1']
csh.price2_2 = request.POST['price2_2']
csh.quantity3 = request.POST['quantity3']
csh.quantity4 = request.POST['quantity4']
csh.amount1 = request.POST['amount1']
csh.amount2 = request.POST['amount2']
csh.sales_ex1 = request.POST['sales_ex1']
csh.sales_ex2 = request.POST['sales_ex2']
csh.job1 = request.POST['job1']
csh.job2 = request.POST['job2']
csh.labour_charge = request.POST['labour_charge']
csh.other_charge = request.POST['other_charge']
csh.total1 = request.POST['total1']
csh.total2 = request.POST['total2']
csh.total3 = request.POST['total3']
csh.total4 = request.POST['total4']
csh.total5 = request.POST['total5']
csh.total6 = request.POST['total6']
csh.discount = request.POST['discount']
csh.tax = request.POST['tax']
csh.save()
return render(request, 'Sam/purchase.html')
def deletepcash(request, id):
csh = PCash.objects.get(id=id)
csh.delete()
return render(request, 'Sam/purchase.html')
def gopcreditsale(request):
return render(request, 'Sam/credit purchase.html')
def pcreditcreate(request):
crd2 = PCredit(invoice_number=request.POST['invoice_number'],date=request.POST['date'],
internal_ref_no=request.POST['internal_ref_no'],due_on=request.POST['due_on'],
user_id=request.POST['user_id'],credit_limit_amt=request.POST['credit_limit_amt'],
supp_id=request.POST['supp_id'],supp_name=request.POST['supp_name'],
item_id1=request.POST['item_id1'],item_id2=request.POST['item_id2'],
item_details1=request.POST['item_details1'],item_details2=request.POST['item_details2'],
price1_1=request.POST['price1_1'],price1_2=request.POST['price1_2'],
quantity1=request.POST['quantity1'],quantity2=request.POST['quantity2'],
price2_1=request.POST['price2_1'], price2_2=request.POST['price2_2'],
quantity3=request.POST['quantity3'], quantity4=request.POST['quantity4'],
amount1=request.POST['amount1'], amount2=request.POST['amount2'],
sales_ex1=request.POST['sales_ex1'], sales_ex2=request.POST['sales_ex2'],
job1=request.POST['job1'], job2=request.POST['job2'],
labour_charge=request.POST['labour_charge'], other_charge=request.POST['other_charge'],
total1=request.POST['total1'], total2=request.POST['total2'],
total3=request.POST['total3'], total4=request.POST['total4'],
total5=request.POST['total5'], total6=request.POST['total6'],
discount=request.POST['discount'], tax=request.POST['tax'],)
crd2.save()
return redirect( '/')
def pcreditview(request):
crd1 = PCredit.objects.all()
context = {'crd': crd1}
return render(request,'Sam/show credit puchase.html', context)
def editpcredit(request,id):
crd1 = PCredit.objects.get(id=id)
context = {'crd': crd1}
return render(request,'Sam/edit credit purchase.html', context)
def updatepcredit(request,id):
crd = PCredit.objects.get(id=id)
crd.invoice_number = request.POST['invoice_number']
crd.date = request.POST['date']
crd.internal_ref_no = request.POST['internal_ref_no']
crd.due_on = request.POST['due_on']
crd.user_id = request.POST['user_id']
crd.credit_limit_amt = request.POST['credit_limit_amt'],
crd.supp_id = request.POST['supp_id']
crd.supp_name = request.POST['supp_name']
crd.item_id1 = request.POST['item_id1']
crd.item_id2 = request.POST['item_id2']
crd.item_details1 = request.POST['item_details1']
crd.item_details2 = request.POST['item_details2']
crd.price1_1 = request.POST['price1_1']
crd.price1_2 = request.POST['price1_2']
crd.quantity1 = request.POST['quantity1']
crd.quantity2 = request.POST['quantity2']
crd.price2_1 = request.POST['price2_1']
crd.price2_2 = request.POST['price2_2']
crd.quantity3 = request.POST['quantity3']
crd.quantity4 = request.POST['quantity4']
crd.amount1 = request.POST['amount1']
crd.amount2 = request.POST['amount2']
crd.sales_ex1 = request.POST['sales_ex1']
crd.sales_ex2 = request.POST['sales_ex2']
crd.job1 = request.POST['job1']
crd.job2 = request.POST['job2']
crd.labour_charge = request.POST['labour_charge']
crd.other_charge = request.POST['other_charge']
crd.total1 = request.POST['total1']
crd.total2 = request.POST['total2']
crd.total3 = request.POST['total3']
crd.total4 = request.POST['total4']
crd.total5 = request.POST['total5']
crd.total6 = request.POST['total6']
crd.discount = request.POST['discount']
crd.tax = request.POST['tax']
crd.save()
return render(request, 'Sam/purchase.html')
def deletepcredit(request, id):
crd = PCredit.objects.get(id=id)
crd.delete()
return render(request, 'Sam/purchase.html')
def gopsreturnsale(request):
return render(request, 'Sam/purchase return.html')
def psreturncreate(request):
rtn2 = PRSales_Return(invoice_number=request.POST['invoice_number'],date=request.POST['date'],
internal_ref_no=request.POST['internal_ref_no'],
user_id=request.POST['user_id'],due_on=request.POST['due_on'], credit_limit_amt=request.POST['credit_limit_amt'],
supp_id=request.POST['supp_id'],supp_name=request.POST['supp_name'],
item_id1=request.POST['item_id1'],item_id2=request.POST['item_id2'],
item_details1=request.POST['item_details1'],item_details2=request.POST['item_details2'],
price1_1=request.POST['price1_1'],price1_2=request.POST['price1_2'],
quantity1=request.POST['quantity1'],quantity2=request.POST['quantity2'],
price2_1=request.POST['price2_1'], price2_2=request.POST['price2_2'],
quantity3=request.POST['quantity3'], quantity4=request.POST['quantity4'],
amount1=request.POST['amount1'], amount2=request.POST['amount2'],
sales_ex1=request.POST['sales_ex1'], sales_ex2=request.POST['sales_ex2'],
job1=request.POST['job1'], job2=request.POST['job2'],
labour_charge=request.POST['labour_charge'], other_charge=request.POST['other_charge'],
total1=request.POST['total1'], total2=request.POST['total2'],
total3=request.POST['total3'], total4=request.POST['total4'],
total5=request.POST['total5'], total6=request.POST['total6'],
discount=request.POST['discount'], tax=request.POST['tax'],)
rtn2.save()
return redirect( '/')
def psreturnview(request):
rtn1 = PRSales_Return.objects.all()
context = {'rtn': rtn1}
return render(request,'Sam/show purchase return.html', context)
def editpsreturn(request,id):
rtn1 = PRSales_Return.objects.get(id=id)
context = {'rtn': rtn1}
return render(request,'Sam/edit purchase return.html', context)
def updatepsreturn(request,id):
rtn = PRSales_Return.objects.get(id=id)
rtn.invoice_number = request.POST['invoice_number']
rtn.date = request.POST['date']
rtn.internal_ref_no = request.POST['internal_ref_no']
rtn.user_id = request.POST['user_id']
rtn.due_on = request.POST['due_on']
rtn.credit_limit_amt = request.POST['credit_limit_amt'],
rtn.supp_id = request.POST['supp_id']
rtn.supp_name = request.POST['supp_name']
rtn.item_id1 = request.POST['item_id1']
rtn.item_id2 = request.POST['item_id2']
rtn.item_details1 = request.POST['item_details1']
rtn.item_details2 = request.POST['item_details2']
rtn.price1_1 = request.POST['price1_1']
rtn.price1_2 = request.POST['price1_2']
rtn.quantity1 = request.POST['quantity1']
rtn.quantity2 = request.POST['quantity2']
rtn.price2_1 = request.POST['price2_1']
rtn.price2_2 = request.POST['price2_2']
rtn.quantity3 = request.POST['quantity3']
rtn.quantity4 = request.POST['quantity4']
rtn.amount1 = request.POST['amount1']
rtn.amount2 = request.POST['amount2']
rtn.sales_ex1 = request.POST['sales_ex1']
rtn.sales_ex2 = request.POST['sales_ex2']
rtn.job1 = request.POST['job1']
rtn.job2 = request.POST['job2']
rtn.labour_charge = request.POST['labour_charge']
rtn.other_charge = request.POST['other_charge']
rtn.total1 = request.POST['total1']
rtn.total2 = request.POST['total2']
rtn.total3 = request.POST['total3']
rtn.total4 = request.POST['total4']
rtn.total5 = request.POST['total5']
rtn.total6 = request.POST['total6']
rtn.discount = request.POST['discount']
rtn.tax = request.POST['tax']
rtn.save()
return render(request, 'Sam/purchase.html')
def deletepsreturn(request, id):
rtn = PRSales_Return.objects.get(id=id)
rtn.delete()
return render(request, 'Sam/purchase.html')
def gopreceipt(request):
return render(request, 'Sam/purchase receipt.html')
def preceiptcreate(request):
rpt2 = PReceipt(receipt_number=request.POST['receipt_number'], date=request.POST['date'], internal_ref_no=request.POST['internal_ref_no'],
due_on=request.POST['due_on'], credit_limit_amt=request.POST['credit_limit_amt'], user_id=request.POST['user_id'],
supp_id=request.POST['supp_id'], supp_name = request.POST['supp_name'],invoice_no1 = request.POST['invoice_no1'],
invoice_no2 = request.POST['invoice_no2'],invoice_no3 = request.POST['invoice_no3'],invoice_date1 = request.POST['invoice_date1'],
invoice_date2 = request.POST['invoice_date2'],invoice_date3 = request.POST['invoice_date3'],duedate1 = request.POST['duedate1'],
duedate2 = request.POST['duedate2'],duedate3 = request.POST['duedate3'],invoice_amt1 = request.POST['invoice_amt1'],
invoice_amt2 = request.POST['invoice_amt2'],invoice_amt3 = request.POST['invoice_amt3'],received_amt1 = request.POST['received_amt1'],
received_amt2 = request.POST['received_amt2'],received_amt3 = request.POST['received_amt3'],outstanding1 = request.POST['outstanding1'],
outstanding2 = request.POST['outstanding2'],outstanding3 = request.POST['outstanding3'],discount1 = request.POST['discount1'],discount2 = request.POST['discount2'],
discount3 = request.POST['discount3'],balance_amt1 = request.POST['balance_amt1'],balance_amt2 = request.POST['balance_amt2'],balance_amt3 = request.POST['balance_amt3'],
tick_space1 = request.POST['tick_space1'],tick_space2 = request.POST['tick_space2'],tick_space3 = request.POST['tick_space3'],partial1 = request.POST['partial1'],
partial2 = request.POST['partial2'],partial3 = request.POST['partial3'],total1 = request.POST['total1'],total2 = request.POST['total2'],
total3 = request.POST['total3'],total4 = request.POST['total4'],total5 = request.POST['total5'],total6 = request.POST['total6'],
on_account = request.POST['on_account'],discount = request.POST['discount'],)
rpt2.save()
return redirect('/')
def preceiptview(request):
rpt1 = PReceipt.objects.all()
context = {'rpt': rpt1}
return render(request,'Sam/show purchase receipt.html', context)
def editpreceipt(request,id):
rpt1 = PReceipt.objects.get(id=id)
context = {'rpt': rpt1}
return render(request,'Sam/edit purchase receipt.html', context)
def updatepreceipt(request,id):
rpt = PReceipt.objects.get(id=id)
rpt.receipt_number=request.POST['receipt_number']
rpt.date = request.POST['date']
rpt.internal_ref_no = request.POST['internal_ref_no']
rpt.due_on = request.POST['due_on']
rpt.credit_limit_amt = request.POST['credit_limit_amt']
rpt.user_id = request.POST['user_id']
rpt.supp_id = request.POST['supp_id']
rpt.supp_name = request.POST['supp_name']
rpt.invoice_no1 = request.POST['invoice_no1']
rpt.invoice_no2 = request.POST['invoice_no2']
rpt.invoice_no3 = request.POST['invoice_no3']
rpt.invoice_date1 = request.POST['invoice_date1']
rpt.invoice_date2 = request.POST['invoice_date2']
rpt.invoice_date3 = request.POST['invoice_date3']
rpt.duedate1 = request.POST['duedate1']
rpt.invoice_amt2 = request.POST['invoice_amt2']
rpt.invoice_amt3 = request.POST['invoice_amt3']
rpt.received_amt1 = request.POST['received_amt1']
rpt.received_amt2 = request.POST['received_amt2']
rpt.received_amt3 = request.POST['received_amt3']
rpt.outstanding1 = request.POST['outstanding1']
rpt.outstanding2 = request.POST['outstanding2']
rpt.outstanding3 = request.POST['outstanding3']
rpt.discount1 = request.POST['discount1']
rpt.discount2 = request.POST['discount2']
rpt.discount3 = request.POST['discount3']
rpt.balance_amt1 = request.POST['balance_amt1']
rpt.balance_amt2 = request.POST['balance_amt2']
rpt.balance_amt3 = request.POST['balance_amt3']
rpt.tick_space1 = request.POST['tick_space1']
rpt.tick_space2 = request.POST['tick_space2']
rpt.tick_space3 = request.POST['tick_space3']
rpt.partial1 = request.POST['partial1']
rpt.partial2 = request.POST['partial2']
rpt.partial3 = request.POST['partial3']
rpt.total1 = request.POST['total1']
rpt.total2 = request.POST['total2']
rpt.total3 = request.POST['total3']
rpt.total4 = request.POST['total4']
rpt.total5 = request.POST['total5']
rpt.total6 = request.POST['total6']
rpt.on_account = request.POST['on_account']
rpt.discount = request.POST['discount']
rpt.save()
return render(request, 'Sam/purchase.html')
def deletepreceipt(request, id):
rpt = PReceipt.objects.get(id=id)
rpt.delete()
return render(request, 'Sam/purchase.html')
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,150
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0009_ledger.py
|
# Generated by Django 3.2.7 on 2021-09-16 09:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0008_group'),
]
operations = [
migrations.CreateModel(
name='Ledger',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ledger_name', models.TextField(max_length=100)),
('group_name', models.TextField(max_length=100)),
('category', models.TextField(max_length=100)),
('opening_bal', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,151
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0025_alter_supplier_masterdata_created_at.py
|
# Generated by Django 3.2.7 on 2021-10-26 11:15
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0024_alter_supplier_masterdata_updated_at'),
]
operations = [
migrations.AlterField(
model_name='supplier_masterdata',
name='created_at',
field=models.DateTimeField(blank=True, default=datetime.datetime.now, editable=False),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,152
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0029_alter_supplier_masterdata_updated_at.py
|
# Generated by Django 3.2.7 on 2021-10-26 12:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0028_auto_20211026_1748'),
]
operations = [
migrations.AlterField(
model_name='supplier_masterdata',
name='updated_at',
field=models.DateTimeField(null=True),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,153
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0013_cash.py
|
# Generated by Django 3.2.3 on 2021-10-13 13:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0012_asset_expences_income_liabilities'),
]
operations = [
migrations.CreateModel(
name='Cash',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('invoice_number', models.TextField(max_length=100)),
('date', models.CharField(max_length=100)),
('internal_ref_no', models.TextField(max_length=100)),
('cash', models.TextField(max_length=100)),
('user_id', models.TextField(max_length=100)),
('account', models.TextField(max_length=100)),
('customer_id', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
('item_id1', models.TextField(max_length=100)),
('item_id2', models.TextField(max_length=100)),
('item_details1', models.TextField(max_length=100)),
('item_details2', models.TextField(max_length=100)),
('price1_1', models.TextField(max_length=100)),
('price1_2', models.TextField(max_length=100)),
('price2_1', models.TextField(max_length=100)),
('price2_2', models.TextField(max_length=100)),
('quantity1', models.TextField(max_length=100)),
('quantity2', models.TextField(max_length=100)),
('quantity3', models.TextField(max_length=100)),
('quantity4', models.TextField(max_length=100)),
('amount1', models.TextField(max_length=100)),
('amount2', models.TextField(max_length=100)),
('sales_ex1', models.TextField(max_length=100)),
('sales_ex2', models.TextField(max_length=100)),
('job1', models.TextField(max_length=100)),
('job2', models.TextField(max_length=100)),
('labour_charge', models.TextField(max_length=100)),
('other_charge', models.TextField(max_length=100)),
('total1', models.TextField(max_length=100)),
('total2', models.TextField(max_length=100)),
('total3', models.TextField(max_length=100)),
('total4', models.TextField(max_length=100)),
('total5', models.TextField(max_length=100)),
('total6', models.TextField(max_length=100)),
('discount', models.TextField(max_length=100)),
('tax', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,154
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0021_supplier_masterdata_date_created.py
|
# Generated by Django 3.2.7 on 2021-10-26 10:55
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0020_customer_invoice_customer_invoice_receipt_customer_masterdata_customer_outstand_customer_receipt_cus'),
]
operations = [
migrations.AddField(
model_name='supplier_masterdata',
name='date_created',
field=models.DateTimeField(blank=True, default=datetime.datetime.now),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,155
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0001_initial.py
|
# Generated by Django 3.2.7 on 2021-09-16 07:41
import Sam.models
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item_name', models.TextField(max_length=100)),
('item_desc', models.TextField(max_length=500, null=True)),
('item_barcode', models.TextField(max_length=50)),
('item_category', models.TextField(max_length=50)),
('item_unit_prim', models.TextField(max_length=100)),
('item_unit_sec', models.TextField(max_length=100)),
('open_balance', models.TextField(max_length=100)),
('buying_price', models.TextField(max_length=50)),
('sell_price', models.TextField(max_length=50)),
('image1', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('image2', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('image3', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
('image4', models.ImageField(blank=True, null=True, upload_to=Sam.models.filepath)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,156
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0018_ledger_journal_ledger_masterdata.py
|
# Generated by Django 3.2.3 on 2021-10-25 11:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0017_ledger_statement'),
]
operations = [
migrations.CreateModel(
name='Ledger_Journal',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('reportdate', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Ledger_Masterdata',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('reportdate', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,157
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0027_auto_20211026_1647.py
|
# Generated by Django 3.2.7 on 2021-10-26 11:17
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0026_alter_supplier_masterdata_created_at'),
]
operations = [
migrations.AddField(
model_name='customer',
name='created_at',
field=models.DateTimeField(default=datetime.datetime.now, editable=False),
),
migrations.AddField(
model_name='customer',
name='updated_at',
field=models.DateTimeField(null=True),
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,158
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0003_supplier.py
|
# Generated by Django 3.2.7 on 2021-09-16 09:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0002_customer'),
]
operations = [
migrations.CreateModel(
name='Supplier',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('customer_name', models.TextField(max_length=100)),
('vat_reg_no', models.TextField(max_length=100)),
('cr_no', models.TextField(max_length=100)),
('expired_on', models.TextField(max_length=100)),
('land_phone', models.TextField(max_length=100)),
('mobile', models.TextField(max_length=100)),
('contact_person', models.TextField(max_length=100)),
('contact_mobile', models.TextField(max_length=100)),
('email', models.TextField(max_length=100)),
('address', models.TextField(max_length=100)),
('open_balance', models.TextField(max_length=100)),
('credit_lim_am', models.TextField(max_length=100)),
('credit_lim_dur', models.TextField(max_length=100)),
('bank_acc_name', models.TextField(max_length=100)),
('bank_acc_no', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,159
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0017_ledger_statement.py
|
# Generated by Django 3.2.3 on 2021-10-25 06:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0016_auto_20211019_1145'),
]
operations = [
migrations.CreateModel(
name='Ledger_Statement',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('ledger_name', models.TextField(max_length=100)),
('ledger_id', models.TextField(max_length=100)),
('period', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,160
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0020_customer_invoice_customer_invoice_receipt_customer_masterdata_customer_outstand_customer_receipt_cus.py
|
# Generated by Django 3.2.7 on 2021-10-26 07:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Sam', '0019_item_statement_job_masterdata_job_statement_stock_adjustment_stock_balance_stock_masterdata'),
]
operations = [
migrations.CreateModel(
name='Customer_Invoice',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report_date', models.TextField(max_length=100)),
('invoice_no', models.TextField(max_length=100)),
('customer_id', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Customer_Invoice_Receipt',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Customer_Masterdata',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Customer_Outstand',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
('customer_id', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Customer_Receipt',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('customer_id', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
('receipt_no', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Customer_Statement',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('report_period', models.TextField(max_length=100)),
('customer_name', models.TextField(max_length=100)),
('customer_id', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='payment_History',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Supplier_name', models.TextField(max_length=100)),
('Supplier_id', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
('voucher_no', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Supplier_Invoice',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report_date', models.TextField(max_length=100)),
('invoice_no', models.TextField(max_length=100)),
('Supplier_name', models.TextField(max_length=100)),
('Supplier_id', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Supplier_Invoice_Receipt',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Supplier_Masterdata',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Supplier_Outstand',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.TextField(max_length=100)),
('report_date', models.TextField(max_length=100)),
('Supplier_name', models.TextField(max_length=100)),
('Supplier_id', models.TextField(max_length=100)),
],
),
migrations.CreateModel(
name='Supplier_Statement',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Supplier_name', models.TextField(max_length=100)),
('Supplier_id', models.TextField(max_length=100)),
('date', models.TextField(max_length=100)),
('report_period', models.TextField(max_length=100)),
],
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,161
|
Rincmol/sam-backend-main
|
refs/heads/main
|
/Sam/migrations/0022_rename_date_created_supplier_masterdata_created_at.py
|
# Generated by Django 3.2.7 on 2021-10-26 11:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Sam', '0021_supplier_masterdata_date_created'),
]
operations = [
migrations.RenameField(
model_name='supplier_masterdata',
old_name='date_created',
new_name='created_at',
),
]
|
{"/Sam/forms.py": ["/Sam/models.py"], "/Sam/migrations/0007_employee.py": ["/Sam/models.py"], "/Sam/migrations/0006_job.py": ["/Sam/models.py"], "/Sam/views.py": ["/Sam/models.py", "/Sam/forms.py"], "/Sam/migrations/0001_initial.py": ["/Sam/models.py"]}
|
29,172
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/utils/lazy.py
|
import weakref
from marnadi.utils import metaclass, import_module
class CachedDescriptor(object):
__slots__ = 'cache',
def __init__(self):
self.cache = weakref.WeakKeyDictionary()
def __get__(self, instance, instance_type=None):
if instance is None:
return self # static access
try:
return self.cache[instance]
except KeyError:
value = self.cache[instance] = self.get_value(instance)
return value
def __set__(self, instance, value):
self.cache[instance] = self.set_value(instance, value)
def __delete__(self, instance):
del self.cache[instance]
def get_value(self, instance):
raise NotImplementedError
def set_value(self, instance, value):
return value
class CachedProperty(CachedDescriptor):
__slots__ = 'get', 'set', '__doc__'
def __init__(self, fget=None, fset=None, doc=None):
super(CachedProperty, self).__init__()
self.get = fget
self.set = fset
self.__doc__ = doc
def get_value(self, instance):
if self.get is None:
raise AttributeError("unreadable attribute")
return self.get(instance)
def set_value(self, instance, value):
if self.set is not None:
return self.set(instance, value)
return super(CachedProperty, self).set_value(instance, value)
def getter(self, getter):
self.get = getter
return self
def setter(self, setter):
self.set = setter
return self
cached_property = CachedProperty
class LazyMeta(type):
def __call__(cls, path):
if isinstance(path, cls) or not isinstance(path, str):
return path
return super(LazyMeta, cls).__call__(path)
@metaclass(LazyMeta)
class Lazy(object):
__slots__ = '__path', '__weakref__', '__class__'
def __init__(self, path):
super(Lazy, self).__init__()
self.__path = path
def __call__(self, *args, **kwargs):
return self.__obj(*args, **kwargs)
def __iter__(self):
return iter(self.__obj)
def __len__(self):
return len(self.__obj)
def __str__(self):
return str(self.__obj)
def __unicode__(self):
return unicode(self.__obj)
def __bytes__(self):
return bytes(self.__obj)
def __getitem__(self, item):
return self.__obj[item]
def __getattr__(self, attr):
return getattr(self.__obj, attr)
def __bool__(self):
return bool(self.__obj)
def __nonzero__(self):
return self.__bool__()
@cached_property
def __obj(self):
path, _, attribute = self.__path.rpartition('.')
if not path:
path, attribute = attribute, path
module = import_module(path)
if attribute:
return getattr(module, attribute)
return module
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,173
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/data/__init__.py
|
import itertools
import collections
from marnadi.utils import Lazy, CachedDescriptor
from . import decoders
class Data(CachedDescriptor, collections.Mapping):
__slots__ = '_content_decoders',
def __init__(self, *content_decoders, **kw_content_decoders):
super(Data, self).__init__()
self._content_decoders = dict(
(content_type, Lazy(content_decoder))
for content_type, content_decoder in itertools.chain(
content_decoders,
kw_content_decoders.items(),
)
)
def __getitem__(self, content_type):
return self._content_decoders[content_type]
def __iter__(self):
return iter(self._content_decoders)
def __len__(self):
return len(self._content_decoders)
def get_value(self, request):
decoder = self.get(request.content_type, decoders.Decoder)
return decoder(request)
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,174
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/response.py
|
import collections
import itertools
import logging
from marnadi import http
from marnadi.utils import to_bytes, cached_property, coroutine, metaclass
try:
str = unicode
except NameError:
pass
@metaclass(http.Handler)
class Response(object):
logger = logging.getLogger('marnadi')
status = '200 OK'
supported_http_methods = set(
('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE')
)
if hasattr(collections.Iterator, '__slots__'):
__slots__ = 'app', 'request', '__weakref__'
headers = http.Headers(
('Content-Type', http.Header('text/plain', charset='utf-8')),
)
cookies = http.Cookies()
def __init__(self, app, request):
self.app = app
self.request = request
def __call__(self, **kwargs):
if self.request.method not in self.supported_http_methods:
raise http.Error(
'501 Not Implemented',
headers=(('Allow', ', '.join(self.allowed_http_methods)), )
)
callback = getattr(self, self.request.method.lower())
if callback is None:
raise http.Error(
'405 Method Not Allowed',
headers=(('Allow', ', '.join(self.allowed_http_methods)), )
)
return callback(**kwargs)
def __iter__(self):
return self
def __next__(self):
return next(self.iterator)
def next(self):
return self.__next__()
@cached_property
@coroutine
def iterator(self):
result = yield
if result is None or isinstance(result, (str, bytes)):
chunk = to_bytes(result)
self.headers['Content-Length'] = len(chunk)
yield chunk
else:
chunks = iter(result)
first_chunk = to_bytes(next(chunks, b''))
try:
result_length = len(result)
except TypeError: # result doesn't support len()
pass
else:
if result_length <= 1:
self.headers['Content-Length'] = len(first_chunk)
yield first_chunk
for chunk in chunks:
yield to_bytes(chunk, error_callback=self.logger.exception)
@classmethod
def start(cls, *args, **params):
try:
response = cls(*args)
result = response(**params)
response.iterator = itertools.chain(
(response.iterator.send(result), ),
response.iterator
)
return response
except http.Error:
raise
except Exception as error:
cls.logger.exception(error)
raise
@property
def allowed_http_methods(self):
for method in self.supported_http_methods:
if getattr(self, method.lower()):
yield method
@http.Method
def options(self, **kwargs):
self.headers['Allow'] = ', '.join(self.allowed_http_methods)
get = http.Method()
head = http.Method()
post = http.Method()
put = http.Method()
patch = http.Method()
delete = http.Method()
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,175
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/cookies.py
|
import collections
import copy
import datetime
import locale
import time
import weakref
from marnadi.utils import cached_property, CachedDescriptor
class CookieJar(collections.MutableMapping):
"""Cookies - dict-like object allowing to get/set HTTP cookies"""
if hasattr(collections.MutableMapping, '__slots__'):
__slots__ = ('_response', 'domain', 'path', 'expires', 'secure',
'http_only', '__weakref__')
def __init__(self, response, domain=None, path=None, expires=None,
secure=False, http_only=True, ):
self._response = weakref.ref(response)
self.domain = domain
self.path = path
self.expires = expires
self.secure = secure
self.http_only = http_only
__hash__ = object.__hash__
__eq__ = object.__eq__
__ne__ = object.__ne__
def __setitem__(self, cookie, value):
self.set(cookie, value)
if value is None:
self.request_cookies.pop(cookie, None)
else:
self.request_cookies[cookie] = value
def __delitem__(self, cookie):
self.remove(cookie)
del self.request_cookies[cookie]
def __getitem__(self, cookie):
return self.request_cookies[cookie]
def __iter__(self):
return iter(self.request_cookies)
def __len__(self):
return len(self.request_cookies)
@property
def response(self):
response = self._response()
if response is not None:
return response
raise ValueError("CookieJar used outside of response scope")
@cached_property
def request_cookies(self):
try:
return dict(
cookie.strip().split('=', 1)
for cookie in self.response.request.headers['Cookie'].split(';')
)
except (KeyError, ValueError):
return {}
def clear(self, *cookies):
if cookies:
for cookie in cookies:
self.pop(cookie, None)
else:
super(CookieJar, self).clear()
def remove(self, cookie, domain=None, path=None, secure=None,
http_only=None):
self.set(cookie, '', expires=datetime.datetime(1980, 1, 1),
domain=domain, path=path, secure=secure, http_only=http_only)
def set(self, cookie, value, expires=None, domain=None, path=None,
secure=None, http_only=None):
if value is None:
return self.remove(cookie, domain=domain, path=path,
secure=secure, http_only=http_only)
domain = self.domain if domain is None else domain
path = self.path if path is None else path
expires = self.expires if expires is None else expires
secure = self.secure if secure is None else secure
http_only = self.http_only if http_only is None else http_only
cookie_params = ['%s=%s' % (cookie, value)]
domain is not None and cookie_params.append("Domain=%s" % domain)
path is not None and cookie_params.append("Path=%s" % path)
if expires is not None:
try:
# try to use `expires` as timedelta
expires += datetime.datetime.now()
except TypeError:
pass
if isinstance(expires, datetime.datetime):
struct_time = (
time.gmtime(time.mktime(expires.timetuple()))
if expires.tzinfo is None else
time.localtime(time.mktime(expires.utctimetuple()))
)
current_locale = locale.getlocale(locale.LC_TIME)
locale.setlocale(locale.LC_TIME, 'C')
expires = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
struct_time)
locale.setlocale(locale.LC_TIME, current_locale)
cookie_params.append("Expires=%s" % expires)
secure and cookie_params.append("Secure")
http_only and cookie_params.append("HttpOnly")
self.response.headers.append(('Set-Cookie', '; '.join(cookie_params)))
class Cookies(CachedDescriptor):
__slots__ = 'domain', 'path', 'expires', 'secure', 'http_only'
def __init__(self, domain=None, path=None, expires=None, secure=False,
http_only=True):
super(Cookies, self).__init__()
self.domain = domain
self.path = path
self.expires = expires
self.secure = secure
self.http_only = http_only
def get_value(self, response):
return CookieJar(
response=response,
domain=self.domain,
path=self.path,
expires=copy.copy(self.expires),
secure=self.secure,
http_only=self.http_only,
)
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,176
|
ram0973/marnadi
|
refs/heads/master
|
/tests/test_wsgi.py
|
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
from unittest import mock
except ImportError:
import mock
from marnadi import Response, Route, http
from marnadi.utils import Lazy
from marnadi.wsgi import App
_test_handler = Response
_test_route_1 = Route('a', _test_handler)
_test_route_2 = Route('b', _test_handler)
_test_routes = (
_test_route_1,
_test_route_2,
)
class AppTestCase(unittest.TestCase):
def test_get_handler__explicit_lazy_route_match(self):
lazy_route = Lazy('%s._test_route_1' % __name__)
app = App([lazy_route])
app.get_handler('a')
def test_get_handler__implicit_lazy_route_match(self):
lazy_route = '%s._test_route_1' % __name__
app = App([lazy_route])
app.get_handler('a')
def test_get_handler__explicit_lazy_subroute_match(self):
route = Route('/', routes=(
Lazy('%s._test_route_1' % __name__),
))
app = App([route])
app.get_handler('/a')
def test_get_handler__implicit_lazy_subroute_match(self):
route = Route('/', routes=(
'%s._test_route_1' % __name__,
))
app = App([route])
app.get_handler('/a')
class expected_handler(Response):
pass
class unexpected_handler(Response):
pass
def _test_get_handler(
self,
routes,
requested_path,
expected_kwargs=None,
):
app = App(routes=routes)
partial = app.get_handler(requested_path)
actual_handler = partial.func.__self__
self.assertIs(actual_handler, self.expected_handler)
self.assertIsNot(actual_handler, self.unexpected_handler)
self.assertDictEqual(expected_kwargs or {}, partial.keywords)
def test_get_handler__empty_route_handler_error(self):
with self.assertRaises(http.Error) as context:
self._test_get_handler(
routes=(
Route('/'),
),
requested_path='/',
)
self.assertEqual('404 Not Found', context.exception.status)
def test_get_handler__expected(self):
self._test_get_handler(
routes=(
Route('/foo', self.expected_handler),
),
requested_path='/foo',
)
def test_get_handler__expected_params_url(self):
self._test_get_handler(
routes=(
Route('/{foo}', self.expected_handler),
),
requested_path='/foo',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__expected_params_route(self):
self._test_get_handler(
routes=(
Route('/foo', self.expected_handler, params=dict(foo='foo')),
),
requested_path='/foo',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__expected_params_url_route(self):
self._test_get_handler(
routes=(
Route('/{baz}', self.expected_handler, params=dict(foo='foo')),
),
requested_path='/baz',
expected_kwargs=dict(foo='foo', baz='baz'),
)
def test_get_handler__expected_params_url_route_collision(self):
self._test_get_handler(
routes=(
Route('/{foo}',
self.expected_handler, params=dict(foo='baz')),
),
requested_path='/bar',
expected_kwargs={'foo': 'bar'},
)
def test_get_handler__unexpected_error(self):
with self.assertRaises(http.Error) as context:
self._test_get_handler(
routes=(
Route('/foo', self.unexpected_handler),
),
requested_path='/',
)
self.assertEqual('404 Not Found', context.exception.status)
def test_get_handler__unexpected_error2(self):
with self.assertRaises(http.Error) as context:
self._test_get_handler(
routes=(
Route('/', self.unexpected_handler),
),
requested_path='/foo',
)
self.assertEqual('404 Not Found', context.exception.status)
def test_get_handler__expected_unexpected(self):
self._test_get_handler(
routes=(
Route('/', self.unexpected_handler),
Route('/foo', self.expected_handler),
),
requested_path='/foo',
)
def test_get_handler__unexpected_expected(self):
self._test_get_handler(
routes=(
Route('/foo', self.expected_handler),
Route('/', self.unexpected_handler),
),
requested_path='/foo',
)
def test_get_handler__expected_unexpected2(self):
self._test_get_handler(
routes=(
Route('/', self.expected_handler),
Route('/foo', self.unexpected_handler),
),
requested_path='/',
)
def test_get_handler__unexpected_expected2(self):
self._test_get_handler(
routes=(
Route('/foo', self.unexpected_handler),
Route('/', self.expected_handler),
),
requested_path='/',
)
def test_get_handler__expected_unexpected_params_url(self):
self._test_get_handler(
routes=(
Route('/{bar}', self.unexpected_handler),
Route('/{foo}/', self.expected_handler),
),
requested_path='/foo/',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__unexpected_expected_params_url(self):
self._test_get_handler(
routes=(
Route('/{foo}/', self.expected_handler),
Route('/{bar}', self.unexpected_handler),
),
requested_path='/foo/',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__expected_unexpected_params_route(self):
self._test_get_handler(
routes=(
Route('/', self.unexpected_handler, params=dict(bar='bar')),
Route('/foo', self.expected_handler, params=dict(foo='foo')),
),
requested_path='/foo',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__unexpected_expected_params_route(self):
self._test_get_handler(
routes=(
Route('/foo', self.expected_handler, params=dict(foo='foo')),
Route('/', self.unexpected_handler, params=dict(bar='bar')),
),
requested_path='/foo',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__expected_unexpected_params_url_route(self):
self._test_get_handler(
routes=(
Route('/{bar}', self.unexpected_handler, params=dict(z2=2)),
Route('/{foo}/', self.expected_handler, params=dict(z1=1)),
),
requested_path='/foo/',
expected_kwargs=dict(foo='foo', z1=1),
)
def test_get_handler__unexpected_expected_params_url_route(self):
self._test_get_handler(
routes=(
Route('/{foo}/', self.expected_handler, params=dict(z1=1)),
Route('/{bar}', self.unexpected_handler, params=dict(z2=2)),
),
requested_path='/foo/',
expected_kwargs=dict(foo='foo', z1=1),
)
def test_get_handler__nested_expected(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.expected_handler),
)),
),
requested_path='/foo/bar',
)
def test_get_handler__nested_expected_params_url(self):
self._test_get_handler(
routes=(
Route('/{foo}', routes=(
Route('/bar', self.expected_handler),
)),
),
requested_path='/foo/bar',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__nested_expected_params_url2(self):
self._test_get_handler(
routes=(
Route('/{foo}', routes=(
Route('/{bar}', self.expected_handler),
)),
),
requested_path='/foo/bar',
expected_kwargs=dict(foo='foo', bar='bar'),
)
def test_get_handler__nested_expected_params_url2_collision(self):
self._test_get_handler(
routes=(
Route('/{foo}', routes=(
Route('/{foo}', self.expected_handler),
)),
),
requested_path='/foo/bar',
expected_kwargs=dict(foo='bar'),
)
def test_get_handler__nested_expected_params_route(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.expected_handler),
), params=dict(foo='foo')),
),
requested_path='/foo/bar',
expected_kwargs=dict(foo='foo'),
)
def test_get_handler__nested_expected_params_route2(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.expected_handler,
params=dict(bar='bar')),
), params=dict(foo='foo')),
),
requested_path='/foo/bar',
expected_kwargs=dict(foo='foo', bar='bar'),
)
def test_get_handler__nested_expected_params_route2_collision(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.expected_handler,
params=dict(foo='bar')),
), params=dict(foo='foo')),
),
requested_path='/foo/bar',
expected_kwargs=dict(foo='bar'),
)
def test_get_handler__nested_expected_params_url2_route2(self):
self._test_get_handler(
routes=(
Route('/{foo}', routes=(
Route('/{bar}', self.expected_handler,
params=dict(y='y')),
), params=dict(x='x')),
),
requested_path='/foo/bar',
expected_kwargs=dict(foo='foo', bar='bar', x='x', y='y'),
)
def test_get_handler__nested_expected_params_url2_route2_collision(self):
self._test_get_handler(
routes=(
Route('/{foo}', routes=(
Route('/{bar}', self.expected_handler,
params=dict(foo='baz')),
), params=dict(bar='bar')),
),
requested_path='/foo/baz',
expected_kwargs=dict(foo='baz', bar='baz'),
)
def test_get_handler__nested_unexpected_expected(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('', self.unexpected_handler),
Route('/bar', self.expected_handler),
)),
),
requested_path='/foo/bar',
)
def test_get_handler__nested_expected_unexpected(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.expected_handler),
Route('', self.unexpected_handler),
)),
),
requested_path='/foo/bar',
)
def test_get_handler__nested_unexpected_expected_half(self):
self._test_get_handler(
routes=(
Route('/foo', self.unexpected_handler, routes=(
Route('/bar', self.expected_handler),
)),
),
requested_path='/foo/bar',
)
def test_get_handler__nested_expected_unexpected_half(self):
self._test_get_handler(
routes=(
Route('/foo', self.expected_handler, routes=(
Route('/bar', self.unexpected_handler),
)),
),
requested_path='/foo',
)
def test_get_handler__nested2_expected_unexpected(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/baz', self.expected_handler),
)),
Route('/foo', routes=(
Route('/bar', self.unexpected_handler),
)),
),
requested_path='/foo/baz',
)
def test_get_handler__nested2_unexpected_expected(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.unexpected_handler),
)),
Route('/foo', routes=(
Route('/baz', self.expected_handler),
)),
),
requested_path='/foo/baz',
)
def test_get_handler__nested2_unexpected_expected_params_url(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg1}', routes=(
Route('/bar', self.unexpected_handler),
)),
Route('/foo/{kwarg2}', routes=(
Route('/baz', self.expected_handler),
)),
),
requested_path='/foo/kwarg/baz',
expected_kwargs=dict(kwarg2='kwarg'),
)
def test_get_handler__nested2_expected_unexpected_params_url(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg2}', routes=(
Route('/baz', self.expected_handler),
)),
Route('/foo/{kwarg1}', routes=(
Route('/bar', self.unexpected_handler),
)),
),
requested_path='/foo/kwarg/baz',
expected_kwargs=dict(kwarg2='kwarg'),
)
def test_get_handler__nested2_unexpected_expected_params_route(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.unexpected_handler, params=dict(f1=1)),
)),
Route('/foo', routes=(
Route('/baz', self.expected_handler, params=dict(f2=2)),
)),
),
requested_path='/foo/baz',
expected_kwargs=dict(f2=2),
)
def test_get_handler__nested2_expected_unexpected_params_route(self):
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/baz', self.expected_handler, params=dict(f2=2)),
)),
Route('/foo', routes=(
Route('/bar', self.unexpected_handler, params=dict(f1=1)),
)),
),
requested_path='/foo/baz',
expected_kwargs=dict(f2=2),
)
def test_get_handler__nested2_unexpected_expected_params_url_route(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg1}', routes=(
Route('/bar', self.unexpected_handler, params=dict(f1=1)),
)),
Route('/foo/{kwarg2}', routes=(
Route('/baz', self.expected_handler, params=dict(f2=2)),
)),
),
requested_path='/foo/kwarg/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2),
)
def test_get_handler__nested2_expected_unexpected_params_url_route(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg2}', routes=(
Route('/baz', self.expected_handler, params=dict(f2=2)),
)),
Route('/foo/{kwarg1}', routes=(
Route('/bar', self.unexpected_handler, params=dict(f1=1)),
)),
),
requested_path='/foo/kwarg/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2),
)
def test_get_handler__nested2_unexpected_expected_params_url2_route(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg1}', routes=(
Route('/bar/{bar}', self.unexpected_handler,
params=dict(f1=1)),
)),
Route('/foo/{kwarg2}', routes=(
Route('/baz/{baz}', self.expected_handler,
params=dict(f2=2)),
)),
),
requested_path='/foo/kwarg/baz/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2, baz='baz'),
)
def test_get_handler__nested2_expected_unexpected_params_url2_route(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg2}', routes=(
Route('/baz/{baz}', self.expected_handler,
params=dict(f2=2)),
)),
Route('/foo/{kwarg1}', routes=(
Route('/bar/{bar}', self.unexpected_handler,
params=dict(f1=1)),
)),
),
requested_path='/foo/kwarg/baz/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2, baz='baz'),
)
def test_get_handler__nested2_unexpected_expected_params_url_route2(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg1}', routes=(
Route('/bar', self.unexpected_handler,
params=dict(f1=1)),
), params=dict(z1='z1')),
Route('/foo/{kwarg2}', routes=(
Route('/baz', self.expected_handler,
params=dict(f2=2)),
), params=dict(z2='z2')),
),
requested_path='/foo/kwarg/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2, z2='z2'),
)
def test_get_handler__nested2_expected_unexpected_params_url_route2(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg2}', routes=(
Route('/baz', self.expected_handler,
params=dict(f2=2)),
), params=dict(z2='z2')),
Route('/foo/{kwarg1}', routes=(
Route('/bar', self.unexpected_handler,
params=dict(f1=1)),
), params=dict(z1='z1')),
),
requested_path='/foo/kwarg/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2, z2='z2'),
)
def test_get_handler__nested2_unexpected_expected_params_url2_route2(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg1}', routes=(
Route('/bar/{bar}', self.unexpected_handler,
params=dict(f1=1)),
), params=dict(z1='z1')),
Route('/foo/{kwarg2}', routes=(
Route('/baz/{baz}', self.expected_handler,
params=dict(f2=2)),
), params=dict(z2='z2')),
),
requested_path='/foo/kwarg/baz/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2, baz='baz', z2='z2'),
)
def test_get_handler__nested2_expected_unexpected_params_url2_route2(self):
self._test_get_handler(
routes=(
Route('/foo/{kwarg2}', routes=(
Route('/baz/{baz}', self.expected_handler,
params=dict(f2=2)),
), params=dict(z2='z2')),
Route('/foo/{kwarg1}', routes=(
Route('/bar/{bar}', self.unexpected_handler,
params=dict(f1=1)),
), params=dict(z1='z1')),
),
requested_path='/foo/kwarg/baz/baz',
expected_kwargs=dict(kwarg2='kwarg', f2=2, baz='baz', z2='z2'),
)
def test_get_handler__nested2_expected_unexpected_half_error(self):
with self.assertRaises(http.Error) as context:
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/baz', self.expected_handler),
)),
Route('/foo', routes=(
Route('/bar', self.unexpected_handler),
)),
),
requested_path='/foo',
)
self.assertEqual('404 Not Found', context.exception.status)
def test_get_handler__nested2_unexpected_expected_half_error(self):
with self.assertRaises(http.Error) as context:
self._test_get_handler(
routes=(
Route('/foo', routes=(
Route('/bar', self.unexpected_handler),
)),
Route('/foo', routes=(
Route('/baz', self.expected_handler),
)),
),
requested_path='/foo',
)
self.assertEqual('404 Not Found', context.exception.status)
def test_route(self):
app = App()
handler = app.route('/{foo}', params=dict(kwarg='kwarg'))(Response)
self.assertIs(handler, Response)
partial = app.get_handler('/foo')
self.assertDictEqual(dict(kwarg='kwarg', foo='foo'), partial.keywords)
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,177
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/__init__.py
|
from marnadi.response import Response
from marnadi.route import Route
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,178
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/__init__.py
|
import functools
from .cookies import Cookies
from .headers import Headers, Header
from .error import Error
from .data import Data
class Handler(type):
def __new__(mcs, name, mro, attributes):
for attribute, value in attributes.items():
if isinstance(value, Method):
value.name = attribute
return super(Handler, mcs).__new__(mcs, name, mro, attributes)
def start(cls, *args, **kwargs):
raise NotImplementedError
class Method(object):
__slots__ = 'name', 'func'
class FunctionHandler(Handler):
__function__ = NotImplemented
__response__ = NotImplemented
def __call__(cls, *args, **kwargs):
return cls.__function__(*args, **kwargs)
class classmethod(classmethod):
def __get__(self, instance, instance_class=None):
assert isinstance(instance_class, Method.FunctionHandler)
func = getattr(self, '__func__', None)
# Python 2.6 compatibility
func = func or super(type(self), self).__get__(1).im_func
return functools.partial(func, instance_class.__response__)
def __init__(self, func=None, name=None):
self.func = func
self.name = name or func and func.__name__
def __get__(self, response, response_class):
if response is None:
return functools.partial(self, response_class)
return self.func and functools.partial(self.func, response)
def __call__(self, response_class, callback):
method = staticmethod(callback)
if isinstance(callback, self.FunctionHandler):
setattr(callback.__response__, self.name, method)
return callback
attributes = dict(
__module__=callback.__module__,
__doc__=callback.__doc__,
__slots__=(),
)
response = type(callback.__name__, (response_class, ), dict(
{self.name: method},
**attributes
))
callback_replacement = self.FunctionHandler(
callback.__name__,
(),
dict(
attributes,
__function__=method,
__response__=response,
start=self.FunctionHandler.classmethod(
response_class.start.__func__),
),
)
return callback_replacement
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,179
|
ram0973/marnadi
|
refs/heads/master
|
/tests/test_route.py
|
try:
import unittest2 as unittest
except ImportError:
import unittest
from marnadi import Route
from marnadi.route import Routes
class RoutesTestCase(unittest.TestCase):
def test_empty(self):
routes = []
self.assertListEqual([], Routes(routes))
def test_single_route(self):
route = Route('/')
routes = [route]
self.assertListEqual([route], Routes(routes))
def test_two_routes(self):
route = Route('/')
routes = [route] * 2
self.assertListEqual([route] * 2, Routes(routes))
def test_sequence_of_routes(self):
route = Route('/')
routes = [[route] * 2]
self.assertListEqual([route] * 2, Routes(routes))
def test_two_sequences_of_routes(self):
route = Route('/')
routes = [[route] * 2] * 2
self.assertListEqual([route] * 4, Routes(routes))
def test_mixed_routes_and_sequences(self):
route = Route('/')
routes = [route] * 2 + [[route] * 2] * 2
self.assertListEqual([route] * 6, Routes(routes))
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,180
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/wsgi.py
|
import collections
import functools
import itertools
try:
from urllib import parse
except ImportError:
import urlparse as parse
from marnadi import http
from marnadi.route import Routes
from marnadi.utils import cached_property
class Request(collections.Mapping):
"""WSGI request.
Args:
environ (dict): PEP-3333 WSGI environ dict.
"""
if hasattr(collections.Mapping, '__slots__'):
__slots__ = 'environ', '__weakref__'
__hash__ = object.__hash__
__eq__ = object.__eq__
__ne__ = object.__ne__
def __init__(self, environ):
self.environ = environ
def __getitem__(self, key):
return self.environ[key]
def __iter__(self):
return iter(self.environ)
def __len__(self):
return len(self.environ)
@property
def input(self):
return self['wsgi.input']
@property
def method(self):
return self['REQUEST_METHOD']
@property
def path(self):
return self['PATH_INFO']
@property
def query_string(self):
return self.get('QUERY_STRING')
@property
def remote_addr(self):
return self.get('REMOTE_ADDR')
@property
def remote_host(self):
return self.get('REMOTE_HOST')
@property
def content_length(self):
return int(self.get('CONTENT_LENGTH', 0))
@cached_property
def content_type(self):
try:
parts = iter(self['CONTENT_TYPE'].split(';'))
return http.Header(next(parts).strip(), **dict(
map(str.strip, part.split('=', 1))
for part in parts
))
except KeyError:
pass
@cached_property
def headers(self):
return dict(
(name.title().replace('_', '-'), value)
for name, value in
itertools.chain(
(
(env_key, self[env_key])
for env_key in ('CONTENT_TYPE', 'CONTENT_LENGTH')
if env_key in self
),
(
(env_key[5:], env_value)
for env_key, env_value in self.items()
if env_key.startswith('HTTP_')
),
)
)
@cached_property
def query(self):
try:
return dict(parse.parse_qsl(
self.query_string,
keep_blank_values=True,
))
except KeyError:
return {}
data = http.Data(
(
'application/json',
'marnadi.http.data.decoders.application.json.Decoder',
),
(
'application/x-www-form-urlencoded',
'marnadi.http.data.decoders' +
'.application.x_www_form_urlencoded.Decoder',
),
)
class App(object):
"""WSGI application class.
Instance of this class used as entry point for WSGI requests. Using
provided routes list it can determine which handler should be called.
Args:
routes (iterable): list of :class:`Route`.
"""
__slots__ = 'routes', 'route_map'
def __init__(self, routes=()):
self.route_map = {}
self.routes = Routes(routes)
self.build_route_map()
def __call__(self, environ, start_response):
try:
request = self.make_request_object(environ)
handler = self.get_handler(request.path)
response = handler(self, request)
except http.Error as error:
response = error
start_response(
response.status,
list(response.headers.items(stringify=True))
)
return response
@staticmethod
def make_request_object(environ):
return Request(environ) # TODO make request_type as instance attribute
def build_route_map(self, routes=None, parents=()):
routes = self.routes if routes is None else routes
for route in routes:
self.register_route(route, parents=parents)
def register_route(self, route, parents=()):
parents = parents + (route, )
if route.name:
self.route_map[route.name] = parents
self.build_route_map(route.routes, parents=parents)
def route(self, path, **route_params):
return self.routes.route(path, **route_params)
def make_path(self, *route_name, **params):
assert len(route_name) == 1
return ''.join(
route.restore_path(**params)
for route in self.route_map[route_name[0]]
)
def get_handler(self, path, routes=None, params=None):
"""Return handler according to the given path.
Note:
If you wish for example automatically redirect all requests
without trailing slash in URL to URL with persisting one you may
override this method by raising `http.Error` with 301 status and
necessary 'Location' header when needed.
"""
routes = routes or self.routes
params = params or {}
for route in routes:
match = route.match(path)
if not match:
continue
rest_path, route_params = match
if not rest_path:
if route.handler:
params.update(route_params)
return functools.partial(route.handler.start, **params)
else:
try:
return self.get_handler(
rest_path,
routes=route.routes,
params=dict(params, **route_params),
)
except http.Error:
pass # wrong way raises "404 Not Found" at the end
raise http.Error('404 Not Found') # matching route not found
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,181
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/headers.py
|
import collections
import itertools
from marnadi.utils import cached_property, CachedDescriptor
class Header(collections.Mapping):
__slots__ = 'value', 'params'
def __init__(self, *value, **params):
assert len(value) == 1
self.value = value[0]
self.params = params
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return self.value == other
def __ne__(self, other):
return self.value != other
def __str__(self):
return self.stringify()
def __bytes__(self):
value = self.stringify()
if isinstance(value, bytes): # python 2.x
return value
return value.encode(encoding='latin1')
def __getitem__(self, item):
return self.params[item]
def __iter__(self):
return iter(self.params)
def __len__(self):
return len(self.params)
def __bool__(self):
return True
def __nonzero__(self):
return self.__bool__()
def stringify(self):
if not self.params:
return str(self.value)
return '{value}; {params}'.format(
value=self.value,
params='; '.join(
'%s=%s' % (attr_name, attr_value)
for attr_name, attr_value in self.params.items()
),
)
class HeadersMixin(collections.Mapping):
if hasattr(collections.Mapping, '__slots__'):
__slots__ = '__weakref__',
def __getitem__(self, header):
return self._headers[header.title()]
def __len__(self):
return len(self._headers)
def __iter__(self):
return iter(self._headers)
__hash__ = object.__hash__
__eq__ = object.__eq__
__ne__ = object.__ne__
@cached_property
def _headers(self):
raise ValueError("This property must be set before using")
def items(self, stringify=False):
for header, values in self._headers.items():
for value in values:
yield header, str(value) if stringify else value
def values(self, stringify=False):
for values in self._headers.values():
for value in values:
yield str(value) if stringify else value
class ResponseHeaders(HeadersMixin, collections.MutableMapping):
__slots__ = ()
def __init__(self, default_headers):
self._headers = default_headers
def __delitem__(self, header):
del self._headers[header.title()]
def __setitem__(self, header, value):
self._headers[header.title()] = [value]
def append(self, header_item):
header, value = header_item
self._headers[header.title()].append(value)
def extend(self, headers):
for header in headers:
self.append(header)
def setdefault(self, header, default=None):
return self._headers.setdefault(header.title(), [default])
def clear(self, *headers):
if headers:
for header in headers:
try:
del self[header]
except KeyError:
pass
else:
self._headers.clear()
class Headers(CachedDescriptor, HeadersMixin):
__slots__ = ()
def __init__(self, *default_headers, **kw_default_headers):
super(Headers, self).__init__()
self._headers = collections.defaultdict(list)
for header, value in itertools.chain(
default_headers,
kw_default_headers.items(),
):
self._headers[header.title()].append(value)
def get_value(self, instance):
return ResponseHeaders(default_headers=self._headers.copy())
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,182
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/data/decoders/application/json.py
|
json = __import__('json') # import built-in module 'json'
from marnadi.http import Error
from marnadi.http.data.decoders import Decoder as BaseDecoder
class Decoder(BaseDecoder):
__slots__ = ()
def __call__(self, request):
try:
return json.loads(super(Decoder, self).__call__(request))
except ValueError:
raise Error('400 Bad Request')
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,183
|
ram0973/marnadi
|
refs/heads/master
|
/tests/test_utils.py
|
import types
try:
import unittest2 as unittest
except ImportError:
import unittest
from marnadi.utils import Lazy
try:
str = unicode
except NameError:
pass
_test_tuple = ('foo', 'bar')
_test_list = ['foo', 'bar']
_test_set = set(_test_tuple)
_test_dict = {'foo': 'bar'}
_test_str = 'foo'
_test_bytes = b'foo'
_test_true = True
_test_false = False
def _test_function(*args, **kwargs):
return args, kwargs
class _TestClass:
pass
_test_instance = _TestClass()
class LazyTestCase(unittest.TestCase):
def test_lazy_true(self):
lazy_true = Lazy('%s._test_true' % __name__)
self.assertTrue(lazy_true)
def test_lazy_false(self):
lazy_true = Lazy('%s._test_false' % __name__)
self.assertFalse(lazy_true)
def test_lazy_tuple(self):
lazy_tuple = Lazy('%s._test_tuple' % __name__)
self.assertTupleEqual(_test_tuple, tuple(lazy_tuple))
def test_length_of_lazy_tuple(self):
lazy_tuple = Lazy('%s._test_tuple' % __name__)
self.assertEqual(2, len(lazy_tuple))
def test_lazy_list(self):
lazy_list = Lazy('%s._test_list' % __name__)
self.assertListEqual(_test_list, list(lazy_list))
def test_lazy_set(self):
lazy_set = Lazy('%s._test_set' % __name__)
self.assertSetEqual(_test_set, set(lazy_set))
def test_lazy_dict(self):
lazy_dict = Lazy('%s._test_dict' % __name__)
self.assertDictEqual(_test_dict, dict(lazy_dict))
def test_lazy_str(self):
lazy_str = Lazy('%s._test_str' % __name__)
self.assertEqual(_test_str, str(lazy_str))
def test_lazy_bytes(self):
lazy_bytes = Lazy('%s._test_bytes' % __name__)
self.assertEqual(_test_bytes, bytes(lazy_bytes))
def test_lazy_isinstance(self):
lazy_instance = Lazy('%s._test_instance' % __name__)
self.assertIsInstance(lazy_instance, _TestClass)
def test_lazy_class_instance(self):
lazy_class = Lazy('%s._TestClass' % __name__)
self.assertIsInstance(lazy_class(), _TestClass)
def test_lazy_function__no_args(self):
lazy_function = Lazy('%s._test_function' % __name__)
self.assertEqual(lazy_function(), ((), {}))
def test_lazy_function__args(self):
lazy_function = Lazy('%s._test_function' % __name__)
self.assertEqual(
lazy_function('foo', 'bar'),
(('foo', 'bar'), {}),
)
def test_lazy_function__kwargs(self):
lazy_function = Lazy('%s._test_function' % __name__)
self.assertEqual(
lazy_function(foo='bar'),
((), {'foo': 'bar'}),
)
def test_lazy_function__args_kwargs(self):
lazy_function = Lazy('%s._test_function' % __name__)
self.assertEqual(
lazy_function('foo', 'bar', foo='bar'),
(('foo', 'bar'), {'foo': 'bar'}),
)
def test_lazy__explicit_class(self):
self.assertIs(_TestClass, Lazy(_TestClass))
def test_lazy__explicit_function(self):
self.assertIs(_test_function, Lazy(_test_function))
def test_lazy__explicit_instance(self):
self.assertIs(Lazy(_test_instance), _test_instance)
def test_lazy__explicit_dict(self):
self.assertIs(_test_dict, Lazy(_test_dict))
def test_lazy__explicit_list(self):
self.assertIs(_test_list, Lazy(_test_list))
def test_lazy__explicit_tuple(self):
self.assertIs(_test_tuple, Lazy(_test_tuple))
def test_lazy__explicit_set(self):
self.assertIs(_test_set, Lazy(_test_set))
def test_lazy__explicit_none(self):
self.assertIsNone(Lazy(None))
def test_lazy__explicit_lazy(self):
lazy = Lazy('%s._test_instance' % __name__)
self.assertIs(lazy, Lazy(lazy))
def test_lazy__explicit_lazy_str(self):
lazy_str = Lazy('%s._test_str' % __name__)
self.assertIs(lazy_str, Lazy(lazy_str))
def test_lazy__module(self):
lazy = Lazy(__name__)
self.assertIsInstance(lazy, types.ModuleType)
self.assertEqual(__name__, lazy.__name__)
def test_lazy__module_from_package(self):
lazy = Lazy('marnadi.wsgi')
self.assertIsInstance(lazy, types.ModuleType)
self.assertEqual('marnadi.wsgi', lazy.__name__)
def test_lazy__package(self):
lazy = Lazy('marnadi')
self.assertIsInstance(lazy, types.ModuleType)
self.assertEqual('marnadi', lazy.__name__)
def test_lazy__package_from_package(self):
lazy = Lazy('marnadi.http')
self.assertIsInstance(lazy, types.ModuleType)
self.assertEqual('marnadi.http', lazy.__name__)
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,184
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/error.py
|
from marnadi.http import Header, Headers
from marnadi.utils import to_bytes
class Error(Exception):
__slots__ = 'status', 'data', '__weakref__'
default_status = '500 Internal Server Error'
headers = Headers(
('Content-Type', Header('text/plain', charset='utf-8')),
)
def __init__(self, status=None, data=None, headers=()):
self.status = status or self.default_status
self.data = to_bytes(data or status)
self.update_headers(headers)
def __len__(self):
return 1
def __iter__(self):
yield self.data
def update_headers(self, headers):
self.headers.extend(headers)
self.headers['Content-Length'] = len(self.data)
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,185
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/utils/__init__.py
|
import functools
try:
unicode_str = unicode
except NameError:
unicode_str = str
try:
memoryview
except NameError:
memoryview = bytes
def metaclass(mcs):
def _decorator(cls):
attrs = dict(vars(cls))
try:
if isinstance(cls.__slots__, str):
slots = (cls.__slots__, )
else:
slots = cls.__slots__
for slot in slots:
if slot.startswith('__') and not slot.endswith('__'):
slot = '_{cls}{slot}'.format(cls=cls.__name__, slot=slot)
attrs.pop(slot, None)
except AttributeError:
pass
for prop in '__weakref__', '__dict__':
attrs.pop(prop, None)
return mcs(cls.__name__, cls.__bases__, attrs)
return _decorator
class ReferenceType(type):
def __call__(cls, *args, **kwargs):
if len(args) == 1 and len(kwargs) == 0:
if isinstance(args[0], cls):
return args[0]
return super(ReferenceType, cls).__call__(*args, **kwargs)
def to_bytes(obj, encoding='utf-8', error_callback=None):
try:
if isinstance(obj, (bytes, bytearray, memoryview)):
return bytes(obj)
if obj is None:
return b''
try:
return obj.__bytes__()
except AttributeError:
return unicode_str(obj).encode(encoding)
except Exception as error:
if error_callback is not None:
error_callback(error)
raise
def coroutine(fn):
@functools.wraps(fn)
def _fn(*args, **kwargs):
co = fn(*args, **kwargs)
co.send(None)
return co
return _fn
def import_module(path):
module = path.rpartition('.')[2]
return __import__(path, fromlist=(module, ))
from .lazy import Lazy, CachedDescriptor, cached_property
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,186
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/data/decoders/__init__.py
|
from marnadi.http import Error
from marnadi.utils import metaclass
class DecoderType(type):
def __call__(cls, request):
decoder = super(DecoderType, cls).__call__()
return decoder(request)
@metaclass(DecoderType)
class Decoder(object):
__slots__ = ()
default_encoding = 'utf-8'
def __call__(self, request):
return self.decode(
data=request.input.read(request.content_length),
encoding=self.get_encoding(request.content_type)
)
def get_encoding(self, content_type):
return content_type and content_type.params.get(
'charset') or self.default_encoding
@staticmethod
def decode(data, encoding):
try:
return data.decode(encoding)
except UnicodeDecodeError:
raise Error('400 Bad Request')
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,187
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/route.py
|
import re
from marnadi.utils import ReferenceType, metaclass, Lazy
class Route(object):
__slots__ = 'path', 'handler', 'params', 'pattern', 'name', 'callbacks', \
'routes'
placeholder_re = re.compile(r'\{([a-zA-Z_][a-zA-Z0-9_]*)\}')
def __init__(self, path, handler=None, routes=(), name=None, params=None,
callbacks=None, patterns=None):
self.path = path
self.handler = Lazy(handler)
self.routes = Routes(routes)
self.name = name
self.params = params or {}
self.callbacks = callbacks or {}
self.pattern = self.make_pattern(patterns)
def __call__(self, *args, **kwargs):
return self.handler(*args, **kwargs)
def match(self, path):
if self.pattern:
match = self.pattern.match(path)
if match:
params = dict(
(param, self.callbacks.get(param, lambda x: x)(value))
for param, value in match.groupdict().items()
)
return path[match.end(0):], dict(self.params, **params)
elif path.startswith(self.path):
return path[len(self.path):], self.params
def make_pattern(self, patterns=None):
unescaped_path = self.path.replace('{{', '').replace('}}', '')
placeholders = self.placeholder_re.findall(unescaped_path)
if not placeholders:
return
patterns = patterns or {}
pattern = re.escape(self.path.replace('{{', '{').replace('}}', '}'))
for placeholder in placeholders:
pattern = pattern.replace(
r'\{{{placeholder}\}}'.format(placeholder=placeholder),
r'(?P<{name}>{pattern})'.format(
name=placeholder,
pattern=patterns.get(placeholder, r'\w+')
),
)
return re.compile(pattern)
def restore_path(self, **params):
return self.path.format(**params)
@metaclass(ReferenceType)
class Routes(list):
__slots__ = ()
def __init__(self, seq=()):
def unnest(routes):
for route in map(Lazy, routes):
if isinstance(route, Route):
yield route
else:
for unnested in unnest(route):
yield unnested
super(Routes, self).__init__(unnest(seq))
def route(self, path, **route_params):
def _decorator(handler):
self.append(Route(path, handler, **route_params))
return handler
return _decorator
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,188
|
ram0973/marnadi
|
refs/heads/master
|
/tests/test_response.py
|
import io
try:
import unittest2 as unittest
except ImportError:
import unittest
from marnadi import Response, Route
from marnadi.wsgi import Request, App
handler_function = Response.get(lambda: 'foo')
handler_class = type('MyHandler', (Response, ), dict(
get=lambda *args: 'hello'
))
class ResponseTestCase(unittest.TestCase):
def _handle_request(
self,
routes,
environ,
expected_result,
expected_status="200 OK",
expected_headers=None,
unexpected_headers=None,
):
def start_response(status, headers):
self.assertEqual(expected_status, status)
for header in expected_headers or ():
self.assertIn(header, headers)
for header in unexpected_headers or ():
self.assertNotIn(header, headers)
app = App(routes=routes)
actual_result = b''.join(app(environ, start_response))
self.assertEqual(expected_result, actual_result)
def test_as_function(self):
routes = (
Route('/', handler_function),
)
environ = Request(dict(
REQUEST_METHOD='GET',
PATH_INFO='/',
))
self._handle_request(
routes=routes,
environ=environ,
expected_result=b'foo',
expected_headers=(
('Content-Length', '3'),
),
)
def test_as_class(self):
routes = (
Route('/', handler_class),
)
environ = Request(dict(
REQUEST_METHOD='GET',
PATH_INFO='/',
))
self._handle_request(
routes=routes,
environ=environ,
expected_result=b'hello',
expected_headers=(
('Content-Length', '5'),
),
)
def test_as_lazy_function(self):
routes = (
Route('/', '%s.handler_function' % __name__),
)
environ = Request(dict(
REQUEST_METHOD='GET',
PATH_INFO='/',
))
self._handle_request(
routes=routes,
environ=environ,
expected_result=b'foo',
expected_headers=(
('Content-Length', '3'),
),
)
def test_as_lazy_class(self):
routes = (
Route('/', '%s.handler_class' % __name__),
)
environ = Request(dict(
REQUEST_METHOD='GET',
PATH_INFO='/',
))
self._handle_request(
routes=routes,
environ=environ,
expected_result=b'hello',
expected_headers=(
('Content-Length', '5'),
),
)
def test_not_supported_method(self):
routes = (
Route('/', Response),
)
environ = Request(dict(
REQUEST_METHOD='NOT_SUPPORTED_METHOD',
PATH_INFO='/',
))
self._handle_request(
routes=routes,
environ=environ,
expected_status='501 Not Implemented',
expected_result=b'501 Not Implemented',
expected_headers=(
('Content-Type', 'text/plain; charset=utf-8'),
('Allow', 'OPTIONS'),
('Content-Length', '19'),
),
)
def test_not_allowed_method(self):
routes = (
Route('/', Response),
)
environ = Request(dict(
REQUEST_METHOD='GET',
PATH_INFO='/',
))
self._handle_request(
routes=routes,
environ=environ,
expected_status='405 Method Not Allowed',
expected_result=b'405 Method Not Allowed',
expected_headers=(
('Content-Type', 'text/plain; charset=utf-8'),
('Allow', 'OPTIONS'),
('Content-Length', '22'),
),
)
def test_post_application_json(self):
routes = (
Route('/', type('', (Response, ), dict(
post=lambda this: this.request.data,
))),
)
environ = Request({
'REQUEST_METHOD': 'POST',
'PATH_INFO': '/',
'wsgi.input': io.BytesIO(b'"hello"'),
'CONTENT_LENGTH': '7',
'CONTENT_TYPE': 'application/json',
})
self._handle_request(
routes=routes,
environ=environ,
expected_result=b'hello',
expected_headers=(
('Content-Length', '5'),
),
)
def test_post_broken_application_json(self):
routes = (
Route('/', type('', (Response, ), dict(
post=lambda this: this.request.data,
))),
)
environ = Request({
'REQUEST_METHOD': 'POST',
'PATH_INFO': '/',
'wsgi.input': io.BytesIO(b'"hello'),
'CONTENT_LENGTH': '6',
'CONTENT_TYPE': 'application/json',
})
self._handle_request(
routes=routes,
environ=environ,
expected_status='400 Bad Request',
expected_result=b'400 Bad Request',
expected_headers=(
('Content-Type', 'text/plain; charset=utf-8'),
('Content-Length', '15'),
),
)
def test_post_application_x_www_form_urlencoded(self):
routes = (
Route('/', type('', (Response, ), dict(
post=lambda this: this.request.data['hello'],
))),
)
environ = Request({
'REQUEST_METHOD': 'POST',
'PATH_INFO': '/',
'wsgi.input': io.BytesIO(b'hello=world'),
'CONTENT_LENGTH': '11',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
})
self._handle_request(
routes=routes,
environ=environ,
expected_result=b"world",
expected_headers=(
('Content-Length', '5'),
),
)
def test_post(self, content_type=''):
routes = (
Route('/', type('', (Response, ), dict(
post=lambda this: str(this.request.data),
))),
)
environ = Request({
'REQUEST_METHOD': 'POST',
'PATH_INFO': '/',
'wsgi.input': io.BytesIO(b'hello'),
'CONTENT_LENGTH': '5',
'CONTENT_TYPE': content_type,
})
self._handle_request(
routes=routes,
environ=environ,
expected_result=b'hello',
expected_headers=(
('Content-Length', '5'),
),
)
def test_post_text_plain(self):
self.test_post('text/plain')
def test_post_broken_unicode(self):
routes = (
Route('/', type('', (Response, ), dict(
post=lambda this: this.request.data,
))),
)
environ = Request({
'REQUEST_METHOD': 'POST',
'PATH_INFO': '/',
'wsgi.input': io.BytesIO(b'\xd0'),
'CONTENT_LENGTH': '1',
})
self._handle_request(
routes=routes,
environ=environ,
expected_status='400 Bad Request',
expected_result=b'400 Bad Request',
expected_headers=(
('Content-Type', 'text/plain; charset=utf-8'),
('Content-Length', '15'),
),
)
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,189
|
ram0973/marnadi
|
refs/heads/master
|
/marnadi/http/data/decoders/application/x_www_form_urlencoded.py
|
try:
from urllib import parse
except ImportError:
import urlparse as parse
from marnadi.http.data.decoders import Decoder as BaseDecoder
class Decoder(BaseDecoder):
__slots__ = ()
def __call__(self, request):
return dict(parse.parse_qsl(
super(Decoder, self).__call__(request),
keep_blank_values=True,
))
|
{"/marnadi/utils/lazy.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/__init__.py": ["/marnadi/utils/__init__.py"], "/marnadi/response.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/http/cookies.py": ["/marnadi/utils/__init__.py"], "/tests/test_wsgi.py": ["/marnadi/__init__.py", "/marnadi/utils/__init__.py", "/marnadi/wsgi.py"], "/marnadi/__init__.py": ["/marnadi/response.py", "/marnadi/route.py"], "/marnadi/http/__init__.py": ["/marnadi/http/cookies.py", "/marnadi/http/headers.py", "/marnadi/http/error.py", "/marnadi/http/data/__init__.py"], "/tests/test_route.py": ["/marnadi/__init__.py", "/marnadi/route.py"], "/marnadi/wsgi.py": ["/marnadi/__init__.py", "/marnadi/route.py", "/marnadi/utils/__init__.py"], "/marnadi/http/headers.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/data/decoders/application/json.py": ["/marnadi/http/__init__.py", "/marnadi/http/data/decoders/__init__.py"], "/tests/test_utils.py": ["/marnadi/utils/__init__.py"], "/marnadi/http/error.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/utils/__init__.py": ["/marnadi/utils/lazy.py"], "/marnadi/http/data/decoders/__init__.py": ["/marnadi/http/__init__.py", "/marnadi/utils/__init__.py"], "/marnadi/route.py": ["/marnadi/utils/__init__.py"], "/tests/test_response.py": ["/marnadi/__init__.py", "/marnadi/wsgi.py"], "/marnadi/http/data/decoders/application/x_www_form_urlencoded.py": ["/marnadi/http/data/decoders/__init__.py"]}
|
29,239
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/spectral.py
|
from numpy import linspace
import torch
from torch import nn
import numpy as np
@torch.jit.script
def complex_mul(x, y):
"""Extend the elementwise product to complex
tensors, i.e. tensors whose last shape has dimension of 2,
representing real and imaginary part.
Args:
x (tensor): First operand
y (tensor): Second operand
"""
real = x[..., 0] * y[..., 0] - x[..., 1] * y[..., 1]
imag = x[..., 1] * y[..., 0] + x[..., 0] * y[..., 1]
return torch.stack([real, imag], dim=-1)
@torch.jit.script
def conj(x):
if len(x.shape) == 1:
rx = x[0]
ix = x[1]
else:
rx = x[..., 0]
ix = x[..., 1]
return torch.stack([rx, -ix], dim=-1)
@torch.jit.script
def fast_laplacian_with_pml(u, kx, ky, kx_sq, ky_sq, ax, bx, ay, by):
"""
ax,bx are the 1/gamma and gamma'/gamma^3 coefficients in the laplacian operator of the paper, for the x axis
"""
# TODO: redo this function before 9pm
# Make 2d fourier transform of signal
u_fft = torch.fft(u, signal_ndim=2, normalized=False)
# get derivatives
dx = complex_mul(u_fft, kx)
dy = complex_mul(u_fft, ky)
ddx = complex_mul(u_fft, kx_sq)
ddy = complex_mul(u_fft, ky_sq)
derivatives = torch.ifft(
torch.stack([dx, dy, ddx, ddy], dim=0),
signal_ndim=2,
normalized=False,
)
dx = derivatives[0]
dy = derivatives[1]
ddx = derivatives[2]
ddy = derivatives[3]
return (
complex_mul(ax, dx)
+ complex_mul(ay, dy)
+ complex_mul(bx, ddx)
+ complex_mul(by, ddy)
)
'''
class FourierDerivative(nn.Module):
def __init__(self, size: int, direction="x"):
super().__init__()
# Defining the spectral 1d operator
k = 2 * np.pi * linspace(-0.5, 0.5, size, endpoint=False)
k = np.concatenate((k[size // 2 :], k[: size // 2]))
# Make it 2D on the right direction
if direction == "x":
kx = k
ky = kx * 0.0
kx, ky = np.meshgrid(kx, ky)
k = kx
else:
ky = k
kx = ky * 0.0
kx, ky = np.meshgrid(kx, ky)
k = ky
k_tensor = torch.from_numpy(k).unsqueeze(0).unsqueeze(3).float()
k_tensor = torch.cat([-k_tensor, k_tensor], dim=3)
# Save as parameter for automatic GPU loading, non learnable
self.k = torch.nn.Parameter(k_tensor, requires_grad=False)
def forward(self, x):
"""x must be [batch, x, y, real/imag]"""
# Move to fourier basis
Fx = torch.fft(x, signal_ndim=2, normalized=False)
# Make derivative
DFx = self.k * torch.flip(Fx, dims=[3])
# Back to spatial domain
Dx = torch.ifft(DFx, signal_ndim=2, normalized=False)
return Dx
'''
class FourierDerivative(nn.Module):
def __init__(self, size: int, direction="x"):
super().__init__()
# Defining the spectral 1d operator
k = 2 * np.pi * np.linspace(-0.5, 0.5, size, endpoint=False)
k = np.concatenate((k[size // 2 :], k[: size // 2]))
# Make it 2D on the right direction
if direction == "x":
kx = k
ky = kx * 0.0
kx, ky = np.meshgrid(kx, ky)
k = kx
else:
ky = k
kx = ky * 0.0
kx, ky = np.meshgrid(kx, ky)
k = ky
k_tensor = torch.from_numpy(k).unsqueeze(0).unsqueeze(3).float()
self.k_tensor = k_tensor
k_tensor = torch.cat([-k_tensor, k_tensor], dim=3)
# Save as parameter for automatic GPU loading, non learnable
self.k = torch.nn.Parameter(k_tensor, requires_grad=False)
def forward(self, x):
"""x must be [batch, x, y, real/imag]"""
return torch.ifft(
torch.fft(x, signal_ndim=2, normalized=False).flip(dims=[3]).mul(self.k),
signal_ndim=2,
normalized=False,
)
x[..., 1] *= -1
"""
# Move to fourier basis
Fx = torch.fft(x, signal_ndim=2, normalized=False)
# Make derivative
DFx = torch.flip(Fx, dims=[3]).mul(self.k)
# Back to spatial domain
Dx = torch.ifft(DFx, signal_ndim=2, normalized=False)
return Dx
"""
class LaplacianWithPML(nn.Module):
def __init__(self, domain_size: int, PMLsize: int, k: float, sigma_max: float):
super().__init__()
# Settings
self.PMLsize = PMLsize
self.domain_size = domain_size
self.sigma_max = sigma_max
self.k = k
# Calculating the gamma functions for the PML using
# quadratic sigmas and
# https://www.sciencedirect.com/science/article/pii/S0021999106004487
self.gamma_x, self.gamma_y = self.get_gamma_functions()
self.gamma_x = torch.nn.Parameter(self.gamma_x, requires_grad=False)
self.gamma_y = torch.nn.Parameter(self.gamma_y, requires_grad=False)
# Derivative operators
self.dx = FourierDerivative(size=domain_size, direction="x")
self.dy = FourierDerivative(size=domain_size, direction="y")
def pure_derivatives(self, f):
# X direction
dx = self.dx(f)
dy = self.dy(f)
return dx, dy
def sigmas(self):
return self.sigma_x, self.sigma_y
def get_gamma_functions(self):
"""Builds the gamma functions for the PML
Returns:
torch.tensor, torch.tensor: The gamma_x and gamma_y required by the PML
"""
pml_coord = np.arange(self.PMLsize)
sigma_outer = self.sigma_max * (np.abs(1 - pml_coord / self.PMLsize) ** 2)
sigma = np.zeros((self.domain_size,))
sigma[: self.PMLsize] = sigma_outer
sigma[-self.PMLsize :] = np.flip(sigma_outer)
sigma_x, sigma_y = np.meshgrid(sigma, sigma)
self.sigma_x = sigma_x
self.sigma_y = sigma_y
# Making gammas
gamma_x = 1.0 / (np.ones_like(sigma_x) + (1j / self.k) * sigma_x)
gamma_y = 1.0 / (np.ones_like(sigma_y) + (1j / self.k) * sigma_y)
# Turning into tensors
real = torch.from_numpy(np.real(gamma_x))
imag = torch.from_numpy(np.imag(gamma_x))
gamma_x = torch.stack([real, imag], dim=-1).unsqueeze(0)
real = torch.from_numpy(np.real(gamma_y))
imag = torch.from_numpy(np.imag(gamma_y))
gamma_y = torch.stack([real, imag], dim=-1).unsqueeze(0)
# Return
return gamma_x.float(), gamma_y.float()
def forward(self, f):
# X direction
gx_f = complex_mul(self.gamma_x, self.dx(f))
gxgx_f = complex_mul(self.gamma_x, self.dx(gx_f))
# Y direction
gy_f = complex_mul(self.gamma_y, self.dy(f))
gygy_f = complex_mul(self.gamma_y, self.dy(gy_f))
return gxgx_f + gygy_f
class FastLaplacianWithPML(nn.Module):
def __init__(self, domain_size: int, PMLsize: int, k: float, sigma_max: float):
super().__init__()
self.init_variables(PMLsize, domain_size, sigma_max, k)
def forward(self, x):
return fast_laplacian_with_pml(
x,
self.kx,
self.ky,
self.kx_sq,
self.ky_sq,
self.ax,
self.bx,
self.ay,
self.by,
)
def sigmas(self):
return self.sigma_x, self.sigma_y
def init_variables(self, PMLsize, domain_size, sigma_max, k):
# Settings
self.PMLsize = PMLsize
self.domain_size = domain_size
self.sigma_max = sigma_max
self.k = k
self.get_gamma_functions()
# Derivative operators in fourier domain
self.dx = FourierDerivative(size=domain_size, direction="x")
self.dy = FourierDerivative(size=domain_size, direction="y")
kx = self.dx.k_tensor
ky = self.dy.k_tensor
kx_sq = kx.pow(2)
ky_sq = ky.pow(2)
zeros = torch.zeros_like(kx)
kx = torch.cat([zeros, kx], dim=-1) # kx is imaginary
ky = torch.cat([zeros, ky], dim=-1)
kx_sq = torch.cat([-kx_sq, zeros], dim=-1) # k_sq is negated
ky_sq = torch.cat([-ky_sq, zeros], dim=-1)
self.kx = torch.nn.Parameter(kx, requires_grad=False)
self.ky = torch.nn.Parameter(ky, requires_grad=False)
self.kx_sq = torch.nn.Parameter(kx_sq, requires_grad=False)
self.ky_sq = torch.nn.Parameter(ky_sq, requires_grad=False)
# Gamma functions
del self.dx
del self.dy
def get_gamma_functions(self):
"""Builds the gamma functions for the PML using
quadratic sigmas
https://www.sciencedirect.com/science/article/pii/S0021999106004487
Returns:
torch.tensor, torch.tensor: The gamma_x and gamma_y required by the PML
"""
# Constructing sigmas
pml_coord = np.arange(self.PMLsize)
sigma_outer = self.sigma_max * (np.abs(1 - pml_coord / self.PMLsize) ** 2)
sigma = np.zeros((self.domain_size,))
sigma[: self.PMLsize] = sigma_outer
sigma[-self.PMLsize :] = np.flip(sigma_outer)
sigma_x, sigma_y = np.meshgrid(sigma, sigma)
self.sigma_x = torch.tensor(sigma_x).float()
self.sigma_y = torch.tensor(sigma_y).float()
# Making inverse gammas
inv_gamma_x = 1.0 / (
np.ones_like(sigma_x) + (1j / self.k) * sigma_x
) # TODO: this works because w=c0=k=1
inv_gamma_y = 1.0 / (np.ones_like(sigma_y) + (1j / self.k) * sigma_y)
# Making gamma_prime
sigma_prime = (
-2 * self.sigma_max * (1 - pml_coord / self.PMLsize) / self.PMLsize
)
sigma = np.zeros((self.domain_size,))
sigma[: self.PMLsize] = sigma_prime
sigma[-self.PMLsize :] = -np.flip(sigma_prime)
sigma_x_prime, sigma_y_prime = np.meshgrid(sigma, sigma)
gamma_x_prime = (1j / self.k) * sigma_x_prime
gamma_y_prime = (1j / self.k) * sigma_y_prime
# Making coefficients for the modified laplacian as
# L = ax dx' + bx dx'' + ay dy' + by dy''
self.ax = -gamma_x_prime * (inv_gamma_x ** 3)
self.bx = inv_gamma_x ** 2
self.ay = -gamma_y_prime * (inv_gamma_y ** 3)
self.by = inv_gamma_y ** 2
# Turning into tensors
real = torch.from_numpy(np.real(self.ax))
imag = torch.from_numpy(np.imag(self.ax))
self.ax = torch.stack([real, imag], dim=-1).unsqueeze(0).float()
real = torch.from_numpy(np.real(self.bx))
imag = torch.from_numpy(np.imag(self.bx))
self.bx = torch.stack([real, imag], dim=-1).unsqueeze(0).float()
real = torch.from_numpy(np.real(self.ay))
imag = torch.from_numpy(np.imag(self.ay))
self.ay = torch.stack([real, imag], dim=-1).unsqueeze(0).float()
real = torch.from_numpy(np.real(self.by))
imag = torch.from_numpy(np.imag(self.by))
self.by = torch.stack([real, imag], dim=-1).unsqueeze(0).float()
# Make them parameters for automatic device assignment
self.sigma_x = torch.nn.Parameter(self.sigma_x, requires_grad=False)
self.sigma_y = torch.nn.Parameter(self.sigma_y, requires_grad=False)
self.ax = torch.nn.Parameter(self.ax, requires_grad=False)
self.bx = torch.nn.Parameter(self.bx, requires_grad=False)
self.ay = torch.nn.Parameter(self.ay, requires_grad=False)
self.by = torch.nn.Parameter(self.by, requires_grad=False)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,240
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/architectures.py
|
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torch.nn.functional import hardtanh
from random import randint, choice
import pytorch_lightning as pl
import numpy as np
from helmnet.dataloaders import get_dataset
from helmnet.spectral import LaplacianWithPML
from helmnet.utils import load_settings, log_wavefield
from helmnet.source import Source
from helmnet.replaybuffer import ReplayBuffer, Experience
from torch.optim.lr_scheduler import ReduceLROnPlateau
def getActivationFunction(
act_function_name: str, features=None, end=False
) -> nn.Module:
"""Returns the activation function module given
the name
Args:
act_function_name (str): Name of the activation function, case unsensitive
Raises:
NotImplementedError: Raised if the activation function is unknown
Returns:
nn.Module
"""
if act_function_name.lower() == "relu":
return nn.ReLU(inplace=True)
elif act_function_name.lower() == "celu":
return nn.CELU(inplace=True)
elif act_function_name.lower() == "relu_batchnorm":
if end:
return nn.ReLU(inplace=True)
else:
return nn.Sequential(nn.ReLU(inplace=True), nn.BatchNorm2d(features))
return nn.CELU(inplace=True)
elif act_function_name.lower() == "tanh":
return nn.Tanh()
elif act_function_name.lower() == "prelu":
return nn.PReLU()
elif act_function_name.lower() == "gelu":
return nn.GELU()
elif act_function_name.lower() == "tanhshrink":
return nn.Tanhshrink()
elif act_function_name.lower() == "softplus":
return nn.Softplus()
elif act_function_name.lower() == "leakyrelu":
return nn.LeakyReLU(inplace=True)
else:
err = "Unknown activation function {}".format(act_function_name)
raise NotImplementedError(err)
class OutConv(nn.Module):
"""Outconvolution, consisting of a simple 2D convolution layer with kernel size 1"""
def __init__(self, in_channels: int, out_channels: int):
"""
Args:
in_channels (int): Number of input channels
out_channels (int): Number of output channels
"""
super(OutConv, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1)
def forward(self, x):
return self.conv(x)
class DoubleConv(nn.Module):
"""(convolution => actFunction) * 2"""
def __init__(
self,
in_channels: int,
out_channels: int,
mid_channels=None,
activation_fun="relu",
):
super().__init__()
if mid_channels is None:
mid_channels = out_channels
self.double_conv = nn.Sequential(
nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1),
getActivationFunction(activation_fun, mid_channels),
nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1),
)
def forward(self, x):
return self.double_conv(x)
class CleanDoubleConv(nn.Module):
"""(convolution => actFunction) * 2"""
def __init__(
self,
in_channels: int,
out_channels: int,
mid_channels=None,
activation_fun="relu",
):
super().__init__()
if mid_channels is None:
mid_channels = out_channels
self.double_conv = nn.Sequential(
nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1),
getActivationFunction(activation_fun, mid_channels),
nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1),
)
def forward(self, x):
return self.double_conv(x)
class ResDoubleConv(nn.Module):
"""(convolution => actFunction) * 2"""
def __init__(
self,
in_channels: int,
out_channels: int,
mid_channels=None,
activation_fun="relu",
):
super().__init__()
if mid_channels is None:
mid_channels = out_channels
self.double_conv = nn.Sequential(
nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1),
getActivationFunction(activation_fun, mid_channels),
nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1),
)
def forward(self, x):
return self.double_conv(x) + x
class ConvGRUCell(nn.Module):
"""
Basic CGRU cell.
"""
def __init__(self, in_channels, hidden_channels, kernel_size, bias):
super(ConvGRUCell, self).__init__()
self.input_dim = in_channels
self.hidden_dim = hidden_channels
self.kernel_size = kernel_size
self.padding = kernel_size[0] // 2, kernel_size[1] // 2
self.bias = bias
self.update_gate = nn.Conv2d(
in_channels=self.input_dim + self.hidden_dim,
out_channels=self.hidden_dim,
kernel_size=self.kernel_size,
padding=self.padding,
bias=self.bias,
)
self.reset_gate = nn.Conv2d(
in_channels=self.input_dim + self.hidden_dim,
out_channels=self.hidden_dim,
kernel_size=self.kernel_size,
padding=self.padding,
bias=self.bias,
)
self.out_gate = nn.Conv2d(
in_channels=self.input_dim + self.hidden_dim,
out_channels=self.hidden_dim,
kernel_size=self.kernel_size,
padding=self.padding,
bias=self.bias,
)
def forward(self, input_tensor, cur_state):
h_cur = cur_state
# data size is [batch, channel, height, width]
x_in = torch.cat([input_tensor, h_cur], dim=1)
update = torch.sigmoid(self.update_gate(x_in))
reset = torch.sigmoid(self.reset_gate(x_in))
x_out = torch.tanh(
self.out_gate(torch.cat([input_tensor, h_cur * reset], dim=1))
)
h_new = h_cur * (1 - update) + x_out * update
return h_new
class EncoderBlock(nn.Module):
def __init__(
self,
num_features: int,
state_size=2,
activation_function="prelu",
use_state=True,
domain_size=0,
):
super().__init__()
self.state_size = state_size
self.use_state = use_state
self.domain_size = domain_size
self.num_features = num_features
# Define the two double_conv layers
self.conv_signal = DoubleConv(
self.num_features + self.state_size * self.use_state,
self.num_features,
activation_fun=activation_function,
)
# Downward path
self.down = nn.Conv2d(
self.num_features, self.num_features, kernel_size=8, padding=3, stride=2
)
if self.use_state:
self.conv_state = DoubleConv(
self.num_features + self.state_size,
self.state_size,
activation_fun=activation_function,
)
"""
self.conv_state = ConvGRUCell(
in_channels=self.num_features,
hidden_channels=self.state_size,
kernel_size=[3, 3],
bias=True
)
"""
self.state = None
def set_state(self, state):
self.state = state
def get_state(self):
return self.state
def clear_state(self, x):
self.state = torch.zeros(
[x.shape[0], 2, self.domain_size, self.domain_size], device=x.device
)
def forward(self, x):
if self.use_state:
if self.state is None:
raise ValueError(
"You must set or clear the state before using this module"
)
x_and_state = torch.cat([x, self.state], 1)
output = self.conv_signal(x_and_state)
self.state = self.conv_state(torch.cat([output, self.state], 1))
# self.state = self.conv_state(output, self.state)
else:
output = self.conv_signal(x)
return output, self.down(output)
class ResNet(nn.Module):
def __init__(
self,
activation_function: str,
depth: int,
domain_size: int,
features: int,
inchannels: int,
state_channels: int,
state_depth: int,
):
super().__init__()
# Hyperparameters
self.activation_function = activation_function
self.depth = depth
self.domain_size = domain_size
self.features = features
self.inchannels = inchannels
self.state_channels = state_channels
self.state_depth = state_depth
self.state = None
# Define resnet
inc = [nn.Conv2d(inchannels + 2, features, 7, padding=3)]
res_blocks = [
ResDoubleConv(features, features, features * 2) for _ in range(self.depth)
]
outc = [nn.Conv2d(features, 4, 7, padding=3)]
layers = inc + res_blocks + outc
self.network = nn.Sequential(*layers)
def init_by_size(self):
return
def get_states(self, flatten=False):
return
def clear_states(self, x):
self.state = None
return
def set_states(self, states, flatten=False):
return
def flatten_state(self, h_list):
return
def unflatten_state(self, h_flatten):
return
def forward(self, x):
if self.state is None:
self.state = torch.zeros(
(x.shape[0], 2, x.shape[2], x.shape[3]), device=x.device
)
x = torch.cat([x, self.state], 1)
y = self.network(x)
self.state = y[:, :2]
return y[:, 2:]
class HybridNet(nn.Module):
def __init__(
self,
activation_function: str,
depth: int,
domain_size: int,
features: int,
inchannels: int,
state_channels: int,
state_depth: int,
):
super().__init__()
# Hyperparameters
self.activation_function = activation_function
self.depth = depth
self.domain_size = domain_size
self.features = features
self.inchannels = inchannels
self.state_channels = state_channels
self.state_depth = state_depth
# Define states boundaries for packing and unpacking
self.init_by_size()
# Input layer
self.inc = DoubleConv(
self.inchannels, self.features, activation_fun=self.activation_function
)
# Encoding layer
self.enc = nn.ModuleList(
[
EncoderBlock(
self.features,
state_size=self.state_channels,
activation_function=self.activation_function,
use_state=d < self.state_depth,
domain_size=self.states_dimension[d],
)
for d in range(self.depth)
]
)
# Decode path
self.decode = nn.ModuleList(
[
DoubleConv(
self.features + self.features * (i < self.depth),
self.features,
activation_fun=self.activation_function,
)
for i in range(self.depth + 1)
]
)
# Upsampling
self.up = nn.ModuleList(
[
nn.ConvTranspose2d(
self.features,
self.features,
kernel_size=8,
padding=3,
output_padding=0,
stride=2,
)
for i in range(self.depth)
]
)
# Output layer
self.outc = OutConv(self.features, 2)
def init_by_size(self):
# This helps to reshape the state to the correct dimensions
self.states_dimension = [self.domain_size // 2 ** x for x in range(self.depth)]
self.total_state_length = sum(map(lambda x: x ** 2, self.states_dimension))
self.state_boundaries = []
for d in range(self.depth):
if d == 0:
self.state_boundaries.append([0, self.states_dimension[d] ** 2])
else:
self.state_boundaries.append(
[
self.state_boundaries[-1][-1],
self.state_boundaries[-1][-1] + self.states_dimension[d] ** 2,
]
)
def get_states(self, flatten=False):
h = []
for enc in self.enc:
h.append(enc.get_state())
if flatten:
return self.flatten_state(h)
else:
return h
def clear_states(self, x):
for enc in self.enc:
enc.clear_state(x)
def set_states(self, states, flatten=False):
if flatten:
h = self.unflatten_state(states)
for enc, state in zip(self.enc[: len(h)], h):
enc.set_state(state)
def flatten_state(self, h_list):
h = []
for x in h_list:
h.append(x.view(x.shape[0], x.shape[1], -1))
return torch.cat(h, 2)
def unflatten_state(self, h_flatten):
h = []
h_shape = h_flatten.shape
for boundaries, size in zip(self.state_boundaries, self.states_dimension):
h_d_flat = h_flatten[:, :, boundaries[0] : boundaries[1]]
h.append(h_d_flat.view(h_shape[0], h_shape[1], size, size))
return h
def forward(self, x):
# First feature transformation
x = self.inc(x)
# Downsampling tree and extracting new states
inner_signals = []
for d in range(self.depth):
# Encode signal
inner, x = self.enc[d](x)
# Store signal
inner_signals.append(inner)
# Upscaling
x = self.decode[-1](x)
for d in range(self.depth - 1, -1, -1):
# Upscale
x = self.up[d](x)
# Concatenate inner path
x = torch.cat([x, inner_signals[d]], 1)
# Decode
x = self.decode[d](x)
# Output layer
out = self.outc(x)
return out
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,241
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/source.py
|
import numpy as np
import torch
class Source:
"""Defines a (complex) monochromatic source. This is made to work easily with
pytorch, so some outputs may have some extra dimension which appear counter-intuitive.
"""
def __init__(
self,
image_size,
omega=1,
location=[180, 50],
amplitude=1.0,
phase=0.0,
smooth=True,
):
"""Initializes source
Args:
image_size ([type]): Image dimension
omega (int, optional): Angular frequency of the source, i.e. 2*pi*f. Defaults to 1.
location (list, optional): Source location. Defaults to [180,50].
amplitude ([type], optional): Source amplitude. Defaults to 1..
phase ([type], optional): Source phase. Defaults to 0..
smooth (bool, optional): If `True`, the source is smoothed in the spatial
frequency domain using a Blackman window. Defaults to True.
"""
self.L = image_size
self.location = location
self.t = None
self.omega = omega
self.amplitude = amplitude
self.phase = phase
self.make_abs_spatial_map(smooth=smooth)
def make_abs_spatial_map(self, smooth=True):
"""Defines the spatial amplitude map in absolute value. This should ideally be
a complex map if one wants to have multiple monochromatic sources, however
for the momen we are dealing only with single point sources
Args:
smooth (bool, optional): If `True`, the source is smoothed in the spatial
frequency domain using a Blackman window. Defaults to True.
"""
# TODO: Make complex such that whatever spatial map can be defined.
spatial_map = np.zeros((self.L, self.L))
spatial_map[self.location[0], self.location[1]] = self.amplitude
# Balckman smoothing in frequency
sp_map_frequency = np.fft.fftshift(np.fft.fft2(spatial_map))
if smooth:
blackman = np.blackman(self.L)
blackman_2d = np.outer(blackman, blackman)
sp_map_frequency *= blackman_2d
# This is a complex map and that's fine
complex_spatial_map = np.fft.ifft2(np.fft.ifftshift(sp_map_frequency))
self._abs_spatial_map = torch.from_numpy(np.abs(complex_spatial_map))
def spatial_map(self, t: float):
"""Builds the complex spatial map at time t.
Args:
t (float): Time value
Returns:
torch.tensor: The source wavefield at time t.
"""
curr_time = self.omega * t + self.phase
with torch.no_grad():
real = self._abs_spatial_map * np.cos(curr_time)
imag = self._abs_spatial_map * np.sin(curr_time)
source = torch.stack([real, imag], dim=2)
return source.unsqueeze(0)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,242
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/__init__.py
|
from helmnet.architectures import (
OutConv,
DoubleConv,
CleanDoubleConv,
ResDoubleConv,
ConvGRUCell,
EncoderBlock,
ResNet,
HybridNet,
getActivationFunction,
)
from helmnet.dataloaders import EllipsesDataset, get_dataset
from helmnet.hybridnet import IterativeSolver
from helmnet.source import Source
from helmnet.spectral import LaplacianWithPML, FourierDerivative
from helmnet.utils import load_settings
from helmnet.replaybuffer import Experience, ReplayBuffer
__all__ = [
"CleanDoubleConv",
"ConvGRUCell",
"DoubleConv",
"EllipsesDataset",
"EncoderBlock",
"Experience",
"FourierDerivative",
"HybridNet",
"IterativeSolver",
"LaplacianWithPML",
"OutConv",
"ReplayBuffer",
"ResDoubleConv",
"ResNet",
"Source",
"getActivationFunction" "get_dataset",
"load_settings",
]
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,243
|
SonyPony/helmnet
|
refs/heads/main
|
/train.py
|
import pytorch_lightning as pl
from pytorch_lightning.loggers import TensorBoardLogger
from pytorch_lightning.callbacks import ModelCheckpoint
import torch
from helmnet import IterativeSolver, load_settings
import os
from argparse import ArgumentParser
if __name__ == "__main__":
# Parsing command line arguments
parser = ArgumentParser()
parser.add_argument(
"--distributed_backend",
type=str,
default="ddp",
help="Distributed training backend, see https://pytorch.org/tutorials/intermediate/ddp_tutorial.html",
)
parser.add_argument(
"--gpus",
type=str,
default="2,3,4,5,6,7",
help="IDs of the GPUs to use during training, separated by a comma",
)
parser.add_argument(
"--precision",
type=int,
default="32",
help="Bits precision to use for calculations, can be either 32 or 16",
)
parser.add_argument(
"--max_epochs",
type=int,
default=1000,
help="Number of total epochs for training",
)
parser.add_argument("--track_arg_norm", type=bool, default=True)
parser.add_argument("--terminate_on_nan", type=bool, default=True)
parser.add_argument("--check_val_every_n_epoch", type=int, default=2)
parser.add_argument("--limit_val_batches", type=float, default=1.0)
parser.add_argument("--num_sanity_val_steps", type=int, default=1)
parser.add_argument("--benchmark", type=bool, default=True)
# Loading setings file
settings = load_settings("experiments/base.json")
# Making model
solver = IterativeSolver(
batch_size=settings["training"]["train batch size"],
domain_size=settings["geometry"]["grid size"],
k=settings["source"]["omega"] / settings["medium"]["c0"],
omega=settings["source"]["omega"],
gradient_clip_val=settings["training"]["gradient clipping"],
learning_rate=settings["training"]["learning rate"],
loss=settings["training"]["loss"],
minimum_learning_rate=settings["training"]["minimum learning rate"],
optimizer=settings["training"]["optimizer"],
PMLsize=settings["geometry"]["PML Size"],
sigma_max=settings["geometry"]["sigma max"],
source_location=settings["source"]["location"],
source_amplitude=settings["source"]["amplitude"],
source_phase=settings["source"]["phase"],
source_smoothing=settings["source"]["smoothing"],
train_data_path=settings["medium"]["train_set"],
validation_data_path=settings["medium"]["validation_set"],
activation_function=settings["neural_network"]["activation function"],
depth=settings["neural_network"]["depth"],
features=settings["neural_network"]["channels per layer"],
max_iterations=settings["environment"]["max iterations"],
state_channels=settings["neural_network"]["state channels"],
state_depth=settings["neural_network"]["states depth"],
weight_decay=settings["training"]["weight_decay"],
)
# Create trainer
logger = TensorBoardLogger("logs", name="helmnet")
checkpoint_callback = ModelCheckpoint(
filepath=os.getcwd() + "/checkpoints/",
save_top_k=3,
verbose=True,
monitor="val_loss",
mode="min",
save_last=True,
)
# parser = pl.Trainer.add_argparse_args(parser)
args = parser.parse_args()
# Make trainer
trainer = pl.Trainer.from_argparse_args(
args, logger=logger, checkpoint_callback=checkpoint_callback
)
# Train network
trainer.fit(solver)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,244
|
SonyPony/helmnet
|
refs/heads/main
|
/test.py
|
from helmnet import IterativeSolver
from helmnet.support_functions import fig_generic
import numpy as np
import torch
solver = IterativeSolver.load_from_checkpoint(
"checkpoints/trained_weights.ckpt", strict=False
)
solver.freeze() # To evaluate the model without changing it
solver.to("cuda:0")
# Setup problem
source_location = [30, 128]
sos_map = np.ones((256, 256))
sos_map[100:170, 30:240] = np.tile(np.linspace(2,1,210),(70,1))
# Set model domain size (assumed square)
solver.set_domain_size(sos_map.shape[-1], source_location=source_location)
# Run example in kWave and pytorch, and produce figure
fig_generic(
solver,
sos_map,
path="images/withgmres",
source_location=source_location,
omega=1,
min_sos=1,
cfl=0.1,
roundtrips=10.0,
mode="normal",
)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,245
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/hybridnet.py
|
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torch.nn.functional import hardtanh
from random import choice
import pytorch_lightning as pl
from pytorch_lightning.metrics.regression import MeanAbsoluteError
import numpy as np
from helmnet.architectures import HybridNet
from helmnet.dataloaders import get_dataset
from helmnet.spectral import LaplacianWithPML, FastLaplacianWithPML
from helmnet.utils import log_wavefield
from helmnet.source import Source
from helmnet.replaybuffer import ReplayBuffer, Experience
from torch.optim.lr_scheduler import ReduceLROnPlateau
class IterativeSolver(pl.LightningModule):
def __init__(
self,
domain_size: int,
k: float,
omega: float,
PMLsize: int,
sigma_max: float,
source_location: list,
train_data_path: str,
validation_data_path: str,
activation_function="relu",
architecture="custom_unet",
gradient_clip_val=0,
batch_size=24,
buffer_size=100,
depth=4,
features=8,
learning_rate=1e-4,
loss="mse",
minimum_learning_rate=1e-4,
optimizer="adam",
weight_decay=0.0,
max_iterations=100,
source_amplitude=10,
source_phase=0,
source_smoothing=False,
state_channels=2,
state_depth=4,
unrolling_steps=10,
):
super().__init__()
# Saving hyperparameters
self.save_hyperparameters()
# Derived modules
self.replaybuffer = ReplayBuffer(self.hparams.buffer_size)
self.metric = MeanAbsoluteError()
self.set_laplacian()
self.set_source()
# Non linear function approximator
self.init_f()
# Custom weight initialization
# TODO: Add this to the settings file
def weights_init(m):
if isinstance(m, torch.nn.Conv2d):
torch.nn.init.xavier_normal_(m.weight, gain=0.02)
# torch.nn.init.zeros_(m.bias)
self.f.apply(weights_init)
def init_f(self):
nn_name = self.hparams.architecture
if nn_name == "custom_unet":
self.f = HybridNet(
activation_function=self.hparams.activation_function,
depth=self.hparams.depth,
domain_size=self.hparams.domain_size,
features=self.hparams.features,
inchannels=6,
state_channels=self.hparams.state_channels,
state_depth=self.hparams.state_depth,
)
else:
raise NotImplementedError("Unknown architecture {}".format(nn_name))
def set_domain_size(self, domain_size, source_location=None, source_map=None):
self.hparams.domain_size = domain_size
self.f.domain_size = self.hparams.domain_size
self.set_laplacian()
if source_location is not None:
self.set_multiple_sources([source_location])
else:
self.set_source_maps(source_map)
self.f.init_by_size()
for enc, size in zip(self.f.enc, self.f.states_dimension):
enc.domain_size = size
def set_laplacian(self):
"""
self.Lap = LaplacianWithPML(
domain_size=self.hparams.domain_size,
PMLsize=self.hparams.PMLsize,
k=self.hparams.k,
sigma_max=self.hparams.sigma_max,
).to(self.device)
"""
self.Lap = FastLaplacianWithPML(
domain_size=self.hparams.domain_size,
PMLsize=self.hparams.PMLsize,
k=self.hparams.k,
sigma_max=self.hparams.sigma_max,
).to(self.device)
sigmax, sigmay = self.Lap.sigmas()
sigmax = torch.tensor(sigmax, device=self.device)
sigmay = torch.tensor(sigmay, device=self.device)
sigmax = sigmax.unsqueeze(0)
sigmay = sigmay.unsqueeze(0)
self.sigmas = torch.cat([sigmax, sigmay]).float()
def set_source(self):
# Defining source
source = Source(
image_size=self.hparams.domain_size,
omega=self.hparams.omega,
location=self.hparams.source_location,
amplitude=self.hparams.source_amplitude,
phase=self.hparams.source_phase,
smooth=self.hparams.source_smoothing,
)
sourceval = source.spatial_map(0).type(torch.FloatTensor).permute(0, 3, 1, 2)
self.set_source_maps(sourceval)
def set_source_maps(self, sourceval):
self.source = nn.Parameter(
sourceval.to(self.device),
requires_grad=False,
)
def reset_source(self):
self.set_source()
def set_multiple_sources(self, source_locations):
sourceval_array = []
for loc in source_locations:
# Defining source
source = Source(
image_size=self.hparams.domain_size,
omega=self.hparams.omega,
location=loc,
amplitude=self.hparams.source_amplitude,
phase=self.hparams.source_phase,
smooth=self.hparams.source_smoothing,
)
sourceval_array.append(
source.spatial_map(0).type(torch.FloatTensor).permute(0, 3, 1, 2)
)
sourceval = torch.cat(sourceval_array, 0)
self.set_source_maps(sourceval)
def on_after_backward(self):
if self.hparams.gradient_clip_val > 0:
torch.nn.utils.clip_grad.clip_grad_value_(
self.parameters(), self.hparams.gradient_clip_val
)
def get_random_source_loc(self):
"""Random source location on a circle"""
# TODO: Make it more flexible, this is basically hard coded...
theta = 2 * np.pi * np.random.rand(1)
L = self.hparams.domain_size // 2
dL = L - self.hparams.PMLsize - 2
source_location = np.array(
[int(L + dL * np.cos(theta)), int(L + dL * np.sin(theta))]
)
return source_location
def train_dataloader(self):
# Making dataset of SoS
sos_train = get_dataset(self.hparams.train_data_path)
# Filling up experience replay
print("Filling up Replay buffer...")
with torch.no_grad():
for counter in range(len(self.replaybuffer)):
self.reset_source() # self.set_multiple_sources([self.get_random_source_loc()])
sos_map = sos_train[counter].unsqueeze(0).to(self.device)
k_sq, wavefield = self.get_initials(sos_map)
self.f.clear_states(wavefield)
h_states = self.f.get_states(flatten=True)
residual = self.get_residual(wavefield, k_sq)
exp = Experience(
wavefield[0],
h_states[0],
k_sq[0],
residual[0],
self.source[0],
counter * 10,
)
self.replaybuffer.append(exp, counter)
# Return the dataloader of sos maps
return DataLoader(
sos_train,
batch_size=self.hparams.batch_size,
num_workers=min([self.hparams.batch_size, 32]),
drop_last=True,
)
def val_dataloader(self):
# Making dataset of SoS
self.reset_source()
sos_train = get_dataset("datasets/splitted_96/validation.ph")
# Return the dataloader of sos maps
return DataLoader(
sos_train,
batch_size=self.hparams.batch_size,
num_workers=min([self.hparams.batch_size, 32]),
)
def configure_optimizers(self):
# TODO: Add adam betast to settings file
if self.hparams.optimizer.lower() == "adam":
optimizer = torch.optim.Adam(
self.parameters(),
lr=self.hparams.learning_rate,
betas=(0.9, 0.95),
weight_decay=self.hparams.weight_decay,
)
else:
raise NotImplementedError(
"The optimizer {} is not implemented".format(self.hparams.optimizer)
)
if self.hparams.minimum_learning_rate > self.hparams.learning_rate:
raise ValueError(
"Minimum learning rate ({}) must be smaller than the starting learning rate ({})".format(
self.hparams.minimum_learning_rate, self.hparams.learning_rate
)
)
scheduler = {
"scheduler": ReduceLROnPlateau(
optimizer,
mode="min",
factor=0.5,
patience=10,
min_lr=self.hparams.minimum_learning_rate,
verbose=True,
),
"monitor": "train_loss", # Default: val_loss
"interval": "epoch",
"frequency": 1,
}
return [optimizer], [scheduler]
def loss_function(self, x):
if self.hparams.loss == "mse":
return x.pow(2).mean()
else:
raise NotImplementedError(
"The loss function {} is not implemented".format(self.hparams.loss)
)
@staticmethod
def test_loss_function(x):
return x.pow(2).mean((1, 2, 3)).sqrt()
def test_step(self, batch, batch_idx):
self.reset_source()
with torch.no_grad():
output = self.forward(
batch,
num_iterations=self.hparams.max_iterations,
return_wavefields=True,
return_states=False,
)
# Get loss
losses = [self.test_loss_function(x) for x in output["residuals"]]
losses = torch.stack(losses, 1)
return {
"losses": losses,
"wavefields": [x.cpu() for x in output["wavefields"]],
}
def validation_step(self, batch, batch_idx):
self.set_multiple_sources(
[self.get_random_source_loc() for _ in range(batch.shape[0])]
)
with torch.no_grad():
output = self.forward(
batch,
num_iterations=self.hparams.max_iterations,
return_wavefields=False,
return_states=False,
)
# Get loss
loss = self.loss_function(output["residuals"][-1]).sqrt()
# NaNs to Infs, due to Lightning bug: https://github.com/PyTorchLightning/pytorch-lightning/issues/2636
loss[torch.isnan(loss)] = float("inf")
sample_wavefield = (hardtanh(output["wavefields"][0][0]) + 1) / 2
return {
"loss": loss,
"sample_wavefield": sample_wavefield,
"batch_idx": batch_idx,
}
def validation_epoch_end(self, outputs):
all_losses = torch.stack([x["loss"] for x in outputs]).mean()
val_loss_mean = self.metric(all_losses, torch.zeros_like(all_losses))
self.reset_source()
self.logger.experiment.add_images(
"wavefield/val_real",
outputs[0]["sample_wavefield"][0],
self.trainer.global_step,
dataformats="HW",
)
self.logger.experiment.add_image(
"wavefield/val_imag",
outputs[0]["sample_wavefield"][1],
self.trainer.global_step,
dataformats="HW",
)
return {
"val_loss": val_loss_mean,
"log": {"loss/val_terminal_loss": val_loss_mean},
}
def test_epoch_end(self, outputs):
# Saving average losses
print("Saving residual RMSE")
x = []
for o in outputs:
x.append(o["losses"])
all_losses = torch.cat(x, dim=0).cpu().numpy()
np.save("results/evolution_of_model_RMSE_on_test_set", all_losses)
# Save wavefield
print("Saving wavefields")
wavefields = torch.cat(
[torch.stack(x["wavefields"], 0) for x in outputs], 1
).permute(1, 0, 2, 3, 4)
np.save("results/evolution_of_wavefields_on_test_set", wavefields.cpu().numpy())
def training_epoch_end(self, outputs):
train_loss_mean = torch.stack([x["loss"] for x in outputs]).mean()
return {"train_loss": train_loss_mean}
def training_step(self, sos_batch, batch_idx):
# Training phase
maxiter = min([self.current_epoch * 20 + 1, self.hparams.max_iterations])
# Sample from the buffer
(
wavefields,
h_states,
k_sqs,
residual,
sources,
timesteps,
indices,
) = self.replaybuffer.sample(self.hparams.batch_size)
# Set the states and sources
self.set_source_maps(sources)
self.f.set_states(h_states, flatten=True)
# Make N steps
num_iterations = self.hparams.unrolling_steps
output = self.n_steps(wavefields, k_sqs, residual, num_iterations, True, True)
# Evaluate the loss function (will backward later)
cat_res = torch.cat(output["residuals"])
# stack_res = torch.stack(output["residuals"])
loss_f = cat_res.pow(2)
loss = 1e4 * loss_f.mean() # TODO: Use settings loss and amplify
rel_loss_f = loss_f.mean((1, 2, 3)).sqrt().mean()
self.logger.experiment.add_scalar(
"loss/train", rel_loss_f, self.trainer.global_step
)
# Add histogram of iteration lengths
if self.trainer.current_epoch // 50 == 0:
self.logger.experiment.add_histogram(
"hyper/iterations", np.array(list(timesteps)), self.trainer.global_step
)
# Making detached clones
wavefields = [x.detach() for x in output["wavefields"]]
h_states = [x.detach() for x in output["states"]]
k_sqs = [k_sqs for x in output["wavefields"]]
residuals = [x.detach() for x in output["residuals"]]
sources = [x.detach() for x in self.source]
# Adding to RB if iterations are not more than allowed
counter = 0
terminal_logged = False
middle_logged = False
iteration = np.random.choice(len(residuals))
for sample_idx in range(self.hparams.batch_size):
new_timesteps = timesteps[sample_idx] + iteration + 1
res = residuals[iteration][sample_idx]
if res.pow(2).mean() < 1 and new_timesteps < maxiter:
self.replaybuffer.append(
Experience(
wavefields[iteration][sample_idx],
h_states[iteration][sample_idx],
k_sqs[iteration][sample_idx],
residuals[iteration][sample_idx],
sources[sample_idx],
new_timesteps,
),
indices[sample_idx],
)
else:
with torch.no_grad():
self.reset_source()
ksq, wf = self.get_initials(choice(sos_batch).unsqueeze(0))
self.f.clear_states(wf)
h = self.f.get_states(flatten=True)
res = self.get_residual(wf, ksq)
self.replaybuffer.append(
Experience(wf[0], h[0], ksq[0], res[0], self.source[0], 0),
indices[sample_idx],
)
counter += 1
# Log it as wavefield at 20 steps
if not middle_logged and new_timesteps == 20:
self.log_wavefield(wavefields[iteration][sample_idx], "20")
with torch.no_grad():
middle_loss = self.loss_function(residuals[iteration][sample_idx])
self.logger.experiment.add_scalar(
"loss/step_20",
middle_loss.sqrt().item(),
self.trainer.global_step,
)
middle_logged = True
# Log terminal wavefield
elif new_timesteps >= maxiter and not terminal_logged:
self.log_wavefield(wavefields[iteration][sample_idx], "terminal")
with torch.no_grad():
terminal_loss = self.loss_function(residuals[iteration][sample_idx])
self.logger.experiment.add_scalar(
"loss/terminal",
terminal_loss.sqrt().item(),
self.trainer.global_step,
)
terminal_logged = True
return {
"loss": loss,
"progress_bar": {
"maxiter": maxiter,
"unrolling": num_iterations,
"new_sos": counter,
},
}
def log_wavefield(self, wavefield, name):
wavefield = (hardtanh(wavefield) + 1) / 2
self.logger.experiment.add_images(
"wavefield/" + name + "_real",
wavefield[0],
self.trainer.global_step,
dataformats="HW",
)
self.logger.experiment.add_image(
"wavefield/" + name + "_imag",
wavefield[1],
self.trainer.global_step,
dataformats="HW",
)
def get_initials(self, sos_maps: torch.tensor):
"""Gets the initial estimates for state, wavefield and residual. It
also calculate k_sq = (omega/c)**2
Args:
sos_maps (tensor): Speed of sound map
Returns:
(tensor, tensor, tensor, tensor): state, wavefield, residual, k_sq
"""
# TODO: Make it trainable?
k_sq = (self.hparams.omega / sos_maps) ** 2
wavefield = torch.zeros(
k_sq.shape[0], 2, k_sq.shape[2], k_sq.shape[3], device=k_sq.device
)
return k_sq, wavefield
def apply_laplacian(self, x: torch.tensor):
return self.Lap(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)
def get_residual(self, x: torch.tensor, k_sq: torch.tensor):
# TODO: This should be outside of the networ, as represents the
# environment
"""Returns the residual wavefield
Args:
x (tensor): Current solution estimate for the Helmholtz equation
k_sq (tensor): (omega/c)**2
Returns:
torch.tensor: the residual
"""
return self.apply_laplacian(x) + k_sq * x - self.source
def single_step(
self, wavefield: torch.tensor, k_sq: torch.tensor, residual: torch.tensor
):
# Getting residual signal
# residual = self.get_residual(wavefield, k_sq)
sigmas = (
self.sigmas.unsqueeze(0).repeat(wavefield.shape[0], 1, 1, 1).to(self.device)
)
input = torch.cat([wavefield, 1e3 * residual, sigmas], dim=1)
# Predicting wavefield update
d_wavefield = self.f(input) # *100/self.current_iterations
up_wavefield = d_wavefield / 1e3 + wavefield
new_residual = self.get_residual(up_wavefield, k_sq)
# Impose Dirichlet BC on updated wavefield
"""
dirichlet_mask = torch.zeros_like(up_wavefield)
dirichlet_mask.requires_grad = False
dirichlet_mask[:,:,1:-1,1:-1] = 1.
up_wavefield = up_wavefield*dirichlet_mask
"""
get_residual = True
if get_residual:
return up_wavefield, new_residual
else:
return up_wavefield
def n_steps(
self,
wavefield,
k_sq,
residual,
num_iterations,
return_wavefields=False,
return_states=False,
):
# Initialize containers
wavefields = []
residuals = []
states = []
# Unroll N steps
for current_iteration in range(num_iterations):
# Update wavefield and get residual AFTER update
wavefield, residual = self.single_step(
wavefield, k_sq, residual
)
# Store
residuals.append(residual) # Last residual
if return_wavefields:
wavefields.append(wavefield)
if return_states:
states.append(self.f.get_states(flatten=True))
# Add only last wavefield if none logged
if not return_wavefields:
wavefields.append(wavefield)
return {
"wavefields": wavefields,
"residuals": residuals,
"states": states,
"last_iteration": current_iteration,
}
def fast_forward(self, sos_maps):
# Finite horizon value
num_iterations = self.hparams.max_iterations
# Initialize inputs and network states
k_sq, wavefield = self.get_initials(sos_maps)
self.f.clear_states(wavefield)
residual = self.get_residual(wavefield, k_sq)
sigmas = (
self.sigmas.unsqueeze(0).repeat(wavefield.shape[0], 1, 1, 1).to(self.device)
)
# Initialize containers
wavefields = torch.empty(
[num_iterations] + list(wavefield.shape[1:]),
device="cuda:1",
dtype=torch.float32,
)
# Unroll N steps
for current_iteration in range(num_iterations):
# Loop
wavefield, residual = self.single_step(wavefield, k_sq, residual)
# Store
wavefields[current_iteration] = wavefield[0]
return wavefields
def forward(
self,
sos_maps,
return_wavefields=False,
return_states=False,
num_iterations=None,
stop_if_diverge=False,
):
# Finite horizon value
if num_iterations is None:
num_iterations = self.hparams.max_iterations
# Initialize inputs and network states
k_sq, wavefield = self.get_initials(sos_maps)
self.f.clear_states(wavefield)
residual = self.get_residual(wavefield, k_sq)
# Initialize containers
wavefields = []
residuals = []
states = []
# Unroll N steps
for current_iteration in range(num_iterations):
# Update wavefield and get residual AFTER update
wavefield, residual = self.single_step(wavefield, k_sq, residual)
# Store
residuals.append(residual) # Last residual
if return_wavefields:
wavefields.append(wavefield)
if return_states:
states.append(self.f.get_states(flatten=True))
# Add only last wavefield if none logged
if not return_wavefields:
wavefields.append(wavefield)
return {
"wavefields": wavefields,
"residuals": residuals,
"states": states,
"last_iteration": current_iteration,
}
def forward_variable_src(
self,
sos_maps,
src_time_pairs,
return_wavefields=False,
return_states=False,
num_iterations=None,
stop_if_diverge=False,
):
# Finite horizon value
if num_iterations is None:
num_iterations = self.hparams.max_iterations
# Extract source insertion times
new_src_times = src_time_pairs["iteration"]
src_maps = iter(src_time_pairs["src_maps"])
# Initialize inputs and network states
k_sq, wavefield = self.get_initials(sos_maps)
self.f.clear_states(wavefield)
residual = self.get_residual(wavefield, k_sq)
# Initialize containers
wavefields = []
residuals = []
states = []
# Unroll N steps
for current_iteration in range(num_iterations):
# Update source map if needed
if current_iteration in new_src_times:
self.set_source_maps(next(src_maps))
# _, wavefield = self.get_initials(sos_maps)
# self.f.clear_states(wavefield)
residual = self.get_residual(wavefield, k_sq)
# Update wavefield and get residual AFTER update
wavefield, residual = self.single_step(wavefield, k_sq, residual)
# Store
residuals.append(residual) # Last residual
if return_wavefields:
wavefields.append(wavefield)
if return_states:
states.append(self.f.get_states(flatten=True))
# Add only last wavefield if none logged
if not return_wavefields:
wavefields.append(wavefield)
return {
"wavefields": wavefields,
"residuals": residuals,
"states": states,
"last_iteration": current_iteration,
}
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,246
|
SonyPony/helmnet
|
refs/heads/main
|
/produce_figures.py
|
from evaluate import Evaluation
from helmnet.support_functions import *
from matplotlib import pyplot as plt
import numpy as np
import os
from scipy.io import loadmat, savemat
import torch
from torchvision.utils import make_grid
from tqdm import tqdm
import subprocess
SETTINGS = {
"gmres_results": "results/gmres_results.mat",
"kwave_results": "results/kwave_results.mat",
"model_checkpoint": "checkpoints/trained_weights.ckpt",
"testset": "datasets/splitted_96/testset.ph",
"gpu": [0],
}
def load_kwave_and_gmres():
if not os.path.isfile(SETTINGS["kwave_results"]):
raise FileNotFoundError(
"Can't find the k-Wave results in {}. Have you run 'matlab/parallel_kwave_solver.m'?".format(
SETTINGS["kwave_results"]
)
)
if not os.path.isfile(SETTINGS["gmres_results"]):
raise FileNotFoundError(
"Can't find the GMRES results in {}. Have you run 'matlab/parallel_sectral_gmres_solver.m'?".format(
SETTINGS["gmres_results"]
)
)
# Load data
print("Loading k-Wave and GMRES results... ", end="")
matfile = loadmat(SETTINGS["kwave_results"])
kwave_results = matfile["P"]
matfile = loadmat(SETTINGS["gmres_results"])
gmres_results = matfile["P"]
gmres_residuals = (
matfile["residuals"] / gmres_results.shape[-1]
) # To mimick RMSE used in network
print("done!")
gmres_tensors = np.moveaxis(
np.stack([gmres_results.real, gmres_results.imag]), 0, 2
)
return kwave_results, gmres_results, gmres_residuals, gmres_tensors
def load_model_results():
path = "results/evolution_of_wavefields_on_test_set.npy"
if not os.path.isfile(path):
raise FileNotFoundError(
"Can't find the model results on the testset. Have you run 'python evaluate.py'?"
)
print("Loading model results, this may take some time... ", end="")
pytorch_tensors = np.load("results/evolution_of_wavefields_on_test_set.npy")
traces_file = "results/evolution_of_model_RMSE_on_test_set.npy"
traces = np.load(traces_file)
print("done!")
return pytorch_tensors, traces
def fig_samples_from_testset(evaluator, savepath="images/example_skulls"):
print("Saving examples from testset in {}".format(savepath))
some_sos_maps = make_grid([evaluator.testset[i] for i in range(8 * 8)], nrow=8)
plt.figure(figsize=(8, 8), dpi=300)
plt.imshow(some_sos_maps.cpu().numpy()[0], vmin=1.0, vmax=2.0, cmap="inferno")
plt.colorbar(fraction=0.02, pad=0.02)
plt.axis("off")
plt.savefig(savepath + ".png")
def fig_error_vs_residual(
traces,
l_infty_traces,
path="images/error_vs_residual",
iterations=1000,
lines_color="darkgray",
lines_alpha=0.1,
mean_color="black",
xscale="log",
yscale="log",
dpi=100,
):
print("Making Error vs Residual figure")
plt.figure(dpi=dpi)
toraster = plt.plot(
traces.T, 100 * l_infty_traces.T, color=lines_color, alpha=lines_alpha
)
mean_residual = np.mean(traces, 0)
mean_error = np.mean(100 * l_infty_traces, 0)
plt.plot(mean_residual, mean_error, color=mean_color, linestyle="--", label="Mean")
median_residual = np.median(traces, 0)
median_error = np.median(100 * l_infty_traces, 0)
plt.plot(median_residual, median_error, color=mean_color, label="Median")
plt.yscale(yscale)
plt.xscale(xscale)
plt.xlabel("Residual magnitude")
plt.ylabel("$\ell_\infty$ error (percent)")
plt.ylim([0.1, 100])
plt.xlim([1e-5, 1e-1])
plt.grid()
plt.legend()
plt.savefig(path + ".png")
def fig_residual_and_error_traces(
traces,
l_infty_traces,
gmres_traces,
l_infty_traces_gmres,
path="images/residual_and_l_inf",
dpi=100,
iterations=1000,
lines_alpha=0.05,
xscale="linear",
yscale="log",
):
gmres_x = np.linspace(1, 1000, gmres_traces.shape[1])
w, h = plt.figaspect(1 / 3.0)
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(w, h), dpi=dpi)
toraster1 = ax1.plot(gmres_x, gmres_traces.T, color="orange", alpha=lines_alpha)
ax1.plot(gmres_x, np.mean(gmres_traces, 0), color="darkorange", linestyle="--")
ax1.plot(gmres_x, np.median(gmres_traces, 0), color="darkorange", label="GMRES")
toraster2 = ax1.plot(traces.T, color="darkgray", alpha=lines_alpha)
ax1.plot(np.mean(traces, 0), color="black", linestyle="--")
ax1.plot(np.median(traces, 0), color="black", label="Learned")
ax1.set_yscale(yscale)
ax1.set_xscale(xscale)
ax1.set_title("Residual magnitude")
ax1.set_xlabel("Number of iterations")
ax1.set_ylim([0.00001, 0.1])
ax1.set_xlim([1, 1000])
ax1.grid()
ax1.legend()
x = np.linspace(1, 1001, 1000)
toraster3 = ax2.plot(x, 100 * l_infty_traces.T, color="darkgray", alpha=lines_alpha)
ax2.plot(x, np.mean(100 * l_infty_traces, 0), color="black", linestyle="--")
ax2.plot(x, np.median(100 * l_infty_traces, 0), color="black", label="Learned")
x = np.linspace(1, 1001, 11)
toraster4 = ax2.plot(
x, 100 * l_infty_traces_gmres.T, color="orange", alpha=lines_alpha
)
ax2.plot(
x, np.mean(100 * l_infty_traces_gmres, 0), color="darkgoldenrod", linestyle="--"
)
ax2.plot(
x,
np.median(100 * l_infty_traces_gmres, 0),
color="darkgoldenrod",
label="GMRES",
)
ax2.set_yscale(yscale)
ax1.set_xscale(xscale)
ax2.set_title("Error $\ell_\infty$ (percent)")
ax2.set_xlabel("Number of iterations")
ax2.set_yticks([0.01, 0.1, 1, 10, 100])
ax2.set_yticklabels(["0.01", "0.1", "1", "10", "100"])
ax2.set_ylim([0.1, 100])
ax2.set_xlim([1, iterations])
ax2.grid()
plt.savefig(path + ".png")
def histograms(l_infty_pytorch, mse_pytorch, l_infty_gmres, mse_gmres, filename=None):
kwargs = dict(histtype="stepfilled", alpha=0.5, bins=50, ec="k")
x_ticks = np.array([0.0001, 0.001, 0.01, 0.1, 1])
x_ticks_location = np.log10(x_ticks)
x_thicks_labels = 100 * x_ticks
fig, axes = plt.subplots(1, 3, figsize=(12, 3), dpi=300)
axes[0].hist(
np.log10(l_infty_pytorch.cpu()), **kwargs, color="black", label="Learned"
)
axes[0].hist(np.log10(l_infty_gmres.cpu()), **kwargs, color="orange", label="GMRES")
axes[0].set_xticks(x_ticks_location)
axes[0].set_xticklabels(x_thicks_labels)
axes[0].set_xlim([-4, 0])
axes[0].set_xlabel("$\ell_\infty$ error (\%)")
axes[0].set_ylabel("Number of")
axes[0].legend()
axes[1].hist(np.log10(mse_pytorch.cpu()), **kwargs, color="black")
axes[1].hist(np.log10(mse_gmres.cpu()), **kwargs, color="orange")
axes[1].set_xticks(x_ticks_location)
axes[1].set_xticklabels(x_thicks_labels)
axes[1].set_xlim([-4, 0])
axes[1].set_xlabel("RMSE error (x 100)")
axes[1].set_ylabel("Number of")
color = "black"
axes[2].boxplot(
np.log10(l_infty_pytorch.cpu()),
positions=(0.85,),
patch_artist=True,
boxprops=dict(facecolor="white", color=color),
flierprops=dict(markerfacecolor=color, marker=".", markersize=1),
medianprops=dict(color=color),
)
color = "darkorange"
axes[2].boxplot(
np.log10(l_infty_gmres.cpu()),
positions=(1.15,),
patch_artist=True,
boxprops=dict(facecolor="white", color=color),
flierprops=dict(markerfacecolor=color, marker=".", markersize=1),
medianprops=dict(color=color),
)
color = "black"
axes[2].boxplot(
np.log10(mse_pytorch.cpu()),
positions=(1.85,),
patch_artist=True,
boxprops=dict(facecolor="white", color=color),
flierprops=dict(markerfacecolor=color, marker=".", markersize=1),
medianprops=dict(color=color),
)
color = "darkorange"
axes[2].boxplot(
np.log10(mse_gmres.cpu()),
positions=(2.15,),
patch_artist=True,
boxprops=dict(facecolor="white", color=color),
flierprops=dict(markerfacecolor=color, marker=".", markersize=1),
medianprops=dict(color=color),
)
axes[2].set_xlim([0.7, 2.3])
axes[2].set_xticks([1, 2])
axes[2].set_xticklabels(["$\ell_\infty (\%)$", "RMSE (x100)"])
axes[2].set_yticks(x_ticks_location)
axes[2].set_yticklabels(x_thicks_labels)
axes[2].yaxis.tick_right()
axes[2].set_title("$\ell_\infty$ and RMSE errors")
if filename is not None:
plt.savefig(filename)
def fig_skull_error_histograms_and_boxplot(
pytorch_tensors,
gmres_tensors,
kwave_results,
path="images/distribution_errors_global",
):
l_infty_pytorch, mse_pytorch = last_frame_difference(
pytorch_tensors[:, :-1], kwave_results
)
l_infty_gmres, mse_gmres = last_frame_difference(
gmres_tensors[:, :-1], kwave_results
)
histograms(
l_infty_pytorch,
mse_pytorch,
l_infty_gmres,
mse_gmres,
filename=path + ".png",
)
def fig_example(
evaluator,
sos_map,
path,
source_location=[82, 48],
omega=1,
min_sos=1,
cfl=0.01,
roundtrips=60.0,
mode="normal",
restart =10,
max_iter=1000,
):
solver = evaluator.model
fig_generic(
solver,
sos_map,
path,
source_location,
omega,
min_sos,
cfl,
roundtrips,
mode,
restart,
max_iter
)
def fig_skull_example(evaluator, path="images/skull_example"):
if not os.path.isfile("examples/kwavedata512.mat"):
print("Data for skull example not found, I'll generate it.")
make_skull_example(evaluator)
sos_map = loadmat("examples/problem_setup.mat")["sos"]
kwave_wavefield = loadmat("examples/kwavedata512.mat")["p_kw"]
pytorch_wavefield = loadmat("examples/pytorch_results.mat")["pytorch_wf"]
l_infty = loadmat("examples/pytorch_results.mat")["l_infty"]
show_example_abs(
sos_map,
pytorch_wavefield,
kwave_wavefield,
100 * l_infty,
trace_name="$\ell_\infty$ error \%",
)
plt.savefig(path + ".png")
plt.close()
# Sample iterations
samples = loadmat("examples/pytorch_results.mat")["samples"]
iterations = loadmat("examples/pytorch_results.mat")["iterations"][0]
fig, axs = plt.subplots(4, 4, figsize=(18, 18), dpi=300)
counter = 0
for r in range(4):
for c in range(4):
plotnum = r * 4 + c
axs[r, c].imshow(samples[counter], cmap="inferno")
print(plotnum, len(iterations))
axs[r, c].set_title("Iteration {}".format(iterations[plotnum] + 1))
axs[r, c].axis("off")
counter += 1
plt.savefig(path + "_evolution.png")
if __name__ == "__main__":
import matplotlib as mpl
plt.rcParams.update({
"text.usetex": True,
"font.family": "sans-serif",
"font.sans-serif": ["Helvetica"]})
# Load model
evaluator = Evaluation(
path=SETTINGS["model_checkpoint"],
testset=SETTINGS["testset"],
gpus=SETTINGS["gpu"],
)
evaluator.move_model_to_gpu()
# ----------------------------------------------------------------
# Load GMRES and kWave results
kwave_results, gmres_results, gmres_traces, gmres_tensors = load_kwave_and_gmres()
# Load model results on testset
pytorch_tensors, traces = load_model_results()
# Load model
evaluator = Evaluation(
path=SETTINGS["model_checkpoint"],
testset=SETTINGS["testset"],
gpus=SETTINGS["gpu"],
)
evaluator.move_model_to_gpu()
# ----------------------------------------------------------------
# Save examples of speed of sound maps from the testset()
fig_samples_from_testset(evaluator)
# Evaluate error curves
l_infty_traces, mse_traces = get_model_errors(pytorch_tensors, kwave_results)
l_infty_traces_gmres, mse_traces_gmres = get_gmres_errors(
gmres_results, kwave_results
)
# Residual vs error figure
fig_error_vs_residual(traces, l_infty_traces)
fig_residual_and_error_traces(
traces, l_infty_traces, gmres_traces, l_infty_traces_gmres
)
# Histograms and boxplots
fig_skull_error_histograms_and_boxplot(
pytorch_tensors, gmres_tensors, kwave_results
)
# Make examples
print(
"--- Example images ---\nEach example may take a while to compute as it runs an accurate kWave simulation (cfl=0.01, roundtrips=60)"
)
fig_example(
evaluator, (evaluator.testset[0]).clone().numpy()[0], path="images/example_0"
)
fig_example(
evaluator, (evaluator.testset[1]).clone().numpy()[0], path="images/example_1"
)
fig_example(
evaluator, (evaluator.testset[2]).clone().numpy()[0], path="images/example_2"
)
fig_example(
evaluator, (evaluator.testset[3]).clone().numpy()[0], path="images/example_3"
)
fig_example(
evaluator, (evaluator.testset[4]).clone().numpy()[0], path="images/example_4"
)
fig_example(
evaluator,
(evaluator.testset[864]).clone().numpy()[0],
path="images/worst_example",
)
# Rectangle example
sos_map = (evaluator.testset[0] * 0 + 1).numpy()[0]
sos_map[20:60, 20:-20] = 2.0
fig_example(evaluator, sos_map, path="images/rectangle", cfl=0.01, roundtrips=60)
# Large example
source_location = [450, 256]
sos_maps = [evaluator.testset[n] for n in range(25)]
sos_map = make_grid(sos_maps, nrow=5, padding=0)[0].numpy()
sos_map[400:, 200:300] = 1.0 # Remove one
sos_map = np.pad(sos_map, 16, mode="edge") # Pad to 512x512
evaluator.set_domain_size(sos_map.shape[-1], source_location=source_location)
fig_example(
evaluator,
sos_map,
"images/patches",
source_location=source_location,
cfl=0.1,
roundtrips=100,
mode="large",
restart=25,
)
# Skull example
fig_skull_example(evaluator)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,247
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/dataloaders.py
|
import random
from matplotlib import pyplot as plt
from tqdm import trange
import numpy as np
import torch
from torch.utils.data import Dataset
from scipy.io import savemat
def get_dataset(
dataset_path: str, source_location="cuda:7", destination="cpu"
) -> Dataset:
"""Loads a torch dataset and maps it to arbitrary locations
Args:
dataset_path (str): Path of the dataset. It must be a .ph file
source_location (str, optional): On which device the dataset was located. Defaults to "cuda:7".
destination (str, optional): On which device the dataset must be mapped to. Defaults to "cpu".
Returns:
torch.Dataset
"""
# Preparing dataset
trainset = torch.load(dataset_path, map_location={source_location: destination})
return trainset
class EllipsesDataset(Dataset):
"""Dataset of oversimplified skulls."""
def __init__(self):
self.all_sos = []
def make_dataset(self, num_ellipses=5000, imsize=128):
# TODO: Add more control over the paramers of the generated
# datasets, which at the moment are hard coded.
"""Generates a dataset of oversimplified skulls.
Args:
num_ellipses (int, optional): How many maps to make. Defaults to 5000.
imsize (int, optional): Size of the speed of sound map. Possibly
a power of two. The map is squared. Defaults to 128.
"""
all_sos_maps = []
for _ in trange(num_ellipses):
all_sos_maps.append(self._make_ellipsoid(imsize))
self.all_sos_numpy = np.stack(all_sos_maps, axis=0)
def load_dataset(self, filepath="data/ellipses.npy"):
"""Loads a dataset from a `npy` file
Args:
filepath (str, optional): Relative file path. Defaults to "data/ellipses.npy".
"""
all_sos = np.load(filepath)
self.all_sos_numpy = np.array(all_sos, np.float32)
def save_dataset(self, filepath: str):
"""Saves a dataset as an `npy` file.
Args:
filepath (str): Path to save the file. Should start from the
folder `data` to avoid confusion.
"""
np.save(filepath, self.all_sos_numpy)
def save_for_matlab(self, name):
savemat("datasets/" + name, {"speeds_of_sound": self.all_sos.numpy()})
def sos_maps_to_tensor(self):
# TODO: This mway of moving things is likely going to create confusion.
"""Moves the maps to a cuda tensor and takes care of some shaping"""
self.all_sos = torch.from_numpy(self.all_sos_numpy).unsqueeze(1).float()
@staticmethod
def _make_ellipsoid(imsize=128):
import cv2
"""Internal method to make an ellipsoid speed of sound map.
Args:
imsize (int, optional): Size of the image. Defaults to 128.
Returns:
np.array: The speed of sound map with a random ellipsoid.
"""
t = np.linspace(0, 2 * np.pi, num=360, endpoint=True)
# Distribution parameters
avg_amplitudes = np.array([1.0, 0.0, 0.0, 0.0])
std_amplitudes = np.array([0.1, 0.05, 0.025, 0.01])
avg_phase = np.array([0] * 4)
std_phase = np.array([np.pi / 16] * 4)
avg_thickness = 2
std_thickness = 8
# Generate sample
a_x = (
avg_amplitudes
+ np.random.randn(
4,
)
* std_amplitudes
)
a_y = (
avg_amplitudes
+ np.random.randn(
4,
)
* std_amplitudes
)
ph_x = (
avg_phase
+ np.random.randn(
4,
)
* std_phase
)
ph_y = (
avg_phase
+ np.random.randn(
4,
)
* std_phase
)
x = 0.0
y = 0.0
for i in range(len(avg_amplitudes)):
x = x + np.sin(t * (i + 1) + ph_x[i]) * a_x[i]
y = y + np.cos(t * (i + 1) + ph_y[i]) * a_y[i]
x = (x + 2) / 4
y = (y + 2) / 4
# Transform into image
thickness = int(
avg_thickness
+ np.random.rand(
1,
)
* std_thickness
)
img = np.zeros((imsize, imsize, 3), dtype="uint8")
x = x * imsize
y = y * imsize
pts = np.expand_dims(np.array([x, y], np.int32).T, axis=0)
cv2.polylines(img, [pts], True, (1, 0, 0), thickness=thickness)
# Fixing speed of sound
rand_amplitude = (
np.random.rand(
1,
)
* 0.5
+ 0.5
)
img = np.array(img[:, :, 0], np.float32) * rand_amplitude
sos = 1.0 + img
return sos
def __len__(self):
return len(self.all_sos)
def __getitem__(self, idx):
return self.all_sos[idx]
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,248
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/support_functions.py
|
import numpy as np
import torch
from tqdm import tqdm
import os
from matplotlib import pyplot as plt
import subprocess
from scipy.io import loadmat, savemat
def last_frame_difference(stream, reference, mask=None):
with torch.no_grad():
pytorch_wf = stream[:, -1, 0] + 1j * stream[:, -1, 1]
stream = torch.tensor(pytorch_wf)
reference = torch.tensor(reference)
difference, normstream, norm_reference = difference_to_kwave(
stream, reference, mask=mask
)
l_infty, indices = (difference).reshape(difference.shape[0], -1).topk(1, 1)
mse = difference.pow(2).mean([1, 2]).sqrt()
return l_infty[:, 0], mse
def difference_to_kwave(sample, reference, mask=None, pml_size=10):
# Normalizing to source wavefield
sample = sample / sample[:, 82, 48].unsqueeze(1).unsqueeze(1)
if torch.any(torch.isnan(sample)):
sample[torch.isnan(sample)] = 0.0
reference = reference / reference[:, 82, 48].unsqueeze(1).unsqueeze(1)
reference = torch.conj(reference)
# Normalize error by maximum
if mask is not None:
sample = sample * mask
reference = reference * mask
max_vals = (
torch.tensor([x.max() for x in reference.abs()])
.unsqueeze(1)
.unsqueeze(1)
.to(reference.device)
)
else:
max_vals = 1
return (
torch.abs(sample - reference)[:, pml_size:-pml_size, pml_size:-pml_size]
/ max_vals,
sample,
reference,
)
def get_model_errors(pytorch_tensors, kwave_results, iterations=1000):
print("Getting model error curves...")
if os.path.isfile("results/model_traces.npz"):
npzfile = np.load("results/model_traces.npz")
return npzfile["l_infty_traces"], npzfile["mse_traces"]
print("File not found: generating curves")
mse_vs_iteration = []
l_infty_vs_iteration = []
for k in tqdm(range(50)):
for sample in range(20):
stream = torch.tensor(
pytorch_tensors[sample + k * 20, :, 0]
+ 1j * pytorch_tensors[sample + k * 20, :, 1]
).cuda()
reference = (
torch.tensor(kwave_results[sample + k * 20])
.repeat(iterations, 1, 1)
.cuda()
)
difference, _, __ = difference_to_kwave(stream, reference, None)
l_infty, indices = difference.reshape(difference.shape[0], -1).topk(1, 1)
mse = difference.pow(2).mean([1, 2]).sqrt()
mse_vs_iteration.append(mse.cpu().numpy())
l_infty_vs_iteration.append(l_infty.cpu().numpy())
mse_traces = np.array(mse_vs_iteration)
l_infty_traces = np.array(l_infty_vs_iteration)
l_infty_traces = l_infty_traces[:, :, 0]
print("Saving")
np.savez(
"results/model_traces.npz", l_infty_traces=l_infty_traces, mse_traces=mse_traces
)
return l_infty_traces, mse_traces
def get_gmres_errors(gmres_results, kwave_results):
print("Getting GMRES error curves")
if os.path.isfile("results/gmres_traces.npz"):
npzfile = np.load("results/gmres_traces.npz")
return npzfile["l_infty_traces_gmres"], npzfile["mse_traces_gmres"]
print("File not found: generating curves")
mse_vs_iteration_gmres = []
l_infty_vs_iteration_gmres = []
for k in tqdm(range(gmres_results.shape[0])):
stream = torch.tensor(gmres_results[k])
reference = torch.tensor(kwave_results[k]).repeat(11, 1, 1)
difference, _, __ = difference_to_kwave(stream, reference, None)
l_infty, indices = difference.reshape(difference.shape[0], -1).topk(1, 1)
mse = difference.pow(2).mean([1, 2]).sqrt()
mse_vs_iteration_gmres.append(mse.cpu().numpy())
l_infty_vs_iteration_gmres.append(l_infty.cpu().numpy())
mse_traces_gmres = np.array(mse_vs_iteration_gmres)
l_infty_traces_gmres = np.array(l_infty_vs_iteration_gmres)
l_infty_traces_gmres = l_infty_traces_gmres[:, :, 0]
print("Saving")
np.savez(
"results/gmres_traces.npz",
l_infty_traces_gmres=l_infty_traces_gmres,
mse_traces_gmres=mse_traces_gmres,
)
return l_infty_traces_gmres, mse_traces_gmres
def normalize_wavefield(wavefield, source_location):
if len(wavefield.shape) == 2:
return wavefield / wavefield[source_location[0], source_location[1]]
elif len(wavefield.shape) == 3:
return wavefield / wavefield[
:, source_location[0], source_location[1]
].unsqueeze(1).unsqueeze(1)
def show_example(
sos,
model_field,
kwave_field,
traces,
traces_name,
source_location=[82, 48],
filename=None,
setticks=True,
):
sos_map = sos
kwave_field = normalize_wavefield(np.conj(kwave_field), source_location)
model_field = normalize_wavefield(model_field, source_location)
fig, axs = plt.subplots(1, 4, figsize=(12, 2.2), dpi=300)
raster1 = axs[0].imshow(np.real(kwave_field), vmin=-0.5, vmax=0.5, cmap="seismic")
axs[0].axis("off")
axs[0].set_title("Reference")
fig.colorbar(raster1, ax=axs[0])
ax = fig.add_axes([0.025, 0.6, 0.25, 0.25])
raster2 = ax.imshow(sos_map, vmin=1, vmax=2, cmap="inferno")
ax.axis("off")
raster3 = axs[1].imshow(np.real(model_field), vmin=-0.5, vmax=0.5, cmap="seismic")
axs[1].axis("off")
axs[1].set_title("Prediction")
fig.colorbar(raster3, ax=axs[1])
error_field = (kwave_field - model_field)[8:-8, 8:-8]
error_field = np.pad(error_field, 8)
raster4 = axs[2].imshow(
np.log10(np.abs(error_field + 1e-20)), vmin=-4, vmax=-2, cmap="inferno"
)
axs[2].axis("off")
axs[2].set_title("Difference")
cbar = fig.colorbar(raster4, ax=axs[2])
cbar.set_ticks(np.log10([0.1, 0.01, 0.001, 0.0001]))
cbar.set_ticklabels(["10\%", "1\%", "0.1\%", "0.01\%"])
for trace in traces:
axs[3].plot(trace["x"],trace["y"], color=trace["color"], label=trace["name"])
axs[3].set_yscale("log")
axs[3].set_xscale("log")
axs[3].set_xlim([1, len(traces[0]["x"])])
axs[3].set_title(traces_name)
axs[3].set_xlabel("Iterations")
axs[3].yaxis.tick_right()
axs[3].grid(True)
axs[3].legend()
if setticks:
axs[3].set_xticks([1, 10, 100, 1000])
axs[3].set_xticklabels(["1", "10", "100", "1000"])
def show_example_large(
sos,
model_field,
kwave_field,
traces,
traces_name,
source_location=[82, 48],
setticks=False,
filename=None,
):
sos_map = sos
kwave_field = normalize_wavefield(np.conj(kwave_field), source_location)
model_field = normalize_wavefield(model_field, source_location)
fig, axs = plt.subplots(2, 2, figsize=(12, 10), dpi=100)
raster1 = axs[0, 0].imshow(
np.real(kwave_field), vmin=-0.2, vmax=0.2, cmap="seismic"
)
axs[0, 0].axis("off")
axs[0, 0].set_title("Reference")
fig.colorbar(raster1, ax=axs[0, 0])
ax = fig.add_axes([0.117, 0.773, 0.10, 0.10])
raster2 = ax.imshow(sos_map, vmin=1, vmax=2, cmap="inferno")
ax.axis("off")
raster3 = axs[0, 1].imshow(
np.real(model_field), vmin=-0.2, vmax=0.2, cmap="seismic"
)
axs[0, 1].axis("off")
axs[0, 1].set_title("Prediction")
# fig.colorbar(raster3, ax=axs[0,1])
error_field = (kwave_field - model_field)[15:-15, 15:-15]
error_field = np.pad(error_field, 15)
raster4 = axs[1, 0].imshow(
np.log10(np.abs(error_field) + 1e-20), vmin=-4, vmax=-2, cmap="inferno"
)
axs[1, 0].axis("off")
axs[1, 0].set_title("Difference")
cbar = fig.colorbar(raster4, ax=axs[1, 0])
cbar.set_ticks(np.log10([0.1, 0.01, 0.001, 0.0001]))
cbar.set_ticklabels(["10\%", "1\%", "0.1\%", "0.01\%"])
for trace in traces:
axs[1, 1].plot(trace["x"],trace["y"], color=trace["color"], label=trace["name"])
axs[1, 1].set_yscale("log")
axs[1, 1].set_xscale("log")
axs[1, 1].set_xlim([1, len(trace)])
axs[1, 1].set_title(traces_name)
axs[1, 1].set_xlabel("Iterations")
axs[1, 1].yaxis.tick_right()
axs[1, 1].grid(True)
axs[1, 1].legend()
if setticks:
axs[1, 1].set_xticks([1, 10, 100, 1000])
axs[1, 1].set_xticklabels(["1", "10", "100", "1000"])
axs[1, 1].set_yticks([0.0001, 0.001, 0.01, 0.1])
axs[1, 1].set_ylim([0.00001, 0.01])
def show_example_abs(
sos,
model_field,
kwave_field,
trace,
trace_name="Residual RMSE",
setticks=False,
filename=None,
):
sos_map = sos
kwave_field = np.abs(kwave_field)
kwave_field /= np.amax(kwave_field)
model_field = np.abs(model_field)
model_field /= np.amax(model_field)
fig, axs = plt.subplots(2, 2, figsize=(12, 10), dpi=100)
raster1 = axs[0, 0].imshow(np.real(kwave_field), vmin=0, vmax=0.5, cmap="inferno")
axs[0, 0].axis("off")
axs[0, 0].set_title("Reference")
fig.colorbar(raster1, ax=axs[0, 0])
ax = fig.add_axes([0.117, 0.773, 0.10, 0.10])
raster2 = ax.imshow(sos_map, vmin=1, vmax=2, cmap="inferno")
ax.axis("off")
raster3 = axs[0, 1].imshow(np.real(model_field), vmin=0, vmax=0.5, cmap="inferno")
axs[0, 1].axis("off")
axs[0, 1].set_title("Prediction")
# fig.colorbar(raster3, ax=axs[0,1])
error_field = (kwave_field - model_field)[15:-15, 15:-15]
error_field = np.pad(error_field, 15)
raster4 = axs[1, 0].imshow(
np.log10(np.abs(error_field) + 1e-20), vmin=-4, vmax=-2, cmap="inferno"
)
axs[1, 0].axis("off")
axs[1, 0].set_title("Difference")
cbar = fig.colorbar(raster4, ax=axs[1, 0])
cbar.set_ticks(np.log10([0.1, 0.01, 0.001, 0.0001]))
cbar.set_ticklabels(["10\%", "1\%", "0.1\%", "0.01\%"])
axs[1, 1].plot(trace, color="black")
axs[1, 1].set_yscale("log")
axs[1, 1].set_xscale("log")
axs[1, 1].set_xlim([1, len(trace)])
axs[1, 1].set_title(trace_name)
axs[1, 1].set_xlabel("Iterations")
axs[1, 1].yaxis.tick_right()
axs[1, 1].grid(True)
if setticks:
axs[1, 1].set_xticks([1, 10, 100, 1000])
axs[1, 1].set_xticklabels(["1", "10", "100", "1000"])
axs[1, 1].set_yticks([0.0001, 0.001, 0.01, 0.1])
axs[1, 1].set_ylim([0.00001, 0.01])
def make_skull_example(evaluator):
print("----- Running kWave (output not shown)")
command = [
"matlab",
''' -nodisplay -nosplash -nodesktop -r "run('matlab/skull_example.m'); exit;"''',
]
subprocess.run(command, capture_output=True)
print("----- Solving with model")
kwave_solution = loadmat("examples/kwavedata512.mat")["p_kw"]
matlab_variables = loadmat("examples/problem_setup.mat")
speedofsound = matlab_variables["sos"].astype(float)
src_map = 10 * matlab_variables["src"].astype(float)
sos_map = torch.tensor(speedofsound).unsqueeze(0).unsqueeze(0)
source = torch.tensor(src_map).unsqueeze(0).float()
evaluator.set_domain_size(sos_map.shape[-1], source_map=source)
sos_map_tensor = torch.tensor(sos_map).to("cuda:" + str(evaluator.gpus[0])).float()
with torch.no_grad():
output = evaluator.model.forward(
sos_map_tensor,
num_iterations=3000,
return_wavefields=True,
return_states=False,
)
with torch.no_grad():
losses = [evaluator.model.test_loss_function(x) for x in output["residuals"]]
pytorch_wavefield = torch.cat(
[(x[:, 0] + 1j * x[:, 1]).detach().cpu() for x in output["wavefields"]]
).cpu()
kwave_wavefield = torch.tensor(kwave_solution, device=pytorch_wavefield.device)
max_pt = torch.argmax(torch.abs(kwave_wavefield))
row, col = max_pt // 512, max_pt - (max_pt // 512) * 512
kwave_field_norm = normalize_wavefield(
torch.conj(kwave_wavefield), source_location=[row, col]
)
model_field_norm = normalize_wavefield(
pytorch_wavefield, source_location=[row, col]
)
difference = torch.abs(kwave_field_norm.unsqueeze(0) - model_field_norm)[
:, 15:-15, 15:-15
]
l_infty, indices = difference.reshape(difference.shape[0], -1).topk(1, 1)
# Store some wavefields
iterations = np.rint(3000 ** np.linspace(0, 1, 16) - 1).tolist()
iterations = list(map(int, iterations))
samples = np.stack([model_field_norm[i].abs().cpu() for i in iterations])
savemat(
"examples/pytorch_results.mat",
{
"pytorch_wf": pytorch_wavefield[-1].cpu().numpy(),
"res": np.array(losses),
"l_infty": np.array(l_infty),
"samples": samples,
"iterations": iterations,
},
)
def fig_generic(
solver,
sos_map,
path,
source_location=[82, 48],
omega=1,
min_sos=1,
cfl=0.01,
roundtrips=60.0,
mode="normal",
restart=20,
max_iter = 1000
):
assert mode in ["normal", "large"]
print("Making {}".format(path))
flag = 0
# Save data into matfile
savemat(
"/tmp/helmholtz_setup.mat",
{
"sos_map": sos_map,
"source_location": source_location,
"omega": omega,
"min_sos": min_sos,
"flag": flag,
"cfl": cfl,
"roundtrips": roundtrips,
"pml_size": solver.hparams.PMLsize,
"sigma_star": solver.hparams.sigma_max,
"max_iter": max_iter,
"restart": restart
},
)
#gmres_matfile = loadmat("/tmp/helmholtz.mat")
# Solve with kWave
print("Solving with kWave")
command = [
"matlab",
''' -nodisplay -nosplash -nodesktop -nojvm -r "run('matlab/solve_with_kwave.m'); exit;"''',
]
subprocess.run(command, capture_output=True)
matfile = loadmat("/tmp/helmholtz.mat")
kwave_solution = matfile["p"]
kwave_wavefield = torch.tensor(kwave_solution)
kwave_field_norm = normalize_wavefield(
torch.conj(kwave_wavefield), source_location
)
# Solve with gmres
print("Solving with GMRES")
#"""
command = [
"matlab",
''' -nodisplay -nosplash -nodesktop -r "run('matlab/solve_with_gmres.m'); exit;"''',
]
subprocess.run(command, capture_output=True)
#"""
matfile = loadmat("/tmp/helmholtz.mat")#gmres_matfile# loadmat("/tmp/helmholtz.mat")
gmres_solution = matfile["p"]
gmres_error = matfile["rel_error"]
# Finding GMRES error curve
kwave_wavefield = torch.tensor(kwave_solution)
kwave_field_norm = normalize_wavefield(torch.conj(kwave_wavefield), source_location)
gmres_solutions = torch.tensor(gmres_solution)
gmres_norm = normalize_wavefield(gmres_solutions, source_location)
gmres_difference = torch.abs(kwave_field_norm.unsqueeze(0) - gmres_norm)[:, 10:-10, 10:-10]
l_infty_gmres, indices = gmres_difference.reshape(gmres_difference.shape[0], -1).topk(1, 1)
# Solving with model
print("Solving with Neural network")
sos_map_tensor = (
torch.tensor(sos_map).unsqueeze(0).unsqueeze(0).to(solver.device)
).float()
with torch.no_grad():
output = solver.forward(
sos_map_tensor,
num_iterations=1000,
return_wavefields=True,
return_states=False,
)
# Find losses
losses = [solver.test_loss_function(x) for x in output["residuals"]]
pytorch_wavefield = torch.cat(
[x[:, 0] + 1j * x[:, 1] for x in output["wavefields"]]
)
kwave_wavefield = torch.tensor(kwave_solution, device=pytorch_wavefield.device)
kwave_field_norm = normalize_wavefield(
torch.conj(kwave_wavefield), source_location
)
model_field_norm = normalize_wavefield(pytorch_wavefield, source_location)
difference = torch.abs(kwave_field_norm.unsqueeze(0) - model_field_norm)[
:, 10:-10, 10:-10
]
l_infty, indices = difference.reshape(difference.shape[0], -1).topk(1, 1)
traces = [
{
"name": "Proposed",
"x": np.linspace(1,max_iter,max_iter, endpoint=True),
"y": 100*l_infty.cpu(),
"color": "black"
},
{
"name": "GMRES",
"x": np.linspace(1,max_iter, l_infty_gmres.shape[0], endpoint=True),
"y": 100*l_infty_gmres,
"color": "darkorange"
}
]
if mode == "normal":
show_example(
sos_map,
pytorch_wavefield[-1].cpu(),
kwave_wavefield.cpu(),
traces,
traces_name = "$\ell_\infty$ error %",
source_location=source_location,
)
elif mode == "large":
show_example_large(
sos_map,
pytorch_wavefield[-1].cpu(),
kwave_wavefield.cpu(),
traces,
traces_name = "$\ell_\infty$ error %",
source_location=source_location,
)
plt.savefig(path + ".pgf")
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,249
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/utils.py
|
import json
import os
from matplotlib import pyplot as plt
import numpy as np
def load_settings(jsonpath: str, add_full_path=True):
"""Loads a `settings.json` file and adds the folder path to its
fields
Args:
folder (str): folder path
add_full_path (bool, optional): If true, adds the folder path to its
fields. Defaults to True.
"""
with open(jsonpath) as json_file:
settings = json.load(json_file)
if add_full_path:
settings["path"] = jsonpath
settings["name"] = os.path.splitext(os.path.basename(jsonpath))[0]
return settings
def show_wavefield(wf, component="real", crange=0, colorbar=True, colormap="seismic"):
"""Helper function to plot a wavefield
Args:
wf (np.array): Wavefield to be shown. Must have the last dimension of
size 2, representing real and imaginary part.
component (str, optional): Which component to plot: can be "real" or "imag".
Defaults to "real".
crange (float, optional): The colormap will display values in (-crange, crange).
If 0, it is given by the maximum absolute amplitude. Defaults to 0.
colorbar (bool, optional): If a colorbar has to be used. Defaults to True.
colormap (str, optional): What colormap to use. Defaults to 'seismic'.
"""
if crange == 0:
crange = np.sqrt(np.max(np.sum(wf[0] ** 2 + wf[1] ** 2))) / 20
elif crange < 0:
raise ValueError("The range must be a positive number")
if component == "real":
_show_image(
wf[0], vmin=-crange, vmax=crange, colorbar=colorbar, colormap=colormap
)
elif component == "imag":
_show_image(
wf[1], vmin=-crange, vmax=crange, colorbar=colorbar, colormap=colormap
)
else:
raise ValueError('The component field can be either "real" or "imag".')
def log_wavefield(wavefield, logger, windowname="Wavefield"):
"""Logs a wavefield map image to tensorboard."""
wavefield = wavefield.cpu()
fig = plt.figure(figsize=(6, 3))
plt.title(windowname)
plt.subplot(1, 2, 1)
show_wavefield(wavefield, component="real", crange=1)
plt.subplot(1, 2, 2)
show_wavefield(wavefield, component="imag", crange=1)
plt.tight_layout()
logger.add_figure(windowname, fig, 0)
plt.close()
def _show_image(image, vmin=None, vmax=None, colorbar=True, colormap="hot"):
"""Helper function to show an image with colorbar and
custom colormap extrema.
Args:
image ([type]): Image to be shown
vmin ([type], optional): Custom `vmin`. If `None` this value
is the minimum of the image. Defaults to None.
vmax ([type], optional): Custom `vmax`. If `None` this value
is the maximum of the image. Defaults to None.
colorbar (bool, optional): If a colorbar has to be used. Defaults
to True.
colormap (str, optional): What colormap to use. Defaults to 'hot'.
"""
if vmin is None:
vmin = np.min(image)
if vmax is None:
vmax = np.max(image)
plt.imshow(image, vmin=vmin, vmax=vmax, cmap=colormap, aspect="equal")
if colorbar:
plt.colorbar()
# A function to rasterize components of a matplotlib figure while keeping
# axes, labels, etc as vector components
# https://brushingupscience.wordpress.com/2017/05/09/vector-and-raster-in-one-with-matplotlib/
from inspect import getmembers, isclass
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
def rasterize_and_save(fname, rasterize_list=None, fig=None, dpi=None, savefig_kw={}):
"""Save a figure with raster and vector components
This function lets you specify which objects to rasterize at the export
stage, rather than within each plotting call. Rasterizing certain
components of a complex figure can significantly reduce file size.
Code from:
https://gist.github.com/hugke729/78655b82b885cde79e270f1c30da0b5f
Inputs
------
fname : str
Output filename with extension
rasterize_list : list (or object)
List of objects to rasterize (or a single object to rasterize)
fig : matplotlib figure object
Defaults to current figure
dpi : int
Resolution (dots per inch) for rasterizing
savefig_kw : dict
Extra keywords to pass to matplotlib.pyplot.savefig
If rasterize_list is not specified, then all contour, pcolor, and
collects objects (e.g., ``scatter, fill_between`` etc) will be
rasterized
Note: does not work correctly with round=True in Basemap
Example
-------
Rasterize the contour, pcolor, and scatter plots, but not the line
>>> import matplotlib.pyplot as plt
>>> from numpy.random import random
>>> X, Y, Z = random((9, 9)), random((9, 9)), random((9, 9))
>>> fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(ncols=2, nrows=2)
>>> cax1 = ax1.contourf(Z)
>>> cax2 = ax2.scatter(X, Y, s=Z)
>>> cax3 = ax3.pcolormesh(Z)
>>> cax4 = ax4.plot(Z[:, 0])
>>> rasterize_list = [cax1, cax2, cax3]
>>> rasterize_and_save('out.svg', rasterize_list, fig=fig, dpi=300)
"""
# Behave like pyplot and act on current figure if no figure is specified
fig = plt.gcf() if fig is None else fig
# Need to set_rasterization_zorder in order for rasterizing to work
zorder = -5 # Somewhat arbitrary, just ensuring less than 0
if rasterize_list is None:
# Have a guess at stuff that should be rasterised
types_to_raster = ["QuadMesh", "Contour", "collections"]
rasterize_list = []
print(
"""
No rasterize_list specified, so the following objects will
be rasterized: """
)
# Get all axes, and then get objects within axes
for ax in fig.get_axes():
for item in ax.get_children():
if any(x in str(item) for x in types_to_raster):
rasterize_list.append(item)
print("\n".join([str(x) for x in rasterize_list]))
else:
# Allow rasterize_list to be input as an object to rasterize
if type(rasterize_list) != list:
rasterize_list = [rasterize_list]
for item in rasterize_list:
# Whether or not plot is a contour plot is important
is_contour = isinstance(item, matplotlib.contour.QuadContourSet) or isinstance(
item, matplotlib.tri.TriContourSet
)
# Whether or not collection of lines
# This is commented as we seldom want to rasterize lines
# is_lines = isinstance(item, matplotlib.collections.LineCollection)
# Whether or not current item is list of patches
all_patch_types = tuple(x[1] for x in getmembers(matplotlib.patches, isclass))
try:
is_patch_list = isinstance(item[0], all_patch_types)
except TypeError:
is_patch_list = False
# Convert to rasterized mode and then change zorder properties
if is_contour:
curr_ax = item.ax.axes
curr_ax.set_rasterization_zorder(zorder)
# For contour plots, need to set each part of the contour
# collection individually
for contour_level in item.collections:
contour_level.set_zorder(zorder - 1)
contour_level.set_rasterized(True)
elif is_patch_list:
# For list of patches, need to set zorder for each patch
for patch in item:
curr_ax = patch.axes
curr_ax.set_rasterization_zorder(zorder)
patch.set_zorder(zorder - 1)
patch.set_rasterized(True)
else:
# For all other objects, we can just do it all at once
curr_ax = item.axes
curr_ax.set_rasterization_zorder(zorder)
item.set_rasterized(True)
item.set_zorder(zorder - 1)
# dpi is a savefig keyword argument, but treat it as special since it is
# important to this function
if dpi is not None:
savefig_kw["dpi"] = dpi
# Save resulting figure
fig.savefig(fname, **savefig_kw)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,250
|
SonyPony/helmnet
|
refs/heads/main
|
/setup.py
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="helmnet", # Replace with your own username
version="0.1.0",
author="Antonio Stanziola",
author_email="a.stanziola@ucl.ac.uk",
description="",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://bug.medphys.ucl.ac.uk",
packages=setuptools.find_packages(),
python_requires=">=3.7",
)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,251
|
SonyPony/helmnet
|
refs/heads/main
|
/evaluate.py
|
from helmnet import IterativeSolver
import pytorch_lightning as pl
from torch.utils.data import DataLoader
from helmnet.dataloaders import get_dataset
from scipy.io import savemat
import numpy as np
class Evaluation:
def __init__(self, path, testset, gpus):
self.path = path
self.testset = get_dataset(testset)
self.testloader = DataLoader(
self.testset, batch_size=32, num_workers=32, shuffle=False
)
self.gpus = gpus
self.model = self.get_model()
self.model.eval()
self.model.freeze()
def move_model_to_gpu(self):
self.model.to("cuda:" + str(self.gpus[0]))
def results_on_test_set(self):
trainer = pl.Trainer(gpus=self.gpus)
trainer.test(self.model, self.testloader)
def compare_to_gmres(self):
# self.testset.dataset.save_for_matlab('testset.mat')
savemat("test_indices.mat", {"test_indices": np.array(self.testset.indices)})
def single_example(self, idx, get_wavefield=True, get_states=True, iterations=1000):
sos_map = self.testset[idx].unsqueeze(0).to("cuda:" + str(self.gpus[0]))
output = self.model.forward(
sos_map,
num_iterations=iterations,
return_wavefields=get_wavefield,
return_states=get_wavefield,
)
# Get loss
losses = [self.model.test_loss_function(x) for x in output["residuals"]]
return output, losses
def get_model(self, domain_size=None, source_location=None):
# Loading model and its hyperparams
model = IterativeSolver.load_from_checkpoint(self.path, strict=False)
hparams = model.hparams
# Customizing hparams if needed
if domain_size is not None:
hparams["domain_size"] = domain_size
if source_location is not None:
hparams["source_location"] = source_location
new_model = IterativeSolver(**hparams)
# loading weights and final setup
new_model.f.load_state_dict(model.f.state_dict())
new_model.set_laplacian()
new_model.set_source()
new_model.freeze()
print("--- MODEL HYPERPARAMETERS ---")
print(new_model.hparams)
return new_model
def set_domain_size(self, domain_size, source_location=None, source_map=None):
self.model.hparams.domain_size = domain_size
self.model.f.domain_size = self.model.hparams.domain_size
self.model.set_laplacian()
if source_location is not None:
self.model.set_multiple_sources([source_location])
else:
self.model.set_source_maps(source_map)
self.model.f.init_by_size()
for enc, size in zip(self.model.f.enc, self.model.f.states_dimension):
enc.domain_size = size
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument(
"--model_checkpoint",
type=str,
default="checkpoints/trained_weights.ckpt",
help="Checkpoint file with model weights",
)
parser.add_argument(
"--test_set",
type=str,
default="datasets/splitted_96/testset.ph",
help="Test-set file",
)
parser.add_argument(
"--gpu",
type=int,
default=1,
help="Which gpu to use",
)
args = parser.parse_args()
evaluator = Evaluation(
path=args.model_checkpoint, testset=args.test_set, gpus=[args.gpu]
)
# Making results on the test set
evaluator.results_on_test_set()
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,252
|
SonyPony/helmnet
|
refs/heads/main
|
/helmnet/replaybuffer.py
|
import collections
import numpy as np
from torch.utils.data import IterableDataset
from torch import stack
import random
# The ReplayBuffer class and Experience object is built on top of this tutorial:
# https://towardsdatascience.com/en-lightning-reinforcement-learning-a155c217c3de
Experience = collections.namedtuple(
"Experience",
field_names=[
"wavefield",
"hidden_state",
"k_sq",
"residual",
"source",
"iteration",
],
)
class ReplayBuffer:
def __init__(self, capacity: int):
self.buffer = [None for _ in range(capacity)]
self.capacity = capacity
def __len__(self):
return self.capacity
def append(self, experience, index):
self.buffer[index] = experience
def sample(self, batch_size: int):
indices = np.random.choice(self.capacity, batch_size, replace=False)
wavefields, h_states, k_sqs, residual, source, iterations = zip(
*[self.buffer[t] for t in indices]
)
# Cat them
wavefields = stack(wavefields, 0)
h_states = stack(h_states, 0)
k_sqs = stack(k_sqs, 0)
residual = stack(residual, 0)
source = stack(source, 0)
return (wavefields, h_states, k_sqs, residual, source, iterations, indices)
|
{"/helmnet/architectures.py": ["/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/helmnet/__init__.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/hybridnet.py", "/helmnet/source.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/replaybuffer.py"], "/train.py": ["/helmnet/__init__.py"], "/test.py": ["/helmnet/__init__.py", "/helmnet/support_functions.py"], "/helmnet/hybridnet.py": ["/helmnet/architectures.py", "/helmnet/dataloaders.py", "/helmnet/spectral.py", "/helmnet/utils.py", "/helmnet/source.py", "/helmnet/replaybuffer.py"], "/produce_figures.py": ["/evaluate.py", "/helmnet/support_functions.py"], "/evaluate.py": ["/helmnet/__init__.py", "/helmnet/dataloaders.py"]}
|
29,253
|
indifferentalex/botticelli
|
refs/heads/master
|
/botticelli/scene.py
|
class Scene:
"""
A scene has a name and a detector function, which returns true if the scene is
detected, false otherwise.
Attributes:
name (string): A descriptive name of what the scene consists of.
detector (function): A function that checks if that scene is present.
"""
def __init__(self, name, detector):
self.name = name
self.detector = detector
def detected(self, params):
detected, params = self.detector(params)
return (detected, params)
|
{"/examples/calibration.py": ["/botticelli/utilities/__init__.py"], "/botticelli/__init__.py": ["/botticelli/scene.py", "/botticelli/action.py", "/botticelli/trigger.py"], "/examples/utilities_calibration.py": ["/botticelli/__init__.py"], "/botticelli/utilities/__init__.py": ["/botticelli/utilities/canvas.py"]}
|
29,254
|
indifferentalex/botticelli
|
refs/heads/master
|
/botticelli/utilities/detector_inspector.py
|
import botticelli
# The detector inspector ignores params returned by the detectors (no state flow),
# all cases should be written explicity
def inspect(scenes_and_params):
for scene, all_params in scenes_and_params:
if not all_params:
print scene.name + ": " + str(scene.detected({})[0])
else:
for params in all_params:
print scene.name + str(params) + ": " + str(scene.detected(params)[0])
|
{"/examples/calibration.py": ["/botticelli/utilities/__init__.py"], "/botticelli/__init__.py": ["/botticelli/scene.py", "/botticelli/action.py", "/botticelli/trigger.py"], "/examples/utilities_calibration.py": ["/botticelli/__init__.py"], "/botticelli/utilities/__init__.py": ["/botticelli/utilities/canvas.py"]}
|
29,255
|
indifferentalex/botticelli
|
refs/heads/master
|
/examples/calibration.py
|
from context import botticelli
from botticelli.utilities import detector_inspector
detector_inspector.inspect()
|
{"/examples/calibration.py": ["/botticelli/utilities/__init__.py"], "/botticelli/__init__.py": ["/botticelli/scene.py", "/botticelli/action.py", "/botticelli/trigger.py"], "/examples/utilities_calibration.py": ["/botticelli/__init__.py"], "/botticelli/utilities/__init__.py": ["/botticelli/utilities/canvas.py"]}
|
29,256
|
indifferentalex/botticelli
|
refs/heads/master
|
/botticelli/utilities/pypette.py
|
from botticelli import utilities as canvas
from pymouse import PyMouseEvent
import webcolors
import time
mouse_position = canvas.mouse_position()
color_at_mouse = canvas.get_color_at(
int(mouse_position["x"]), int(mouse_position["y"]))
# https://stackoverflow.com/a/9694246
def closest_colour(requested_colour):
min_colours = {}
for key, name in webcolors.css3_hex_to_names.items():
r_c, g_c, b_c = webcolors.hex_to_rgb(key)
rd = (r_c - requested_colour[0])**2
gd = (g_c - requested_colour[1])**2
bd = (b_c - requested_colour[2])**2
min_colours[(rd + gd + bd)] = name
return min_colours[min(min_colours.keys())]
def get_colour_name(requested_colour):
try:
closest_name = actual_name = webcolors.rgb_to_name(requested_colour)
except ValueError:
closest_name = closest_colour(requested_colour)
actual_name = None
return actual_name, closest_name
def set_mouse_pos():
global mouse_position, color_at_mouse
mouse_position = canvas.mouse_position()
color_at_mouse = canvas.get_color_at(
int(abs_pos()["x"]), int(abs_pos()["y"]))
def abs_pos():
return mouse_position
def rel_pos():
return {
"x": round(abs_pos()["x"] / (1.0 * canvas.screen_width), 3),
"y": round(abs_pos()["y"] / (1.0 * canvas.screen_height), 3)
}
def formatted_color():
return "X: " + str(abs_pos()["x"]) + "/" + str(
rel_pos()["x"]) + " Y: " + str(abs_pos()["y"]) + "/" + str(
rel_pos()["y"]) + " RGB: " + str(color_at_mouse["r"]) + ", " + str(
color_at_mouse["g"]) + ", " + str(color_at_mouse["b"])
def named_color():
actual_name, closest_name = get_colour_name(
(color_at_mouse["r"], color_at_mouse["g"], color_at_mouse["b"]))
return closest_name
def detailed_information():
return formatted_color() + " - " + named_color()
def absolute_parameters():
return " abs: (" + str(abs_pos()["x"]) + ", " + str(
abs_pos()["y"]) + ", " + str(color_at_mouse["r"]) + ", " + str(
color_at_mouse["g"]) + ", " + str(color_at_mouse["b"]) + ")"
def relative_parameters():
return " rel: (" + str(rel_pos()["x"]) + ", " + str(
rel_pos()["y"]) + ", " + str(color_at_mouse["r"]) + ", " + str(
color_at_mouse["g"]) + ", " + str(color_at_mouse["b"]) + ")"
def print_click_information():
print detailed_information()
print absolute_parameters()
print relative_parameters()
class ColorPicker(PyMouseEvent):
def __init__(self):
PyMouseEvent.__init__(self)
def click(self, x, y, button, press):
if button == 1:
if press:
set_mouse_pos()
print_click_information()
else:
self.stop()
color_picker = ColorPicker()
color_picker.run()
while True:
a = 5
|
{"/examples/calibration.py": ["/botticelli/utilities/__init__.py"], "/botticelli/__init__.py": ["/botticelli/scene.py", "/botticelli/action.py", "/botticelli/trigger.py"], "/examples/utilities_calibration.py": ["/botticelli/__init__.py"], "/botticelli/utilities/__init__.py": ["/botticelli/utilities/canvas.py"]}
|
29,257
|
indifferentalex/botticelli
|
refs/heads/master
|
/botticelli/__init__.py
|
from _version import __version__
from .scene import Scene
from .action import Action
from .trigger import Trigger
|
{"/examples/calibration.py": ["/botticelli/utilities/__init__.py"], "/botticelli/__init__.py": ["/botticelli/scene.py", "/botticelli/action.py", "/botticelli/trigger.py"], "/examples/utilities_calibration.py": ["/botticelli/__init__.py"], "/botticelli/utilities/__init__.py": ["/botticelli/utilities/canvas.py"]}
|
29,258
|
indifferentalex/botticelli
|
refs/heads/master
|
/botticelli/trigger.py
|
class Trigger:
"""
A trigger is simply a scene/action pair that can be passed in to actions
(along with other triggers if required).
Attributes:
scene (botticelli.Scene): A scene that will trigger the accompanying
action.
action (botticelli.Action): An action that will be performed if the
accompanying scene is detected.
"""
def __init__(self, scene, action):
self.scene = scene
self.action = action
|
{"/examples/calibration.py": ["/botticelli/utilities/__init__.py"], "/botticelli/__init__.py": ["/botticelli/scene.py", "/botticelli/action.py", "/botticelli/trigger.py"], "/examples/utilities_calibration.py": ["/botticelli/__init__.py"], "/botticelli/utilities/__init__.py": ["/botticelli/utilities/canvas.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.