index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
40,462
|
colobas/aics-shparam
|
refs/heads/main
|
/aicsshparam/shtools.py
|
import vtk
import pyshtools
import numpy as np
from typing import Tuple
from vtk.util import numpy_support
from skimage import transform as sktrans
from skimage import filters as skfilters
from skimage import morphology as skmorpho
from sklearn import decomposition as skdecomp
from scipy import interpolate as sciinterp
from scipy import stats as scistats
EPS = 1e-12
def get_mesh_from_image(
image: np.array,
sigma: float = 0,
lcc: bool = True,
translate_to_origin: bool = True,
):
"""Converts a numpy array into a vtkImageData and then into a 3d mesh
using vtkContourFilter. The input is assumed to be binary and the
isosurface value is set to 0.5.
Optionally the input can be pre-processed by i) extracting the largest
connected component and ii) applying a gaussian smooth to it. In case
smooth is used, the image is binarize using thershold 1/e.
A size threshold is applying to garantee that enough points will be
used to compute the SH parametrization.
Also, points as the edge of the image are set to zero (background)
to make sure the mesh forms a manifold.
Parameters
----------
image : np.array
Input array where the mesh will be computed on
Returns
-------
mesh : vtkPolyData
3d mesh in VTK format
img_output : np.array
Input image after pre-processing
centroid : np.array
x, y, z coordinates of the mesh centroid
Other parameters
----------------
lcc : bool, optional
Wheather or not to compute the mesh only on the largest
connected component found in the input connected component,
default is True.
sigma : float, optional
The degree of smooth to be applied to the input image, default
is 0 (no smooth).
translate_to_origin : bool, optional
Wheather or not translate the mesh to the origin (0,0,0),
default is True.
"""
img = image.copy()
# VTK requires YXZ
img = np.swapaxes(img, 0, 2)
# Extracting the largest connected component
if lcc:
img = skmorpho.label(img.astype(np.uint8))
counts = np.bincount(img.flatten())
lcc = 1 + np.argmax(counts[1:])
img[img != lcc] = 0
img[img == lcc] = 1
# Smooth binarize the input image and binarize
if sigma:
img = skfilters.gaussian(img.astype(np.float32), sigma=(sigma, sigma, sigma))
img[img < 1.0 / np.exp(1.0)] = 0
img[img > 0] = 1
if img.sum() == 0:
raise ValueError(
"No foreground voxels found after pre-processing. Try using sigma=0."
)
# Set image border to 0 so that the mesh forms a manifold
img[[0, -1], :, :] = 0
img[:, [0, -1], :] = 0
img[:, :, [0, -1]] = 0
img = img.astype(np.float32)
if img.sum() == 0:
raise ValueError(
"No foreground voxels found after pre-processing."
"Is the object of interest centered?"
)
# Create vtkImageData
imgdata = vtk.vtkImageData()
imgdata.SetDimensions(img.shape)
img = img.transpose(2, 1, 0)
img_output = img.copy()
img = img.flatten()
arr = numpy_support.numpy_to_vtk(img, array_type=vtk.VTK_FLOAT)
arr.SetName("Scalar")
imgdata.GetPointData().SetScalars(arr)
# Create 3d mesh
cf = vtk.vtkContourFilter()
cf.SetInputData(imgdata)
cf.SetValue(0, 0.5)
cf.Update()
mesh = cf.GetOutput()
# Calculate the mesh centroid
coords = numpy_support.vtk_to_numpy(mesh.GetPoints().GetData())
centroid = coords.mean(axis=0, keepdims=True)
# Translate to origin
coords -= centroid
mesh.GetPoints().SetData(numpy_support.numpy_to_vtk(coords))
return mesh, img_output, tuple(centroid.squeeze())
def rotate_image_2d(image: np.array, angle: float, interpolation_order: int = 0):
"""Rotate multichannel image in 2D by a given angle. The
expected shape of image is (C,Z,Y,X). The rotation will
be done clock-wise around the center of the image.
Parameters
----------
angle : float
Angle in degrees
interpolation_order : int
Order of interpolation used during the image rotation
Returns
-------
img_rot : np.array
Rotated image
"""
if image.ndim != 4:
raise ValueError(
f"Invalid shape {image.shape} of input image."
"Expected 4 dimensional images as input."
)
if not isinstance(interpolation_order, int):
raise ValueError("Only integer values are accepted for interpolation order.")
# Make z to be the last axis. Required for skimage rotation
image = np.swapaxes(image, 1, 3)
img_aligned = []
for stack in image:
stack_aligned = sktrans.rotate(
image=stack,
angle=-angle,
resize=True,
order=interpolation_order,
preserve_range=True,
)
img_aligned.append(stack_aligned)
img_aligned = np.array(img_aligned)
# Swap axes back
img_aligned = np.swapaxes(img_aligned, 1, 3)
img_aligned = img_aligned.astype(image.dtype)
return img_aligned
def align_image_2d(
image: np.array, alignment_channel: int = None, make_unique: bool = False
):
"""Align a multichannel 3D image based on the channel
specified by alignment_channel. The expected shape of
image is (C,Z,Y,X) or (Z,Y,X).
Parameters
----------
image : np.array
Input array of shape (C,Z,Y,X) or (Z,Y,X).
alignment_channel : int
Number of channel to be used as reference for alignemnt. The
alignment will be propagated to all other channels.
make_unique : bool
Set true to make sure the alignment rotation is unique.
Returns
-------
img_aligned : np.array
Aligned image
angle : float
Angle used for align the shape.
"""
if image.ndim not in [3, 4]:
raise ValueError(f"Invalid shape {image.shape} of input image.")
if image.ndim == 4:
if alignment_channel is None:
raise ValueError(
"An alignment channel must be provided with multichannel images."
)
if not isinstance(alignment_channel, int):
raise ValueError("Number of alignment channel must be an integer")
if image.ndim == 3:
alignment_channel = 0
image = image.reshape(1, *image.shape)
z, y, x = np.where(image[alignment_channel])
xy = np.hstack([x.reshape(-1, 1), y.reshape(-1, 1)])
pca = skdecomp.PCA(n_components=2)
pca = pca.fit(xy)
eigenvecs = pca.components_
if make_unique:
# Calculate angle with arctan2
angle = 180.0 * np.arctan2(eigenvecs[0][1], eigenvecs[0][0]) / np.pi
# Rotate x coordinates
x_rot = (x - x.mean()) * np.cos(np.pi * angle / 180) + (y - y.mean()) * np.sin(
np.pi * angle / 180
)
# Check the skewness of the rotated x coordinate
xsk = scistats.skew(x_rot)
if xsk < 0.0:
angle += 180
# Map all angles to anti-clockwise
angle = angle % 360
else:
# Calculate smallest angle
angle = 0.0
if np.abs(eigenvecs[0][0]) > EPS:
angle = 180.0 * np.arctan(eigenvecs[0][1] / eigenvecs[0][0]) / np.pi
# Apply skimage rotation clock-wise
img_aligned = rotate_image_2d(image=image, angle=angle)
return img_aligned, angle
def apply_image_alignment_2d(image: np.array, angle: float):
"""Apply an existing set of alignment parameters to a
multichannel 3D image. The expected shape of
image is (C,Z,Y,X) or (Z,Y,X).
Parameters
----------
image : np.array
Input array of shape (C,Z,Y,X) or (Z,Y,X).
angle : float
2D rotation angle in degrees
Returns
-------
img_aligned : np.array
Aligned image
"""
if image.ndim not in [3, 4]:
raise ValueError(f"Invalid shape {image.shape} of input image.")
if image.ndim == 3:
image = image.reshape(1, *image.shape)
img_aligned = rotate_image_2d(image=image, angle=angle)
return img_aligned
def update_mesh_points(
mesh: vtk.vtkPolyData, x_new: np.array, y_new: np.array, z_new: np.array
):
"""Updates the xyz coordinates of points in the input mesh
with new coordinates provided.
Parameters
----------
mesh : vtkPolyData
Mesh in VTK format to be updated.
x_new, y_new and z_new : np.array
Array containing the new coordinates.
Returns
-------
mesh_updated : vtkPolyData
Mesh with updated coordinates.
Notes
-----
This function also re-calculate the new normal vectors
for the updated mesh.
"""
mesh.GetPoints().SetData(numpy_support.numpy_to_vtk(np.c_[x_new, y_new, z_new]))
mesh.Modified()
# Fix normal vectors orientation
normals = vtk.vtkPolyDataNormals()
normals.SetInputData(mesh)
normals.Update()
mesh_updated = normals.GetOutput()
return mesh_updated
def get_even_reconstruction_from_grid(
grid: np.array, npoints: int = 512, centroid: Tuple = (0, 0, 0)
):
"""Converts a parametric 2D grid of type (lon,lat,rad) into
a 3d mesh. lon in [0,2pi], lat in [0,pi]. The method uses
a spherical mesh with an even distribution of points. The
even distribution is obtained via the Fibonacci grid rule.
Parameters
----------
grid : np.array
Input grid where the element grid[i,j] represents the
radial coordinate at longitude i*2pi/grid.shape[0] and
latitude j*pi/grid.shape[1].
Returns
-------
mesh : vtkPolyData
Mesh that represents the input parametric grid.
Other parameters
----------------
npoints: int
Number of points in the initial spherical mesh
centroid : tuple of floats, optional
x, y and z coordinates of the centroid where the mesh
will be translated to, default is (0,0,0).
"""
res_lat = grid.shape[0]
res_lon = grid.shape[1]
# Creates an interpolator
lon = np.linspace(start=0, stop=2 * np.pi, num=res_lon, endpoint=True)
lat = np.linspace(start=0, stop=1 * np.pi, num=res_lat, endpoint=True)
fgrid = sciinterp.RectBivariateSpline(lon, lat, grid.T)
# Create x,y,z coordinates based on the Fibonacci Lattice
# http://extremelearning.com.au/evenly-distributing-points-on-a-sphere/
golden_ratio = 0.5 * (1 + np.sqrt(5))
idxs = np.arange(0, npoints, dtype=np.float32)
fib_theta = np.arccos(2 * ((idxs + 0.5) / npoints) - 1)
fib_phi = (2 * np.pi * (idxs / golden_ratio)) % (2 * np.pi) - np.pi
fib_lat = fib_theta
fib_lon = fib_phi + np.pi
fib_grid = fgrid.ev(fib_lon, fib_lat)
# Assign to sphere
fib_x = centroid[0] + fib_grid * np.sin(fib_theta) * np.cos(fib_phi)
fib_y = centroid[1] + fib_grid * np.sin(fib_theta) * np.sin(fib_phi)
fib_z = centroid[2] + fib_grid * np.cos(fib_theta)
# Add points (x,y,z) to a polydata
points = vtk.vtkPoints()
for (x, y, z) in zip(fib_x, fib_y, fib_z):
points.InsertNextPoint(x, y, z)
rec = vtk.vtkPolyData()
rec.SetPoints(points)
# Calculates the connections between points via Delaunay triangulation
delaunay = vtk.vtkDelaunay3D()
delaunay.SetInputData(rec)
delaunay.Update()
surface_filter = vtk.vtkDataSetSurfaceFilter()
surface_filter.SetInputData(delaunay.GetOutput())
surface_filter.Update()
# Smooth the mesh to get a more even distribution of points
NITER_SMOOTH = 128
smooth = vtk.vtkSmoothPolyDataFilter()
smooth.SetInputData(surface_filter.GetOutput())
smooth.SetNumberOfIterations(NITER_SMOOTH)
smooth.FeatureEdgeSmoothingOff()
smooth.BoundarySmoothingOn()
smooth.Update()
rec.DeepCopy(smooth.GetOutput())
# Compute normal vectors
normals = vtk.vtkPolyDataNormals()
normals.SetInputData(rec)
normals.Update()
mesh = normals.GetOutput()
return mesh
def get_even_reconstruction_from_coeffs(
coeffs: np.array, lrec: int = 0, npoints: int = 512
):
"""Converts a set of spherical harmonic coefficients into
a 3d mesh using the Fibonacci grid for generating a mesh
with a more even distribution of points
Parameters
----------
coeffs : np.array
Input array of spherical harmonic coefficients. These
array has dimensions 2xLxM, where the first dimension
is 0 for cosine-associated coefficients and 1 for
sine-associated coefficients. Second and thrid dimensions
represent the expansion parameters (l,m).
Returns
-------
mesh : vtkPolyData
Mesh that represents the input parametric grid.
Other parameters
----------------
lrec : int, optional
Only coefficients l<lrec will be used for creating the
mesh, default is 0 meaning all coefficients available
in the matrix coefficients will be used.
npoints : int optional
Number of points in the initial spherical mesh
Notes
-----
The mesh resolution is set by the size of the coefficients
matrix and therefore not affected by lrec.
"""
coeffs_ = coeffs.copy()
if (lrec > 0) and (lrec < coeffs_.shape[1]):
coeffs_[:, lrec:, :] = 0
grid = pyshtools.expand.MakeGridDH(coeffs_, sampling=2)
mesh = get_even_reconstruction_from_grid(grid, npoints)
return mesh, grid
def get_reconstruction_from_grid(grid: np.array, centroid: Tuple = (0, 0, 0)):
"""Converts a parametric 2D grid of type (lon,lat,rad) into
a 3d mesh. lon in [0,2pi], lat in [0,pi].
Parameters
----------
grid : np.array
Input grid where the element grid[i,j] represents the
radial coordinate at longitude i*2pi/grid.shape[0] and
latitude j*pi/grid.shape[1].
Returns
-------
mesh : vtkPolyData
Mesh that represents the input parametric grid.
Other parameters
----------------
centroid : tuple of floats, optional
x, y and z coordinates of the centroid where the mesh
will be translated to, default is (0,0,0).
"""
res_lat = grid.shape[0]
res_lon = grid.shape[1]
# Creates an initial spherical mesh with right dimensions.
rec = vtk.vtkSphereSource()
rec.SetPhiResolution(res_lat + 2)
rec.SetThetaResolution(res_lon)
rec.Update()
rec = rec.GetOutput()
grid_ = grid.T.flatten()
# Update the points coordinates of the spherical mesh according to the inout grid
for j, lon in enumerate(np.linspace(0, 2 * np.pi, num=res_lon, endpoint=False)):
for i, lat in enumerate(
np.linspace(np.pi / (res_lat + 1), np.pi, num=res_lat, endpoint=False)
):
theta = lat
phi = lon - np.pi
k = j * res_lat + i
x = centroid[0] + grid_[k] * np.sin(theta) * np.cos(phi)
y = centroid[1] + grid_[k] * np.sin(theta) * np.sin(phi)
z = centroid[2] + grid_[k] * np.cos(theta)
rec.GetPoints().SetPoint(k + 2, x, y, z)
# Update coordinates of north and south pole points
north = grid_[::res_lat].mean()
south = grid_[(res_lat - 1) :: res_lat].mean()
rec.GetPoints().SetPoint(0, centroid[0] + 0, centroid[1] + 0, centroid[2] + north)
rec.GetPoints().SetPoint(1, centroid[0] + 0, centroid[1] + 0, centroid[2] - south)
# Compute normal vectors
normals = vtk.vtkPolyDataNormals()
normals.SetInputData(rec)
# Set splitting off to avoid output mesh from having different number of
# points compared to input
normals.SplittingOff()
normals.Update()
mesh = normals.GetOutput()
return mesh
def get_reconstruction_from_coeffs(coeffs: np.array, lrec: int = 0):
"""Converts a set of spherical harmonic coefficients into
a 3d mesh.
Parameters
----------
coeffs : np.array
Input array of spherical harmonic coefficients. These
array has dimensions 2xLxM, where the first dimension
is 0 for cosine-associated coefficients and 1 for
sine-associated coefficients. Second and thrid dimensions
represent the expansion parameters (l,m).
Returns
-------
mesh : vtkPolyData
Mesh that represents the input parametric grid.
Other parameters
----------------
lrec : int, optional
Degree of the reconstruction. If lrec<l, then only
coefficients l<lrec will be used for creating the mesh.
If lrec>l, then the mesh will be oversampled.
Default is 0 meaning all coefficients
available in the matrix coefficients will be used.
Notes
-----
The mesh resolution is set by the size of the coefficients
matrix and therefore not affected by lrec.
"""
# Degree of the expansion
lmax = coeffs.shape[1]
if lrec == 0:
lrec = lmax
# Create array (oversampled if lrec>lrec)
coeffs_ = np.zeros((2, lrec, lrec), dtype=np.float32)
# Adjust lrec to the expansion degree
if lrec > lmax:
lrec = lmax
# Copy coefficients
coeffs_[:, :lrec, :lrec] = coeffs[:, :lrec, :lrec]
# Expand into a grid
grid = pyshtools.expand.MakeGridDH(coeffs_, sampling=2)
# Get mesh
mesh = get_reconstruction_from_grid(grid)
return mesh, grid
def get_reconstruction_error(grid_input: np.array, grid_rec: np.array):
"""Compute mean square error between two parametric grids. When applied
to the input parametric grid and its corresponsing reconstructed
version, it gives an idea of the quality of the parametrization with
low values indicate good parametrization.
Parameters
----------
grid_input : np.array
Parametric grid
grid_rec : np.array
Reconstructed parametric grid
Returns
-------
mse : float
Mean square error
"""
mse = np.power(grid_input - grid_rec, 2).mean()
return mse
def save_polydata(mesh: vtk.vtkPolyData, filename: str):
"""Saves a mesh as a vtkPolyData file.
Parameters
----------
mesh : vtkPolyData
Input mesh
filename : str
File path where the mesh will be saved
output_type : vtk or ply
Format of output polydata file
"""
# Output file format
output_type = filename.split(".")[-1]
if output_type not in ["vtk", "ply"]:
raise ValueError(
f"Output format {output_type} not supported. Please use vtk or ply."
)
if output_type == "vtk":
writer = vtk.vtkPolyDataWriter()
else:
writer = vtk.vtkPLYWriter()
writer.SetInputData(mesh)
writer.SetFileName(filename)
writer.Write()
|
{"/aicsshparam/shparam.py": ["/aicsshparam/__init__.py"]}
|
40,463
|
colobas/aics-shparam
|
refs/heads/main
|
/aicsshparam/tests/dummy_test.py
|
import numpy as np
def test_dummy():
a = np.ones((5, 5))
a[0, 0] = 2
assert np.any(a > 0)
|
{"/aicsshparam/shparam.py": ["/aicsshparam/__init__.py"]}
|
40,464
|
colobas/aics-shparam
|
refs/heads/main
|
/aicsshparam/bin/__init__.py
|
# This is necessary here for packaging
|
{"/aicsshparam/shparam.py": ["/aicsshparam/__init__.py"]}
|
40,465
|
colobas/aics-shparam
|
refs/heads/main
|
/aicsshparam/shparam.py
|
import warnings
import pyshtools
import numpy as np
from vtk.util import numpy_support
from skimage import transform as sktrans
from scipy import interpolate as spinterp
from . import shtools
def get_shcoeffs(
image: np.array,
lmax: int,
sigma: float = 0,
compute_lcc: bool = True,
alignment_2d: bool = True,
make_unique: bool = False,
):
"""Compute spherical harmonics coefficients that describe an object stored as
an image.
Calculates the spherical harmonics coefficients that parametrize the shape
formed by the foreground set of voxels in the input image. The input image
does not need to be binary and all foreground voxels (background=0) are used
in the computation. Foreground voxels must form a single connected component.
If you are sure that this is the case for the input image, you can set
compute_lcc to False to speed up the calculation. In addition, the shape is
expected to be centered in the input image.
Parameters
----------
image : ndarray
Input image. Expected to have shape ZYX.
lmax : int
Order of the spherical harmonics parametrization. The higher the order
the more shape details are represented.
Returns
-------
coeffs_dict : dict
Dictionary with the spherical harmonics coefficients and the mean square
error between input and its parametrization
grid_rec : ndarray
Parametric grid representing sh parametrization
image_ : ndarray
Input image after pre-processing (lcc calculation, smooth and binarization).
mesh : vtkPolyData
Polydata representation of image_.
grid_down : ndarray
Parametric grid representing input object.
transform : tuple of floats
(xc, yc, zc, angle) if alignment_2d is True or
(xc, yc, zc) if alignment_2d is False. (xc, yc, zc) are the coordinates
of the shape centroid after alignment; angle is the angle used to align
the image
Other parameters
----------------
sigma : float, optional
The degree of smooth to be applied to the input image, default is 0 (no
smooth)
compute_lcc : bool, optional
Whether to compute the largest connected component before appliying the
spherical harmonic parametrization, default is True. Set compute_lcc to
False in case you are sure the input image contains a single connected
component. It is crucial that parametrization is calculated on a single
connected component object.
alignment_2d : bool
Wheather the image should be aligned in 2d. Default is True.
make_unique : bool
Set true to make sure the alignment rotation is unique.
Notes
-----
Alignment mode '2d' allows for keeping the z axis unchanged which might be
important for some applications.
Examples
--------
import numpy as np
from aicsshparam import shparam, shtools
img = np.ones((32,32,32), dtype=np.uint8)
(coeffs, grid_rec), (image_, mesh, grid, transform) =
shparam.get_shcoeffs(image=img, lmax=2)
mse = shtools.get_reconstruction_error(grid, grid_rec)
print('Coefficients:', coeffs)
>>> Coefficients: {'shcoeffs_L0M0C': 18.31594310878251, 'shcoeffs_L0M1C': 0.0,
'shcoeffs_L0M2C': 0.0, 'shcoeffs_L1M0C': 0.020438775421611564, 'shcoeffs_L1M1C':
-0.0030960466571801513, 'shcoeffs_L1M2C': 0.0, 'shcoeffs_L2M0C':
-0.0185688727281408, 'shcoeffs_L2M1C': -2.9925077712704384e-05,
'shcoeffs_L2M2C': -0.009087503958673892, 'shcoeffs_L0M0S': 0.0,
'shcoeffs_L0M1S': 0.0, 'shcoeffs_L0M2S': 0.0, 'shcoeffs_L1M0S': 0.0,
'shcoeffs_L1M1S': 3.799611612562637e-05, 'shcoeffs_L1M2S': 0.0,
'shcoeffs_L2M0S': 0.0, 'shcoeffs_L2M1S': 3.672543904347801e-07,
'shcoeffs_L2M2S': 0.0002230857005948496}
print('Error:', mse)
>>> Error: 2.3738182456948795
"""
if len(image.shape) != 3:
raise ValueError(
"Incorrect dimensions: {}. Expected 3 dimensions.".format(image.shape)
)
if image.sum() == 0:
raise ValueError("No foreground voxels found. Is the input image empty?")
# Binarize the input. We assume that everything that is not background will
# be use for parametrization
image_ = image.copy()
image_[image_ > 0] = 1
# Alignment
if alignment_2d:
# Align the points such that the longest axis of the 2d
# xy max projected shape will be horizontal (along x)
image_, angle = shtools.align_image_2d(image=image_, make_unique=make_unique)
image_ = image_.squeeze()
# Converting the input image into a mesh using regular marching cubes
mesh, image_, centroid = shtools.get_mesh_from_image(image=image_, sigma=sigma)
if not image_[tuple([int(u) for u in centroid[::-1]])]:
warnings.warn(
"Mesh centroid seems to fall outside the object. This indicates\
the mesh may not be a manifold suitable for spherical harmonics\
parameterization."
)
# Get coordinates of mesh points
coords = numpy_support.vtk_to_numpy(mesh.GetPoints().GetData())
x = coords[:, 0]
y = coords[:, 1]
z = coords[:, 2]
transform = centroid + ((angle,) if alignment_2d else ())
# Translate and update mesh normals
mesh = shtools.update_mesh_points(mesh, x, y, z)
# Cartesian to spherical coordinates convertion
rad = np.sqrt(x ** 2 + y ** 2 + z ** 2)
lat = np.arccos(np.divide(z, rad, out=np.zeros_like(rad), where=(rad != 0)))
lon = np.pi + np.arctan2(y, x)
# Creating a meshgrid data from (lon,lat,r)
points = np.concatenate(
[np.array(lon).reshape(-1, 1), np.array(lat).reshape(-1, 1)], axis=1
)
grid_lon, grid_lat = np.meshgrid(
np.linspace(start=0, stop=2 * np.pi, num=256, endpoint=True),
np.linspace(start=0, stop=1 * np.pi, num=128, endpoint=True),
)
# Interpolate the (lon,lat,r) data into a grid
grid = spinterp.griddata(points, rad, (grid_lon, grid_lat), method="nearest")
# Fit grid data with SH. Look at pyshtools for detail.
coeffs = pyshtools.expand.SHExpandDH(grid, sampling=2, lmax_calc=lmax)
# Reconstruct grid. Look at pyshtools for detail.
grid_rec = pyshtools.expand.MakeGridDH(coeffs, sampling=2)
# Resize the input grid to match the size of the reconstruction
grid_down = sktrans.resize(grid, output_shape=grid_rec.shape, preserve_range=True)
# Create (l,m) keys for the coefficient dictionary
lvalues = np.repeat(np.arange(lmax + 1).reshape(-1, 1), lmax + 1, axis=1)
keys = []
for suffix in ["C", "S"]:
for (l, m) in zip(lvalues.flatten(), lvalues.T.flatten()):
keys.append(f"shcoeffs_L{l}M{m}{suffix}")
coeffs_dict = dict(zip(keys, coeffs.flatten()))
return (coeffs_dict, grid_rec), (image_, mesh, grid_down, transform)
|
{"/aicsshparam/shparam.py": ["/aicsshparam/__init__.py"]}
|
40,466
|
colobas/aics-shparam
|
refs/heads/main
|
/aicsshparam/__init__.py
|
# -*- coding: utf-8 -*-
"""Top-level package for aics-shparam."""
__author__ = "Matheus Viana"
__email__ = "matheus.viana@alleninstitute.org"
# Do not edit this string manually, always use bumpversion
# Details in CONTRIBUTING.md
__version__ = "0.1.1"
def get_module_version():
return __version__
|
{"/aicsshparam/shparam.py": ["/aicsshparam/__init__.py"]}
|
40,467
|
fkchong/mydjsite02
|
refs/heads/master
|
/empexp/apps.py
|
from django.apps import AppConfig
class EmpexpConfig(AppConfig):
name = 'empexp'
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,468
|
fkchong/mydjsite02
|
refs/heads/master
|
/empexp/migrations/0002_auto_20210627_1310.py
|
# Generated by Django 3.1.12 on 2021-06-27 05:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('empexp', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='experience',
name='firstname',
),
migrations.RemoveField(
model_name='experience',
name='lastname',
),
]
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,469
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/views.py
|
from django.shortcuts import render
from django.views.generic import FormView
from .forms import upbform, bookform
from .models import userprefbook
from django.contrib import messages
# Create your views here.
class upbformview(FormView):
model = userprefbook
form_class = upbform
template_name = "userprefbook/index.html"
def get_success_url(self):
return self.request.path
def form_valid(self, form):
form.save()
messages.add_message(self.request, messages.INFO, 'Save successfully.')
return super().form_valid(form)
def form_invalid(self, form):
form.add_error(None, 'Error in form...')
return super().form_invalid(form)
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,470
|
fkchong/mydjsite02
|
refs/heads/master
|
/empexp/models.py
|
from django.db import models
from django.contrib.auth.models import User
from site02.models import employee
# Create your models here.
class experience(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
employee = models.OneToOneField(employee, on_delete=models.CASCADE)
companyname = models.CharField(max_length=200)
fromdate = models.DateField()
todate = models.DateField()
rolesandresponsibility = models.TextField()
reasonofleaving = models.TextField()
currentexperience = models.BooleanField(default=True)
def __str__(self):
return f"{self.user.username}, {self.currentexperience}"
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,471
|
fkchong/mydjsite02
|
refs/heads/master
|
/empexp/views.py
|
from django.shortcuts import render
from django.views.generic import FormView
from .forms import EmpExpModelForm
from django.contrib import messages
# Create your views here.
class MyEmpExpFormView(FormView):
form_class = EmpExpModelForm
template_name = "empexp/index.html"
def get_success_url(self):
return self.request.path
def form_valid(self, form):
form.save()
messages.add_message(self.request, messages.INFO, 'Save successfully.')
return super().form_valid(form)
def form_invalid(self, form):
form.add_error(None, 'Error in form...')
return super().form_invalid(form)
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,472
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/admin.py
|
from django.contrib import admin
from .models import book, userprefbook
# Register your models here.
admin.site.register(book)
admin.site.register(userprefbook)
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,473
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/migrations/0001_initial.py
|
# Generated by Django 3.1.12 on 2021-06-29 01:17
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('author', models.CharField(max_length=100)),
('publishyear', models.DateField()),
],
),
migrations.CreateModel(
name='userprefbook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('remark', models.TextField(null=True)),
('createddate', models.DateField(auto_now_add=True)),
('bookname', models.ManyToManyField(to='userprefbook.book')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,474
|
fkchong/mydjsite02
|
refs/heads/master
|
/site02/models.py
|
from django.db import models
# Create your models here.
class employee(models.Model):
firstname = models.CharField(max_length=100)
lastname = models.CharField(max_length=100)
sex = models.CharField(max_length=20)
dob = models.DateField()
def __str__(self):
return f"{self.firstname}, {self.lastname}, {self.sex}, {self.dob}"
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,475
|
fkchong/mydjsite02
|
refs/heads/master
|
/empexp/forms.py
|
from django import forms
from .models import experience
from django.core.exceptions import ValidationError
class EmpExpModelForm(forms.ModelForm):
class Meta:
model = experience
fields = "__all__"
def clean_reasonofleaving(self):
reasonofleaving = self.cleaned_data.get('reasonofleaving')
if len(reasonofleaving) < 10:
raise ValidationError('Reason of leaving is too short.')
return reasonofleaving
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,476
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class book(models.Model):
name = models.CharField(max_length=200)
author = models.CharField(max_length=100)
publishyear = models.DateField()
def __str__(self):
return f"{self.name}, {self.author}"
class userprefbook(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
bookname = models.ManyToManyField(book)
remark = models.TextField(null=True)
createddate = models.DateField(auto_now_add=True)
def __str__(self):
return f"{self.user.username}, {self.bookname}"
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,477
|
fkchong/mydjsite02
|
refs/heads/master
|
/site02/views.py
|
from django.shortcuts import render, HttpResponse
# Create your views here.
def appindex(request):
#return HttpResponse("This is app index page.")
progress = 'in progress'
value = 1999
context = {'status1':progress,'status2':value}
return render(request, 'app-about.html', context)
def aboutpost(request):
#print(request)
if request.method == 'GET':
temp = {}
progress = request.GET.get('Text1')
value = 1999
context = {'status1':progress,'status2':value}
return render(request, 'aboutpost.html', context)
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,478
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/apps.py
|
from django.apps import AppConfig
class UserprefbookConfig(AppConfig):
name = 'userprefbook'
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,479
|
fkchong/mydjsite02
|
refs/heads/master
|
/site02/apps.py
|
from django.apps import AppConfig
class Site02Config(AppConfig):
name = 'site02'
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,480
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/forms.py
|
from django import forms
from .models import book, userprefbook
from django.core.exceptions import ValidationError
class bookform(forms.ModelForm):
class Meta:
model = book
fields = "__all__"
def clean_name(self):
name = self.cleaned_data.get('name')
if len(name) < 10:
raise ValidationError('Name is too short.')
return name
class upbform(forms.ModelForm):
class Meta:
model = userprefbook
fields = ('user', 'bookname', 'remark')
def clean_name(self):
remark = self.cleaned_data.get('remark')
if len(remark) < 10:
raise ValidationError('Remark is too short.')
return remark
widgets = {
'user': forms.Select(attrs={'class': 'form-control'}),
'bookname': forms.Select(attrs={'class': 'form-control'}),
'remark': forms.Textarea(attrs={'class': 'form-control', 'placeholder': 'Please enter remark'})
}
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,481
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/migrations/0003_userprefbook_remark.py
|
# Generated by Django 3.1.12 on 2021-06-29 05:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprefbook', '0002_remove_userprefbook_remark'),
]
operations = [
migrations.AddField(
model_name='userprefbook',
name='remark',
field=models.TextField(null=True),
),
]
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,482
|
fkchong/mydjsite02
|
refs/heads/master
|
/userprefbook/migrations/0002_remove_userprefbook_remark.py
|
# Generated by Django 3.1.12 on 2021-06-29 05:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('userprefbook', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='userprefbook',
name='remark',
),
]
|
{"/userprefbook/views.py": ["/userprefbook/forms.py", "/userprefbook/models.py"], "/empexp/models.py": ["/site02/models.py"], "/empexp/views.py": ["/empexp/forms.py"], "/userprefbook/admin.py": ["/userprefbook/models.py"], "/empexp/forms.py": ["/empexp/models.py"], "/userprefbook/forms.py": ["/userprefbook/models.py"]}
|
40,483
|
IgorPodlawski/vending-machine-python
|
refs/heads/master
|
/Coin.py
|
class Coin:
symbol = ''
value = 0.00
def __init__(self, symbol: str, value: float):
self.symbol = symbol
self.value = value
|
{"/VendingMachine.py": ["/Product.py", "/Coin.py"], "/main.py": ["/VendingMachine.py"], "/tests/test_machine.py": ["/Product.py", "/Coin.py", "/VendingMachine.py"]}
|
40,484
|
IgorPodlawski/vending-machine-python
|
refs/heads/master
|
/VendingMachine.py
|
from Product import Product
from Coin import Coin
import random
class VendingMachine:
name = ''
def __init__(self, name: str, **qwargs: dict):
self.name = name
self.insertedCoinsSum = 0.00
self.insertedCoins = []
self.slots = []
if qwargs.get('initialCash'):
self.cash = qwargs.get('initialCash')
else:
self.cash = 100.00
def fill(self, productsQuantity: int):
randomProductsNames = ['Mars', 'Snickers', 'Coffee', 'Coca-Cola', 'Pepsi 😂', 'iPhone']
for i in range(0, productsQuantity):
product = Product(random.choice(randomProductsNames), random.randint(1, 9), random.choice([1.00, 1.50, 2.00, 3.00]))
self.slots.append(product)
def productsList(self):
for product in self.slots:
print('NAME:', product.name, 'QUANTITY:', product.quantity, 'PRICE:', product.price)
def insertCoin(self, coin: Coin):
self.insertedCoins.append(coin)
def sumInsertedCoins(self):
for coin in self.insertedCoins:
self.insertedCoinsSum = self.insertedCoinsSum + coin.value
def buyProduct(self, number: int):
product = self.slots[number]
if (product.price <= self.insertedCoinsSum and product.quantity > 0):
product.decrease()
self.insertedCoinsSum = self.insertedCoinsSum - product.price
return product
else:
return None
|
{"/VendingMachine.py": ["/Product.py", "/Coin.py"], "/main.py": ["/VendingMachine.py"], "/tests/test_machine.py": ["/Product.py", "/Coin.py", "/VendingMachine.py"]}
|
40,485
|
IgorPodlawski/vending-machine-python
|
refs/heads/master
|
/main.py
|
from VendingMachine import VendingMachine
def loop():
print('press CTRL+C or CTLR+D to exit')
vm = VendingMachine('hospital', initialCash = 130)
vm.fill(32)
while True:
try:
data = input('MACHINE> ')
if (data == 'PRODUCTS'):
vm.productsList()
except (KeyboardInterrupt, EOFError):
print('goodbye!')
break
if __name__ == "__main__":
loop()
|
{"/VendingMachine.py": ["/Product.py", "/Coin.py"], "/main.py": ["/VendingMachine.py"], "/tests/test_machine.py": ["/Product.py", "/Coin.py", "/VendingMachine.py"]}
|
40,486
|
IgorPodlawski/vending-machine-python
|
refs/heads/master
|
/Product.py
|
class Product:
name = ''
quantity = 0
price = 0.00
def __init__(self, name: str, quantity: int, price: float):
self.name = name
self.quantity = quantity
self.price = price
def increase(self):
self.quantity = self.quantity + 1
def decrease(self):
self.quantity = self.quantity - 1
|
{"/VendingMachine.py": ["/Product.py", "/Coin.py"], "/main.py": ["/VendingMachine.py"], "/tests/test_machine.py": ["/Product.py", "/Coin.py", "/VendingMachine.py"]}
|
40,487
|
IgorPodlawski/vending-machine-python
|
refs/heads/master
|
/tests/test_machine.py
|
from Product import Product
from Coin import Coin
from VendingMachine import VendingMachine
def test_foo():
assert 2 + 2
def test_product_incease():
product = Product('Snickers', 0, 1.00)
product.increase()
assert 1 == product.quantity
def test_coin_defitinion():
dollar = Coin('DOLLAR', 1.00)
assert dollar.symbol == 'DOLLAR' and dollar.value == 1.00
def test_vending_machine_init():
vm = VendingMachine('hospital', initialCash = 130)
assert vm.cash == 130 and vm.name == 'hospital'
def test_vending_machine_fill():
vm = VendingMachine('hospital', initialCash = 130)
vm.fill(32)
vm.productsList()
assert len(vm.slots) == 32
def test_insert_coin():
vm = VendingMachine('hospital', initialCash = 130)
dollar = Coin('DOLLAR', 1.00)
vm.insertCoin(dollar)
insertedCoin = vm.insertedCoins[0]
assert insertedCoin.symbol == 'DOLLAR' and insertedCoin.value == 1.00
def test_sum_inserted_coins():
vm = VendingMachine('hospital', initialCash = 130)
dollar = Coin('DOLLAR', 1.00)
quarter = Coin('Q', 0.25)
vm.insertCoin(dollar)
vm.insertCoin(dollar)
vm.insertCoin(quarter)
vm.sumInsertedCoins()
assert vm.insertedCoinsSum == 2.25
def test_product_bought():
vm = VendingMachine('hospital', initialCash = 130)
vm.fill(32)
dollar = Coin('DOLLAR', 1.00)
quarter = Coin('Q', 0.25)
vm.insertCoin(dollar)
vm.insertCoin(dollar)
vm.insertCoin(dollar)
vm.insertCoin(quarter)
vm.sumInsertedCoins()
product = vm.slots[5]
boughtProduct = vm.buyProduct(5)
assert boughtProduct != None
def test_product_not_bought():
vm = VendingMachine('hospital', initialCash = 130)
vm.fill(32)
quarter = Coin('Q', 0.25)
vm.insertCoin(quarter)
vm.sumInsertedCoins()
product = vm.slots[5]
boughtProduct = vm.buyProduct(5)
assert boughtProduct == None
def test_rest():
vm = VendingMachine('hospital', initialCash = 130)
vm.fill(32)
dollar = Coin('DOLLAR', 1.00)
quarter = Coin('Q', 0.25)
vm.insertCoin(dollar)
vm.insertCoin(dollar)
vm.insertCoin(dollar)
vm.insertCoin(quarter)
vm.sumInsertedCoins()
initialSum = vm.insertedCoinsSum
product = vm.slots[5]
boughtProduct = vm.buyProduct(5)
assert vm.insertedCoinsSum != initialSum
|
{"/VendingMachine.py": ["/Product.py", "/Coin.py"], "/main.py": ["/VendingMachine.py"], "/tests/test_machine.py": ["/Product.py", "/Coin.py", "/VendingMachine.py"]}
|
40,509
|
mg-blvd/Images-Website-HW4
|
refs/heads/master
|
/txt_read_write_functions.py
|
'''Authors: Misael Guijarro && Jose Alfaro
Date: 04/02/2019
Class: CST 205
Teacher: Wes Modes
Description: Holds thst function that will write to the text file that keeps track
of the images we used in the last refresh.'''
# this function literally reads the names that are in former_images.txt
# it then 'splits' the strings after the new line
def read_in_names():
f = open("former_images.txt", "r")
read_names = f.read().split('\n')
f.close()
return read_names
#this function passes in name_list which is used to 'write' the name of the images
# this we are using in the current page. We will call these names back at the
#next refresh.
# Writes out the name of image followed by a new line.
def edit_text(name_list):
f = open("former_images.txt", 'w')
for i in range(3):
f.write(name_list[i] + '\n')
f.close()
|
{"/hw4.py": ["/txt_read_write_functions.py"]}
|
40,510
|
mg-blvd/Images-Website-HW4
|
refs/heads/master
|
/hw4.py
|
'''Authors: Misael Guijarro && Jose Alfaro
Date: 04/02/2019
Class: CST 205
Teacher: Wes Modes
Description: The home page displays three randoms images. Once clicked they take you to
a separate page with the picture and a description of it.'''
from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from txt_read_write_functions import read_in_names, edit_text
from image_info import image_info
from PIL import Image
import random
import ast
app = Flask(__name__, static_url_path = "/static", static_folder = "static")
bootstrap = Bootstrap(app)
#this function is so that the picture does not get repeated on the home page
def assert_no_repeating(array1, array2):
current = random.choice(image_info)
while (current["title"] in array1 or current["title"] in array2):
current = random.choice(image_info)
return current
#this will route the function to be our home page
#this function allows the images to be displayed onto the page
# Function: inside of the for loop it is making sure that the images will not be repeated or reuse images from the last refresh
# by passing in the assert_no_repeating function and the read_in_names with the past_images variable.
# after completion it will use the edit_text function to pass in the names of the images that
# will be posted onto the home page to our text file. This will assure that different
#images are used after each refresh.
@app.route('/')
def main_page():
past_images = read_in_names()
current_images = []
current_image_titles = []
for i in range(3):
current_images.append(assert_no_repeating(past_images, current_image_titles))
current_image_titles.append(current_images[i]["title"])
edit_text(current_image_titles)
return render_template('home.html', image_list=current_images)
#this will route the function to be the pages where the individual image description is on after clicking on it
#this function will display the image resized onto another page with some description of it
# Function: it is passing in var so it could be later used in the home.html to make print out the random images with
# their respective 'locations' in the array in the previous function.
# The .literal_eval(var) makes a dictionary out of string output from html code.
# Images are being resized once clicked on and taken to new page.
# We get the dimensions od the image using PIL.
@app.route('/image_info/<var>')
def image_page(var):
new_var = ast.literal_eval(var)
current_image = Image.open("static/" + new_var["id"] + ".jpg")
dimensions_w, dimensions_h = current_image.size
return render_template('image_info.html', pic=new_var, w=dimensions_w, h=dimensions_h)
if __name__ == '__main__':
print("Working!")
app.run(debug=True)
|
{"/hw4.py": ["/txt_read_write_functions.py"]}
|
40,525
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/train.py
|
import model
import dataset
import torch
import torch.cuda as cuda
import torch.utils.data as Data
import torch.nn as nn
import torch.optim as Optim
import matplotlib.pyplot as plt
import os
from visdom import Visdom
import numpy as np
from loss import FocalLoss,FocalLoss_bce
# hyper-parameter
os.environ["CUDA_VISIBLE_DEVICES"] = "7"
EPOCHS = 40
BATCH_SIZE = 500
if __name__ == '__main__':
# gpu
gpu_avail = cuda.is_available()
# model
# net = model.Ztk_vgg()
net = model.MobileNet()
if cuda.device_count()>1:
print('cuda count:',cuda.device_count())
net = nn.DataParallel(net)
# names = net.named_parameters()
# for name,p in names:
# print(name)
# pass
if gpu_avail:
net = net.to("cuda")
# make directory
if not os.path.exists('./loss'):
os.makedirs('./loss')
model_saved_path = './model_saved_new_3/mobilenet_conv1x1'
if not os.path.exists(model_saved_path):
os.makedirs(model_saved_path)
# data
input_train_data = dataset.RainData(train=True)
input_test_data = dataset.RainData(train=False)
train_data_loader = Data.DataLoader(input_train_data,batch_size=BATCH_SIZE,shuffle=True)
test_data_loader = Data.DataLoader(input_test_data,batch_size=BATCH_SIZE,shuffle=False)
# optimizer
optimizer = Optim.Adam(net.parameters())
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,verbose=True,patience=1000)
# loss function
loss_func = nn.CrossEntropyLoss()
# loss_func = FocalLoss(2,alpha=0.25)
# loss_func = FocalLoss_bce(alpha=1, gamma=2)
# visualize
env = Visdom(port=10022,env='mobilenet')
# initialize some variables
accuracy,accuracy_old = 0,0
train_loss_curve,val_loss_curve = [],[]
iteration = 0
# start training
for epoch in range(EPOCHS):
# loss_train_epoch = 0.0
# loss_test_epoch = 0.0
net.train()
for i,(img, label) in enumerate(train_data_loader):
net.train()
iteration = int(epoch*(len(input_train_data.train_data)/BATCH_SIZE+1)+i+1)
# pytorch has to do these type cvt
# img---float32(float), label---int64(long)
img = img.float()
label = label.long()
if gpu_avail:
img = img.to('cuda')
label = label.to('cuda')
# with SummaryWriter(log_dir='./model_graph', comment='my_vgg') as w:
# w.add_graph(net, img)
optimizer.zero_grad()
out = net(img) # (Batch_size, 2)
# value_range over channels (0.0, 1.0)
loss = loss_func(out, label)
loss.backward()
# optimizer.step()
scheduler.step(loss)
# loss_train_epoch += loss.item() * label.shape[0]
loss_train_iteration = loss.item()
if i % 200 == 0:
# print(f"Epoch {epoch}\tIteration {i}\tTrain loss: {loss.item()}")
# env.text(f"Epoch {epoch}\tIteration {i}\tTrain loss: {loss.item()}\n",
# win='log',env='mobilenet',
# append=False if iteration == 1 else True
# )
# print(epoch, iteration)
# env.line(Y=np.array([loss.item()]),X=np.array([iteration]),
# win='loss',env='mobilenet',
# opts=dict(title='loss',
# xlabel='iteration',ylabel='loss',
# legend=['Train loss'],
# showlegend=True
# ),
# update=None if iteration == 1 else 'append')
# evaluation
net.eval()
correct = 0
loss_test_iteration = 0.0
for j, (img_val, label_val) in enumerate(test_data_loader):
img_val = img_val.float()
label_val = label_val.long()
if gpu_avail:
img_val = img_val.to("cuda")
label_val = label_val.to("cuda")
out_val = net(img_val) # (Batch_size, 2)
# output the accuracy
out_class = torch.argmax(out_val, dim=1)
count = torch.sum(out_class==label_val).item()
# print(count)
correct += count
loss_val = loss_func(out_val, label_val)
# loss_test_epoch += loss_val.item() * label_val.shape[0]
loss_test_iteration += loss_val.item() * label_val.shape[0]
# loss_total_train = loss_train_iteration/len(input_train_data.train_data)
# loss_total_train = loss_train_iteration
loss_total_val = loss_test_iteration/len(input_test_data.train_data)
accuracy = correct/len(input_test_data.train_data)
if accuracy_old<accuracy:
accuracy_old = accuracy
torch.save({'model':net,
'epoch':epoch,
'batch_size':BATCH_SIZE,
},f'{model_saved_path}/accuracy_{accuracy}.pkl')
# record train_loss,val_loss
# train_loss_curve.append(loss_total_train)
train_loss_curve.append(loss_train_iteration)
val_loss_curve.append(loss_total_val)
env.line(Y=np.column_stack((loss_train_iteration,loss_total_val)),
# X=np.column_stack((epoch,epoch)),
X=np.array([iteration]),
win='loss', env='mobilenet',
opts=dict(title='Loss',
xlabel='Iteration', ylabel='loss',
legend=['Train loss','Validation loss'],
markers=True,
showlegend=True
),
update=None if iteration == 201 else 'append')
env.line(Y=np.array([accuracy]),
# X=np.column_stack((epoch,epoch)),
X=np.array([iteration]),
win='accuracy', env='mobilenet',
opts=dict(title='Accuracy',
xlabel='Iteration', ylabel='accuracy',
legend=['Accuracy'],
markers=True,
showlegend=True
),
update=None if iteration == 1 else 'append')
print(f'Iteration {iteration} finished! Train loss: {loss_train_iteration}\t'
f'Val loss: {loss_total_val}\t'
f'Accuracy: {accuracy}')
env.text(f'Iteration {iteration} finished! Train loss: {loss_train_iteration}\t'
f'Val loss: {loss_total_val}\t'
f'Accuracy: {accuracy}\n\n',
win='log', env='mobilenet',
append=False if iteration == 1 else True)
print()
pass
print(f'Epoch {epoch} finished!\n')
env.text(f'Epoch {epoch} finished!\n',
win='log', env='mobilenet',
append=False if iteration == 1 else True)
plt.plot(range(1,EPOCHS+1), train_loss_curve, label='train loss')
plt.plot(range(1,EPOCHS+1), val_loss_curve, label='val loss')
plt.title('Mobilenet')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.savefig(f'./loss/loss_epoch{EPOCHS}')
plt.show()
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,526
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/main.py
|
import torch
import dataset
import model
import cv2
import os
import torch.nn as nn
import numpy as np
import time
import torch.cuda as cuda
import glob
if __name__ == '__main__':
gpu_avail = cuda.is_available()
os.environ["CUDA_VISIBLE_DEVICES"] = "3"
scale = 0.8
img_dir = '/home/zhangtk/data/test/0/'
# img_dir = r'E:\data\rain_full\rain_imgs\test\2'
# img_list = os.listdir(img_dir)
img_list_ = glob.glob(os.path.join(img_dir,'*.jpg'))
img_list = [os.path.basename(i) for i in img_list_]
# save_dir = f'/home/zhangtk/data/test/0_test_ztkvgg1_{scale}/'
save_dir = os.path.join(img_dir, 'result_mobilenet_conv1x1_new_930')
if not os.path.exists(save_dir):
os.makedirs(save_dir)
start = time.time()
for img_path in img_list:
img = cv2.imread(os.path.join(img_dir, img_path))
h = img.shape[0]
w = img.shape[1]
img = cv2.resize(img,(0,0),fx=scale,fy=scale)
img = torch.from_numpy(img)
img = img.float()
img = torch.unsqueeze(img,dim=0)
img = img.permute(0,3,1,2)
if gpu_avail:
info = torch.load('./model_saved_new/mobilenet_conv1x1/accuracy_0.9302765786860787.pkl')
img = img.to("cuda")
else:
info = torch.load('./model_saved_new/mobilenet_conv1x1/accuracy_0.9302765786860787.pkl', map_location='cpu')
net = info['model']
# net = net.module
net = net.eval()
# print(net)
s = time.time()
out = net(img)
# cv2.imshow('i',out[0,:,:].detach().cpu().numpy())
# cv2.waitKey()
out = torch.unsqueeze(out, dim=0)
out = nn.Softmax2d()(out)
out = out[0,1,:,:]
out = out.cpu().detach().numpy()
out = cv2.resize(out,(w,h))
out = out*255
# out = out[:,:,np.newaxis]
# cv2.imshow('abc',out)
# cv2.waitKey(0)
print(img_path)
e = time.time()
# print(e-s, ' s')
cv2.imwrite(os.path.join(save_dir, img_path[:-4]+'_test.jpg'), out)
# cv2.imshow('img', out)
# cv2.waitKey()
pass
end = time.time()
print(end-start, ' s')
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,527
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/model.py
|
import torch as torch
import torch.nn as nn
import torchvision
import torch.utils.model_zoo as model_zoo
__all__ = ['VGG', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn', 'vgg19_bn', 'vgg19',]
cfg = {
'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
'my_vgg':[32, 32, 'M', 64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 256, 256, 'M'],
'xx_vgg':[],
'my_mobilenet':[],
}
class VGG(nn.Module):
def __init__(self, features, init_weights=True):
super(VGG, self).__init__()
self.features = features
self.conv_sub = nn.Conv2d(in_channels=256, out_channels=2, kernel_size=1)
# self.softmax = nn.Softmax2d()
# self.linear = nn.Linear(256,10)
if init_weights:
self._initialize_weights()
def forward(self, x):
x = self.features(x) # (Batch_size, 256, 1, 1)
x = self.conv_sub(x) # (Batch_size, 2, 1, 1)
x = torch.squeeze(x) # (Batch_size, 2)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.constant_(m.bias, 0)
def make_layers(cfg, batch_norm=False):
layers = []
in_channels = 3
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
return nn.Sequential(*layers)
def my_vgg(batch_norm=True, **kwargs):\
return VGG(make_layers(cfg['my_vgg'], batch_norm=batch_norm), **kwargs)
""""""""""""""""""""""""""""""""""""
"""============ZTK_vgg==========="""
""""""""""""""""""""""""""""""""""""
class Block1(nn.Module):
# kernel_size=3*3 pad=1
def __init__(self, in_channel, out_channel, stride=1, padding=1):
super(Block1,self).__init__()
self.conv = nn.Conv2d(in_channels=in_channel,out_channels=out_channel,
kernel_size=3,stride=stride,padding=padding)
self.bn = nn.BatchNorm2d(out_channel)
self.relu = nn.ReLU(inplace=True)
self.block = nn.Sequential(self.conv,self.bn,self.relu)
def forward(self, x):
x = self.block(x)
return x
class Block2(nn.Module):
# kernel_size=3*3 pad=0
def __init__(self, in_channel, out_channel):
super(Block2,self).__init__()
self.conv = nn.Conv2d(in_channels=in_channel, out_channels=out_channel,
kernel_size=3, stride=1, padding=0)
self.bn = nn.BatchNorm2d(out_channel)
self.relu = nn.ReLU(inplace=True)
self.block = nn.Sequential(self.conv, self.bn, self.relu)
def forward(self, x):
x = self.block(x)
return x
class Ztk_vgg(nn.Module):
def __init__(self, in_channel=3, num_class=2):
super(Ztk_vgg,self).__init__()
self.unit1 = Block1(in_channel, 32)
self.unit2 = Block1(32, 32)
self.maxpool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.down_sample1 = nn.Sequential(self.unit1,self.unit2,self.maxpool1)
self.unit3 = Block1(32,64)
self.unit4 = Block1(64,64)
self.maxpool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.down_sample2 = nn.Sequential(self.unit3,self.unit4,self.maxpool2)
self.unit5 = Block2(64,128)
self.unit6 = Block2(128,256)
self.unit7 = Block2(256,512)
self.maxpool3 = nn.MaxPool2d(kernel_size=2, stride=2)
self.down_sample3 = nn.Sequential(self.unit5,self.unit6,self.unit7,self.maxpool3)
self.conv_1x1 = nn.Conv2d(512,2,kernel_size=1)
self._initialize_weights()
# self.feature = nn.Sequential(self.down_sample1,self.down_sample2,self.down_sample3)
def forward(self, x):
x = self.down_sample1(x)
x = self.down_sample2(x)
x = self.down_sample3(x)
x = self.conv_1x1(x)
x = torch.squeeze(x)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
""""""""""""""""""""""""""""""""""""
"""============MobileNet========="""
""""""""""""""""""""""""""""""""""""
class Depthwise_separable_conv(nn.Module):
def __init__(self, in_channel, out_channel, stride=1 ,kernel_size=3, padding=1):
super(Depthwise_separable_conv,self).__init__()
self.depthwise = nn.Conv2d(in_channels=in_channel, out_channels=in_channel,
kernel_size=kernel_size, padding=padding,stride=stride,
groups=in_channel)
self.pointwise = nn.Conv2d(in_channels=in_channel, out_channels=out_channel, kernel_size=1)
self.bn_in = nn.BatchNorm2d(in_channel)
self.bn_out = nn.BatchNorm2d(out_channel)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
x = self.depthwise(x)
x = self.bn_in(x)
x = self.relu(x)
x = self.pointwise(x)
x = self.bn_out(x)
x = self.relu(x)
return x
class MobileNet(nn.Module):
def __init__(self, in_channel=3, out_channel=1000):
super(MobileNet, self).__init__()
self.conv1 = Block1(in_channel,32,stride=2,padding=1) # /2
self.depth_point_conv1 = Depthwise_separable_conv(32,64,stride=1,padding=1) # unchanged
self.depth_point_conv2 = Depthwise_separable_conv(64,128,stride=2,padding=1) # /2
# self.depth_point_conv2 = nn.AvgPool2d(kernel_size=2,stride=2)
self.depth_point_conv3 = Depthwise_separable_conv(128,128,stride=1,padding=1) # unchanged
self.depth_point_conv4 = Depthwise_separable_conv(128,256,stride=1,padding=0) # -2
self.depth_point_conv5 = Depthwise_separable_conv(256,256,stride=1,padding=1) # unchanged
self.depth_point_conv6 = Depthwise_separable_conv(256,512,stride=1,padding=0) # -2
self.depth_point_conv7_block = [Depthwise_separable_conv(512,512,stride=1,padding=1)]*5
self.depth_point_conv8 = Depthwise_separable_conv(512,1024,stride=1,padding=0) # -2
self.depth_point_conv9 = Depthwise_separable_conv(1024,1024,stride=2,padding=1) # /2
# self.depth_point_conv9 = nn.AvgPool2d(kernel_size=2,stride=2)
self.conv1x1 = nn.Conv2d(1024,2,kernel_size=1)
self.feature = nn.Sequential(self.conv1,
self.depth_point_conv1,
self.depth_point_conv2,
self.depth_point_conv3,
self.depth_point_conv4,
self.depth_point_conv5,
self.depth_point_conv6,
*self.depth_point_conv7_block,
self.depth_point_conv8,
self.depth_point_conv9,
self.conv1x1
)
self._initialize_weights()
# self.average_pool = nn.AvgPool2d(kernel_size=7) # 1*1*1024
# self.fc = nn.Linear(1024,out_channel)
# self.softmax = nn.Softmax()
# self.classifier = nn.Sequential(self.fc,self.softmax)
def forward(self, x):
x = self.feature(x)
# x = x.view(-1, x.shape[1]) # 1024
# x = self.classifier(x) # 1000
# x = self.fc(x)
# x = self.softmax(x)
x = torch.squeeze(x)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,528
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/dataset.py
|
import torch
import torchvision.transforms as transforms
import torch.utils.data as Data
import os
import glob
import pickle
import numpy as np
class RainData(Data.Dataset):
def __init__(self, train=True):
super(RainData, self).__init__()
root = '/data/rain/rain_detection_train/cropped/'
self.train_data = []
if train:
data_path = [os.path.join(root, 'train_True', '32.imdb'),
os.path.join(root, 'train_False', '32.imdb')]
else:
data_path = [os.path.join(root, 'val_True', '32.imdb'),
os.path.join(root, 'val_False', '32.imdb')]
for file_train in data_path:
# for j in range(2):
imdb = open(file_train, 'rb')
data = pickle.load(imdb)
if 'True' in file_train:
self.train_data += [[i,1] for i in data]
# self.train_data += np.column_stack((data, np.ones(len(data),dtype=np.float)))
else:
self.train_data += [[i, 0] for i in data]
# print(0)
# self.train_data += np.column_stack((data, np.zeros(len(data),dtype=np.float)))
imdb.close()
pass
def __len__(self):
return len(self.train_data)
def __getitem__(self, item):
img = (self.train_data[item][0]).astype(np.float)
label = float((self.train_data[item][1]))
return img, label
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,529
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/mess/test_time.py
|
import cv2
import torch
import time
import torch.cuda as cuda
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
gpu_avail = cuda.is_available()
if gpu_avail:
info = torch.load('./model_saved/accuracy_0.9762699947266655.pkl')
print('\nTest on gpu:')
else:
info = torch.load('./model_saved/accuracy_0.9762699947266655.pkl', map_location='cpu')
print('\nTest on cpu:')
net = info['model']
number = 100
start = time.time()
img = cv2.imread('./RECORD2.20000201000408Front.mp4.432.jpg')
img = torch.from_numpy(img)
img = img.float()
img = torch.unsqueeze(img,dim=0)
img = img.permute(0,3,1,2)
if gpu_avail:
img = img.to('cuda')
for i in range(number):
a = net(img)
end = time.time()
print(end-start,f's for {number} images')
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,530
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/loss.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
# class FocalLoss(torch.nn.Module):
# def __init__(self, gamma=2):
# super().__init__()
# self.gamma = gamma
#
# def forward(self, log_pred_prob_onehot, target):
# pred_prob_oh = torch.exp(log_pred_prob_onehot)
# pt = pred_prob_oh.gather(1, target.data.view(-1, 1))
# modulator = (1 - pt) ** self.gamma
# mce = modulator * (-torch.log(pt))
#
# return mce.mean()
class FocalLoss(nn.Module):
r"""
This criterion is a implementation of Focal Loss, which is proposed in
Focal Loss for Dense Object Detection.
Loss(x, class) = - \alpha (1-softmax(x)[class])^gamma \log(softmax(x)[class])
The losses are averaged across observations for each mini-batch.
Args:
alpha(1D Tensor, Variable) : the scalar factor for this criterion
gamma(float, double) : gamma > 0; reduces the relative loss for well-classified examples (p > .5),
putting more focus on hard, misclassified examples
size_average(bool): By default, the losses are averaged over observations for each mini-batch.
However, if the field size_average is set to False, the losses are
instead summed for each mini-batch.
Input:
inputs: [N, C]
targets: [N, 1]
"""
def __init__(self, class_num, alpha=None, gamma=2, size_average=True):
super(FocalLoss, self).__init__()
if alpha is None:
self.alpha = torch.ones(class_num, 1)
else:
self.alpha = torch.tensor([alpha,1-alpha])
self.gamma = gamma
self.class_num = class_num
self.size_average = size_average
def forward(self, inputs, targets):
N = inputs.size(0) # batch_size
C = inputs.size(1) # 2
P = F.softmax(inputs,dim=1)
# class_mask = inputs.new(N, C).fill_(0)
class_mask = torch.zeros_like(inputs)
# ids = targets.view(-1, 1)
ids = targets.view(-1,1)
class_mask.scatter_(1, ids, 1) # 生成 one-hot 标签
# print(class_mask)
if inputs.is_cuda and not self.alpha.is_cuda:
self.alpha = self.alpha.to("cuda")
# alpha = self.alpha[ids.data.view(-1)]
alpha = self.alpha[targets]
probs = (P * class_mask).sum(1).view(-1, 1)
probs += 1e-8
log_p = probs.log()
# print('probs size= {}'.format(probs.size()))
# print(probs)
batch_loss = -alpha * (torch.pow((1 - probs), self.gamma)) * log_p
# print('-----bacth_loss------')
# print(batch_loss)
if self.size_average:
loss = batch_loss.mean()
else:
loss = batch_loss.sum()
return loss
class FocalLoss_bce(nn.Module):
def __init__(self, alpha=1, gamma=2, logits=False, reduce=True):
super(FocalLoss_bce, self).__init__()
self.alpha = alpha
self.gamma = gamma
self.logits = logits
self.reduce = reduce
def forward(self, inputs, targets):
targets = targets.float()
if self.logits:
BCE_loss = F.binary_cross_entropy_with_logits(F.softmax(inputs,dim=1)[:,1], targets, reduction='none')
else:
BCE_loss = F.binary_cross_entropy(F.softmax(inputs,dim=1)[:,1], targets, reduction='none')
pt = torch.exp(-BCE_loss)
F_loss = self.alpha * (1-pt)**self.gamma * BCE_loss
if self.reduce:
return torch.mean(F_loss)
else:
return F_loss
# if __name__ == '__main__':
# loss = FocalLoss(2)
#
# # conf_mask = torch.FloatTensor([0.0, 1.0, 0.0, 1.0, 1.0]) - 1
# # conf_data = torch.FloatTensor([-0.1, -0.9, 0.0, -0.2, -0.2])
# conf_data = torch.randn(3,2)
# conf_mask = torch.tensor([1,0,1], dtype=torch.long)
#
# print(loss(conf_data, conf_mask))
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,531
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/mess/xml_jpg.py
|
import os
import xml.dom.minidom
import cv2
ImgPath = r'E:\data\rain_full\rain_imgs\match\img'
AnnoPath = r'E:\data\rain_full\rain_imgs\match\xml'
ImgLabelPath = r'E:\data\rain_full\rain_imgs\match\img_label'
imagelist = os.listdir(ImgPath)
for image in imagelist:
image_pre, ext = os.path.splitext(image)
imgfile = os.path.join(ImgPath, image)
xmlfile = os.path.join(AnnoPath, image_pre+'.xml')
# 打开xml文档
DOMTree = xml.dom.minidom.parse(xmlfile)
# 得到文档元素对象
collection = DOMTree.documentElement
# 读取图片
img = cv2.imread(imgfile)
# filenamelist = collection.getElementsByTagName("filename")
# filename = filenamelist[0].childNodes[0].data
# print(filename)
# 得到标签名为object的信息
objectlist = collection.getElementsByTagName("object")
for objects in objectlist:
# 每个object中得到子标签名为name的信息
# namelist = objects.getElementsByTagName('name')
# # 通过此语句得到具体的某个name的值
# objectname = namelist[0].childNodes[0].data
bndbox = objects.getElementsByTagName('bndbox')
for box in bndbox:
x1_list = box.getElementsByTagName('xmin')
x1 = int(x1_list[0].childNodes[0].data)
y1_list = box.getElementsByTagName('ymin')
y1 = int(y1_list[0].childNodes[0].data)
x2_list = box.getElementsByTagName('xmax')
x2 = int(x2_list[0].childNodes[0].data)
y2_list = box.getElementsByTagName('ymax')
y2 = int(y2_list[0].childNodes[0].data)
cv2.rectangle(img, (x1, y1), (x2, y2), (0, 0, 255), thickness=2)
# cv2.putText(img, objectname, (x1, y1), cv.FONT_HERSHEY_COMPLEX, 0.7, (0, 255, 0),
# thickness=2)
# cv2.imshow('head', img)
# cv2.waitKey()
cv2.imwrite(os.path.join(ImgLabelPath, image), img) #save picture
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,532
|
SicParvisMagna95/rain_detection
|
refs/heads/master
|
/mess/match.py
|
"""match the jpg with annotation"""
import os
import glob
import shutil
test_dir = r'E:\data\rain_full\rain_imgs\test'
test_dir_list = glob.glob(os.path.join(test_dir,'*'))
annotation_dir = r'E:\data\rain_full\Annotation'
annotation_list = os.listdir(annotation_dir)
match = []
for test_path in test_dir_list[1:]:
test_list = os.listdir(test_path)
for jpg in test_list:
jpg_xml = jpg[:-4] + '.xml'
if jpg_xml in annotation_list:
shutil.copy(os.path.join(test_path,jpg), r'E:\data\rain_full\rain_imgs\match\img')
shutil.copy(os.path.join(annotation_dir, jpg_xml), r'E:\data\rain_full\rain_imgs\match\xml')
match.append(jpg[:-4])
pass
pass
|
{"/train.py": ["/model.py", "/dataset.py", "/loss.py"], "/main.py": ["/dataset.py", "/model.py"]}
|
40,572
|
loyalist16/components
|
refs/heads/master
|
/ttf_ocr/baidu_ocr.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/2/23 11:45
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : baidu_ocr.py
# @Desc : 百度的OCR项目
from aip import AipOcr
APP_ID = '**'
API_KEY = '**'
SECRET_KEY = '**'
client = AipOcr(APP_ID, API_KEY, SECRET_KEY)
def accurate_ocr(file):
'''
通用文字识别(高精度版)
'''
""" 读取图片 """
def get_file_content(filePath):
with open(filePath, 'rb') as fp:
return fp.read()
image = get_file_content(file)
""" 调用通用文字识别(高精度版) """
result = client.basicAccurate(image)
return result
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,573
|
loyalist16/components
|
refs/heads/master
|
/proxy/dbConnect.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/1/29 9:33
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : dbConnect.py
# @Desc : 连接各种数据库
import redis
import random
from proxy import setting
class MyRedisClient:
"""
redis连接客户端
"""
def __init__(self, host=setting.REDIS_HOST, port=setting.REDIS_PORT, passwd=setting.REDIS_PASSWORD,
proxy_key=setting.PROXY_KEY):
"""
初始化redis连接
:param str host: 主机
:param str port: 端口
:param str passwd: 密码
:param str proxy_key: 哈希键名
"""
self.db = redis.StrictRedis(host=host, port=port, password=passwd, decode_responses=True)
self.proxy_key = proxy_key
def set(self, name, proxy):
"""
设置代理
:param str name: 主机名称
:param str proxy: 代理
:return: 设置结果
:rtype: int
"""
return self.db.hset(self.proxy_key, name, proxy)
def get(self, name):
"""
获取代理
:param str name: 主机名称
:return: 代理
:rtype: str
"""
return self.db.hget(self.proxy_key, name)
def count(self):
"""
获取代理总数
:return: 代理总数
:rtype: int
"""
return self.db.hlen(self.proxy_key)
def remove(self, name):
"""
删除代理
:param str name: 主机名称
:return: 删除结果
:rtype: int
"""
return self.db.hdel(self.proxy_key, name)
def names(self):
"""
获取主机名称列表
:return: 名称列表
:rtype: list
"""
return self.db.hkeys(self.proxy_key)
def proxies(self):
"""
获取代理列表
:return: 代理列表
:rtype: list
"""
return self.db.hvals(self.proxy_key)
def random(self):
"""
随机获取代理
:return: 代理
:rtype: str
"""
proxies = self.proxies()
return random.choice(proxies)
def all(self):
"""
获取所有 主机:代理 键值对
:return: 所有代理
:rtype: dict
"""
return self.db.hgetall(self.proxy_key)
if __name__ == '__main__':
rc = MyRedisClient()
# p = rc.set('2', "153.35.238.242")
p = rc.names()
# p = rc.get('1')
print(p, type(p))
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,574
|
loyalist16/components
|
refs/heads/master
|
/proxy/setting.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/1/29 9:36
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : setting.py
# @Desc : 配置文件, 存储一些私密信息
# Redis 数据库 IP
REDIS_HOST = 'localhost'
# Redis 数据库密码,如无则填 None
REDIS_PASSWORD = None
# Redis 数据库端口
REDIS_PORT = 6379
# 代理池键名
PROXY_KEY = 'adsl'
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,575
|
loyalist16/components
|
refs/heads/master
|
/txhk/gap.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/3/11 16:22
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : gap.py
# @Desc : 获取到缺口所在位置的左边距
from PIL import Image
def get_pixel_list(file_path):
"""
获取图片像素列表
:param file_path: 文件路径
:return: 图片像素列表
"""
img = Image.open(file_path)
img = img.convert('RGBA')
return img.load()
def is_pixel_equal(img1_list, img2_list, x, y):
"""
判断两张图相同坐标处像素是否相同
:param PixelAccess img1: 图片1的像素列表
:param PixelAccess img2: 图片2的像素列表
:param int x: 横坐标
:param int y: 纵坐标
:return: bool
:rtype: bool
"""
pixel1 = img1_list[x, y]
pixel2 = img2_list[x, y]
threshold = 45
if abs(pixel1[0] - pixel2[0]) < threshold and abs(pixel1[1] - pixel2[1]) < threshold and abs(
pixel1[2] - pixel2[2]) < threshold:
return True
else:
return False
def get_left_offset(img1, img2):
"""
获取图片左边偏移量
:param img1: 全图
:param img2: 缺口图
:return: 缺口左边距离
"""
left = 60
top = 30
img1_list = img1.load()
img2_list = img2.load()
for x in range(left, img1.size[0]):
for y in range(top, img1.size[1]):
if not is_pixel_equal(img1_list, img2_list, x, y):
# 判断右边是否为白边
right_pixel = img2.load()[x + 1, y]
if right_pixel[0] > 200 and right_pixel[1] > 200 and right_pixel[2] > 200:
left = x + 1
return left
if __name__ == '__main__':
img1 = Image.open('full_img/penguin.png').convert('RGBA')
img2 = Image.open('cap_union_new_getcapbysig (2).jpg').convert('RGBA')
left = get_left_offset(img1, img2)
print(left)
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,576
|
loyalist16/components
|
refs/heads/master
|
/ttf_ocr/run.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/3/1 17:35
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : run.py
# @Desc : 识别图片中的文字
from baidu_ocr import accurate_ocr
from ttf2img import ttf2img
from loguru import logger
def get_fontMap(fontPath):
"""
获取 编码:字符 映射表
:param str fontPath: 字体文件路径
:return: 字体映射 或者 ocr识别有缺漏,则返回code_list和识别后的字体列表, 核实完后手动dict(zip())生成映射
:rtype: dict|tuple
"""
code_list, img_name = ttf2img(fontPath=fontPath)
result = accurate_ocr(img_name)
print(result)
words_result = [words.get('words') for words in result.get('words_result')]
words = ''.join(words_result)
words_list = list(words)
if len(words_list) == len(code_list):
font_map = dict(zip(code_list, words_list))
print(font_map)
return font_map
else:
error_words = min(words_result, key=len)
line = words_result.index(error_words) + 1
logger.error("第{}行, 字体有缺漏, 请核对 `{}` ", line, error_words)
return code_list, words_list
if __name__ == '__main__':
get_fontMap('4165b8d8.woff')
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,577
|
loyalist16/components
|
refs/heads/master
|
/proxy/adsl/dial.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/1/29 10:20
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : dial.py
# @Desc : 定时拨号, 在服务器上运行
import re
import subprocess
import time
import requests
from proxy.dbConnect import MyRedisClient
from requests.exceptions import ConnectionError, ReadTimeout
# 拨号网卡
ADSL_IFNAME = 'ppp0'
# 测试 URL
TEST_URL = 'https://www.baidu.com'
# 测试超时时间
TEST_TIMEOUT = 20
# 拨号间隔
ADSL_CYCLE = 100
# 拨号出错重试间隔
ADSL_ERROR_CYCLE = 5
# ADSL 命令
ADSL_BASH = 'adsl-stop;adsl-start'
# 代理运行端口
PROXY_PORT = 8888
# 客户端唯一标识
CLIENT_NAME = 'adsl1'
class Sender():
def get_ip(self, ifname=ADSL_IFNAME):
"""
获取本机 IP
:param str ifname: 网卡名称
:return:
"""
(status, output) = subprocess.getstatusoutput('ifconfig')
if status == 0:
pattern = re.compile(ifname + '.*?inet addr:(\d+\.\d+\.\d+\.\d+)?', re.S)
result = re.search(pattern, output)
if result:
ip = result.group(1)
return ip
def test_proxy(self, proxy):
"""
测试代理
:param proxy: 代理
:return: 测试结果
"""
try:
response = requests.get(TEST_URL, proxies={
'http': 'http://' + proxy,
'https': 'https://' + proxy
}, timeout=TEST_TIMEOUT)
if response.status_code == 200:
return True
except (ConnectionError, ReadTimeout):
return False
def remove_proxy(self):
"""
移除代理
:return: None
"""
self.redis = MyRedisClient()
self.redis.remove(CLIENT_NAME)
print('Successfully Removed Proxy')
def set_proxy(self, proxy):
"""
设置代理
:param str proxy: 代理
:return: None
"""
self.redis = MyRedisClient()
if self.redis.set(CLIENT_NAME, proxy):
print('Successfully Set Proxy', proxy)
def adsl(self):
"""
拨号主进程
:return: None
"""
while True:
print('ADSL Start, Remove Proxy, Please wait')
self.remove_proxy()
(status, output) = subprocess.getstatusoutput(ADSL_BASH)
if status == 0:
print('ADSL Successfully')
ip = self.get_ip()
if ip:
print('Now IP', ip)
print('Testing Proxy, Please Wait')
proxy = '{ip}:{port}'.format(ip=ip, port=PROXY_PORT)
if self.test_proxy(proxy):
print('Valid Proxy')
self.set_proxy(proxy)
print('Sleeping')
time.sleep(ADSL_CYCLE)
else:
print('Invalid Proxy')
else:
print('Get IP Failed, Re Dialing')
time.sleep(ADSL_ERROR_CYCLE)
else:
print('ADSL Failed, Please Check')
time.sleep(ADSL_ERROR_CYCLE)
def run():
sender = Sender()
sender.adsl()
if __name__ == '__main__':
run()
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,578
|
loyalist16/components
|
refs/heads/master
|
/ttf_ocr/ttf2img.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/2/23 11:00
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : ttf2img.py
# @Desc : 将ttf文件转为图片
from PIL import Image, ImageDraw, ImageFont
from fontTools.ttLib import TTFont
import numpy
def ttf2img(fontPath):
"""
字体文件转换为图片
:param str fontPath: 字体文件路径
:return: 编码列表, 图片路径名字
:rtype: tuple
"""
im = Image.new('RGB', (1800, 1000), (255, 255, 255))
dr = ImageDraw.Draw(im)
font = ImageFont.truetype(fontPath, 40)
tt_font = TTFont(fontPath)
code_list = tt_font.getGlyphOrder()[2:]
array_list = numpy.array_split(code_list, 15)
for i in range(15):
newList = [j.replace("uni", "\\u") for j in array_list[i]]
text = "".join(newList)
text = text.encode('utf-8').decode('unicode_escape')
dr.text((0, 50 * i), text, font=font, fill="#000000")
font_imgName = "font.jpg"
im.save(font_imgName)
return code_list, font_imgName
if __name__ == '__main__':
ttf2img('4165b8d8.woff')
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,579
|
loyalist16/components
|
refs/heads/master
|
/txhk/core.py
|
# -*- coding: utf-8 -*-
# @Time : 2021/2/3 13:33
# @Author : loyalist
# @Email : lzl0118@foxmail.com
# @File : core.py
# @Desc : 腾讯滑块破解
import base64
import requests
import re
import time
import warnings
import execjs
from urllib.parse import quote
from txhk.gap import get_left_offset
from PIL import Image
warnings.filterwarnings('ignore')
class Txhk:
def __init__(self, ua, aid, refer_url, app_version):
"""
初始化
:param ua: User-Agent
:param aid: 滑块使用者aid
:param refer_url: 来源url
:param app_version: navigator.appVersion值
"""
self._ua = ua
self.aid = aid
self.refer_url = refer_url
self.app_version = app_version
@property
def ua(self):
return base64.b64encode(self._ua.encode()).decode()
def initCaptcha(self):
"""
初始化验证 获取sess, sid
:param str aid: 用户id
:param str ua: ua
:return: 结果
:rtype: dict
"""
url = "https://t.captcha.qq.com/cap_union_prehandle"
params = {
'aid': self.aid, 'protocol': 'https', 'accver': '1', 'showtype': 'popup',
'ua': self.ua,
'noheader': '1', 'fb': '1', 'enableDarkMode': '0', 'grayscale': '1', 'clientype': '2',
'lang': 'zh-CN',
'subsid': '1', 'callback': '_aq_716998'
}
headers = {
'Host': 't.captcha.qq.com',
'sec-ch-ua': '"Chromium";v="88", "Google Chrome";v="88", ";Not A Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'User-Agent': self._ua,
'Accept': '*/*',
'Sec-Fetch-Site': 'cross-site',
'Sec-Fetch-Mode': 'no-cors',
'Sec-Fetch-Dest': 'script',
'Referer': self.refer_url, # 根据不同程序改变
'Accept-Language': 'zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6'
}
response = requests.get(url, headers=headers, params=params, verify=False)
s = re.search('_aq_716998\((.*)\)', response.text).group(1)
result = eval(s)
# {"state":1,"ticket":"","capclass":"1","subcapclass":"15","src_1":"cap_union_new_show","src_2":"template/new_placeholder.html","src_3":"template/new_slide_placeholder.html","sess":"s0On0zvlytShIM4z6GoUJV5m0WYGy0Dj1O4bOxPpmlARN9P7uJNkP5UADIPDMQtEcA3uwzHdf_zq9Z4BSw8tM3NTTM9H5lA1MTdRSrbfCNsUx5rKJak4b2Zorv1zVNQZfwkEbTcSwuoEjzIFvyjZ0K_yjVY0YwceLjkN4O0SierPzIJ-BWI0uaWxoWCv1dY7w0E8cnx24D1Ek*","randstr":"","sid":"6762554139436920832"}
return result
def showCaptcha(self, sess, sid):
"""
验证码显示页面, 获取验证码id
:param str sess: initCaptcha 生成
:param str sid: initCaptcha 生成
:return: 请求url, nonce, tdc_url
:rtype: (str, str, str)
"""
self.rnd = '535739'
self.createIframeStart = f'{int(time.time() * 1000)}'
self.prehandleLoadTime = '273'
url = "https://t.captcha.qq.com/cap_union_new_show"
params = {
'aid': self.aid, 'protocol': 'https', 'accver': '1', 'showtype': 'popup',
'ua': self.ua,
'noheader': '1', 'fb': '1', 'enableDarkMode': '0', 'grayscale': '1', 'clientype': '2',
'sess': sess,
'fwidth': '0', 'sid': sid, 'forcestyle': 'undefined', 'tcScale': '1',
'rnd': self.rnd,
'TCapIframeLoadTime': 'undefined',
'prehandleLoadTime': self.prehandleLoadTime,
'createIframeStart': self.createIframeStart,
'subsid': '2'
}
headers = {
'Host': 't.captcha.qq.com',
'sec-ch-ua': '"Chromium";v="88", "Google Chrome";v="88", ";Not A Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'Upgrade-Insecure-Requests': '1',
'User-Agent': self._ua,
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'Sec-Fetch-Site': 'cross-site',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Dest': 'iframe',
'Referer': self.refer_url,
'Accept-Language': 'zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6'
}
response = requests.get(url, headers=headers, params=params, verify=False)
# img_id = re.search('image=(\d+)?', response.text).group(1)
nonce = re.search('nonce:"(.*?)"', response.text).group(1)
tdc_url = re.search('dcFileName:"(.*?)"', response.text).group(1)
return response.request.url, nonce, tdc_url
def resetSess(self, sess, sid, show_url):
"""
重置sess, 并获取初始滑动位置
:param str sess: initCaptcha 生成
:param str sid: initCaptcha 生成
:param str show_url: 验证码显示页面url
:return: 结果
:rtype: dict
"""
url = "https://t.captcha.qq.com/cap_union_new_getsig"
payload = f"aid={self.aid}&protocol=https&accver=1&showtype=popup&ua={quote(self.ua)}" \
f"&noheader=1&fb=1&enableDarkMode=0&grayscale=1&clientype=2&sess={sess}&fwidth=0&sid={sid}&forcestyle=undefined" \
f"&wxLang=&tcScale=1&uid=&cap_cd=&rnd={self.rnd}&TCapIframeLoadTime=undefined&prehandleLoadTime={self.prehandleLoadTime}&" \
f"createIframeStart={self.createIframeStart}&subsid=2&rand=99947704"
headers = {
'Host': 't.captcha.qq.com',
'sec-ch-ua': '"Chromium";v="88", "Google Chrome";v="88", ";Not A Brand";v="99"',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'X-Requested-With': 'XMLHttpRequest',
'sec-ch-ua-mobile': '?0',
'User-Agent': self._ua,
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Origin': 'https://t.captcha.qq.com',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Dest': 'empty',
'Referer': show_url,
'Accept-Language': 'zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6'
}
response = requests.request("POST", url, headers=headers, data=payload, verify=False)
# {'ret': '0', 'sess': 's0RtRP3ArZ4B7NVpRRZnCjofZ7RDj0qBLHRme1xLyGllMt5ElSEollN9UD41KZcP9fdtyVDWfuUeXpyyJ14kI0g55UPi9srdReuc9GOFWwe4EvAwJtMoe_jwi2-81TEelOA3ERgbepgmobOrK-ckUaQUcNP5EeC0Gcj51wTN2BfPDikyvKdQtjNRKKUS4Exfjb7ojieTJsBb6YUZXYhI7R4_vfU8ejqRFpdQbMA8pIIbY*', 'cdnPic1': '/hycdn?index=1&image=937032639779710976', 'cdnPic2': '/hycdn?index=2&image=937032639779710976', 'initx': '68', 'inity': '66'}
return response.json()
def loadImage(self, sess, sid, show_url):
"""
下载验证码图片
:param sess: resetSess生成
:param sid: initCaptcha生成
:param referer: 验证码显示页面url
:return: None
"""
# 缺口图片
url1 = f"https://t.captcha.qq.com/cap_union_new_getcapbysig?aid={self.aid}&" \
f"sess={sess}&sid={sid}&img_index=1&subsid=7"
headers = {
'Host': 't.captcha.qq.com',
'Connection': 'keep-alive',
'sec-ch-ua': '"Chromium";v="88", "Google Chrome";v="88", ";Not A Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'User-Agent': self._ua,
'Accept': 'image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-Mode': 'no-cors',
'Sec-Fetch-Dest': 'image',
'Referer': show_url,
'Accept-Language': 'zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6'
}
response = requests.request("GET", url1, headers=headers, verify=False)
with open('topic7_1.png', 'wb') as f:
f.write(response.content)
# 小图片
url2 = f"https://t.captcha.qq.com/cap_union_new_getcapbysig?aid={self.aid}&" \
f"sess={sess}&sid={sid}&img_index=2&subsid=8"
response = requests.request("GET", url2, headers=headers, verify=False)
with open('topic7_2.png', 'wb') as f:
f.write(response.content)
# 完整图片
url3 = f"https://t.captcha.qq.com/cap_union_new_getcapbysig?aid={self.aid}&" \
f"sess={sess}&sid={sid}&img_index=0&subsid=9"
response = requests.request("GET", url3, headers=headers, verify=False)
with open('topic7_0.png', 'wb') as f:
f.write(response.content)
def get_tdc_js(self, tdc_url, show_url):
"""
获取到tdc_js文件
:param str tdc_url: tdc文件url
:param show_url: 验证码展示窗口url
:return: tdc_js文件内容
:rtype: str
"""
url = "https://t.captcha.qq.com/" + tdc_url
payload = {}
headers = {
'Host': 't.captcha.qq.com',
'sec-ch-ua': '"Chromium";v="88", "Google Chrome";v="88", ";Not A Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'User-Agent': self._ua,
'Accept': '*/*',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-Mode': 'no-cors',
'Sec-Fetch-Dest': 'script',
'Referer': show_url,
'Accept-Language': 'zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6'
}
response = requests.request("GET", url, headers=headers, data=payload, verify=False)
return response.text
def get_collect(self, tdc, location_href, start_pos, end_pos):
"""
获取加密参数collect
:param str tdc: 动态tdc文件
:param str location_href: 验证码的展示链接
:param list start_pos: 滑块开始滑动坐标
:param list end_pos: 滑块结束滑动坐标
:return: collect参数
:rtype: str
"""
jscode = """const {JSDOM} = require('jsdom');
// 重写require库, 过fs检测
var ori_require = require;
require = function (v) {
console.log("重写require", v);
if (v === 'fs') {
return false;
} else {
return ori_require(v);
}
}
"""
jscode += "const dom = new JSDOM('', {url: '%s',referrer: '%s',contentType: 'text/html',includeNodeLocations: true,storageQuota: 10000000})" % (
location_href, self.refer_url)
jscode += """
// 加载
dom.window.dispatchEvent(new dom.window.Event('load'));
// window设为全局
var window = global;
delete process.cwd;;
window.addEventListener = function (event, callback, flag) {
dom.window.addEventListener(event, callback, flag);
}
window.location = dom.window.location;
window.WebGLRenderingContext = function () {
}
WebGLRenderingContext.toString = function () {
return "function WebGLRenderingContext() { [native code] }";
}
document = dom.window.document;
"""
jscode += """// navigator
navigator = dom.window.navigator;
Object.defineProperties(navigator, {
appVersion: {value: "%s"},
platform: {value: "Win32"},
languages: {value: ["zh-CN", "zh-TW", "zh", "en-US", "en"]},
userAgent: {value: "%s"},
plugins: {value: [{name: "Chrome PDF Plugin"}, {name: "Chrome PDF Plugin"}, {name: "Chrome PDF Viewer"}]},
})
""" % (self.app_version, self._ua)
jscode += """navigator.getBattery = function () {
let num = 0.6;
return new Promise((resolve, reject) => {
})
}
// plugins: [{name: "Chrome PDF Plugin"}, {name: "Chrome PDF Plugin"}, {name: "Chrome PDF Viewer"}],
window.innerWidth = 360;
window.innerHeight = 360;
screen = dom.window.screen;
Object.defineProperties(screen, {
availHeight: {value: 1040},
availLeft: {value: 1920},
availTop: {value: 0},
availWidth: {value: 1920},
colorDepth: {value: 24},
height: {value: 1080},
pixelDepth: {value: 24},
width: {value: 1920},
orientation: {
value: {
angle: 0,
onchange: null,
type: "landscape-primary"
}
}
})
// canvas
dom.window.HTMLCanvasElement.prototype.toDataURL = function () {
return "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAYAAACtWK6eAAAgAElEQVR4Xu2dCZgU1bn3f2dm2BFRUFAQBQU1GlFjjKKoMcYNl2gSzKJe4wIuNyYmMeaquZpEvXrNouZGCSrXm2i+iNkNJHFf0KhRiQoqKCICCrJFGNmZ+p5/dZ2emp7q6aru6pqunjrPM8/MdJ/lrfecf73Lec97DDVeHJwhwEeBPYHdgGHADsBAYOci5C8AlgPvAfp7HvAa8IrBLLZtHJw+wEeAvXy/twF6AT293/rb/qjpuoKf9d7/q4DZwKv2t8F8mKfPifc5MK3PUeNTmGryTK1R7+AICJ8CxgIHAzvFTOMaoBnoAWwbc9/57oTGaf1Y+bc92PDCQfRddQRbuU8kWMdTFgJ/B54EHsYYDRmqOBNwQlWss0pmMpHXe+QG1eCZg3MQcApwoicpqjFM1ft8Bvg9cL8nrgIH3Bc4BBjjvQLig78AoqF/jzEipWjJABJ+KXQaQBwcvUvPAs4B9ghL8gXMYBKvsgf9uYqP8UUe5q8czzEMDdtFpHoaT+U2DuVDNjOehxhGX/d/FelxdwF3Aq9H6tmrrFfDOOAkYB9fB7MXwZRH4JJxMHRA1J5Fiki6C2NEYms5b/Jtx5tXzp/KZPqwIWq/gfUv4Mu8w7bE2WcshBV0Ep8EOff2QTS0PNZm4TrmWG6f8LdKCXdwZE9cBEyM2tctzOI2XuUxTmQQvfgbiziW6Z0CkAs5lJ8BP4/6EB3VP9qToZKjzRUBxD+KSPwZxrzifpgBJNKMtZcgE35+MXAzhYA4b/JtwB/KBYmDo/fjpcDpkSj0Vdbb/B2amcpR9KGp3G4itSuUIMfxEEvpy1xPgkTqLGxluQcOXwQNj8Ct42CXyBIkaKS7gRuZcPsFXVqCnPGLPvRcPxV4h9snXFBqStoC5LzJx2CcuzHOUfz8/JdKNQ7zvYOzHXCVJzXCNClapzMB8n0O5Uo2M5mHgL5QTYC4HFgEPAK7j4NzB+SU0TgM/Fv+Muuo2X/Z+w9dVcUqGyC2oXH+xuSJtwSu0qDOrTrmmMfyiPQ+u3bagY8c9+7QM492pve9kYO4gX/yOv9yu5bdsC8DOIL785/dzBguZu92Qy9lXZt6qnA+H+F89uQopnE3R+ZtEGsnTOedfD/+fv12hCrInrHfv8QKt7/lyHObG0NlFvAyh7KazZAHSH/gaW8MvfJlSNg3va1naSj83lv8HOn1keNJznK3z2/reP3usAKGTYOP94RrT4R+8jwXlNXr4Ef3wxKvv72Hwa6DYN5SmHAU9GiCe2bQ9MRUvs9dzOAshrGS27gn39GH9GA8EziG2RzOXI7iEiZxD1M4hOkebcczq429YW2Qs3mKz3ma80CaeYifMNoFOrzEULev5e7LRfN/iztGYQmyZyxNflpv4Ui+xmn55rY/O87neDH/XL6x7wM+2eZV45hJRSXJxEmjWyXIxEmjccxDOOb0DtUoqWCOOYb1PcfzyzM/xLaTvdrScAR3nLf0ov2fPOWJxUumPLh0XP8lrHUX3UB65m0H2RJf42n3s4cYx2gGoM9+wIv5/4MAWihB7IK2ALFAOoId80Z04Wd+APmNe9vXd9k/D9LLmcV/uSAQUGSU+xe+fzHLkH/LB5K5HlgsYPT9u56BoYWtRTMdEMhkcOgzwfBFXx9+gPT2HFT9oO9RcFYTnA3s5+OSBceoHeHLOQcCMvRvmQ4Cig8gPCENYzJ7cAgbOJy/8yMGsdpt8jf24nTOdhe3iha1il3sS+nHEXyTESzPg0SLehKHcT5PuIvSLmi1k+HeTA+3zQU8zsWSiq73YAyf54V2jgL/+H5wncYE7mWyC7hCEKnNsVycB11hH7b+9HUfz6EzjIrlretCgNyLcU7rUL3KNWytZwFjnP4Y50Ln5xMPutnMuvUBZ1GDbIU3+aDdWz5oMdqFfBNjinqkSgGk0Ii3y0fG/Ok84oJvN7Z2PVG5yWu1ZQr7lmV7EZvZ0kalsgDJLR3ydpD2DuVhFZDaS0BY4W5V5LZ3BJoC6eBSU9iHrXMMMNN7FN+Y2uL8jvcjc+yRWfD4q/DNAulyzwxY2RwIEJD2ewkXM4Wbvbe5FpOKFrp9836XafmFXQiioAVbDGh3MyVQavhfhhaAfjBJWmjRC2xvsh1+sKhtoYTxA1RS7UpO5jF+xODJqw1hVayczT0sOkD8qti6Xnd6aJwCnH3Jn/bu/+OlB4/RYtuT/u6buPAtrwcKAoP97AI+EqhmqV0pgPgNaj/T/TQcyuB2rlorVeQqFs0TgNvzHeTcvG0liFzKfiD4VS/v7e26h7Wp7i/Hg+uOjgIQqWf9CgDp61NuYgGl2aPTSg9bRcCRJAmQILhu3hwgzuMefuBJh5uY6i5kAaRwMaquBY5d8EFqkf8tvhvLXLVNKlqhelbAIPdfPyD0v1X5JH2stAhqZyWYn0apdFb9ct28YQDiW+OtAGl17d5W1AaxVOXQBY65GuNM+eGTB/1n8+KN9z63ZtmuP2UMZ/Aot3KoqzqlCSAHMpSH2dvdmm4tUQHyMU+aqAerPkmCTAPX5ogCEKlh24Or/vhtnIDl8ckZueEusQD16pQEiKJsxgM/Ym92Yh3jecpTueICSJAd0hFQ/OMuYWu+znhXAkgNFED8/wcBRZ9ZSfQ6g2MCiPsamXwbxjnC2hLFBifn7boJx9ww4u2tTpr34Bf2f4kVO1/IDP6Tj/F/zOV2DnNdsUkCpCMV6+s87dpAfekWKEGO5iFeoS9r2ninrNqzY4EEKfRi+QEgrknP9i9oa3OUI0HUjzbJ/TZO0MzMgK2b4aqj4BLPBb5hM0zOqZPFJYik0zddhRP2ZRveYzqPIMFUKCnsqHrD38bh+UVbSoJYgNj2QXaG/4n8joLX3LC7nMoXJL2KrVGrKu7Je3laQ6tYPinT1s3bKloObOfq9e+D5KTN7/q+3W3Ybx749FCpJlZNWcl69mVg3khOEiAdGelWdQvaDX8AOCZvOBca31KTgox0u9iteqW3vF7hawukhQWZPEvlAmRrz3v2fgeSxIJ0RM5I/29t7c+Ce58uaqTnVCwVSTZJEjkLfuWqgNoRHuR5nuSRsm/wILukFEA0wkyGcZbn9SsFENVXne8zjv6s4zp+n/eGWfC8xcA8Tap/GafyJZ5z6wnAP2Cc61iw6p08YJMm35Nb7559kXc0BaEsJwT+Ghxq4n3Zpl3BxuFeh9w3fctTLcfZXW3Vtd6pIO+Q3xVbLRtENAS5ef30FAJEis8J+Qe1b3r7gRb0294/fi9Wd2Cju9eUKwow9hvt8khZF7A8VQKdpEpUFcsvifweNAu0wpm1IFkPikn+1hhcUdChDaI+pPbJW6VnnYwFzk8Zyve4hBv5LTdwDFJXVG7m3jZGeymAWENeni6VQhdw0PoM8pb561nPmf3M2hnWRvHT6LNbvuaaD62e14F05OY9b/IxZcViOTgKiqg47KSYeAz7uaTTaTzMvXzKtXeilpzkqPPy+Rkw1ufF0uNOLJx2v5rl35sYSj8uYWoI71MauBhfLFYHT+tF3irMutOL3LfWtlBsVpSicFfF0td38aSO3P+/PhRGeE/bDiCthnrOIWBLTrLcxBS+FrCplzbeVR0gDo4OKD3ewUGlxHgWZEuEHVwnqA73TlKFbVP79aReKTpIaoyNU/NtYO45ANfO1eZiG4DoWIwc25IcuU28QoBszxSeY3bR02m1z5schUkA5AnvFEOn8cQfdnI8w8oKXNQSauvK7bTHiXngQhvKv1PveYz/oG1sq2JpD0Tc0LS2hpsUAgSmMJbZbq00l6oCxMGR9XZemhkk2ttuAqb9acqk/wQD8lxHLJp8LYK0lqoBxMHR2Y1JaWWMpVvhI+en/SFiod/AceUdZtYiiHyQJxaaK++kKgBxcLQJ8DLQWDmJndeDdjN0IGVL55FQeyMrZvurkcmS5T8aYwrjaCJ3lIYGJd28Do7cuXLrprrInSu3blYKOHAtcHlkrjyAMXXvIRdXOgSIg/PvwE8js6/GGvxPWS/KGnuIapJzM6BzpNHKVzFGrK3rUhQg3knAN71Q0tQyYZmXTMvv3U/tw1STcKV40BmT8EUs3Q1jxOK6LR0BRG8HJVdIdZEIVHKFrITgwL1eYG+Iql4VJYMQi+u2BALES7AQy5n0zuScPAujO5OAtI2tiJM/ebuo4WmXwS5W12UpBpBfVpJ9pFY4dQagVB5ZicCBUcBfaA1LKd30bowRq+uytAOIl7eqzRthAx+ymiVsoJkt7rlsWfeGRrrTh23YikE0JpSGJ+wsKAmUPw9buHbKyKndaIVtWIewzrZ2AzfZhM6B7B6uq6rVmuNlTlVw5i7eKIrCFc0x0fdZ4DeRHmCffN6tgGZnPcfgpkaGtDTRPGU0eoBEy/kz2WVzCwPKGT8IIPm9oC1sYgVvs84XwNZAowuOFlpwaHEftIEmBjCM3m6MdW0UbQhGT+qmvNZLvAdocJ8st+i0BRTjAqyIRQkARPRdA1wRmtCfY0zRPdi6AYiXDtT1SmxhI+/zJhtZh6GBfmzP1uzg/m3LWv7FKhaymY000o3tGEEPL61LaNZWoaJybSodQfRi38RSxkdGb55Ii4QAomf5s3c+K9xzbdcuzanXrp4A8i03+x6wjHkIAFr4AxlOT7YKZNNG1rpAkrTpQR8GMaoNiMLxNt5aP/RSOEbv1QLEr75E76W6LRIEiGIoHiRs3NalGCPWtyv1BBAdft5DtoYWfQtb2JrB9EdXdBQvq1hMM8voTm8GsDNreJ/VvE8vV+60fRN/yEpWsMBVz/TtNgW3G7zPG65KZ79bwhzX9hEN3enFSldi5Y6KCrySaluxnQvQlbzjtj2SFua5kk5HVXXar1SaUguMwmdUO9Gvo67FVCydLFTOqw+8vFnqQ+0USSu+FY5tF7iiBXU8V+1UdJ5FNoV+yxbS1SaShZu8/VzlxtKz6NYD3d5QzAbRmDrpqOR36kf2k1Iy6mx3kE9G2xlSK0WLtbtUT9lUBsGXB7T1dGyYA1uaodsQaOgFmxZDy3oanfVvn/3qIae0NLFq3gcsfuyTnrGqDOVFbJCrZ9N90Xp2BXo3OGxc1423fjma1jtVii05B3P2THZocNwH0wPiNLGhcRNLNrbQVGjvFNogZ82nZ9NK5I7o1r0ni2/dK69X50ec8Dy9TQMj8xzzH4SS2qQFLsNbC1wLP0qxIFB7SZQm9yqOXLF962+pY4N9Ru8m1rOUuS4wJbV60x8LEEknSSsVqXkCmIPj2kP9GMxaVqH2L9HIye7CyNlHuJJP9+60qobtn0ULSvffqI1+rP0h20Pzt7QIQNRGbXOOi9ZwNbvQNHc6pZTLV5YrFiD22K7GEL3ikeZMY2t/VteYqOh/TZP6VH/6Xy+IIIAIjOpLdW2/HfFBINSP2mgM28YPlB3glh1aY7YsQBr6QIvmQ21z7fZaNfUrBy+9Xv6RtW+u5g0LkiCAHPEoTSP6smtDA32jgONqh4bFz7Gb05RTaZwWWhocHKeRRv3W2C2GPn6DPMhIn/A8u7YY+re00Dzl4+0dBxfOZvDG9QzxA+QG4Nsa1C5KAUML2G93tF9c7T+R/bKEue5bXRKlj++emiW8jrxiKk10bwOgZpa7UkCAErAkISwtOUD1YSAj3HbypknaCDQCiXxqGut79HfzFYAMbi1sLSgt8mAVsS31xVSsIC+R3tBveGfT9bbV218eLxW94ef7vnNfVgUAEesHeVJGX9lFLcDJDBTdkjKqU9in/g8CiD7XOJI0kmAq4oEknIAiy0zf2f4ERPu5Tg/a5SAA6lIuJZzoCTuOgie75bBuAeJitw/0GAFGYIde62bdesarH71TJDTAokmjXdHbToK4i/wldnO2uJOyqftq3rr1ky7TSpbz/8mQzVsYLGCYLbx7x0HuA3LGS/TpvpHhpiH3Ni4FkIueZcDaRnbu3sAWp4U3Jh/gvX0B0ffuC4wS0PwAUXTmnlp4eotvYl2gilTyCbwK1obxq1FWQqhKDuySkcPp5apCUmLeppkVrjdMBr+KBYjAMoiRdPMdrV3NUlaxyIWHVLB+DHLzj+SuWtICVgpQvd01+covVapEAYj1eOmtLQllwWHHkATQItMClNpjF7qVIAKVHzhqJ3VNNGuBKkFCoWorV7OApz6DACJQ6dBn4cVZllYrpbSgpVbpR3+LjkJVUOqdwCrpMBLO6+3lc/BULNMNeozMqVn50vL6uS82niqVyTEsv3N/9/q7NgAZtg9v+CRAJHBIJVu4mVFmMz2aGlkyaV/3LZgvZ7/OVo2r2VXSpBRAJjxPN6eJUc4Wem7cwJJfjGnt68LZ9N30oTupuVeGd+2ZG76st34OIOvpywAG5H3tpRZX2+9lh2jxakFbKWRVr570ddUoSZJt3IU92JUIGncz69nWvaIml8q8VcVqq47pO9tfAw2uKjiP3l66aX0r3V2LTUDRQstl5Oi4RAGI7qiRJNSbWhIqqEjCSMf3e8UsQIL2LKSyiQZNi2yfQtAJGGov1SYIIFKFtU9TqE6KTtGil5IkXRh3/ErvULL6Ei29dfkFHOsBpKEv9Gy/J/TRRZeP+8T7/7W0qYEVk/bLpYSxKlaDw4ctmvYmF8GRwKF+LniZbTZuYJemRloK3/qW+xNnMnJLC/1KAUT1JzzPsBbDdqJrx48x92rtXgBnPs2Q7j0YLPXLAiQftSvdXotSqkuQkV1qidnvJYGWepNi7Rhrf/RnR9c1LJVKdsZ27Mo6PmA58111TupVN9dIbAVIEFgLATKZ3r7jDdUESNi+ZVBLy9Bb1r6lg7xQlmvSFrRRGSRdbB1JEC3eIIBo3Q0PmKIw9Epq6UeajqSf1CupfdZR0TuXQuihOWCaoWkAdLcbla1D9m9+9KrPzT1yWhBA/ISZBraYLcyffEDeS1FyaVmgmUbWm83MnXyA+xZsU855kZ2Nw8AwADlrJv0bNzNctku3Prx56140S71a9CK7NzTRq2Udiy1A2oSpWU9SuTaIpTgHNEmJoa5E0P/yQEmt0m9JGGtvSOJ8wJJ2oLQSJAxAzqC3mx8wV8IsikL2hpUgYfuWCiMtwL/gOwJIUP0wNJZyTxejV5JF9AkUhfd6amnoMx9ARMrVc+DS4gDpvv6VaWe+us9VxQDSYtjc0kJLk3F1u7VD92eOfXOXQkgYgAQ5BIrtpMtRMHxrRjU69LJq1oTn2dppZLhsnM3bMtcCRMpm/jpJhZXIdRvWiyVpI5tDdoJsAbtnoj7Ul6SEPpcKpZ14SQhJEAFRRRJGYJE716pcfpDp8zAA2Y3erhO0dgBiJYhf9QkjQfy2QhSAhJEgssdkDwkUSmcq8EiNsrdei1bZzpIgMiF8KpZI6TMHHmyGjwVLELPhzXfPmT3ypCIA2dTSh/lNzTR4i7AxyJYoBpQwALnwFXbauJHtw0gQjWPVKatmvT+LIWrf4PCvyQcwz3j3kOduOfFK1H2Qf7HYfftr8fuN7vWsyQNHEkT1BB4Bwm9zyLiWca43VqFbOawE2cxI1zHcWsK+5f1PHlaCqI1d6OXaIEGbkdoTkQqlUszzZm2fIBVLNkvQfajWYeC3Qax9JGDIxsh5olpLgA3ifjkHJjbDLcEAYePbHL7w4uP3XHP/a4U2iH/RnvV3dmnqzgBJlJ49mCf1phgw7OfnPM229GbnppZ4bBD1mzfIu4FUvk0NDKGFnr23sOBnn2CFAHKsF7/Zhj7rUSq1ky4wLeMt17jv6fqRWjcGLQjk9pWxXighrKdL9oZULr9Bb4kJC5CZjOSkRAES1oslPd7vJOhIgsjjJseC3t5+l6zlhl3o6jMIIHbfptClbV3HVtVTf9aBUSxqQB44ec0KVCwBpE8z/GMA7NneBhFAhq+69+Jj3vvO/R0BxPVIrWWU3LKmkTVDRvNmKVWrlBdLrt5u69mtoYmmsBJEnDj7H+yu/ZjNDazoJq+LYZO1cQQQJWX9cSF6tahlZMubFRSLJTetoPEB77nSoFgslvY1JEdUCiWMddPasYN27cMC5NeM5NJEAVLpPkixhSk7RPsW0n61+229b7IXJF1swuli+yBSz2SoWw+Y+rObgdpXsf1ZoKq+XOpW+krySqGQRy3ABrGS8z8GwHXBABnYPOPHn3vnjJ92BBDN+YTnGbgJhmnnpqkX7wbtaBeuy472QXo67CK3rdpEAci5zzCooRdDtmzEMQ00NDgsm3xALvGyAFL05KDe6pIOdgfbLnL9tjvZ+luGtlQrbeQVFn9oiSSF3QBUPauCyeVbCJ6oEuRaRjIpUYCIwkp20juK9/KHvvh30jWmpEQxCSJvmQAkb2XhTrrsEy1ou/VlVSi7026T1thddPUlsOh7v2vYA9aQAfDPXdpfLLrxbfqum3Xf6fNP/HYpgOhpznqGkU3dXD/4pq028MZPxrjis2gptZMulU27pX4bqFS4uz/0RMb5libm37Vf7jJNAUR3h7UmOC8gTZLiQ1a4LllF9toQd23OacH3ZltXsdL/QUUgy+1vbGznofLvuSjOant397zthlVYCTKRkUxPHCB6Ym3u6Y0r9ceGnOidKNtEb+xisVilAiIldeX2tRJDfUqiSFUv5uZVn9rjkHPAtpOEkFEeFN8sm0eqoo3b8sVguf3YPRe/uudTEW/cBRTe6i8b36bnhnlPnjXvqDPDAERqUY/NrsHVbfMmVt91kOe56QglQbFYLWxoNO6DD1QISRSAeNLMDT3ZYlg3/wPm2jAZAeR5QNcipbocALxQtSeI+UBS1ehMuGPdQqfrE9vHgr6AMZqSxMvZL7F7w2b6+nfywxBhY7MKd9UFEM2+4hNSXaQEuHENVSkZQIqy9RcKhGr37QKMCTBQKpucS56m15oeOS/Q5gbesWqQ7fWsR10n6cju3ejmjwUrNartt9FgCnfoBZDCHaJS/dXk9yUz4FVEtXWJFttnqKjzdDf+vHd7W+FTGBP7lPg39hQG0mR4y+6me7FVwxUAqdD3nZqYe/Verv4bWK6+OheP89jhNIzoy7CGBraxex/+BhlAii5PGaiKdpUuL8NV8y1BG/2innQjoAT18hlpa6ZQB6kCQETJObPZlrXs7HmbHBnV+lwBivqtEJaWdbxz5xjXUCta7J6KkX8oVwJjwzKAFGWhnCmSHAKKeK9o4DJSotc1OryHU+7NwuxYVQKIRpRh37uFHVugj9PSCgyzhTWrerHwvg4kh50OHYhqMW7EbrfNjntmfGGhyuYCLlOxxAZ5cnTOR4HykhpygdsTfcUsG70y7Uk9/S1HzJ7ARwPC1OscJUEX8lURIElys4sCREB42LtGR7fJtUZwxcN8hbXpgrexwKc84MTTc832ojSD/jxLGUBqa6pKW4S6lfD3gLZ9ckeqkiuSLLoi+hRyMeN1WH4AXOl7rjoCSB27eXUq7i5AmZllSdZCUTDhOdpD9lS0WqApBhokMO3N1/K4V8HNGwOVkbuo041C2RNKWR09dVxkDlbUQHc1KT+47JY6KDLlcn6MTtsojJuLJUNN4h6wWv1JgfmzexGW0nqlLSPv6V4mr+jJUqvFz7L6lQb7GbflnzFGU5L6IoDoVXthup9kGRfxPW5N+0UHoy6CuVeVnRey0+fwO8B/uVTUzbUIRcPdO53ZoQlQMPIV/ITVfCN0mxqtqEMHI/vBTdfCwym8duNA4FmXt9/AmJ/UKJcjkVX0wFSkXjqlspKw6ChL7ubBv5K7uDXVRdcO6Piayh1HwyU/gWYlMkpR0cHQbTkOYzQlqS+BR25r/6lkfOsNa8PLc1t9Om2d6qKoeX8qrEcbYcLP4M0UXbz8R+AkhmJMm5xVaZ0Xm7RByZvCpB6sgeecANweSIfNXFsDREYnQXuL7hm2giLQfOE8eGpy9D47ocU2F7Nm1S1GB6DqoliA6IynTuPUcFHIh+KqnyxK42lFAktr+KFaSRsPKPlSsfK5sfBbXfxV2ycTjjqQ9x56ztRN0JoFSE5zrNmiXfAvlDzxkerrnoMC/grn4xs7w09+XdO78Tf2Y+Wlq03dhDzLBtFB8pIpVzoPOzLCw91ZrwCSlJm0rWx1MyOH4PJ9uon2b4AiBGuveI/R12BKX2NQe+S3o0gA+TjwXG3SOq2j4/KBJLcmr67NJwqkSkHBUW4bl12yU7Trn5LghvDtJniGAw3mH0mMWe0xBBAFBf1vtQeK3n94yeHv+zLwrj+IPmKntVAeaxnoYXJKWyLdvA21JUl0d4bu0AC+YjAKgkt9EUB0ncaltfUksjkU/Ra9lN8y+lixtpBpIS9DlKJNuYMUrl8bEcI+Sm40GPeumbQXAUR6zPG18yDyVh1e0iDviF6J+lqJ3Q3N168AU0LXbq14z85w+uOd7t1SjLLvEMF0gxlXxtPUXBMBREHK5b2uq/I4h3Xoyg0zZPmXeIbpvUp1lL5Ku1GFKXLDDPe9sXD1E2FqVq2OQkR9KbKeNphDqjZYgh0LIMpstG+CY3YwVPFNwCj0lX8NdJRRqlD3N8Bny+z39PPgns7bTJSPIXflkVtmGsz+ZT5JTTUTQNybbTufKoWPFL2LPjJ56qnWT4O0e6hvAoEXKYd4fBntx0+CZ5MPS9GIk9qSOMdgamBNheBbiSo1kjhOzkGdhbB5YSt/MB2ZSt3pii8Cv6rg2Z9tguNfgpXJ7gbpFE7Bka+FBmNvC63ggTq/qQCiBLBhbrisIrXaCMxF5cZZFJiSqqNTJ5O7B7CSMvlomCj3bzJFR70UAFNQVhiMT+NKhpZqjCKAdHKgYvUCRPRmG10NrlWrT2VIVqbkSssBP4UXkjlPUpjMxCN9rcG0T/Vf6XN1QnsBRJnR2qcfToQYmXbK3SWMVqdomejIZCqKNgo7zAcY8in+1A9OVn6voIzuIfsIUU2n6fV6CyqG+FOPhiAp9iqdDJDqL9/qQzDGOdGBA/lKdeq20vLHi+CkYsu30s7d9nqr7YYx7YJkHJwrDeaaWPvzTOgAABDGSURBVEbp5E46UcVKTgGqnhIX8+zppmhdv/E93SZbYd9uYFQRBajCrr3mX8WYdgh0cES5ANJJWkk8D2d76UQjPVkTujpugHgngyOAR70+JUW+X2H/N5wO3w4woSvs1vWoGNMuxNoDhyhfY6iPQ1Od5OZN3gkrR7IM9tZDupWvkth7KHTzfheoRFHRxVKzXoaBsebdki9+H4zxAndzXPCBQ/8uMZgaP4AXbvY6aaOwc7bx4t2KDMfgSLWCNgqvAK6L1Evbyt+YCD8q2MaroDt3N9eYNnuwBeBQ7/MMRt6X1JdOCDXp3ECQeIJZqjTvxUJN/gO4vswxdbXhM8tg/1i2JW7HGLEwXwLAoe9eMZjU7dMGcbgTghU7P5Sw8nDIMhdrR81KBStWctDl+hvhssLbNiM/w5MYI9aVAoe+f8ZgaigANvKz5ht0Qrh75wejVx5QXz7Di7YME+6uUzvlxGodtwdMryijfY5lxuQvSykiOezj1VW4e4IHpmrnOFPtUOKtqbAHpiQIfhQRoLpkbMnfYbuyD1YdjDFimVtKgENV6urAVIJHbivREyIuihDVyzvUG6LjqFXk75kd4citcqxGTew59dvwee9AbDT6jsGYfKBcCHCo97o6cptg0obaS6kQPS1EtNUVqra21qLunn8duDlU77lKF+wJt7bxzIZpfALGiEVhJYetWldJGxJK+1O7SXk6VZJIeihAsZxUaxcDyqcVpriZG8PmFnI7LEdyWErqJ+2Pp1POB2K/+L3tvNV2wEe41HRhVmLEOuVID/8QUcLZXvgp7F8yyleG+Bci2hx+it42mOERuVCz1W1mxQQSN9R+YtDSyU1jnsdKpIefFN3uclsI2q4dD5d3lN/Uzet6RgRvVdCgdePB0sNZgCTgyUpPaunENhPjvLooTHDCwTvB00EZst11Phlj2pzXDWmQF4KkbjxYfoCcBChxfZVK+i4nUCyFzjvEdwi4gLXnFk1SX/4cfLWDAxq21w2LoLv/jgU3PO3fQ4SPhKXrZIP5U9jKtV7PShAlrlYC6yqVdF5v0/aKnhhZo3wfL8TYn7+rUi7gBX+BYfaWHvec8yUlAg+jEjrAYOI49hV13KrUz18v7uAoT58u0apCkdM+vRek5S55i/Hco1MFFvu77Gi76dkfw4GX6LDTFR2c54jqdLajP2cwn6jy0yXavR8gun5R1zDGX7Y/5wPen7J1/B0n16OOzekcU0XHd6usyLbhxpXAte35c9qvTltw7xd//fEiJwEr9aldbzAKrayb4geILvDVRb7xly/tP5/LZg5P5Q3NBdwo+6JpRePqzZ5k8Z1MzF80fc3Il82Vb7TLZVGmQV74NKcYTKV5WZLkUMmx/ACR01E3TcVfrhi6mGsW5yxDnZXSazh1Wd3asiX0Y4irks3/Fj9bw/Q48Tq46Apf3qpvDX3X/HBRGys9JnCInB0N5r0wdKWlTh4gIrhqeXrv2Go55zS3PZCgYyFKkH9nGjNNt05v0ccQMOQ0Pa/MXfIKVpBSGp4DKMjOZboS59pc62cPWGOmrMjfIRgjOP5uMGMqILsmmxYCRJrrD2Kn9BEDn+ygV21jS7m7v22K8NjpCOpQK2h9PHdsuY+xA9w/EV5LGBg6RHAicEqxyxBu8m7NPhXM7/L7X5XaHH6OfrdeMpn4H6oQIDoFplQY8ZbXDeweskuFbD3sJXjXhRMLQ7YLW00xSTrKMxb4lHftmRzcAqcubdLvjWE78+opI7tW5wne7wG5qwCSfoySVEu1/RWYp9E5oDjBoaFHG4xMtLoqbQDiqVnxX1u02pR/ybT2GKXwa8XN864NkZYr3SZ/fKdgTnQRrCSD1Bxt4Ot0tF6xyl3QRvsOmMsNXvpPXSAmi0xj2d+qrj4VWGh/KxZa7g2dCOygJP0YRUlRAviJjsz3cl25QV0/YGif5aQekBIEEEWzhY0RDceDSgASboSsVlgOuFe3xb4R81VD+xxZYUmq5XpBANH7Vxc09YyN8CgqVmyDZh0FckDhAXvFChBZcHsYWo/j1hPn2wHEU7OmAp+P7UFLGemxDZR1VJIDMoyOihUg9xnM+JLjprRCMYAo7eEvYnumIDdvbJ1nHUXiwJ19l3PumlhyAHnjnmkwVUnfGOm5qlS5GECUV1VXs+0dy7j+jcJYOsw6KZsDV+24iO8vHlp2+7YNZwH7GUxNJ6ys5FkDAeKpWcqfoS2myotCTe6ZWTenzCpnSCf2cMa+b3H3zBExUXCpwZSTiCim4avfTUcAkRiWFKn8bTPms/N46ne7Vv9xshFKcmDsqW8w47cjS9YrXWGRJz3kcK/bUhQgnhTRrrp21ysrw/5zPgt+kEmQyrgYT+tdvvsWC74fhwS5xmCUXruuSymAaItNUqRvRVzo/celfPgZ5RrPSmdzoM8flrD25MEVktHsSQ9dY1XXpUOAeFJEm4YlU2F0yCWzaCMtOykgIyudzYGGhRtxhlY6F/9jMDrgW/clDED28yKjKruU8fGeSzlsQyZFOnNJPdFjKYevr3QOPlQkm8FIs6j7UhIgnhSpPMr3K594jSnPKSIqK53FgbMPfI3/fbbSOajLqN1iUxIWINoXUc6ksrMfM/i6hbx3hWJps9JZHNjh2oUsubySOVBEv6RH3e57FE5NKIB4UqTCa+5fg3c+ApVMT2ctrHoYV8cGhkVKPRr01J8xmCqmh6o9RocGiAcSBUvrKFB55caBS/jWiko9KOWN3dVb/XDAEi5dXgnvbze0vV2qK7A0KkDkP5eqVU6qZfj4mXN47pdhj051Bf4n94wHnjGHf/yiXN7rRIxUq7eSI7g2RooEEE+KyL13S1nkd5uxivVjt0EXumQlOQ606PDCk6vYdOg2ZQ56scHEe0aoTEKSbhYZIB5I7ga+XBaxv91qIac2Z5ZIWcwrs9Hv+i7ks2vK5fk9BqOsQV2ylAsQqVhKW7lXZK4de+6j/OXOjlI4RO4ya1CCA8ed8yh/vaMcnuveq6MNpjrpoFIwcWUBxJMi47w0B9Ees9uSl2neYR8q3cuNNmrXra0EFH3fe5lNg8u5lvkEQ+sNU12RiWUDxAOJUtZeE5lxF4x9hltnlL+nEnnALtzgwkOf4bYny+H1lQYTkLy0a/GyIoB4IPkN8NlIbOs2833m7b99ticSiWvRK2vvY9cX32fTfttHbPxbg/lcxDZ1WT0OgMj1+xdgVCQOnXbYa/z6yUrDHiIN2eUqf2Hsa9z7RFQezwWO64ou3aD1UTFAPClyOKBLU/IpLUsuRvNyC8+ObkB5pbISPweU1+sTL7Xg7BPFqa5rEU4ymMfjJyidPcYCEA8kymzR4QV47Vh02Klzefz30SRPOvmcPNWHnzKXJ34XlbenGYwy2mTF40BsAPFAcraXjjokg5fBH3Zcy8mbe4dskFULw4E/Nq3lM+/2hu3C1LZ1zjGYKVEadIW6sQLEA4lu7w5/xf2I781h3tXlhkB0hTmK/oy7Xj2Ht66KwtOvGUx50RHRqUtVi9gB4oHk8uD7jYrw5vrhs7ns7eibjqlidULE3rDLbL4zPwovrzCY6xKiLnXDVAUgHkjCx2w1vvIBM0b35SCnMXUcrCWCnzUtHPLSGrZ8NOx1d102xirstFUNIB5IjgOmhyJmh/+ez6zLhqP7drMSnQNKSr33DfN579ths8ccbzByz2elAw5UFSAeSBQD9EioWTjipDd59H5lUslKVA588oQ3eSw07440mEejDtEV61cdIB5IdGWNLncsvaN78a4LuPktZZjPSlgOfG3EAm6ZF4Zn7+s2E4PR1URZCcGBRADigUTZUe7xrrLpgLQFcMfe7e80DPEwXbKKm4x61kAoiQ9dQfTlrpKNJK61kBhAPJAoLOW/S8duPQPPHAx1dSV9XFPm6+dZpdGQMCgZi/hbXeOZhY9En4NEAWLJc3BCRAE/ACuOITPai0yqe1NUqNvysqjc6LjIt+gUgHjSROdJbuj40NU0WHhCHOmzK2BRDTZV2uiddOOoWFi06LDTZV39PEels9dpAPFAopOJUrk6OL77AEw9Js77rirlWee2vw8YX1JyyNaTStVlTwLGNUmdChCfyqVNxe8Uz5byDFw4bjU/Wxk+WjguDtVSPxdtu5pbp/XrwOYQIK7vqgkWqjFVNQEQnwEvkBTJu7UAjjphOQ/KY9MFy6f3Xs5Df+7IW3W7B44ul5qnmquhZgDikybK4CigBLtm9jhtKQ9OHRTDtT7V5Gt8fcve+PT4Jbx+b7Gkb0oHKqnRpTIexsfgjnuqOYB40kS5gAUS/bTPKr/1Tcv43TcGcKQT5TBQUjyNb5xHzBZO/fFKPvh6UNy6sqxf74Gjy+TKjY+54XqqSYD4pIk2F3XG5Kx2l/g0zFrLdSfOr9soYEXlXn7/cFr2Ljwro8tr7gKmZJt+4RZ5JbVqGiA+oCg+6988oLS9M1HnSX58zU51c+hKh52+ceXCgPMcUrYEjP8zmLq/2amSRR1n21QAxAcUGeiSJgKL74rqZXDYxLn88PejUnvGXWfIv6Vjsj8fVXASUFct/5/AYTB1fWFmnAs7rr5SBRAfUGSjfBE40fvp6X6nRBDj/30ONz65Z2pSCik1z6VjX2Pq/+zuS7CwHrjf+/l/Xek+jrgWdlz9pBIg/od3cBSlZ4FytPud8m6de/Fb3DTjoJrN4KiMh18/9BnuuGWEL2+V0rm6wDCYBXFNctZP+RxIPUAKwKL0mid5MRgHoTSnn75iOedPHcm45p06Pau8sqxP67uQSePf4MFrB3rpQOWmnaa0SQbzcvlTmbWsBgfqCiAFYFE287HAGOAQuj25E2Nue5cv/XUAJ6zascwbTqLPgfa2/7TNu/z62JU8fcEObBorpeop4GndtWIw+j8rNcqBugVIIb8dHBn4AsyhbPX8YWwtoPzhQ06ftR2HxHz77lM9lnL33sv482f68MGxK1hzwBPADA8QmaFdo2AIIqvLACTo4R0cxXYdzOCZR2NeO5wVi/ei99wtDJ27gd0WO4xa2cRHVm2NTrHYk97zAQVzvLrNB8zddjNvDjEsGtWTtaMaGDBkNuz5OO/tJ1vi7wajTIVZSTEHujRAUjxvGekJcSADSEKMzoZJJwcygKRz3jKqE+JABpCEGJ0Nk04OZABJ57xlVCfEgQwgCTE6GyadHMgAks55y6hOiAMZQBJidDZMOjmQASSd85ZRnRAHMoAkxOhsmHRyIANIOuctozohDmQASYjR2TDp5EAGkHTOW0Z1QhzIAJIQo7Nh0smBDCDpnLeM6oQ4kAEkIUZnw6STAxlA0jlvGdUJcSADSEKMzoZJJwcygKRz3jKqE+JABpCEGJ0Nk04OZABJ57xlVCfEgQwgCTE6GyadHMgAks55y6hOiAMZQBJidDZMOjmQASSd85ZRnRAHMoAkxOhsmHRyIANIOuctozohDmQASYjR2TDp5EAGkHTOW0Z1QhzIAJIQo7Nh0smBDCDpnLeM6oQ4kAEkIUZnw6STAxlA0jlvGdUJcSADSEKMzoZJJwcygKRz3jKqE+JABpCEGJ0Nk04OZABJ57xlVCfEgQwgCTE6GyadHMgAks55y6hOiAMZQBJidDZMOjmQASSd85ZRnRAHMoAkxOhsmHRyIANIOuctozohDmQASYjR2TDp5EAGkHTOW0Z1QhzIAJIQo7Nh0smBDCDpnLeM6oQ4kAEkIUZnw6STAxlA0jlvGdUJcSADSEKMzoZJJwf+P7njubHn9k00AAAAAElFTkSuQmCC"
}
dom.window.HTMLCanvasElement.prototype.getContext = function (v) {
if (v === '2d') {
return {
isPointInPath: function (args) {
return false
},
rect: function (args) {
},
fillRect: function (args) {
},
fillText: function (args) {
},
beginPath: function () {
},
arc: function (args) {
},
closePath: function () {
},
fill: function () {
}
};
} else {
return {
getExtension: function (args) {
return {
UNMASKED_RENDERER_WEBGL: 1
}
},
getParameter: function (args) {
return "ANGLE(Intel(R)UHDGraphics630Direct3D11vs_5_0ps_5_0)"
}
};
}
}
// 监听事件
var md = new dom.window.Event('mousedown');
var mu = new dom.window.Event('mouseup');
var cl = new dom.window.Event('click');
var mv = new dom.window.Event('mousemove');
// 轨迹函数
function track(start_pos, end_pos) {
/* start_pos 点击的坐标
* end_pos 结束的坐标
* */
const distance = end_pos[0];
let click_pos = start_pos[0];
let a = 0.00035, // 加速度
current = 0, //当前位移
t = 0.02, //计算间隔
v = 0, //初速度
mid = distance * 4 / 5; // 减速阈值
while (current < distance) {
v0 = v; //初速度v0
v = v0 + a * t; //当前速度
move = v0 * t + 1 / 2 * a * t * t; // 移动距离
// console.log("每次移动: ", move)
if (parseInt(move) > 0) {
current += parseInt(move); //当前位移
if (current < mid) {
if (current % (parseInt(Math.random() * 5)) == 0) {
mv.pageX = current;
mv.pageY = start_pos[1];
document.dispatchEvent(mv);
}
if (current >= click_pos) {
md.pageX = current;
md.pageY = start_pos[1];
document.dispatchEvent(md);
click_pos += 9999;
}
} else {
mv.pageX = current;
mv.pageY = start_pos[1];
document.dispatchEvent(mv);
if (current == end_pos[0]) {
mu.pageX = cl.pageX = current;
mu.pageY = cl.pageY = start_pos[1];
document.dispatchEvent(mv);
document.dispatchEvent(cl);
}
}
}
}
}
"""
jscode += tdc
jscode += """function run(start_pos, end_pos) {
track(start_pos, end_pos);
collect = window.TDC.getData(!0);
return decodeURIComponent(collect);
}
"""
ctx = execjs.compile(jscode)
collect = ctx.call('run', start_pos, end_pos)
return collect
def verify(self, sess, sid, nonce, eks, show_url, ans, collect):
"""
滑块验证码验证
:param str ssess: resetSess生成
:param str sid: initCaptcha生成
:param str nonce: showCaptcha生成
:param str eks: tdc文件中动态变量
:param str show_url: 验证码展示url
:param str sans: 验证码终点坐标 格式: 'x,y;'
:param str collect: 环境检测后的加密参数
:return: 验证结果,errorCode为0表示成功
:rtype: dict
"""
url = "https://t.captcha.qq.com/cap_union_new_verify"
payload = {
'aid': self.aid, 'protocol': 'https', 'accver': '1', 'showtype': 'popup',
'ua': self.ua,
'noheader': '1', 'fb': '1', 'enableDarkMode': '0', 'grayscale': '1', 'clientype': '2',
'sess': sess,
'fwidth': '0',
'sid': sid, 'forcestyle': 'undefined', 'wxLang': '', 'tcScale': '1', 'uid': '', 'cap_cd': '',
'rnd': self.rnd, 'TCapIframeLoadTime': 'undefined', 'prehandleLoadTime': self.prehandleLoadTime,
'createIframeStart': self.createIframeStart, 'subsid': '2', 'cdata': '0',
'ans': ans, # '320,107;'
'collect': collect,
'vsig': '', 'websig': '', 'subcapclass': '',
'tlg': len(collect),
'eks': eks,
'nonce': nonce, 'vlg': '0_0_1'
# 'vData': vData
}
headers = {
'Host': 't.captcha.qq.com',
'sec-ch-ua': '"Chromium";v="88", "Google Chrome";v="88", ";Not A Brand";v="99"',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'X-Requested-With': 'XMLHttpRequest',
'sec-ch-ua-mobile': '?0',
'User-Agent': self._ua,
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Origin': 'https://t.captcha.qq.com',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Dest': 'empty',
'Referer': show_url,
'Accept-Language': 'zh-CN,zh-TW;q=0.9,zh;q=0.8,en-US;q=0.7,en;q=0.6'
}
response = requests.request("POST", url, headers=headers, data=payload)
return response.json()
def run(tx):
"""
执行函数
:param tx: Txhk对象
:return: 验证结果
"""
# 初始化滑块
init_obj = tx.initCaptcha()
sess = init_obj['sess']
sid = init_obj['sid']
# 获取验证码url, nonce, tdc文件url
location_href, nonce, tdc_url = tx.showCaptcha(sess, sid)
# 重置sess获取缺口初始位置
r = tx.resetSess(sess, sid, location_href)
sess = r.get('sess')
# 下载图片并获取缺口左边位置
tx.loadImage(sess, sid, location_href)
img1 = Image.open('topic7_0.png').convert('RGBA')
img2 = Image.open('topic7_1.png').convert('RGBA')
left_offset = get_left_offset(img1, img2)
ans = f"{left_offset - 20},{r.get('inity')};" # 20是小图片左边距
print(ans)
# 起点坐标
start_pos = [68, 285]
# 终点坐标
end_pos = [(left_offset + 44) // 2 + 12, 285] # 44是小图片宽度, 12是图片距窗口左边距离
# 获取tdc文件
tdc = tx.get_tdc_js(tdc_url=tdc_url, show_url=location_href)
tdc = tdc.replace('=254;', '=200;').replace('+=5293}', '+=0}') # 将检测电源值转为200, 以及是否有process函数改为0
eks = re.search('\.info="(.*?)"', tdc).group(1)
# 执行js, 获取collect加密参数
collect = tx.get_collect(tdc=tdc, location_href=location_href, start_pos=start_pos, end_pos=end_pos)
result = tx.verify(sess=sess, sid=sid, nonce=nonce, eks=eks, show_url=location_href, ans=ans, collect=collect)
return result
if __name__ == '__main__':
tx = Txhk(
ua="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36",
aid="2005597573",
refer_url="http://www.glidedsky.com/",
app_version="5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.82 Safari/537.36"
)
result = run(tx)
print(result)
|
{"/proxy/adsl/dial.py": ["/proxy/dbConnect.py"], "/txhk/core.py": ["/txhk/gap.py"]}
|
40,761
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/forms.py
|
from django.forms import ModelForm,forms
from django import forms
from cve_db_app.models import Scan
class ScanUploadForm(ModelForm,forms.Form):
file = forms.FileField(required=False, label="Scan in CSV format")
site = forms.CharField(required=True, label="Site Name that the Scan is based off of")
class Meta:
model = Scan
fields = ['name']
labels = {
'name': ('Scan Name')
}
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,762
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('upload_scan',views.upload_scan),
path('cve_search', views.cve_search),
path('ajax/get_cve_info/', views.ajax_get_cve_info),
]
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,763
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/OLD_models.py
|
from django.db import models
from django.contrib.postgres.fields import HStoreField
# Create your models here.
class Scan(models.Model):
scan_date = models.DateTimeField("scan date")
scan_results = HStoreField()
class Site(models.Model):
site_name = models.CharField(max_length=200)
class Device(models.Model):
site = models.ForeignKey(Site, on_delete=models.CASCADE)
device_name = models.CharField(max_length=200)
dns_name = models.CharField(max_length=200)
device_ip = models.GenericIPAddressField()
device_version = models.CharField(max_length=200)
device_net_bios = models.CharField(max_length=200)
# Potential Table for the processing the CVE scan data results
# class Threat(models.Model)
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,764
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/apps.py
|
from django.apps import AppConfig
class CveDbAppConfig(AppConfig):
name = 'cve_db_app'
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,765
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/models.py
|
from django.db import models
from datetime import datetime
# Create your models here.
#constants
STATUS_CHOICES = (
('On', 'Online'),
('Off', 'Offline'),
)
class Scan(models.Model):
date = models.DateTimeField(blank=True)
name = models.CharField(max_length=200, blank=True)
class Site(models.Model):
name = models.CharField(max_length=200, blank=True)
class Contact(models.Model):
first_name = models.CharField(max_length=200, blank=True)
last_name = models.CharField(max_length=200, blank=True)
gdit_group = models.CharField(max_length=200, blank=True)
phone_number = models.CharField(max_length=200, blank=True)
email_address = models.EmailField(max_length=200, blank=True)
# Asset Mgmt from Dashboard View (broken up into Device and Device Location)
class Device(models.Model):
site = models.ForeignKey(Site, on_delete=models.PROTECT)
contact = models.ForeignKey(Contact, on_delete=models.PROTECT)
dns_name = models.CharField(max_length=200, blank=True)
ip = models.GenericIPAddressField()
net_bios = models.CharField(max_length=200, blank=True)
mac_address = models.CharField(max_length=200, blank=True)
os_vendor = models.CharField(max_length=200, blank=True)
os_type = models.CharField(max_length=200, blank=True)
os_version = models.CharField(max_length=200, blank=True)
os_revision = models.CharField(max_length=200, blank=True)
switch = models.CharField(max_length=200, blank=True)
port = models.CharField(max_length=200, blank=True)
vlan = models.CharField(max_length=200, blank=True)
make = models.CharField(max_length=200, null=True)
model = models.CharField(max_length=200, null=True)
serial_number = models.CharField(max_length=200, blank=True)
asset_tag = models.CharField(max_length=200, blank=True)
poc = models.CharField(max_length=200, blank=True)
role = models.CharField(max_length=200, blank=True)
function = models.CharField(max_length=200, blank=True)
mission_criticality = models.CharField(max_length=200, blank=True)
status = models.CharField(
max_length=200,
choices=STATUS_CHOICES,
default='On',
)
# Asset Mgmt from Dashboard View (broken up into Device and Device Location)
class Device_Location(models.Model):
device = models.ForeignKey(Device, on_delete=models.CASCADE)
building = models.CharField(max_length=200, blank=True)
physical_location = models.CharField(max_length=200, blank=True)
floor = models.CharField(max_length=200, blank=True)
room = models.CharField(max_length=200, blank=True)
rack_row = models.CharField(max_length=200, blank=True)
rack_name = models.CharField(max_length=200, blank=True)
rack_unit = models.CharField(max_length=200, blank=True)
# Potential Table for the processing the CVE scan data results
class Vulnerability(models.Model):
scan = models.ForeignKey(Scan,on_delete=models.CASCADE)
device = models.ForeignKey(Device, on_delete=models.CASCADE)
plugin = models.CharField(max_length=500,blank=True,null=True)
plugin_name = models.CharField(max_length=200,blank=True,null=True)
family = models.CharField(max_length=500,blank=True,null=True)
severity = models.CharField(max_length=500,blank=True,null=True)
protocol = models.CharField(max_length=500,blank=True,null=True)
port = models.CharField(max_length=500,blank=True,null=True)
exploit = models.CharField(max_length=50,blank=True,null=True)
repository = models.CharField(max_length=500,blank=True,null=True)
plugin_text = models.TextField(max_length=500, blank=True,null=True)
cve = models.TextField(max_length=500,blank=True,null=True)
first_discovered = models.DateTimeField(max_length=500,blank=True,null=True)
last_observed = models.DateTimeField(max_length=500,blank=True,null=True)
exploit_frameworks = models.CharField(max_length=500, blank=True,null=True)
synopsis = models.CharField(max_length=500,blank=True,null=True)
description = models.TextField(max_length=1000, blank=True,null=True)
solution = models.CharField(max_length=500,blank=True,null=True)
see_also = models.URLField(max_length=500, blank=True,null=True)
risk_factor = models.CharField(max_length=50,blank=True,null=True)
stig_severity = models.CharField(max_length=500, blank=True,null=True)
cvss_base_score = models.DecimalField(blank=True, max_digits=5, decimal_places=2,null=True)
cvss_temporal_score = models.DecimalField(blank=True, max_digits=5, decimal_places=2,null=True)
cvss_vector = models.CharField(max_length=300,blank=True,null=True)
cpe = models.TextField(max_length=500,blank=True,null=True)
#bid = models.BigIntegerField(blank=True,null=True)
bid = models.TextField(max_length=1000, blank=True,null=True)
cross_references = models.CharField(max_length=500,blank=True,null=True)
vuln_publication_date = models.CharField(max_length=500,blank=True,null=True)
patch_publication_date = models.CharField(max_length=500,blank=True,null=True)
plugin_publication_date = models.DateTimeField(blank=True,null=True)
plugin_modification_date = models.DateTimeField(blank=True,null=True)
exploit_ease = models.CharField(max_length=500,blank=True,null=True)
check_type = models.CharField(max_length=300,blank=True,null=True)
version = models.CharField(max_length=300,blank=True,null=True)
class Meta:
verbose_name_plural = "Vulnerabilities"
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,766
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/admin.py
|
from django.contrib import admin
# Register your models here.
from .models import Site,Device,Scan,Vulnerability as Vuln
admin.site.register(Site)
admin.site.register(Scan)
admin.site.register(Vuln)
admin.site.register(Device)
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,767
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/migrations/0002_auto_20180812_0323.py
|
# Generated by Django 2.0.6 on 2018-08-12 03:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cve_db_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='vulnerability',
name='device',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cve_db_app.Device'),
),
]
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,768
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/migrations/0001_initial.py
|
# Generated by Django 2.0.6 on 2018-08-12 03:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Device',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dns_name', models.CharField(blank=True, max_length=200)),
('ip', models.GenericIPAddressField()),
('net_bios', models.CharField(blank=True, max_length=200)),
('mac_address', models.CharField(blank=True, max_length=200)),
],
),
migrations.CreateModel(
name='Scan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True)),
('name', models.CharField(blank=True, max_length=200)),
],
),
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200)),
],
),
migrations.CreateModel(
name='Vulnerability',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('plugin', models.IntegerField()),
('plugin_name', models.CharField(max_length=200)),
('family', models.CharField(max_length=200)),
('severity', models.CharField(max_length=200)),
('protocol', models.CharField(max_length=200)),
('port', models.CharField(max_length=200)),
('exploit', models.CharField(max_length=50)),
('repository', models.CharField(max_length=200)),
('plugin_text', models.TextField(blank=True, max_length=500)),
('cve', models.CharField(blank=True, max_length=100)),
('first_discovered', models.DateTimeField(max_length=200)),
('last_observed', models.DateTimeField(max_length=200)),
('exploit_frameworks', models.CharField(blank=True, max_length=200)),
('synopsis', models.CharField(max_length=200)),
('description', models.TextField(blank=True, max_length=500)),
('solution', models.CharField(max_length=200)),
('see_also', models.URLField(blank=True)),
('risk_factor', models.CharField(max_length=50)),
('stig_severity', models.CharField(blank=True, max_length=200)),
('cvss_base_score', models.DecimalField(blank=True, decimal_places=2, max_digits=5)),
('cvss_temporal_score', models.DecimalField(blank=True, decimal_places=2, max_digits=5)),
('cvss_vector', models.CharField(blank=True, max_length=100)),
('cpe', models.CharField(blank=True, max_length=100)),
('bid', models.BigIntegerField(blank=True)),
('cross_references', models.CharField(blank=True, max_length=200)),
('vuln_publication_date', models.CharField(max_length=200)),
('patch_publication_date', models.CharField(max_length=200)),
('plugin_publication_date', models.DateTimeField()),
('plugin_modification_date', models.DateTimeField()),
('exploit_ease', models.CharField(blank=True, max_length=200)),
('check_type', models.CharField(max_length=100)),
('version', models.CharField(max_length=100)),
('device', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='cve_db_app.Device')),
('scan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cve_db_app.Scan')),
],
options={
'verbose_name_plural': 'Vulnerabilities',
},
),
migrations.AddField(
model_name='device',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='cve_db_app.Site'),
),
]
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,769
|
phillipboy07/cves
|
refs/heads/master
|
/cve_db_app/views.py
|
from django.shortcuts import render,redirect
from django.http import HttpResponse
from django.http import JsonResponse
from django.template import loader
from django.utils import timezone
from cve_db_app.forms import ScanUploadForm
from cve_db_app.models import Scan, Device, Vulnerability, Site
#from django.contrib.auth.models import Device
from django.utils.dateparse import parse_date
from datetime import datetime
import csv, requests, json
# Create your views here.
def index(request):
template = loader.get_template('cve_db_app/index.html')
#empty dict because no data is being passed.
return HttpResponse(template.render({},request))
#TODO - place this in a helper class?!
def decode_utf8(input_iterator):
for l in input_iterator:
yield l.decode('utf-8')
def upload_scan(request):
if request.method == 'POST':
form = ScanUploadForm(request.POST)
if form.is_valid():
scan_entry = form.save(commit=False)
scan_entry.date = timezone.now()
scan_entry.save()
#Save the Site Info
site = Site(name=form.cleaned_data['site'])
site.save()
csv_file = request.FILES['file']
#check if the file uploaded is a CSV file
if not csv_file.name.endswith('.csv'):
return HttpResponse('<h1>No CSV file??</h1>')
#if file is too large, return
if csv_file.multiple_chunks():
return HttpResponse("Uploaded file is too big (%.2f MB)." % (csv_file.size/(1000*1000),))
#if we get this far lets process the CSV now..
reader = csv.DictReader(decode_utf8(csv_file))
for row in reader:
lower_row = {k.lower(): v for k, v in row.items()}
#TODO - replace with regex for the removal of all special chars
minus_spec_row = {k.replace("?", ""): v for k, v in lower_row.items()}
trimmed_row = {k.replace(" ", "_"): v for k, v in minus_spec_row.items()}
#factor in foreign keys
trimmed_row['scan_id'] = scan_entry.id
device = Device(ip = trimmed_row['ip_address'], site_id = site.id)
device.save()
trimmed_row['device_id'] = device.id
#pop out device specfic fields
device_fields = ('dns_name', 'ip_address', 'netbios_name','mac_address')
for key in device_fields:
if key in trimmed_row:
del trimmed_row[key]
#remove all unecessary data like N/A, empty values, etc
trimmed_row = {k:v for (k,v) in trimmed_row.items() if v != 'N/A' if v != 'n/a' if v != ''}
#remove commas from integers (bid)
for (k,v) in trimmed_row.items():
if (k=='bid'):
trimmed_row[k] = v.replace(',','')
#convert datetime strings to valid formats
datetime_fields = ('first_discovered','plugin_modification_date','last_observed','plugin_publication_date','vuln_publication_date','patch_publication_date')
for key in datetime_fields:
if (key in trimmed_row):
trimmed_row[key] = parse_date(trimmed_row[key])
#TODO - add Try Catch due to the data inconsistences
vuln = Vulnerability(**trimmed_row)
vuln.save()
#return render(request,'cve_db_app/detail.html',{'data':trimmed_row})
return HttpResponse('<h1>Scan has been sucessfully uploaded.</h1>')
else:
return HttpResponse(form.errors)
# template = loader.get_template('cve_db_app/index.html')
# #empty dict because no data is being passed.
# return HttpResponse(template.render({},request))
else:
form = ScanUploadForm()
return render(request,'cve_db_app/upload_scan.html',{'form':form})
def process_csv(request):
csv_file = request.FILES['file']
#check if the file uploaded is a CSV file
if not csv_file.name.endswith('.csv'):
return HttpResponse('<h1>No CSV file??</h1>')
#if file is too large, return
if csv_file.multiple_chunks():
return HttpResponse("Uploaded file is too big (%.2f MB)." % (csv_file.size/(1000*1000),))
#if we get this far lets process the CSV now..
file_data = csv_file.read().decode("utf-8")
lines = file_data.split("\n")
return HttpResponse("Uploaded file is too big (%.2f MB)." % (lines))
def cve_search(request):
if request.method == 'POST':
#insert CVE API call
CVE_SEARCHURL = "http://cve.circl.lu/api/cve/" + request.POST['cve-id']
r = requests.get(CVE_SEARCHURL)
if r.status_code != 200:
return HttpResponse('Error %s.' % r.text)
else:
parsed_json = json.loads(r.text)
data = {
'cve_id' : parsed_json['id'],
'cwe_id' : parsed_json['cwe'],
'cvss_score': parsed_json['cvss'],
'summary': parsed_json['summary']
}
return JsonResponse(data)
else:
return render(request,'cve_db_app/cve_search.html')
def ajax_get_cve_info(request):
#insert CVE API call
CVE_SEARCHURL = "http://cve.circl.lu/api/cve/" + request.GET['cve_id'];
r = requests.get(CVE_SEARCHURL)
if r.status_code != 200:
return HttpResponse('Error %s.' % r.text)
else:
parsed_json = json.loads(r.text)
data = {
'cve_id' : parsed_json['id'],
'cwe_id' : parsed_json['cwe'],
'cvss_score': parsed_json['cvss'],
'summary': parsed_json['summary']
}
return JsonResponse(data)
|
{"/cve_db_app/forms.py": ["/cve_db_app/models.py"], "/cve_db_app/admin.py": ["/cve_db_app/models.py"], "/cve_db_app/views.py": ["/cve_db_app/forms.py", "/cve_db_app/models.py"]}
|
40,778
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/Pattern 3.py
|
row=int(input("Enter a row:"))
k=2*row-2
for i in range(0,row):
for j in range(0,k):
print(end=" ")
k-=1
for l in range(0,i+1):
print('*',end=' ')
print("\r")
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,779
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/todolist.py
|
from todolistpriorty import hightask,lowtask,medtask,edithig,editlow,editmed,deletehig,deletelow,deletemed
def addtask():
add={1:hightask,2:medtask,3:lowtask}
print("Choose the priority level\n\tPress 1 High\n\tPress 2 Medium \n\tPress 3 Low ")
while True:
options=int(input("Enter your choice: "))
if options in add:
action=add[options]
action()
elif options==0:
break;
def edittask():
edit={1:edithig,2:editmed,3:editlow}
print("Choose the priority level\n\tPress 1 High\n\tPress 2 Medium \n\tPress 3 Low ")
while True:
options=int(input("Enter your choice: "))
if options in edit:
action=edit[options]
action()
elif options==0:
break;
def delete():
delete={1:deletehig,2:deletemed,3:deletelow}
print("Choose the priority level\n\tPress 1 High\n\tPress 2 Medium \n\tPress 3 Low ")
while True:
options=int(input("Enter your choice: "))
if options in delete:
action=delete[options]
action()
elif options==0:
break;
actions={1:addtask,2:edittask,3:delete}
while True:
print("Todolist App\n\tPress 1 to add new task\n\tPress 2 to edit the task \n\tPress 3 to delete the task ")
choice=int(input("Enter your choice: "))
if choice in actions:
action=actions[choice]
action()
elif choice==0:
break;
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,780
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 13.py
|
l1=[]
l2=[]
l3=[]
def fl(n):
print("elements for first list")
for i in range(1,n+1):
inp=int(input(f"Enter element {i}:"))
l1.extend([inp])
def sl(n):
print("elements for Second list")
for i in range(1,n+1):
inp=int(input(f"Enter element {i}:"))
l2.extend([inp])
def ml(l1,l2):
for j in range(len(l1)):
if (l1[j]%2)!=0:
l3.extend([l1[j]])
for k in range(len(l2)):
if (l2[k]%2)==0:
l3.extend([l2[k]])
print(f"Merged list:{l3}")
n=int(input("Enter a range:"))
fl(n)
sl(n)
ml(l1,l2)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,781
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/pattern 1.py
|
def plusrow():
print("+",end=' ')
for i in range(4):
print('-',end=' ')
print('+',end=' ')
for i in range(4):
print('-',end=' ')
print('+')
def otherrow():
print('|',end=' ')
for i in range(4):
print(' ',end=' ')
print('|',end=' ')
for i in range(4):
print(' ',end=' ')
print('|')
for i in range(1,12):
if i==1 or i==6 or i==11:
plusrow()
else:
otherrow()
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,782
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 6.py
|
c=str(input("Enter a character:"))
vowels=['A','a','E','e','I','i','o','O','u','U']
if c in vowels:
print("Vowel")
else:
print('Consonant')
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,783
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 7.py
|
n=int(input("Enter range:"))
add=0
for i in range(1,n+1):
add=add+i
print(add)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,784
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 9.py
|
def fact(n):
factorial=1
for i in range(1,n+1):
factorial*=i
print(f"factoial of {n}:{factorial}")
a=int(input("Enter a number:"))
fact(a)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,785
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/rstfunc.py
|
from random import randint
pscore=0
cscore=0
def maingame(n):
print("Enter rock or paper or scissors")
rounds=0
game=['rock','paper','scissors']
computer=game[randint(0,2)]
global pscore
global cscore
while(rounds!=n):
player=input("Player game:")
if player=='rock':
if computer==game[2]:
pscore+=1
print(f"Player score:{pscore}")
print(f"Computer score :{cscore}")
elif computer==game[1]:
cscore+=1
print(f"Player score:{pscore}")
print(f"Computer score :{cscore}")
else:
print("Same")
elif player=='scissors':
if computer==game[0]:
cscore+=1
print(f"Player score:{pscore}")
print(f"Computer score :{cscore}")
elif computer==game[1]:
pscore+=1
print(f"Player score:{pscore}")
print(f"Computer score :{cscore}")
else:
print("Same")
elif player=='paper':
if computer==game[0]:
pscore+=1
print(f"Player score:{pscore}")
print(f"Computer score :{cscore}")
elif computer==game[2]:
cscore+=1
print(f"Player score:{pscore}")
print(f"Computer score :{cscore}")
else:
print("Same")
rounds+=1
def score():
global pscore
global cscore
if pscore>cscore:
print("Player wins")
elif cscore>pscore:
print("Computer wins")
else:
print("Tie")
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,786
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/pattern 2.py
|
def starrow(n):
for i in range(n):
print('*',end='')
rows=int(input("Enter the numbers of rows:"))
for i in range(1,rows+1):
starrow(i)
print()
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,787
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 2.py
|
a=int(input("Enter a number:"))
if a%2==0:
print("even");
else:
print(odd)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,788
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 5 1.py
|
def addcontact():
name=input('Enter name:')
phoneno=int(input("Enter phone number"))
contactbook[name]=phoneno
def deletecontact():
name=input("Enter the name to delete:")
del contactbook[name]
print(contactbook)
def editcontact():
name=input('Enter name:')
phoneno=int(input("Enter phone number"))
contactbook[name]=phoneno
def searchcontact():
name=input('Enter name:')
if name in contactbook:
print(f"Number={contactbook[name]}")
def show():
print(contactbook)
contactbook={}
directory={1:addcontact,2:deletecontact,3:editcontact,4:searchcontact,5:show}
choice=0
while True:
print("Contact directory App\n\tPress 1 to add new number\n\tPress 2 to delete the number \n\tPress 3 to edit the number\n\tPress 4 to search the number \n\tPress 5 to show the number")
choice=int(input("Enter your choice: "))
if choice in directory:
book=directory[choice]
book()
elif choice==0:
break;
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,789
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 10.py
|
n=int(input("Enter range:"))
for i in range(1,n+1):
if (i%2!=0):
print(" ",i," ")
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,790
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 14.py
|
l=[]
n=int(input("Enter range:"))
i=0
for i in range(i,n):
x= int(input("Enter a value: "))
l.append(x);
l.sort()
print(f"The second smallest number is {l[1]}")
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,791
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 5.py
|
def SI(p,r,t):
si=(p*r*t)/100
print(f"Simple interest:{si}")
p=float(input("Enter principle:"))
r=float(input("Enter rate:"))
t=float(input("Enter time:"))
SI(p,r,t)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,792
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/Car.py
|
import turtle
bob=turtle.Turtle()
bob.color("black","red")
bob.begin_fill()
bob.forward(100)
bob.left(90)
bob.forward(100)
bob.right(90)
bob.forward(200)
bob.right(90)
bob.forward(100)
bob.left(90)
bob.forward(100)
bob.right(90)
bob.forward(100)
bob.right(90)
bob.forward(50)
bob.right(90)
bob.circle(50)
bob.left(90)
bob.forward(200)
bob.right(90)
bob.circle(50)
bob.left(90)
bob.forward(100)
bob.forward(50)
bob.right(90)
bob.forward(100)
bob.end_fill()
bob.color("black","yellow")
bob.begin_fill()
bob.right(90)
bob.forward(25)
bob.right(90)
bob.forward(25)
bob.right(90)
bob.forward(25)
bob.end_fill()
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,793
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 12.py
|
n=int(input("Enter a range:"))
even=[i for i in range(1,n+1) if i%2==0]
print(even)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,794
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/rsptrial.py
|
from rstfunc import maingame,score
print("Rock Paper & Scissors game")
n=int(input("Enter number of rounds:"))
maingame(n)
score()
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,795
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 8.py
|
a=int(input("Start range:"))
b=int(input("End range:"))
add=0
for i in range(a,b+1):
add=add+i
print(add)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,796
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 1.py
|
a=int(input("Enter first number:"))
b=int(input("Enter second number:"))
if a>b:
print(f"{b} is smaller than {a}")
else:
print(f"{a} is smaller than {b}")
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,797
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/task 4 4.py
|
marks=float(input("Enter student mark:"))
if marks>90:
print('grade:O')
elif marks>81 and marks<90:
print('grade:A')
elif marks>71and marks<80:
print('grade:B')
elif marks >61 and marks<70:
print('grade:C')
elif marks >51 and marks<60:
print('grade:D')
else:
print("U")
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,798
|
Sanjivkumar100/i-30-workshop
|
refs/heads/main
|
/todolistpriorty.py
|
todolisthig=[]
todolistmed=[]
todolistlow=[]
def hightask():
task=input("Enter the new task:")
todolisthig.append(task)
print(todolisthig)
def medtask():
task=input("Enter the new task:")
todolistmed.append(task)
print(todolistmed)
def lowtask():
task=input("Enter the new task:")
todolistlow.append(task)
print(todolistlow)
def edithig():
newtask=input("Enter the new task")
oldtask=input("Enter old task:")
indexno=todolisthig.index(oldtask)
todolisthig[indexno]=newtask
print(todolisthig)
def editmed():
newtask=input("Enter the new task")
oldtask=input("Enter old task:")
indexno=todolistmed.index(oldtask)
todolistmed[indexno]=newtask
print(todolistmed)
def editlow():
newtask=input("Enter the new task")
oldtask=input("Enter old task:")
indexno=todolistlow.index(oldtask)
todolistlow[indexno]=newtask
print(todolistlow)
def deletehig():
task=input("Enter the task:")
todolisthig.remove(task)
print(todolisthig)
def deletemed():
task=input("Enter the task:")
todolisthig.remove(task)
print(todolistmed)
def deletelow():
task=input("Enter the task:")
todolistlow.remove(task)
print(todolistlow)
|
{"/todolist.py": ["/todolistpriorty.py"], "/rsptrial.py": ["/rstfunc.py"]}
|
40,807
|
clairewangjia/Text-Sentiment-Analysis
|
refs/heads/master
|
/Step2_fused_analyzer.py
|
import os
import pandas as pd
import numpy as np
from sklearn.model_selection import KFold
from sklearn.naive_bayes import MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import Lasso
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
from sklearn.metrics import classification_report, precision_score, recall_score, f1_score
import vader_sentiment_analyzer
import hashtag
data_dir = './data'
print("Loading data...")
x = pd.read_csv('./data/features_processed.csv')
x = x.values # convert pandas dataframe to ndarray
with open(os.path.join(data_dir, 'labels.txt'), 'r') as f:
y = np.array([ int(line.strip()) for line in f.readlines()])
print("----------------------------------------------------")
print("Training RF classifer with features: favourites, followers, friends, likes, lists, retweets, statuses")
print("----------------------------------------------------")
n = 3
prob_count= np.empty(shape=[0, n])
kf = KFold(n_splits=10)
avg_p = 0
avg_r = 0
macro_f1 = 0
for train, test in kf.split(x):
# model = MultinomialNB().fit(x[train], y[train])
# model = KNeighborsClassifier(n_neighbors=3).fit(x[train], y[train])
model = RandomForestClassifier(n_estimators = 300, max_features = 4, random_state=0).fit(x[train], y[train])
# model = LogisticRegression().fit(x[train], y[train])
# model = Lasso(alpha = 0.1).fit(x[train], y[train])
# model = svm.SVC(probability=True).fit(x[train], y[train])
prob = model.predict_proba(x[test])
predicts = model.predict(x[test])
prob_count = np.concatenate((prob_count, prob),axis=0)
# print(classification_report(y[test],predicts))
avg_p += precision_score(y[test],predicts, average='macro')
avg_r += recall_score(y[test],predicts, average='macro')
macro_f1 += f1_score(y[test],predicts, average='macro')
print('Feature importances of the above features in the RandomForestClassifier:')
print(model.feature_importances_)
print('probability: neg, neu, pos')
print(prob_count)
print('Average Precision of features_set_classifier is %f.' %(avg_p/10.0))
print('Average Recall of features_set_classifier is %f.' %(avg_r/10.0))
print('Average Macro-F1 of features_set_classifier is %f.' %(macro_f1/10.0))
print("----------------------------------------------------")
print("Training VADER sentiment classifer for tweets text and emoji")
prob_text = vader_sentiment_analyzer.main()
print("----------------------------------------------------")
print("Training ML TF-IDF classifer for hashtag")
print("----------------------------------------------------")
prob_hashtag = hashtag.main()
# fuse two models
print("----------------------------------------------------")
print("Combining 2 models in the rule-based late fusion model: text + social features")
print("----------------------------------------------------")
weights = []
for w1 in np.arange(0,0.8,0.01):
w2 = 1-w1
weights.append([w1,w2])
precisions = []
recalls = []
macrof1 = []
for i in range(len(weights)):
w_count, w_text = weights[i]
result_prob = w_count*prob_count + w_text*prob_text
result = np.argmax(result_prob, axis=1)
avg_p = precision_score(y, result, average='macro')
avg_r = recall_score(y, result, average='macro')
macro_f1 = f1_score(y,result, average='macro')
precisions.append(avg_p)
recalls.append(avg_r)
macrof1.append(macro_f1)
opt_id = np.argmax(macrof1)
print('Weight of social_feature_classifier: ' + str(weights[opt_id][0]) + ', Weight of vader_sentiment_analyzer: '+ str(weights[opt_id][1]))
print('Optimal Precision of late fusion model is %f.' %precisions[opt_id])
print('Optimal Recall of late fusion model is %f.' %recalls[opt_id])
print('Optimal Macro-F1 of late fusion model is %f.' %macrof1[opt_id])
# fuse 3 models
print("----------------------------------------------------")
print("Combining 3 models in the rule-based late fusion model: text + social features + hashtags ")
print("----------------------------------------------------")
weights = []
for w1 in np.arange(0,0.7,0.01):
for w2 in np.arange(0,0.7,0.01):
w3 = 1-w1-w2
weights.append([w1, w2, w3])
precisions = []
recalls = []
macrof1 = []
for i in range(len(weights)):
w_count, w_text, w_hashtag = weights[i]
result_prob = w_count*prob_count + w_text*prob_text + w_hashtag*prob_hashtag
result = np.argmax(result_prob, axis=1)
avg_p = precision_score(y, result, average='macro')
avg_r = recall_score(y, result, average='macro')
macro_f1 = f1_score(y,result, average='macro')
precisions.append(avg_p)
recalls.append(avg_r)
macrof1.append(macro_f1)
opt_id = np.argmax(macrof1)
print('Weight of social_feature_classifier: ' + str(weights[opt_id][0]))
print('Weight of vader_sentiment_analyzer: '+ str(weights[opt_id][1]))
print('Weight of hashtag_classifier: '+ str(weights[opt_id][2]))
print('Optimal Precision of late fusion model is %f.' %precisions[opt_id])
print('Optimal Recall of late fusion model is %f.' %recalls[opt_id])
print('Optimal Macro-F1 of late fusion model is %f.' %macrof1[opt_id])
|
{"/Step2_fused_analyzer.py": ["/hashtag.py"]}
|
40,808
|
clairewangjia/Text-Sentiment-Analysis
|
refs/heads/master
|
/Step1_social_hashtag_preprocess.py
|
# encoding=utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import os
import re
import nltk
# nltk.download()
from nltk.corpus import stopwords
import simplejson as json
import pickle
import numpy as np
import pandas as pd
def rm_html_tags(str):
html_prog = re.compile(r'<[^>]+>',re.S)
return html_prog.sub('', str)
def rm_html_escape_characters(str):
pattern_str = r'"|&|<|>| |"|&|<|>| |似|眼|格|+|值|尼'
escape_characters_prog = re.compile(pattern_str, re.S)
return escape_characters_prog.sub('', str)
def rm_at_user(str):
return re.sub(r'@[a-zA-Z_0-9]*', '', str)
def rm_url(str):
return re.sub(r'http[s]?:[/+]?[a-zA-Z0-9_\.\/]*', '', str)
def rm_repeat_chars(str):
return re.sub(r'(.)(\1){2,}', r'\1\1', str)
def rm_hashtag_symbol(str):
return re.sub(r'#', '', str)
def replace_emoticon(emoticon_dict, str):
for k, v in emoticon_dict.items():
str = str.replace(k, v)
return str
def rm_time(str):
return re.sub(r'[0-9][0-9]:[0-9][0-9]', '', str)
def rm_punctuation(current_tweet):
return re.sub(r'[^\w\s]','',current_tweet)
def pre_process(str, porter):
# do not change the preprocessing order only if you know what you're doing
str = str.lower()
str = rm_url(str)
str = rm_at_user(str)
str = rm_repeat_chars(str)
str = rm_hashtag_symbol(str)
str = rm_time(str)
str = rm_punctuation(str)
try:
str = nltk.tokenize.word_tokenize(str)
try:
str = [porter.stem(t) for t in str]
except:
print(str)
pass
except:
print(str)
pass
return str
if __name__ == "__main__":
data_dir = './data' ##Setting your own file path here.
x_filename = 'tweets.txt'
y_filename = 'labels.txt'
porter = nltk.PorterStemmer()
stops = set(stopwords.words('english'))
stops.add('rt')
##load and process samples
print('start extract social features...')
retweets = []
likes = []
friends = []
followers = []
lists = []
favourites = []
statuses = []
cnt = 0
with open(os.path.join(data_dir, x_filename)) as f:
for i, line in enumerate(f):
tweet_obj = json.loads(line.strip(), encoding='utf-8')
retweet = tweet_obj['retweet_count']
like = tweet_obj['favorite_count']
friend = tweet_obj['user']['friends_count']
follower = tweet_obj['user']['followers_count']
listed = tweet_obj['user']['listed_count']
favourite = tweet_obj['user']['favourites_count']
statuse = tweet_obj['user']['statuses_count']
retweets.append(retweet)
likes.append(like)
friends.append(friend)
followers.append(follower)
lists.append(listed)
favourites.append(favourite)
statuses.append(statuse)
tweets_df = pd.DataFrame(
{'retweets': retweets,
'likes': likes,
'friends': friends,
'followers': followers,
'lists': lists,
'favourites': favourites,
'statuses': statuses
})
print('Samples of extracted features...')
print tweets_df.head(5)
###Save df to csv
tweets_df.to_csv('./data/features_processed.csv', index=False)
print("Social features preprocessing is completed")
##load and process hashtags
print("Start extract and process hashtags...")
words_stat = {}
hashtags = []
cnt = 0
with open(os.path.join(data_dir, x_filename)) as f:
for i, line in enumerate(f):
postprocess_tweet = []
tweet_obj = json.loads(line.strip(), encoding='utf-8')
hashtag_dict = tweet_obj['entities']['hashtags']
if len(hashtag_dict) > 0:
hashtag_list = []
for h in hashtag_dict:
single_tag = h['text']
hashtag_list.append(single_tag)
hashtag = ' '.join(hashtag_list)
else:
hashtag = ""
words = pre_process(hashtag, porter)
for word in words:
if word not in stops:
postprocess_tweet.append(word)
if word in words_stat.keys():
words_stat[word][0] += 1
if i != words_stat[word][2]:
words_stat[word][1] += 1
words_stat[word][2] = i
else:
words_stat[word] = [1,1,i]
hashtags.append(' '.join(postprocess_tweet))
##saving the statistics of tf and df for each words into file
print("The number of unique words in hashtag data set is %i." %len(words_stat.keys()))
lowTF_words = set()
# with open(os.path.join(data_dir, 'words_statistics_hashtag.txt'), 'w', encoding='utf-8') as f:
# f.write('TF\tDF\tWORD\n')
for word, stat in sorted(words_stat.items(), key=lambda i: i[1], reverse=True):
if stat[0]<2:
lowTF_words.add(word)
print("The number of low frequency words of hashtags is %d." %len(lowTF_words))
# print(stops)
###Re-process samples, filter low frequency words...
fout = open(os.path.join(data_dir, 'hashtag_processed.txt'), 'w')
hashtag_new = []
for hashtag in hashtags:
words = hashtag.split(' ')
new = []
for w in words:
if w not in lowTF_words:
new.append(w)
new_hashtag = ' '.join(new)
hashtag_new.append(new_hashtag)
fout.write('%s\n' %new_hashtag)
fout.close()
print("Hashtag preprocessing is completed")
|
{"/Step2_fused_analyzer.py": ["/hashtag.py"]}
|
40,809
|
clairewangjia/Text-Sentiment-Analysis
|
refs/heads/master
|
/hashtag.py
|
import os
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import KFold
from sklearn.naive_bayes import MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import classification_report, precision_score, recall_score, f1_score
from sklearn.linear_model import SGDClassifier
def main():
# if __name__ == "__main__":
data_dir = './data'
print("Loading data...")
with open(os.path.join(data_dir, 'hashtag_processed.txt'), 'r') as f_hashtag:
x_hashtag = f_hashtag.readlines()
with open(os.path.join(data_dir, 'labels.txt'), 'r') as f:
y = np.array(f.readlines())
print("Extract features...")
x_hashtag_feats = TfidfVectorizer().fit_transform(x_hashtag)
print(x_hashtag_feats.shape)
print("Start training and predict...")
n = 3
prob_hashtag= np.empty(shape=[0, n])
kf = KFold(n_splits=10)
avg_p = 0
avg_r = 0
macro_f1 = 0
for train, test in kf.split(x_hashtag_feats):
model = MultinomialNB().fit(x_hashtag_feats[train], y[train])
# model = KNeighborsClassifier(n_neighbors=7).fit(x_hashtag_feats[train], y[train])
# model = RandomForestClassifier(n_estimators=500, max_features=7, random_state=0).fit(x_hashtag_feats[train], y[train])
# model = LogisticRegression().fit(x_hashtag_feats[train], y[train])
# model = SGDClassifier(loss='log', penalty='l2', alpha=1e-3, random_state=0, max_iter=100).fit(x_hashtag_feats[train], y[train])
prob = model.predict_proba(x_hashtag_feats[test])
predicts = model.predict(x_hashtag_feats[test])
# print(classification_report(y[test],predicts))
prob_hashtag = np.concatenate((prob_hashtag, prob))
avg_p += precision_score(y[test],predicts, average='macro')
avg_r += recall_score(y[test],predicts, average='macro')
macro_f1 += f1_score(y[test],predicts, average='macro')
print('Average Precision of hashtag classifer is %f.' %(avg_p/10.0))
print('Average Recall of hashtag classifer is %f.' %(avg_r/10.0))
print('Average Macro-F1 of hashtag classifer is %f.' %(macro_f1/10.0))
return prob_hashtag
|
{"/Step2_fused_analyzer.py": ["/hashtag.py"]}
|
40,813
|
Chetan8462/hackowasp
|
refs/heads/main
|
/user/models.py
|
from django.db import models
from phonenumber_field.modelfields import PhoneNumberField
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.base_user import BaseUserManager
from phonenumber_field.modelfields import PhoneNumberField
from home.models import *
class CustomUserManager(BaseUserManager):
def create_superuser(self, email, password=None):
user = self.model(email=email, is_staff=True, is_superuser=True)
user.set_password(password)
user.save()
return user
class User(AbstractUser):
"""docstring for User"""
username = None
email = models.EmailField(verbose_name='Email Address', unique=True)
# name = models.CharField(max_length=50)
# contact_no = PhoneNumberField(help_text='Add country code before the contact no.', null=True)
USERNAME_FIELD = 'email'
user_permissions = None
groups = None
REQUIRED_FIELDS = []
objects = CustomUserManager()
def __str__(self):
return self.email
|
{"/user/forms.py": ["/user/models.py"], "/user/views.py": ["/user/forms.py", "/user/models.py"]}
|
40,814
|
Chetan8462/hackowasp
|
refs/heads/main
|
/user/forms.py
|
from django import forms
from django.contrib.auth.forms import UserCreationForm
from phonenumber_field.formfields import PhoneNumberField
from django.db import transaction
from .models import User
from django.contrib.auth import get_user_model
User = get_user_model()
class RegistrationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = User
fields = ['email', 'password1','password2']
|
{"/user/forms.py": ["/user/models.py"], "/user/views.py": ["/user/forms.py", "/user/models.py"]}
|
40,815
|
Chetan8462/hackowasp
|
refs/heads/main
|
/user/views.py
|
from django.shortcuts import render, redirect
from django.contrib.auth.forms import UserCreationForm
from .forms import RegistrationForm
from django.contrib import messages
from django.contrib.auth import get_user_model
from .models import User
User = get_user_model()
def register(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
user = form.save()
user.save()
return redirect('login')
else:
form = RegistrationForm()
return render(request, 'user/register.html', {'form': form})
|
{"/user/forms.py": ["/user/models.py"], "/user/views.py": ["/user/forms.py", "/user/models.py"]}
|
40,817
|
raulium/jarvis
|
refs/heads/master
|
/vacation_mod.py
|
#!/usr/local/env python
# ============== CONFIG PARAMETERS
from config import SNOOZE_TIME, READING_TIME, BED_TIME
# ============== INTERNAL LIBRARIES
from timing_mod import snooze
from IFTTT_mod import IFTTT
from mac_mod import openApp, setLivingRoom, startRadio, setVolume, setDisplay, macTerm
# ============== EXTERNAL LIBRARIES
import os.path
def vMorningRoutine():
snooze(SNOOZE_TIME)
def vEveningRoutine():
snooze(READING_TIME)
# closeApp("Google Chrome")
IFTTT("reading")
snooze(BED_TIME)
IFTTT("sunset")
IFTTT("lights_off")
def away():
if not checkAwayStatus():
cmd = ['touch', '/tmp/away.lock']
macTerm(cmd)
# setLivingRoom()
# setVolume(5)
def back():
if checkAwayStatus():
cmd = ['rm', '/tmp/away.lock']
macTerm(cmd)
# setDisplay()
def checkAwayStatus():
result = os.path.exists('/tmp/away.lock')
return result
def setVacation():
cmd = 'touch /tmp/vacation.lock'
macTerm(cmd)
def rmVacation():
cmd = 'rm /tmp/vacation.lock'
macTerm(cmd)
def checkVacationStatus():
result = os.path.exists('/tmp/vacation.lock')
return result
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,818
|
raulium/jarvis
|
refs/heads/master
|
/gmail_mod.py
|
from __future__ import print_function
# ============== CONFIG PARAMETERS
from config import EMAIL, USERNAME
# ============== INTERNAL LIBRARIES
# ============== EXTERNAL LIBRARIES
import httplib2
import os
import base64
from apiclient import discovery, errors
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from email.MIMEText import MIMEText
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/gmail.send'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'GMAIL API Python Quickstart'
def get_credentials():
home_dir = os.path.expanduser('/Users/' + USERNAME + '/')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else:
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def create_message(sender, to, subject, message_text):
message = MIMEText(message_text)
message['to'] = to
message['from'] = sender
message['subject'] = subject
return {'raw': base64.urlsafe_b64encode(message.as_string())}
def send_message(service, user_id, message):
try:
message = (service.users().messages().send(userId=user_id, body=message)
.execute())
print("Message Id: " + str(message['id']))
return message
except errors.HttpError as error:
print('An error occurred: ' + str(error))
def sendGmail(mail_to, mail_subject, mail_body):
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http)
sender = EMAIL
to = mail_to
subject = mail_subject
message_text = mail_body
compose_message = create_message(sender=sender, to=to, subject=subject, message_text=message_text)
send_message(service=service, user_id='me', message=compose_message)
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,819
|
raulium/jarvis
|
refs/heads/master
|
/morning_mod.py
|
#!/usr/bin/python
# ============== EXTERNAL LIBRARIES
import time
import random
from datetime import datetime
from atd import atd
# ============== CONFIG PARAMETERS\
from config import DOW, NAME, SNOOZE_TIME, NORMAL_TIME
# ============== INTERNAL LIBRARIES
from interaction_mod import GREETING, WAKE, say, laboratoryOptions, volunteerOptions, dayOffOptions, DAY_ASSES
from timing_mod import holidayDict, snooze, t_minus, getTime
from IFTTT_mod import IFTTT, IFTTTcmd
from lab_mod import labStatus
from mac_mod import setVolume, setDisplay, startMusic, openPage
from weather_mod import getSunsetDTO, newDailyReport
from math_mod import maths
from motivation_mod import daysToWeekend, weeksToDate, counterString, motivate
# =======================================================================
# [Good] Morning [Raul]. | The time is [Time], and [All is well]. | [It seems today will be a] [beautiful] [day of the week]
# The temperature is [temp], with an expected high of [high] and a low of [low].
# [Bad conditions]
# - conditions
# - dew point
# - dew point distance
# [Holiday condition]
# And may I also wish you a happy [holiday]
# NEW MORNING
def dayMessage():
WD = datetime.today().weekday()
day_evaluation, bad_conditions, weather_segment = newDailyReport()
print("Bad Conditions: " + ', '.join(bad_conditions))
day_segment = random.choice(DAY_ASSES) + day_evaluation + " " + DOW[WD] + "."
bad_segment = " "
if len(bad_conditions) > 0:
bad_segment = "Weather conditions to consider are: " + ', '.join(bad_conditions)
holiday_segment = ""
holString = holidayDict()
if holString:
holiday_segment = "And may I also wish you a happy " + holString + "."
message = day_segment + ". " + weather_segment + ". " + bad_segment + ". " + holiday_segment + "."
return message
def morningRoutine():
WD = datetime.today().weekday()
statusMessage = dayMessage()
if WD >= 5: # It's a weekend
snooze(t_minus(SNOOZE_TIME, 30))
IFTTT("sunrise")
snooze(SNOOZE_TIME)
else:
if labStatus(): # Where "True" or "1" means closed
if not holString:
openPage("http://www.ll.mit.edu/status/index.html")
statusMessage = "The laboratory appears to be closed today. I've opened the lab's status page, if you care to learn more. " + statusMessage
snooze(t_minus(SNOOZE_TIME, 30))
IFTTT("sunrise")
snooze(SNOOZE_TIME)
else:
snooze(t_minus(NORMAL_TIME, 30))
IFTTT("sunrise")
snooze(NORMAL_TIME)
setVolume(5)
setDisplay()
sunset = getSunsetDTO()
l = IFTTTcmd('living_room_lifx')
w = IFTTTcmd('living_room_wink')
atd.at(l, sunset)
atd.at(w, sunset)
startMusic()
time.sleep(30)
greeting_segment = random.choice(GREETING) + ", " + NAME + "."
time_segment = "The time is " + getTime() + " " + random.choice(WAKE)
say(greeting_segment + ". " + time_segment)
time.sleep(40)
myStatus = 0
if datetime.today().weekday() <= 4:
myStatus = laboratoryOptions()
elif datetime.today().weekday() == 5:
myStatus = volunteerOptions()
else:
myStatus = dayOffOptions()
if myStatus < 1:
maths()
time.sleep(5)
IFTTT("lights_on")
IFTTT("wakeup")
say(statusMessage)
semesterEnd = datetime(2018, 06, 22)
say(counterString(daysToWeekend(), "day") + " the weekend, and " + counterString(weeksToDate(semesterEnd),"week") + " the Chad returns.")
say(motivate())
else:
say("Everything has been taken care of. Feel free to go back to bed.")
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,820
|
raulium/jarvis
|
refs/heads/master
|
/evening_mod.py
|
#!/usr/local/env python
# ============== CONFIG PARAMETERS
from config import READING_TIME, BED_TIME
# ============== INTERNAL LIBRARIES
from interaction_mod import WARN, saiff
from timing_mod import getCurrentTime, snooze, t_minus
from mac_mod import notification, setLivingRoom, setDisplay
from IFTTT_mod import IFTTT
# ============== EXTERNAL LIBRARIES
import time
import random
def eveningRoutine():
h, m, t = getCurrentTime()
timestring = str(h) + ":" + str(m) + " " + t
title = "Time Notification"
msg = "It is " + timestring + "."
notification(title, msg)
snooze(t_minus(READING_TIME, 10))
h, m, t = getCurrentTime()
timestring = str(h) + ":" + str(m) + " " + t
title = "Time Notification"
msg = "It is " + timestring + "."
notification(title, msg)
notification(title, msg)
rmsg = random.choice(WARN) + " It is " + timestring + "."
setLivingRoom()
time.sleep(1)
saiff(rmsg, 'evening')
setDisplay()
notification(title, "This computer will lock in 10 min.")
snooze(READING_TIME)
IFTTT("reading")
snooze(t_minus(BED_TIME, 5))
IFTTT("light_notice")
snooze(BED_TIME)
IFTTT("light_notice")
IFTTT("sunset")
IFTTT("lights_off")
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,821
|
raulium/jarvis
|
refs/heads/master
|
/web_mod.py
|
#!/usr/local/env python
# ============== CONFIG PARAMETERS
# ============== INTERNAL LIBRARIES
# ============== EXTERNAL LIBRARIES
import re
import urllib
from BeautifulSoup import BeautifulSoup as bsp
class MyOpener(urllib.FancyURLopener):
version = 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'
def ripText(URL):
soup = bsp(urllib.urlopen(URL).read())
p = soup.body.div.findAll('p')
strings = str(p[0])
strings = re.sub("<p>", '', strings)
strings = re.sub("</p>", '', strings)
strings = strings.split('.')
return strings
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,822
|
raulium/jarvis
|
refs/heads/master
|
/lab_mod.py
|
#!/usr/local/env python
# ============== CONFIG PARAMETERS
# ============== INTERNAL LIBRARIES
from web_mod import ripText
# ============== EXTERNAL LIBRARIES
def labStatus():
"""Check status of the Laboratory, returning 0 if open & 1 if closed"""
statMSG = ripText("http://www.ll.mit.edu/status/index.html")
if "closed" in str(statMSG[0]):
return 1
if "open" in str(statMSG[0]):
return 0
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,823
|
raulium/jarvis
|
refs/heads/master
|
/math_mod.py
|
#!/usr/local/env python
# ============== EXTERNAL LIBRARIES
import random
from word2number import w2n
# ============== CONFIG PARAMETERS
# ============== INTERNAL LIBRARIES
from interaction_mod import POSITIVE, NEGATIVE, say, getReply
def do_math(): # COULD USE REWORK. WHAT HAPPENS IF YOU NEVER HEAR A REPLY? (BREAK!)
"""
Function poses the user a math problem, evaluates the response
and determines if the user got the problem right (1) or wrong (0)
"""
val1 = random.randint(3, 9)
val2 = random.randint(6, 9)
answer = val1 * val2
say('What is ' + str(val1) + ' multiplied by ' + str(val2) + '?')
reply = getReply()
try:
reply = w2n.word_to_num(reply)
except ValueError:
say(random.choice(NEGATIVE))
return 0
if reply == answer:
say(random.choice(POSITIVE))
return 1
else:
say(random.choice(NEGATIVE))
return 0
def maths(): # Maybe rename this?
"""
Function traps user in a loop, requiring the user to answer three (3)
math problems correctly before exiting.
"""
say("How about a little exercise?")
i = 0
while i < 3:
result = do_math()
if result < 1:
i = 0
else:
i += result
def main():
""" run maths """
maths()
if __name__ == '__main__':
main()
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,824
|
raulium/jarvis
|
refs/heads/master
|
/interaction_mod.py
|
#!/usr/local/env python
# ============== EXTERNAL LIBRARIES
import random
import time
from subprocess import Popen
import speech_recognition as sr
from gmail_mod import sendGmail
# ============== CONFIG PARAMETERS
from config import NAME, WORK_LIST_STRING, ILL_MSG, WFH_MSG, VOLUNTEER_EMAIL, VOLUNTEER_ILL_MSG, VOLUNTEER_WFH_MSG
# ============== INTERNAL LIBRARIES
from mac_mod import check_process
# ============== CUSTOM REACTIONS & INTERACTION SPEECH
GREETING = ["Good morning", "Top of the morning", "Rise and shine",
"Another dawn, another day", "Time to get up"]
WAKE = ["This is your monring wake up call.", "It is time to start your day.",
"which means it's that time again.", "So let's not be lazy.",
"And all is well."]
LAZY = ["It is time to get up.", "You can't blame anyone but yourself.",
"It will only get worse from here if you don't get up."]
POSITIVE = ["Excellent!", "Perfect!", "Splended!", "Good enough.",
"Wonderful.", "Outstanding!", "Marvelous!", "Great!", "Fantastic!",
"Okay."]
NEGATIVE = ["Incorrect", "No.", "That's offensive",
"Inferior performance", "R you even trying", "I think not.",
"This is sad."]
WARN = [NAME + ", it's that time again.", "It is time to get ready for bed.",
"The bells are tolling, " + NAME + ".", "Early to bed, early to rise."]
ILL_KEYS = ["not feeling well", "sick", "don't feel", "ill"]
WFH_KEYS = ["staying", "sleeping", "not going in", "day off", "working from home"]
DAY_ASSES = ["It seems today will be a", "It appears today will be a", "Today will be a",
"It looks as though today might be a"]
GREAT = ["n amazing", " great", " beautiful", " terrific", " perfect"]
GOOD = [" good", " pretty good", " plesant", " favorable"]
OKAY = ["n alright", " decent", "n okay", " satisfactory"]
BAD = [" terrible", "bad", "unfavorable", "n unsatisfactory", "n ugly"]
# ============== INTERACTION functions
def say(STRING):
cmd = 'say -v Lee "' + STRING + '"'
Popen(cmd, shell=True)
saytrap()
def saiff(STRING, NAME):
cmd = 'say -v Lee "' + STRING + '" -o /tmp/' + NAME + '.aiff'
Popen(cmd, shell=True)
paiff(NAME)
def paiff(NAME):
cmd = 'afplay /tmp/' + NAME + '.aiff'
Popen(cmd, shell=True)
aifftrap()
def saytrap():
while (check_process('say') == True):
continue
def aifftrap():
while (check_process('afplay') == True):
continue
def laboratoryOptions():
while 1:
say("Are you ready to start your day?")
reply = getReply()
if reply is "ERR":
say("I'm sorry, I didn't quite catch that.")
elif reply is "NULL":
mystring = NAME + " " + random.choice(LAZY)
say(mystring)
elif any(o in reply for o in ILL_KEYS):
say("I'm sorry to hear that. Would you like me to contact the Laboratory?")
confirm = getReply()
if 'yes' in confirm:
say("Very well. Sending message now.")
sendGmail(WORK_LIST_STRING, "Sick Day", ILL_MSG)
return 1
if 'no' in confirm:
say("Then you shouldn't say you're feeling ill.")
return 0
elif any(o in reply for o in WFH_KEYS):
say("Would you like me to notify the Laboratory?")
confirm = getReply()
if 'yes' in confirm:
say("Very well. Sending message now.")
sendGmail(WORK_LIST_STRING, "WFH Today", WFH_MSG)
return 0
if 'no' in confirm:
say("Then you should have said you were ready to start your day.")
return 0
else:
say(random.choice(POSITIVE))
break
def volunteerOptions():
while 1:
say("Are you ready to start your day?")
reply = getReply()
if reply is "ERR":
say("I'm sorry, I didn't quite catch that.")
elif reply is "NULL":
mystring = NAME + " " + random.choice(LAZY)
say(mystring)
elif any(o in reply for o in ILL_KEYS):
say("I'm sorry to hear that. Would you like me to contact the Museum?")
confirm = getReply()
if 'yes' in confirm:
say("Very well. Sending message now.")
sendGmail(VOLUNTEER_EMAIL, "Out Sick", VOLUNTEER_ILL_MSG)
return 1
if 'no' in confirm:
say("Then you shouldn't say you're feeling ill.")
return 0
elif any(o in reply for o in WFH_KEYS):
say("Would you like me to notify the Museum?")
confirm = getReply()
if 'yes' in confirm:
say("Very well. Sending message now.")
sendGmail(VOLUNTEER_EMAIL, "Out Today", VOLUNTEER_WFH_MSG)
return 1
if 'no' in confirm:
say("Then you should have said you were ready to start your day.")
return 0
else:
say(random.choice(POSITIVE))
break
def dayOffOptions():
while 1:
say("Are you ready to start your day?")
reply = getReply()
if reply is "ERR":
say("I'm sorry, I didn't quite catch that.")
elif reply is "NULL":
mystring = NAME + " " + random.choice(LAZY)
say(mystring)
else:
say(random.choice(POSITIVE))
return 0
def getReply():
r = sr.Recognizer()
with sr.Microphone() as source:
reply = "NULL"
r.adjust_for_ambient_noise(source, duration=1)
audio = r.listen(source, timeout=10)
try:
reply = r.recognize_sphinx(audio)
print("R:\t" + str(reply))
except sr.WaitTimeoutError:
reply = "NULL"
except sr.UnknownValueError:
reply = "ERR"
except sr.RequestError as e:
print("Spinx error; {0}".format(e))
return reply
# def getReply():
# try:
# r = sr.Recognizer()
# with sr.Microphone() as source:
# r.adjust_for_ambient_noise(source, duration=1)
# audio = r.listen(source, timeout=10)
# try:
# reply = r.recognize_google(audio)
# except LookupError:
# reply = "ERR" # Bad input condition
# except sr.WaitTimeoutError:
# reply = "NULL" # Nil heard
# except sr.UnknownValueError:
# reply = "ERR" # Bad input condition
# return reply # Return transcript of reply
def micTest():
say("Voice authorization required.")
r = getReply()
if 'authorization' in r:
r = r.split('authorization')
r = r[1]
time.sleep(0.2)
say(r + " recieved.")
say('Authorization accepted. Hello, ' + NAME + ".")
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,825
|
raulium/jarvis
|
refs/heads/master
|
/timing_mod.py
|
#!/usr/local/env python
# ============== EXTERNAL LIBRARIES
from __future__ import print_function
import holidays
import time
from datetime import datetime, timedelta, date
# ============== INTERNAL LIBRARIES
# ============== CONFIG PARAMETERS
from config import MY_STATE
def getCurrentTime():
hour = datetime.now().hour
if hour > 12:
hour = hour - 12
tod = "PM"
else:
tod = "AM"
m = datetime.now().minute
if m < 10:
m = "0" + str(m)
return hour, m, tod
def getTime():
hour, m, tod = getCurrentTime()
myTime = str(hour) + ":" + str(m) + " " + str(tod)
return myTime
def snooze(new_alarm):
newhour = int(new_alarm.split(':')[0])
newmin = int(new_alarm.split(':')[1].split(' ')[0])
newperiod = new_alarm.split(' ')[1]
year = datetime.now().year
month = datetime.now().month
day = datetime.now().day
if newperiod == "PM":
newhour += 12
wait = 1
while wait > 0:
time.sleep(wait)
wait = int((datetime(year, month, day, newhour, newmin) - datetime.now()).total_seconds())
def dto_to_string(MY_DTO):
hour = MY_DTO.hour
mins = MY_DTO.minute
if hour > 12:
return str(hour - 12) + ':' + str(mins) + " PM"
else:
return str(hour) + ':' + str(mins) + " AM"
def t_minus(time_string, minus):
h = int(time_string.split(':')[0])
m = int(time_string.split(':')[1].split(' ')[0])
t = time_string.split(' ')[1]
year = datetime.now().year
month = datetime.now().month
day = datetime.now().day
if t == "PM":
if h < 12:
h += 12
else:
if h == 12:
h -= 12
diff = datetime(year, month, day, h, m) - timedelta(minutes=minus)
print(diff)
if diff.hour > 12:
th = int(diff.hour) - 12
tp = "PM"
else:
th = int(diff.hour)
tp = "AM"
new_string = str(th) + ':' + str(diff.minute) + " " + tp
return new_string
def holidayDict():
us_holidays = holidays.UnitedStates(state=MY_STATE)
t = datetime.now().date()
if t in us_holidays:
return us_holidays.get(t)
else:
return None
def julianDate():
return date.today().timetuple().tm_yday
def whatSeason(**keyword_parameters):
"""
Returns integer 0-3 for the season of a given julian date, or current date if no
julian is provided.
0 = spring
1 = summer
2 = fall
3 = winter
"""
JULIAN = julianDate()
if 'julian' in keyword_parameters:
JULIAN = keyword_parameters['julian']
# "day of year" ranges for the northern hemisphere
spring = range(80, 171)
summer = range(172, 263)
fall = range(264, 354)
# winter = everything else
if JULIAN in spring:
return 0
elif JULIAN in summer:
return 1
elif JULIAN in fall:
return 2
else:
return 3
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,826
|
raulium/jarvis
|
refs/heads/master
|
/main.py
|
#!/usr/bin/python
# ============== CONFIG PARAMETERS
from config import MASTERKEY, HOST_IP, BASE_PATH
# ============== INTERNAL LIBRARIES
from vacation_mod import vMorningRoutine, vEveningRoutine, checkVacationStatus, setVacation, rmVacation
from interaction_mod import say, micTest
from mac_mod import startMusic, setLivingRoom, setDisplay
from morning_mod import morningRoutine, dayMessage
from evening_mod import eveningRoutine
from IFTTT_mod import IFTTT
# ============== EXTERNAL LIBRARIES
import time
import sys
import logging
from flask import Flask, request, abort
from subprocess import Popen
from datetime import datetime
app = Flask(__name__)
def data_check():
data = request.get_json(force=True)
if data is not None:
app.logger.debug("JSON Recieved -- " + str(data))
if MASTERKEY in data["key"]:
return True
else:
app.logger.debug("Bad Key")
return False
else:
app.logger.debug("No JSON recieved")
return False
def apiReturn(myString):
return str(datetime.now()) + ":\t" + myString + "\t -- SUCCESS!\n"
@app.route('/')
def index():
abort(404)
# @app.route('/away', methods=['POST'])
# def iaway():
# data = request.get_json(force=True)
#
# if data is not None:
# app.logger.debug("JSON Recieved -- " + str(data))
#
# if MASTERKEY in data["key"]:
# away()
# return str(datetime.now()) + ":\tTEST - SUCCESS!\n"
# else:
# abort(404)
# else:
# app.logger.debug("No JSON recieved")
# abort(404)
#
# @app.route('/back', methods=['POST'])
# def iback():
# data = request.get_json(force=True)
#
# if data is not None:
# app.logger.debug("JSON Recieved -- " + str(data))
#
# if MASTERKEY in data["key"]:
# back()
# return str(datetime.now()) + ":\tTEST - SUCCESS!\n"
# else:
# abort(404)
# else:
# app.logger.debug("No JSON recieved")
# abort(404)
@app.route('/load', methods=['POST'])
def loading():
f = "LOADING"
status = data_check()
if status:
say("Loading system services.")
return apiReturn(f)
else:
abort(404)
@app.route('/away', methods=['POST'])
def gone():
f = 'AWAY'
status = data_check()
if status:
if checkVacationStatus():
say('Vacation mode already active.')
return apiReturn('REDUNDANT')
setVacation()
say("Vacation mode activated.")
return apiReturn(f)
else:
abort(404)
@app.route('/back', methods=['POST'])
def here():
f = 'BACK'
status = data_check()
if status:
if not checkVacationStatus():
say("Vacation mode already deactivated.")
return apiReturn("REDUNDANT")
rmVacation()
say("Vacation mode deactivated.")
return apiReturn(f)
else:
abort(404)
@app.route('/test', methods=['POST'])
def testing():
f = "TESTING"
status = data_check()
if status:
say("Test complete. All systems are functioning as expected.")
return apiReturn(f)
else:
abort(404)
@app.route('/morning', methods=['POST'])
def morn_func():
f = "MORNING"
status = data_check()
if status:
if checkVacationStatus():
vMorningRoutine()
else:
morningRoutine()
return apiReturn(f)
else:
abort(404)
@app.route('/evening', methods=['POST'])
def eve_func():
f = "EVENING"
status = data_check()
if status:
if checkVacationStatus():
vEveningRoutine()
else:
eveningRoutine()
return apiReturn(f)
else:
abort(404)
@app.route('/movie', methods=['POST'])
def movie_time():
f = "MOVIE"
status = data_check()
if status:
IFTTT('dim_lighting')
IFTTT('movie_time')
return apiReturn(f)
else:
abort(404)
@app.route('/music', methods=['POST'])
def music_time():
f = "MUSIC"
status = data_check()
if status:
startMusic()
return apiReturn(f)
else:
abort(404)
@app.route('/living', methods=['POST'])
def living():
f = "LIVINGROOM"
status = data_check()
if status:
setLivingRoom()
return apiReturn(f)
else:
abort(404)
@app.route('/display', methods=['POST'])
def display():
f = "DISPLAY"
status = data_check()
if status:
setDisplay()
return apiReturn(f)
else:
abort(404)
@app.route('/mic', methods=['POST'])
def mic():
f = "MIC"
status = data_check()
if status:
micTest()
return apiReturn(f)
else:
abort(404)
@app.route('/redalert', methods=['POST'])
def redalert():
f = "REDALERT"
status = data_check()
if status:
setLivingRoom()
time.sleep(3)
IFTTT('dim_lighting')
Popen(["afplay", BASE_PATH + "/git/jarvis/redalert.mp3"])
IFTTT('klaxon')
time.sleep(10)
setDisplay()
return apiReturn(f)
else:
abort(404)
@app.route('/weather', methods=['POST'])
def current():
f = "WEATHER"
status = data_check()
if status:
msg = dayMessage()
say(msg)
return apiReturn(f)
else:
abort(404)
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
app.run(host=HOST_IP, threaded=True, debug=True)
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,827
|
raulium/jarvis
|
refs/heads/master
|
/mac_mod.py
|
#!/usr/local/env python
# ============== CONFIG PARAMETERS
from __future__ import print_function
from config import BASE_PATH, EMAIL, ICLOUD, IPHONE_ID, HOME_COORD, USERNAME
# ============== INTERNAL LIBRARIES
# ============== EXTERNAL LIBRARIES
import time
import csv
from subprocess import Popen, PIPE
from pyicloud import PyiCloudService
def iCloudConnect():
api = PyiCloudService(EMAIL)
if api.requires_2fa:
import click
print("Two-factor authentication required. Your trusted devices are:")
devices = api.trusted_devices
for i, device in enumerate(devices):
print(" %s: %s" % (i, device.get('deviceName',
"SMS to %s" % device.get('phoneNumber'))))
device = click.prompt('Which device would you like to use?', default=0)
device = devices[device]
if not api.send_verification_code(device):
print("Failed to send verification code")
sys.exit(1)
code = click.prompt('Please enter validation code')
if not api.validate_verification_code(device, code):
print("Failed to verify verification code")
sys.exit(1)
def check_process(PROCESS_NAME):
cmd = "ps -ef | grep " + PROCESS_NAME
process_list = Popen( cmd, stdout=PIPE, shell=True )
out, err = process_list.communicate()
fields = ['UID', 'PID', 'PPID', 'C', 'STIME', 'TTY', 'TIME', 'CMD']
reader = csv.DictReader(out.decode('ascii').splitlines(),
delimiter=' ', skipinitialspace=True,
fieldnames=fields)
for row in reader:
if row['CMD'] == PROCESS_NAME:
return True;
else:
continue
return False;
def macTerm(CMD):
runcmd = 'sudo -u ' + USERNAME + ' ' + CMD
Popen(runcmd, shell=True)
def appleScript(CMD):
c = 'osascript -e ' + CMD
macTerm(c)
def setVolume(VALUE):
"""
... for an ungodly reason, Apple has made it such that the volume scale
is from 0 - 7. And real numbers. Meaning 3.5 is the center of the scale.
... w... t... f...
"""
cmd = '"set Volume ' + str(VALUE) + '"'
appleScript(cmd)
def setLivingRoom():
""" Set Audio Output to Living Room. """
setVolume(7)
cmd = BASE_PATH + 'AppleScripts/audioLivingRoom.applescript'
macTerm(cmd)
time.sleep(2)
def setDisplay():
""" Set Audio Output to Cinema Display. """
setVolume(4)
cmd = BASE_PATH + 'AppleScripts/audioDisplay.applescript'
macTerm(cmd)
time.sleep(2)
# THIS DOESN'T APPEAR TO WORK... INVESTIGATE
# def lockScreen():
# """Lock the screen"""
# Popen(['/System/Library/CoreServices/Menu Extras/User.menu/Contents/Resources/CGSession', '-suspend'])
def notification(TITLE, MSG):
""" Send notification to Notificaiton Center. """
cmd = "display notification " + '"' + MSG + '"' + " with title " + '"' + TITLE + '" sound name "Hero"'
appleScript(cmd)
def ifMuted():
out, err = Popen(['osascript', '-e', 'get volume settings'], stdout=PIPE).communicate()
l = out.split(',')
if 'true' in str(l[3]):
return 1
else:
return 0
# STOPPED WORKING -- FIX THIS
def startRadio():
openPage("http://www.wgbh.org/audioPlayers/wgbh.cfm")
def startMusic():
openApp('Spotify')
time.sleep(5)
cmd1 = "osascript -e 'tell application " + '"' + "Spotify" + '"' + "' -e 'activate' -e 'end tell'"
macTerm(cmd1)
cmd2 = "osascript -e 'tell application " + '"' + "Spotify" + '"' + "' -e 'play track " + '"' + "spotify:user:spotify:playlist:37i9dQZEVXcR6wHtFF48p2" + '"' + "' -e 'end tell'"
time.sleep(5)
macTerm(cmd2)
# Popen(cmd2, shell=True) # Popen(cmd2, shell=True)
def openPage(URL):
Popen(["open", "-a", "Google Chrome", URL])
def closeApp(APPNAME):
cmd = "osascript -e ' quit app " + '"' + str(APPNAME) + '"' + "'"
Popen(cmd, shell=True)
def openApp(APPNAME):
cmd = "open -a '" + str(APPNAME) + "'"
macTerm(cmd)
'''
I really don't like this option. it doens't do well to identify the coordinates of the user
'''
def ishome():
api = PyiCloudService(EMAIL, ICLOUD)
place = dict()
for i in api.devices:
if IPHONE_ID in str(i):
if i.location() is not None:
place = i.location()
else:
place['latitude'] = HOME_COORD['minlat']
place['longitude'] = HOME_COORD['minlon']
if HOME_COORD['minlat'] <= place['latitude'] <= HOME_COORD['maxlat']:
if HOME_COORD['minlon'] <= place['longitude'] <= HOME_COORD['maxlon']:
return True
else:
return False
else:
return False
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,828
|
raulium/jarvis
|
refs/heads/master
|
/IFTTT_mod.py
|
#!/usr/local/env python
# ============== CONFIG PARAMETERS
from config import IFTTT_KEY
# ============== INTERNAL LIBRARIES
# ============== EXTERNAL LIBRARIES
from subprocess import Popen
def IFTTT(EVENT):
url = 'https://maker.ifttt.com/trigger/' + EVENT + '/with/key/' + IFTTT_KEY
Popen(['curl', '-X', 'POST', url])
def IFTTTcmd(EVENT):
url = 'https://maker.ifttt.com/trigger/' + EVENT + '/with/key/' + IFTTT_KEY
cmd = 'curl -X POST ' + url
return cmd
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,829
|
raulium/jarvis
|
refs/heads/master
|
/weather_mod.py
|
#!/usr/local/env python
# ============== EXTERNAL LIBRARIES
import email
import json
import math
import random
from datetime import datetime, timedelta
from web_mod import MyOpener
# ============== CONFIG PARAMETERS
from config import WUNDERGROUND_KEY
# ============== INTERNAL LIBRARIES
from interaction_mod import GREAT, GOOD, OKAY, BAD
from timing_mod import whatSeason
WEATHER_SCORE = {
# SPRING, SUMMER, FALL, WINTER
'HeavyBlowing Sand': [0.2, 0.2, 0.2, 0.2],
'Blowing Sand': [0.3, 0.3, 0.3, 0.3],
'LightBlowing Sand': [0.3, 0.3, 0.3, 0.3],
'HeavyBlowing Snow': [0.1, 0.0, 0.1, 0.4],
'Blowing Snow': [0.2, 0.0, 0.2, 0.5],
'LightBlowing Snow': [0.3, 0.0, 0.3, 0.6],
'HeavyBlowing Widespread Dust': [0.2, 0.2, 0.2, 0.2],
'Blowing Widespread Dust': [0.3, 0.3, 0.3, 0.3],
'LightBlowing Widespread Dust': [0.4, 0.4, 0.4, 0.4],
'Clear': [1, 1, 1, 1],
'HeavyDrizzle': [0.7, 0.8, 0.6, 0.5],
'Drizzle': [0.8, 0.9, 0.7, 0.6],
'LightDrizzle': [0.9, 0.9, 0.8, 0.7],
'HeavyDust Whirls': [0.1, 0.1, 0.1, 0.1],
'Dust Whirls': [0.2, 0.2, 0.2, 0.2],
'LightDust Whirls': [0.3, 0.3, 0.3, 0.3],
'HeavyFog': [0.8, 0.9, 0.7, 0.7],
'Fog': [0.9, 0.9, 0.8, 0.8],
'LightFog': [1, 1, 0.9, 0.9],
'HeavyFog Patches': [0.9, 0.9, 0.8, 0.8],
'Fog Patches': [1, 1, 0.9, 0.9],
'LightFog Patches': [1, 1, 0.9, 0.9],
'HeavyFreezing Drizzle': [0.1, 0.0, 0.1, 0.4],
'Freezing Drizzle': [0.2, 0.0, 0.2, 0.5],
'LightFreezing Drizzle': [0.3, 0.0, 0.3, 0.6],
'HeavyFreezing Fog': [0.1, 0.0, 0.1, 0.4],
'Freezing Fog': [0.2, 0.0, 0.2, 0.5],
'LightFreezing Fog': [0.3, 0.0, 0.3, 0.6],
'HeavyFreezing Rain': [0.1, 0.0, 0.1, 0.4],
'Freezing Rain': [0.2, 0.0, 0.2, 0.5],
'LightFreezing Rain': [0.3, 0.0, 0.3, 0.6],
'Funnel Cloud': [0.0, 0.0, 0.0, 0.0],
'HeavyHail': [0.3, 0.4, 0.3, 0.4],
'Hail': [0.4, 0.5, 0.4, 0.5],
'LightHail': [0.5, 0.6, 0.5, 0.6],
'HeavyHail Showers': [0.2, 0.3, 0.2, 0.3],
'Hail Showers': [0.3, 0.4, 0.3, 0.4],
'LightHail Showers': [0.4, 0.5, 0.4, 0.5],
'HeavyHaze': [0.3, 0.3, 0.3, 0.3],
'Haze': [0.4, 0.4, 0.4, 0.4],
'LightHaze': [0.5, 0.5, 0.5, 0.5],
'HeavyIce Crystals': [0.1, 0.0, 0.1, 0.4],
'Ice Crystals': [0.2, 0.0, 0.2, 0.5],
'LightIce Crystals': [0.3, 0.0, 0.3, 0.6],
'HeavyIce Pellet Showers': [0.1, 0.0, 0.1, 0.4],
'Ice Pellet Showers': [0.2, 0.0, 0.2, 0.5],
'LightIce Pellet Showers': [0.3, 0.0, 0.3, 0.6],
'HeavyIce Pellets': [0.1, 0.0, 0.1, 0.4],
'Ice Pellets': [0.2, 0.0, 0.2, 0.5],
'LightIce Pellets': [0.3, 0.0, 0.3, 0.6],
'HeavyLow Drifting Sand': [0.5, 0.5, 0.5, 0.5],
'Low Drifting Sand': [0.6, 0.6, 0.6, 0.6],
'LightLow Drifting Sand': [0.7, 0.7, 0.7, 0.7],
'HeavyLow Drifting Snow': [0.1, 0.0, 0.1, 0.4],
'Low Drifting Snow': [0.2, 0.0, 0.2, 0.5],
'LightLow Drifting Snow': [0.3, 0.0, 0.3, 0.6],
'HeavyLow Drifting Widespread Dust': [0.3, 0.3, 0.3, 0.3],
'Low Drifting Widespread Dust': [0.4, 0.4, 0.4, 0.4],
'LightLow Drifting Widespread Dust': [0.5, 0.5, 0.5, 0.5],
'HeavyMist': [0.6, 0.7, 0.5, 0.5],
'Mist': [0.7, 0.8, 0.6, 0.6],
'LightMist': [0.8, 0.9, 0.7, 0.7],
'Mostly Cloudy': [0.9, 1, 0.9, 0.9],
'Overcast': [0.8, 0.9, 0.7, 0.7],
'Partial Fog': [0.7, 0.8, 0.7, 0.7],
'Partly Cloudy': [1, 1, 0.9, 1],
'Patches of Fog': [0.7, 0.8, 0.7, 0.7],
'HeavyRain': [0.8, 0.9, 0.7, 0.6],
'Rain': [0.9, 0.9, 0.8, 0.7],
'LightRain': [0.9, 1, 0.9, 0.8],
'HeavyRain Mist': [0.8, 0.9, 0.7, 0.6],
'Rain Mist': [0.9, 0.9, 0.8, 0.7],
'LightRain Mist': [1, 1, 0.9, 0.8],
'HeavyRain Showers': [0.6, 0.7, 0.5, 0.5],
'Rain Showers': [0.7, 0.8, 0.6, 0.6],
'LightRain Showers': [0.8, 0.9, 0.7, 0.7],
'HeavySand': [0.3, 0.3, 0.3, 0.3],
'Sand': [0.4, 0.4, 0.4, 0.4],
'LightSand': [0.5, 0.5, 0.5, 0.5],
'HeavySandstorm': [0.2, 0.2, 0.2, 0.2],
'Sandstorm': [0.3, 0.3, 0.3, 0.3],
'LightSandstorm': [0.4, 0.4, 0.4, 0.4],
'Scattered Clouds': [1, 1, 1, 1],
'Shallow Fog': [0.7, 0.8, 0.7, 0.7],
'Small Hail': [0.5, 0.6, 0.5, 0.6],
'HeavySmall Hail Showers': [0.4, 0.5, 0.3, 0.5],
'Small Hail Showers': [0.5, 0.6, 0.4, 0.6],
'LightSmall Hail Showers': [0.6, 0.7, 0.5, 0.7],
'HeavySmoke': [0.3, 0.3, 0.3, 0.3],
'Smoke': [0.4, 0.4, 0.4, 0.4],
'LightSmoke': [0.5, 0.5, 0.5, 0.5],
'HeavySnow': [0.1, 0.0, 0.1, 0.4],
'Snow': [0.2, 0.0, 0.2, 0.5],
'LightSnow': [0.3, 0.0, 0.3, 0.6],
'HeavySnow Blowing Snow Mist': [0.1, 0.0, 0.1, 0.4],
'Snow Blowing Snow Mist': [0.2, 0.0, 0.2, 0.5],
'LightSnow Blowing Snow Mist': [0.3, 0.0, 0.3, 0.6],
'HeavySnow Grains': [0.2, 0.0, 0.2, 0.5],
'Snow Grains': [0.3, 0.0, 0.3, 0.6],
'LightSnow Grains': [0.4, 0.0, 0.4, 0.7],
'HeavySnow Showers': [0.0, 0.0, 0.0, 0.5],
'Snow Showers': [0.1, 0.0, 0.1, 0.6],
'LightSnow Showers': [0.2, 0.0, 0.2, 0.7],
'HeavySpray': [0.2, 0.2, 0.2, 0.2],
'Spray': [0.3, 0.3, 0.3, 0.3],
'LightSpray': [0.4, 0.4, 0.4, 0.4],
'Squalls': [0.5, 0.5, 0.5, 0.5],
'HeavyThunderstorm': [0.6, 0.7, 0.6, 0.5],
'Thunderstorm': [0.7, 0.8, 0.7, 0.6],
'LightThunderstorm': [0.8, 0.9, 0.8, 0.7],
'HeavyThunderstorms and Ice Pellets': [0.4, 0.5, 0.4, 0.5],
'Thunderstorms and Ice Pellets': [0.5, 0.6, 0.5, 0.6],
'LightThunderstorms and Ice Pellets': [0.6, 0.7, 0.6, 0.7],
'HeavyThunderstorms and Rain': [0.5, 0.6, 0.5, 0.4],
'Thunderstorms and Rain': [0.6, 0.7, 0.6, 0.5],
'LightThunderstorms and Rain': [0.7, 0.8, 0.7, 0.6],
'HeavyThunderstorms and Snow': [0.0, 0.0, 0.0, 0.3],
'Thunderstorms and Snow': [0.1, 0.0, 0.1, 0.4],
'LightThunderstorms and Snow': [0.2, 0.0, 0.2, 0.5],
'HeavyThunderstorms with Hail': [0.4, 0.5, 0.4, 0.5],
'Thunderstorms with Hail': [0.5, 0.6, 0.5, 0.6],
'LightThunderstorms with Hail': [0.6, 0.7, 0.6, 0.7],
'HeavyThunderstorms with Small Hail': [0.5, 0.6, 0.5, 0.6],
'Thunderstorms with Small Hail': [0.6, 0.7, 0.6, 0.7],
'LightThunderstorms with Small Hail': [0.7, 0.8, 0.7, 0.8],
'Unknown': [0.5, 0.5, 0.5, 0.5],
'Unknown Precipitation': [0.9, 0.9, 0.7, 0.6],
'HeavyVolcanic Ash': [0.0, 0.0, 0.0, 0.0],
'Volcanic Ash': [0.0, 0.0, 0.0, 0.0],
'LightVolcanic Ash': [0.0, 0.0, 0.0, 0.0],
'HeavyWidespread Dust': [0.2, 0.2, 0.2, 0.2],
'Widespread Dust': [0.3, 0.3, 0.3, 0.3],
'LightWidespread Dust': [0.4, 0.4, 0.4, 0.4]
}
DISTANCE_SCORE = {
0: 1.0,
2: 0.9,
4: 0.8,
6: 0.7,
8: 0.6,
10: 0.5,
12: 0.4,
14: 0.3,
16: 0.2,
18: 0.1,
20: 0.0
}
def temperatureScore(x):
return -3.044 * (10 ** -8) * (x ** 4) + 3.81 * (10 ** -6) * (x ** 3) - 0.00028 * (x ** 2) + 0.028 * x - 0.17
def dew_point_formula(x):
return -0.00036 * (x ** 2) + 0.04056 * x - 0.144
def dew_distance_formula(x):
return -0.1121 * (10 ** -6) * (x ** 4) + 0.00003 * (x ** 3) - 0.00263 * (x ** 2) + 0.07037 * x + 0.36842
def feels_like_distance(FEELS, TEMP):
r = abs(FEELS - TEMP)
r = math.ceil(r / 2.0) * 2
if r > 20:
r = 20
return DISTANCE_SCORE[r]
def dayCalculator(TEMP, FEELS, DEW, CONDITION):
bad_conditions = list()
a = temperatureScore(TEMP)
print("Temperature Score: " + str(a))
if a < 0.3:
bad_conditions.append("unfavorable temperature")
b = feels_like_distance(FEELS, TEMP)
print("Feels-Like Distance: " + str(b))
if b < 0.3:
bad_conditions.append("unfavorable temperature disparity between what it feels like and what it is")
c = dew_point_formula(DEW)
print("Dew Point: " + str(c))
if c < 0.3:
bad_conditions.append("unfavorable dew point")
d = dew_distance_formula(TEMP - DEW)
print("Dew Point Distance: " + str(d))
if d < 0.3:
bad_conditions.append("unfavorable humidity disparity")
e = WEATHER_SCORE[CONDITION][whatSeason()]
print("Season Condition: " + str(e))
if e < 0.3:
bad_conditions.append("unfavorable weather conditions")
score = a * b * c * d * e
evaluation = None
if score >= 0.9:
evaluation = random.choice(GREAT)
elif (score < 0.9) and (score >= 0.5):
evaluation = random.choice(GOOD)
elif (score < 0.5) and (score >= 0.3):
evaluation = random.choice(OKAY)
else:
evaluation = random.choice(BAD)
return evaluation, bad_conditions
def newDailyReport():
data = getWeatherJSON('current')
observation_time = datetime.fromtimestamp(
email.Utils.mktime_tz(email.Utils.parsedate_tz(data['current_observation']['observation_time_rfc822'])))
observation_time = observation_time.strftime("%I:%M %p")
conditions = data['current_observation']['weather']
feels_like = float(data['current_observation']['feelslike_f'])
temp = float(data['current_observation']['temp_f'])
data = getWeatherJSON('forecast')
high = int(data['forecast']['simpleforecast']['forecastday'][0]['high']['fahrenheit'])
low = int(data['forecast']['simpleforecast']['forecastday'][0]['low']['fahrenheit'])
if (feels_like < temp) and (temp - feels_like > 5):
weather_string = "Though the temperature is " + str(temp) + ", it feels like " + str(
feels_like) + " degrees. It's expected to reach " + str(high) + " today, with a low of " + str(low) + "."
else:
weather_string = "The temperature is " + str(temp) + " degrees, with a high of " + str(
high) + " and a low of " + str(low) + "."
weather_segment = "As of " + str(observation_time) + " it is currently " + conditions + ". " + weather_string
dew = dewPointAverage(getWeatherJSON('hourly'))
day_score, bad_conditions = dayCalculator(temp, feels_like, dew, conditions)
return day_score, bad_conditions, weather_segment
def dewPointAverage(data):
dewList = list()
for d in data['hourly_forecast']:
i = d['dewpoint']['english']
i = float(i)
i = int(i)
dewList.append(i)
return sum(dewList) / len(dewList)
# def dailyReport():
# data = getWeatherJSON('current')
#
# conditions = data['current_observation']['weather']
#
# feels_like = data['current_observation']['feelslike_f']
# temp = data['current_observation']['temp_f']
#
# observation_time = datetime.fromtimestamp(
# email.Utils.mktime_tz(email.Utils.parsedate_tz(data['current_observation']['observation_time_rfc822'])))
# observation_time = observation_time.strftime("%I:%M %p")
#
# data = getWeatherJSON('forecast')
#
# high = data['forecast']['simpleforecast']['forecastday'][0]['high']['fahrenheit']
# low = data['forecast']['simpleforecast']['forecastday'][0]['low']['fahrenheit']
#
# if feels_like < temp:
# temperature_string = "Though the temperature is " + str(temp) + ", it feels like " + str(
# feels_like) + " degrees. It's expected to reach " + str(high) + " today, with a low of " + str(low) + "."
# else:
# temperature_string = "The temperature is " + str(temp) + " degrees, with a high of " + str(
# high) + " and a low of " + str(low) + "."
#
# fullReport = "As of " + str(observation_time) + " it is currently " + conditions + ". " + temperature_string
# saiff(fullReport, "/tmp/DailyReport.aiff")
def getWeatherJSON(weather_type):
if weather_type == 'current':
url = "http://api.wunderground.com/api/" + WUNDERGROUND_KEY + "/conditions/q/MA/waltham.json"
if weather_type == 'forecast':
url = "http://api.wunderground.com/api/" + WUNDERGROUND_KEY + "/forecast/q/MA/waltham.json"
if weather_type == 'astronomy':
url = "http://api.wunderground.com/api/" + WUNDERGROUND_KEY + "/astronomy/q/MA/waltham.json"
if weather_type == 'hourly':
url = "http://api.wunderground.com/api/" + WUNDERGROUND_KEY + "/hourly/q/MA/waltham.json"
agent = MyOpener()
page = agent.open(url)
jsonurl = page.read()
data = json.loads(jsonurl)
return data
def getSunsetDTO():
data = getWeatherJSON('astronomy')
h = data['sun_phase']['sunset']['hour']
m = data['sun_phase']['sunset']['minute']
t = datetime.now().replace(hour=int(h), minute=int(m), second=0, microsecond=0)
t = t - timedelta(minutes=40)
return t
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,830
|
raulium/jarvis
|
refs/heads/master
|
/yoga_mod.py
|
#!/usr/local/env python
# ============== EXTERNAL LIBRARIES
import random
import time
# ============== CONFIG PARAMETERS
from config import NAME
# ============== INTERNAL LIBRARIES
from interaction_mod import POSITIVE, say
from mac_mod import setVolume
def stretch_Spinal():
say('Sit cross-legged.')
time.sleep(5)
i = 0
while i < 2:
say('Twist left.')
time.sleep(10)
say('Return to center.')
time.sleep(5)
say('Twist right.')
time.sleep(10)
say('Return to center.')
time.sleep(5)
i += 1
say('Put your left hand on the floor. Inhale. Raise your right arm. While exhaling, reach left.')
time.sleep(10)
say('Inhale, and relax a moment.')
time.sleep(5)
say('Exhale and reach left, again.')
time.sleep(10)
say('Time to switch.')
time.sleep(5)
say('Put your right hand on the floor. Inhale. Raise your left arm. While exhaling, reach right.')
time.sleep(10)
say('Inhale, and relax a moment.')
time.sleep(5)
say('Exhale and reach right, again.')
time.sleep(10)
def stretch_Cat():
say('Come to all fours')
time.sleep(5)
i = 0
while i < 3:
say('Curve your back toward the ceiling. Drawing your chin to your stomach.')
time.sleep(10)
say('Inhale deeply')
time.sleep(5)
say('Exhale and arch your back.')
time.sleep(5)
say('Slowly raise your head to the ceiling.')
time.sleep(5)
i += 1
def stretch_Lunge():
say('Come to all fours')
time.sleep(5)
i = 0
while i < 3:
say('Lunge with your right foot')
time.sleep(5)
say("Don't forget to breathe.")
time.sleep(5)
say('Lunge with left foot.')
time.sleep(10)
i += 1
def fuckGreg2():
setVolume(3)
say("I've been instructed to try to further motivate you, " + NAME + ".")
time.sleep(5)
say('Now for a real exercise. Stretching.')
time.sleep(5)
stretch_Spinal()
stretch_Cat()
stretch_Lunge()
say(random.choice(POSITIVE) + 'All done. I hope that woke you up.')
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,831
|
raulium/jarvis
|
refs/heads/master
|
/motivation_mod.py
|
#!/usr/local/env python
# ============== EXTERNAL LIBRARIES
from random import choice
from math import floor
from datetime import datetime
# ============== CONFIG PARAMETERS
from config import NAME
# ============== INTERNAL LIBRARIES
#?
def daysToWeekend():
n = datetime.today().weekday()
if n < 5:
return 5 - n
else:
return 0
def counterString(i, unit):
s = ["Only", "A mere", ""]
e = ["more", "additional", ""]
if i == 1:
return choice(s) + " " + str(i) + " " + choice(e) + " " + unit + " until"
else:
return choice(s) + " " + str(i) + " " + choice(e) + " " + unit + "s until"
def weeksToDate(dto):
n = datetime.today()
return int(floor((dto - n).days / 7))
def motivate():
c = ['You can do it.', "I have full confidence you'll make it.", "You'll be fine.", "Seize the day!",
"You've got this.", "Don't forget to breathe.", "Just do it.", "Failure is not an option.",
"Resistance is futile.", "Hang in there."]
return choice(c)
|
{"/vacation_mod.py": ["/timing_mod.py", "/IFTTT_mod.py", "/mac_mod.py"], "/evening_mod.py": ["/interaction_mod.py", "/timing_mod.py", "/mac_mod.py", "/IFTTT_mod.py"], "/lab_mod.py": ["/web_mod.py"], "/math_mod.py": ["/interaction_mod.py"], "/interaction_mod.py": ["/gmail_mod.py", "/mac_mod.py"], "/main.py": ["/vacation_mod.py", "/interaction_mod.py", "/mac_mod.py", "/morning_mod.py", "/evening_mod.py", "/IFTTT_mod.py"], "/weather_mod.py": ["/web_mod.py", "/interaction_mod.py", "/timing_mod.py"], "/yoga_mod.py": ["/interaction_mod.py", "/mac_mod.py"]}
|
40,834
|
simshadows/dpcutipy
|
refs/heads/master
|
/errors.py
|
"""
errors.py
Author: simshadows
Defines errors for dpcutipy.
"""
from ctypes import c_int
# Error codes from dpcdefs.h revision 07/22/2004
_errors_src = """
const ERC ercNoError = 0;
const ERC ercConnReject = 3001;
const ERC ercConnType = 3002;
const ERC ercConnNoMode = 3003;
const ERC ercInvParam = 3004;
const ERC ercInvCmd = 3005;
const ERC ercUnknown = 3006;
const ERC ercJtagConflict = 3007;
const ERC ercNotImp = 3008;
const ERC ercNoMem = 3009;
const ERC ercTimeout = 3010;
const ERC ercConflict = 3011;
const ERC ercBadPacket = 3012;
const ERC ercInvOption = 3013;
const ERC ercAlreadyCon = 3014;
const ERC ercConnected = 3101;
const ERC ercNotInit = 3102;
const ERC ercCantConnect = 3103;
const ERC ercAlreadyConnect = 3104;
const ERC ercSendError = 3105;
const ERC ercRcvError = 3106;
const ERC ercAbort = 3107;
const ERC ercTimeOut = 3108;
const ERC ercOutOfOrder = 3109;
const ERC ercExtraData = 3110;
const ERC ercMissingData = 3111;
const ERC ercTridNotFound = 3201;
const ERC ercNotComplete = 3202;
const ERC ercNotConnected = 3203;
const ERC ercWrongMode = 3204;
const ERC ercWrongVersion = 3205;
const ERC ercDvctableDne = 3301;
const ERC ercDvctableCorrupt= 3302;
const ERC ercDvcDne = 3303;
const ERC ercDpcutilInitFail= 3304;
const ERC ercUnknownErr = 3305;
const ERC ercDvcTableOpen = 3306;
const ERC ercRegError = 3307;
const ERC ercNotifyRegFull = 3308;
const ERC ercNotifyNotFound = 3309;
const ERC ercOldDriverNewFw = 3310;
const ERC ercInvHandle = 3311;
const ERC ercInterfaceNotSupported = 3312;
"""
# Error documentation from DPCUTIL Programmer’s Manual revision 06/03/05
# https://reference.digilentinc.com/_media/dpcutil_programmers_reference_manual.pdf
_errors_doc = """
ercNoError 0 No error occurred in transaction
ercInvParam 3004 Invalid parameter sent in API call
ercInvCmd 3005 Internal error. Please report occurrence as a bug
ercUnknown 3006 Internal error. Please report occurrence as a bug
ercNoMem 3009 Not enough memory to carry out transaction
ercNotInit 3102 Communication device not initialized
ercCantConnect 3103 Can’t connect to communication module
ercAlreadyConnect 3104 Already connected to communication device
ercSendError 3105 Error occurred while sending data to communication device
ercRcvError 3106 Error occurred while receiving data from communication device
ercAbort 3107 Error occurred while trying to abort transaction(s)
ercOutOfOrder 3109 Completion out of order
ercExtraData 3110 Too much data received from communication device
ercMissingData 3111 Nothing to send or data/address mismatched pairs
ercTridNotFound 3201 Unable to find matching TRID in transaction queue
ercNotComplete 3202 Transaction being cleared is not complete
ercNotConnected 3203 Not connected to communication device
ercWrongMode 3204 Connected in wrong mode (JTAG or data transfer)
ercWrongVersion 3205 Internal error. Please report occurrence as a bug
ercDvctableDne 3301 Device table doesn’t exist (an empty one has been created)
ercDvctableCorrupt 3302 All or part of the device table is corrupted
ercDvcDne 3303 Device does not exist in device table
ercDpcutilInitFail 3304 DpcInit API call failed
ercDvcTableOpen 3306 Communications devices dialog box already open.
ercRegError 3307 Error occurred while accessing the registry
"""
# This is the user exception.
class DpcUtiPyException(Exception):
ERCNOERROR = 0
_errors = {}
# _errors[error_code] = error name string
_error_descriptions = {}
# _error_descriptions[error_code] = error description string
# Parse the error code source and documentation
for line in _errors_src.strip().splitlines():
substrings = line.rsplit(maxsplit=1)
# E.g. "const ERC ercNoError = 0;"
# Becomes ["const ERC ercNoError =", "0;"]
assert substrings[0][-1] == "="
assert substrings[1][-1] == ";"
assert len(substrings) == 2
error_code = int(substrings[1][:-1])
substrings = substrings[0][:-1].rsplit(maxsplit=1)
# E.g. ["const ERC ercNoError =", "0;"]
# Becomes ["const ERC", "ercNoError"]
assert substrings[0] == "const ERC"
assert len(substrings) == 2
error_name = substrings[1]
_errors[error_code] = error_name
for line in _errors_doc.strip().splitlines():
substrings = line.split(maxsplit=2)
# E.g. "ercNoError 0 No error occurred in transaction"
# Becomes ["ercNoError", "0", "No error occurred in transaction"]
error_code = int(substrings[1])
assert substrings[0].lower() == _errors[error_code].lower()
assert len(substrings) == 3
_error_descriptions[error_code] = substrings[2]
# TODO: Clean up this spaghettified mess.
#
# ARGUMENTS:
# DpcUtiPyException()
# Constructions an exception with no args.
# DpcUtiPyException(msg:str)
# Constructs an exception with the message `msg` describing the error
# context.
# DpcUtiPyException(erc:int)
# DpcUtiPyException(erc:c_int)
# Constructs an exception with the DPCUTIL error code being used to
# describe the error, as defined in dpcdefs.h.
# DpcUtiPyException(msg:str, erc:int)
# DpcUtiPyException(msg:str, erc:c_int)
# DpcUtiPyException(erc:int, msg:str)
# DpcUtiPyException(erc:c_int, msg:str)
# Constructs an exception with a combination of both a context
# message and a DPCUTIL error code.
# PRECONDITIONS:
# - If `msg` is supplied, then it is a non-empty string (after whitespace
# stripping).
# - Behaviour is undefined for argument combinations other than those
# documented above. This is assert-checked.
def __init__(self, *args):
assert len(args) < 3
# These will remain None if unused.
self.error_cntxt = None # Error context
self.erc = None # Error code from dpcdefs.h
self.error_name = None # Error name from dpcdefs.h
self.error_desc = None # Error description from documentation
buf = ""
buf_msg = None
buf_erc = None
# Read Arguments
if len(args) == 0:
buf_msg = "Unknown error (no error code or context string given)."
else:
for arg in args:
if isinstance(arg, int):
assert self.erc is None
self.erc = arg
elif isinstance(arg, c_int):
assert self.erc is None
self.erc = arg.value
else: # Assumed to be a string
assert isinstance(arg, str) and (len(arg.strip()) > 0)
assert self.error_cntxt is None
buf_msg = self.error_cntxt = arg.strip()
if not self.erc is None:
template = "DPCUTIL error {name} ({erc}){desc}"
erc = self.erc # Easier reference
name = None
desc = None
if erc in self._errors:
name = self.error_name = self._errors[erc]
else:
name = "UNKNOWN_DPCUTIL_ERROR"
if erc in self._error_descriptions:
self.error_desc = self._error_descriptions[erc]
desc = " " + self.error_desc
else:
desc = ""
buf_erc = template.format(name=name, erc=str(erc), desc=desc)
assert (buf_msg is None) or isinstance(buf_msg, str)
assert (buf_erc is None) or isinstance(buf_erc, str)
assert not buf_msg is buf_erc is None
if not buf_msg is None:
buf = buf_msg
if (not buf_erc is None) and (self.erc != self.ERCNOERROR):
if len(buf) > 0:
if not buf.endswith("."):
buf += "."
buf += " "
buf += buf_erc
return super(DpcUtiPyException, self).__init__(buf)
|
{"/testdpcutipy.py": ["/dpcutipy.py"], "/dpcutipy.py": ["/errors.py"]}
|
40,835
|
simshadows/dpcutipy
|
refs/heads/master
|
/testdpcutipy.py
|
"""
testdpcutipy.py
Author: simshadows
A very simple test file that just writes to a register, then reads and prints it back.
This also provides a simple demonstration of the operation of the dpcutipy library.
"""
import os
import sys
import random
import traceback
from dpcutipy import dpcutipy
reg_id = random.randint(0,64)
reg_val = random.randint(0,256)
dpcutipy.DvmgStartConfigureDevices() # Comment back in if needed.
dev_id = dpcutipy.DvmgGetDefaultDev()
dev_name = dpcutipy.DvmgGetDevName(dev_id)
print("Default device: " + dev_name)
dpcutipy.put_single_register(reg_id, reg_val, dev_name)
print("REGISTER {} WRITE: {}".format(str(reg_id), str(reg_val)))
ret = dpcutipy.get_single_register(reg_id, dev_name)
print("REGISTER {} READ: {}".format(str(reg_id), str(ret)))
print("DONE!")
|
{"/testdpcutipy.py": ["/dpcutipy.py"], "/dpcutipy.py": ["/errors.py"]}
|
40,836
|
simshadows/dpcutipy
|
refs/heads/master
|
/dpcutipy.py
|
"""
dpcutipy.py
Author: simshadows
dpcutipy is a glue code module for dpcutil
Design documentation used to build this module:
https://reference.digilentinc.com/_media/dpcutil_programmers_reference_manual.pdf
http://hamsterworks.co.nz/mediawiki/index.php/Module_18
"""
import os
import sys
import traceback
import collections
import atexit
from ctypes import *
from .errors import DpcUtiPyException
BUFFER_SIZE = 512
# Placeholder value for mutable ctypes buffers.
# TODO: Please secure for buffer overflow issues.
###############################################################################
# MODULE INITIALIZATION #######################################################
###############################################################################
_dpcutil = None
_cwd = os.path.dirname(__file__)
# Attempt to load dpcutil
_dpcutil = cdll.dpcutil
# try: # TODO: Make something more robust.
# _dpcutil = cdll.dpcutil
# except Exception as e:
# # Alternative Imports.
# pass
# # try:
# # _dpcutil = CDLL(os.path.join(_cwd, "dpcutil_sys32.dll"))
# # except Exception as e0:
# # try:
# # _dpcutil = CDLL(os.path.join(_cwd, "dpcutil_syswow64.dll"))
# # except Exception as e1:
# # _dpcutil = None
if _dpcutil is None:
raise DpcUtiPyException("Failed to import dpcutil.")
def _DpcInit():
erc = c_int(0)
if not _dpcutil.DpcInit(byref(erc)):
raise DpcUtiPyException(erc)
return
def _DpcTerm():
_dpcutil.DpcTerm()
return
_DpcInit()
atexit.register(_DpcTerm)
###############################################################################
# MODULE BASIC INTERFACE ######################################################
###############################################################################
# For function behaviour, see DPCUTIL Programmers Reference Manual.
# PARAMETERS:
# hwnd (Optional)
# Handle to parent window. No type-checking is done, but this should be
# a relevant ctypes type, likely `c_void_p`.
def DvmgStartConfigureDevices(hwnd=None):
erc = c_int(0)
_dpcutil.DvmgStartConfigureDevices(hwnd, byref(erc))
if erc.value != DpcUtiPyException.ERCNOERROR:
raise DpcUtiPyException(erc)
return
# RETURNS:
# The index of the default device, as an integer.
def DvmgGetDefaultDev():
erc = c_int(0)
device_id = _dpcutil.DvmgGetDefaultDev(byref(erc))
if device_id == -1:
raise DpcUtiPyException("No devices in the device table.", erc)
assert isinstance(device_id, int)
return device_id
# RETURNS:
# Name of the queried device.
def DvmgGetDevName(device_id):
erc = c_int(0)
cbuf = create_string_buffer(BUFFER_SIZE)
if not _dpcutil.DvmgGetDevName(device_id, cbuf, byref(erc)):
raise DpcUtiPyException(erc)
ret = cbuf.value.decode("ascii")
assert isinstance(ret, str)
return ret
# RETURNS:
# A class that represents the data transfer HANDLE object from the DLL API.
# This class contains methods that mimic the operations available to be
# performed on this HANDLE.
def DpcOpenData(dev_name):
return EPPDataTransferHandle(dev_name)
class EPPDataTransferHandle:
def __init__(self, dev_name):
self._chif = c_void_p(0) # ctypes pointer to a HANDLE.
erc = c_int(0)
buf = dev_name.encode() # str to bytes
if not _dpcutil.DpcOpenData(byref(self._chif), buf, byref(erc), None):
raise DpcUtiPyException(erc)
return
def DpcCloseData(self):
erc = c_int(0)
if not _dpcutil.DpcCloseData(self._chif, byref(erc)):
raise DpcUtiPyException(erc)
return
def DpcPutReg(self, b_addr, b_data):
assert isinstance(b_addr, int) and (0x00 <= b_addr <= 0xFF)
assert isinstance(b_data, int) and (0x00 <= b_data <= 0xFF)
b_addr = c_byte(b_addr)
b_data = c_byte(b_data)
erc = c_int(0)
if not _dpcutil.DpcPutReg(self._chif, b_addr, b_data, byref(erc), None):
raise DpcUtiPyException(erc)
return
def DpcGetReg(self, b_addr):
assert isinstance(b_addr, int) and (0x00 <= b_addr <= 0xFF)
b_addr = c_byte(b_addr)
b_data = c_byte(0)
erc = c_int(0)
if not _dpcutil.DpcGetReg(self._chif, b_addr, byref(b_data), byref(erc), None):
raise DpcUtiPyException(erc)
b_data = b_data.value
b_data = (b_data + 0x100) % 0x100
assert isinstance(b_data, int) and (0x00 <= b_data <= 0xFF)
return b_data
def DpcPutRegSet(*args, **kwargs):
raise NotImplementedError
def DpcGetRegSet(*args, **kwargs):
raise NotImplementedError
def DpcPutRegRepeat(*args, **kwargs):
raise NotImplementedError
def DpcGetRegRepeat(*args, **kwargs):
raise NotImplementedError
# RETURNS:
# Nothing.
# EXCEPTIONS:
# If DPCUTIL's DpcGetFirstError returns an error code, this method will
# raise the error code as an exception.
# TODO:
# Reimplement error codes as enum, and make this method return the error
# as an enum. This may require refactoring.
def DpcGetFirstError(self):
ret = _dpcutil.DpcGetFirstError(self._chif)
if ret != DpcUtiPyException.ERCNOERROR:
raise DpcUtiPyException(ret)
return
###############################################################################
# MODULE EXTENDED INTERFACE ###################################################
###############################################################################
def put_single_register(b_addr, b_data, dev_name):
handle = DpcOpenData(dev_name)
try:
handle.DpcPutReg(b_addr, b_data)
handle.DpcGetFirstError()
finally:
handle.DpcCloseData()
return
def get_single_register(b_addr, dev_name):
handle = DpcOpenData(dev_name)
data = None
try:
data = handle.DpcGetReg(b_addr)
handle.DpcGetFirstError()
finally:
handle.DpcCloseData()
return data
|
{"/testdpcutipy.py": ["/dpcutipy.py"], "/dpcutipy.py": ["/errors.py"]}
|
40,883
|
ErrorMakerqwq/EE308
|
refs/heads/master
|
/main.py
|
'''
@Project :pythonProject1
@File :main.py
@Author :ErrorMaker
@Date :2021/9/21 22:21
'''
#ui
def ui():
# input data
path = input('请输入文件路径(默认路径为File.c):')
level = input('请输入运行等级:')
return path,level
#choosing
def chooseingFuction(path,level):
#level1
if(level=='1'):
countingKwords(path)
#level2
elif(level=='2'):
keywrdsList = countingKwords(path)
couterSClist = countingswtichStructure(keywrdsList)
print('case num: ',end='')
for i in range(0, len(couterSClist)):
print(couterSClist[i]," ",end='')
else:
print('no finishing......')
# Counting fuction
def countingKwords(path):
countkeywrds = 0
keywrdsList = list()
with open(path) as fileStream:
#split str
strarr = fileStream.read().split()
#loop traverse str
for str in strarr:
#loop traverse keywords
for kw in keywrds:
#if contains keyword countkeywrds + 1
if (str.__contains__(kw)):
countkeywrds = countkeywrds + 1
#add item
keywrdsList.append(kw)
break
print('total num:', countkeywrds)
return keywrdsList
# Counting swtich structure
def countingswtichStructure(keywrdsList):
counterSwitch = 0
couterSClist = []
i = 0;
# find "swtich"
for i in range(len(keywrdsList)):
counter2 = 0
if (keywrdsList[i] == "switch"):
counterSwitch = counterSwitch + 1
for j in range(i + 1, len(keywrdsList)):
if (keywrdsList[j] == "case"):
counter2 = counter2 + 1
elif (keywrdsList[j] == 'switch'):
break
couterSClist.append(counter2)
print('switch num:', counterSwitch)
return couterSClist
# Counting if-else and if-elseif-else structure
#def countingEifStructure(keywrdsList):
#list of keywords
keywrds = ["auto", "break", "case", "char", "const", "continue", "default"
, "double", "do", "else", "enum", "extern", "float", "for", "goto", "if"
, "int", "long", "register", "return", "short", "signed", "sizeof", "static"
, "struct", "switch", "typedef", "union", "unsigned", "void", "volatile", "while"]
if __name__ == '__main__':
path,level=ui()
chooseingFuction(path, level)
|
{"/test_main.py": ["/main.py"]}
|
40,884
|
ErrorMakerqwq/EE308
|
refs/heads/master
|
/test_main.py
|
import unittest
import main
class TestcountingKwords(unittest.TestCase):
def test_countingKwords(self):
self.assertEqual(len(main.countingKwords(path="File.c")), 35) # add assertion here
if __name__ == '__main__':
unittest.main()
|
{"/test_main.py": ["/main.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.