index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
38,833
|
tinattwang/ApeSplitter
|
refs/heads/master
|
/run.py
|
#
# run.py for running ffmpeg commands by multiprocessing
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Wang Tiantian
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# -*- coding: utf-8 -*-
#!/usr/bin/env python3
import os
import time
import multiprocessing
import subprocess
import makeFfmpegCmd
import fnmatch
import sys
def splitApe(srcFile, errorCallback, param):
if not isinstance(param, dict):
param = {}
param['src'] = srcFile
param['exitCode'] = ""
if not os.path.exists(srcFile):
param['exitCode'] += "1001"
errorCallback(param)
return
if not callable(errorCallback):
errorCallback = print
param['exitCode'] += "1002"
errorCallback(param)
return
cmds = makeFfmpegCmd.makeFfmpegCmd(srcFile)
cpu_count = multiprocessing.cpu_count()
maxProcess = cpu_count * 3 // 4
output = os.popen('ps aux |grep ffmpeg | grep -v grep | wc -l')
curProcess = int(output.read())
while True:
if maxProcess > curProcess:
for cmd in cmds:
process = subprocess.Popen(cmd, shell = True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
process.wait()
exitCode = process.returncode
if exitCode != 0 and callable(errorCallback):
param["srcFile"] = srcFile
param["cmd"] = cmd
param["exitCode"] = exitCode
errorCallback(param)
break
output = os.popen('ps aux |grep ffmpeg | grep -v grep | wc -l')
curProcess = int(output.read())
time.sleep(5)
def splitDir(dir):
def _print(dict):
for key, value in dict.items():
print("[ error: ] %s : %s"%(key, value))
param = {}
matches = []
for root, dirnames, filenames in os.walk(dir):
for filename in fnmatch.filter(filenames, '*.cue'):
matches.append(os.path.join(root, filename))
for file in matches:
splitApe(file, _print, param)
if __name__ == '__main__':
if len(sys.argv) < 2:
exit()
dir = sys.argv[1]
splitDir(dir)
|
{"/run.py": ["/makeFfmpegCmd.py"]}
|
38,834
|
tinattwang/ApeSplitter
|
refs/heads/master
|
/makeFfmpegCmd.py
|
#
# makeFfmpegCmd.py for generating the commands for slicing APE file to MP3 files.
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Wang Tiantian
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# -*- coding: utf-8 -*-
#!/usr/bin/env python3
import os
def makeFfmpegCmd(file):
d = open(file).read().splitlines()
general = {}
tracks = []
current_file = None
path = os.path.split(file)[0]
cmds = []
for line in d:
if line.startswith('REM GENRE '):
general['genre'] = ' '.join(line.split(' ')[2:])
if line.startswith('REM DATE '):
general['date'] = ' '.join(line.split(' ')[2:])
if line.startswith('PERFORMER '):
general['artist'] = ' '.join(line.split(' ')[1:]).replace('"', '')
if line.startswith('TITLE '):
general['album'] = ' '.join(line.split(' ')[1:]).replace('"', '')
if line.startswith('FILE '):
current_file = os.path.join(path, ' '.join(line.split(' ')[1:-1]).replace('"', ''))
if line.startswith(' TRACK '):
track = general.copy()
track['track'] = int(line.strip().split(' ')[1], 10)
tracks.append(track)
if line.startswith(' TITLE '):
tracks[-1]['title'] = ' '.join(line.strip().split(' ')[1:]).replace('"', '')
if line.startswith(' PERFORMER '):
tracks[-1]['artist'] = ' '.join(line.strip().split(' ')[1:]).replace('"', '')
if line.startswith(' INDEX 01 '):
t = list(map(int, ' '.join(line.strip().split(' ')[2:]).replace('"', '').split(':')))
tracks[-1]['start'] = 60 * t[0] + t[1] + t[2] / 100.0
for i in range(len(tracks)):
if i != len(tracks) - 1:
tracks[i]['duration'] = tracks[i + 1]['start'] - tracks[i]['start']
for track in tracks:
metadata = {
'artist': track['artist'],
'title': track['title'],
'album': track['album'],
'track': str(track['track']) + '/' + str(len(tracks))
}
if 'genre' in track:
metadata['genre'] = track['genre']
if 'date' in track:
metadata['date'] = track['date']
cmd = 'ffmpeg'
cmd += ' -i "%s"' % current_file
cmd += ' -b:a 320k'
cmd += ' -ss %.2d:%.2d:%.2d' % (track['start'] / 60 / 60, track['start'] / 60 % 60, int(track['start'] % 60))
if 'duration' in track:
cmd += ' -t %.2d:%.2d:%.2d' % (
track['duration'] / 60 / 60, track['duration'] / 60 % 60, int(track['duration'] % 60))
cmd += ' ' + ' '.join('-metadata %s="%s"' % (k, v) for (k, v) in metadata.items())
cmd += ' "%s/%.2d - %s - %s.mp3"' % (path, track['track'], track['artist'], track['title'])
cmds.append(cmd)
return cmds
|
{"/run.py": ["/makeFfmpegCmd.py"]}
|
38,839
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/arm_class.py
|
#!/usr/bin/python3
# coding=utf8
import sys
sys.path.append('/home/pi/ArmPi/')
import cv2
import time
import Camera
import threading
# from LABConfig import *
from ArmIK.Transform import *
from ArmIK.ArmMoveIK import *
import HiwonderSDK.Board as Board
from CameraCalibration.CalibrationConfig import *
from ResearchRobotics.perception_class import Perception
from ResearchRobotics.paw_class import Paw
if sys.version_info.major == 2:
print('Please run this program with python3!')
sys.exit(0)
## TODO combine paw and color classes.
0
class Arm:
def __init__(self) -> None:
# initialize encapsulated classes
self.eye = Perception()
self.paw = Paw()
# (x, y, z) Place coordinates of different colors
# color them
self.color_goal_coordinate = {
'red': (-15 + 0.5, 12 - 0.5, 1.5),
'green': (-15 + 0.5, 6 - 0.5, 1.5),
'blue': (-15 + 0.5, 0 - 0.5, 1.5),
}
# in a stack
def colorSorting(self):
""" grab colors and place them onto their spaces """
# TODO multithread with Rossros
target_colors = ('red',)
while True:
# self.setBuzzer(0.1)
is_not_blind = self.eye.see()
if is_not_blind:
# find block
loc, found_color = self.eye.detect(target_color=target_colors, print_loc=True)
print('loc', loc, 'color', found_color)
# see result in window and arm led
if found_color:
self.set_rgb(found_color)
key = self.eye.display()
if key == 27: # ??
break
if loc:
# grab block
self.paw.grabAtXY(*loc)
# place block at cooresponding coordinate
self.paw.placeAtXY(*self.color_goal_coordinate[found_color])
self.eye.close()
def exit(self):
""" App exit gameplay call """
# may be necessary for threading
pass
def reset(self):
""" Reset class variables """
# reset encapsulated classes
self.paw.reset()
self.eye.reset()
def setBuzzer(self, timer):
""" Activate buzzer to sound for 'timer' seconds """
Board.setBuzzer(0)
Board.setBuzzer(1)
time.sleep(timer)
Board.setBuzzer(0)
def set_rgb(self, color):
""" Set the RGB light color of the expansion board to make it consistent with the color to be tracked """
if color == "red":
Board.RGB.setPixelColor(0, Board.PixelColor(255, 0, 0))
Board.RGB.setPixelColor(1, Board.PixelColor(255, 0, 0))
Board.RGB.show()
elif color == "green":
Board.RGB.setPixelColor(0, Board.PixelColor(0, 255, 0))
Board.RGB.setPixelColor(1, Board.PixelColor(0, 255, 0))
Board.RGB.show()
elif color == "blue":
Board.RGB.setPixelColor(0, Board.PixelColor(0, 0, 255))
Board.RGB.setPixelColor(1, Board.PixelColor(0, 0, 255))
Board.RGB.show()
else:
print('set_rgb color not found')
Board.RGB.setPixelColor(0, Board.PixelColor(0, 0, 0))
Board.RGB.setPixelColor(1, Board.PixelColor(0, 0, 0))
Board.RGB.show()
if __name__ == '__main__':
# Identifies locations of a block and labels it in the camera video display
arm = Arm()
arm.colorSorting()
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,840
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/perception.py
|
#!/usr/bin/env python3
import sys
import numpy as np
sys.path.append('/home/pi/ArmPi/')
import cv2
import time
import Camera
from math import log10
import params
from db_txt import *
DEBUG = False
class Perception:
def __init__(self):
#start capturing from camera
self.my_camera = Camera.Camera()
self.my_camera.camera_open()
#always store current scene
self.frame = None
#define the shape of a block vs starfish
self.block_shape_logscale = np.array([ 0.7827677 , 5.01251839, 6.19969677, 7.33014752, 14.1640971, 10.04483551, 14.37753618])
self.starfish_shape_logscale = np.array([0.64055716, 3.97346808, 4.47200431, 4.85410399, 9.66857547, 6.9555887 , 9.66677454])
#define bounds for classification
self.block_t = params.Perception.block_threshold
self.starfish_t = params.Perception.starfish_threshold
#define color ranges for thresholding
self.color_ranges = params.Perception.color_ranges
#update function for image of scene
def get_frame(self):
self.frame = self.my_camera.frame
updateDBImg(self.frame)
#converts humoments to logarithmic representation for comparison
def to_logscale(self, moment):
logscale = 7*[None]
for i in range(0,7):
if moment[i] != 0:
logscale[i] = log10(abs(moment[i]))
else:
logscale[i] = 0
return abs(np.array(logscale))
#given an image, return processed binary image that contains only regions of the specified color
def color_threshold(self, color, img):
if DEBUG: show_image(img.copy(), 'img_prethreshold')
#convert to LAB
lab = cv2.cvtColor(img, cv2.COLOR_BGR2LAB)
#mask image for the target color
frame_mask = cv2.inRange(lab, self.color_ranges[color][0], self.color_ranges[color][1])
if DEBUG: show_image(frame_mask.copy(), 'frame_mask_premorph')
#smooth the image (erosion -> dilation -> dilation -> erosion)
opened = cv2.morphologyEx(frame_mask, cv2.MORPH_OPEN, np.ones((6, 6), np.uint8))
closed = cv2.morphologyEx(opened, cv2.MORPH_CLOSE, np.ones((6, 6), np.uint8))
#return the procesed image
return closed
#dumb function that makes getting cv2 contours less cumbersome by storing some settings
def get_contours(self, img):
contours = cv2.findContours(img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[-2]
return contours
#given the contours in a scene, return the hu moments of those contours
def get_moments(self, contours):
huMoments = []
for i in range(len(contours)):
# Calculate Moments
moments = cv2.moments(contours[i])
# Calculate Hu Moments
huMoments.append(cv2.HuMoments(moments))
return huMoments
# checks to see if a moment is L2-near enough to the platonic ideal of "box"
def is_block(self, moment):
shape_diff = abs(self.to_logscale(moment) - self.block_shape_logscale)
if (shape_diff**2).sum() < self.block_t:
return True
else:
return False
# checks to see if a moment is L2-near enough to the platonic ideal of "starfish"
def is_starfish(self, moment):
shape_diff = abs(self.to_logscale(moment) - self.starfish_shape_logscale)
if (shape_diff**2).sum() < self.starfish_t:
return True
else:
return False
#finds all block and starfish blobs of one color
def find_targets(self, image, color):
blobs = []
#copy the image so as not to overwrite self.frame
processing_frame = image.copy()
#smooth image
frame_gb = cv2.GaussianBlur(processing_frame, (11, 11), 11)
#threshold to single color
frame_mono = self.color_threshold(color, frame_gb)
if DEBUG: show_image(frame_mono.copy(), 'frame_mono')
#find contours and hu moments
canny_output = cv2.Canny(frame_mono, 128, 255)
if DEBUG: show_image(canny_output, 'canny_output')
contours = self.get_contours(canny_output)
contours = list(filter(
lambda c: cv2.contourArea(c) > params.Perception.min_cnt_area,
contours))
hu_moments = self.get_moments(contours)
if DEBUG:
img_contours = image.copy()
cv2.drawContours(img_contours, contours, -1, (0,255,0), 3)
show_image(img_contours)
#for every found contour
count = 0
for cnt in contours:
M = cv2.moments(cnt)
if M['m00'] != 0:
#find the location
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
#classify blob and store as dictionary
if self.is_block(hu_moments[count]):
rect = cv2.minAreaRect(cnt)
block_blob = {'x': cx, 'y': cy, 'color': color, 'type': 'block', 'angle': rect[2]}
blobs.append(block_blob)
if self.is_starfish(hu_moments[count]) and color=='green':
starfish_blob = {'x': cx, 'y': cy, 'color': color, 'type': 'starfish', 'angle': 0}
blobs.append(starfish_blob)
count = count + 1
return blobs
#main function that updates the frame and returns all targets in scene
def get_all_targets(self):
#update image from camera
self.get_frame()
scene = []
#check for empty image error
if self.frame is not None:
#find all blobs for each color
for color in ['red', 'green', 'blue']:
blobs = self.find_targets(self.frame, color)
scene.extend(blobs)
updateBlockDB(scene)
return scene
def label_scene(display_frame, scene):
for obj in scene:
display_frame = cv2.putText(display_frame, ". " + obj['type'] + " (" + obj['color'] + ")", (obj['x'], obj['y']), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 2, cv2.LINE_AA)
return display_frame
def show_image(img, name='img'):
cv2.imshow(name, img)
key = cv2.waitKey(1)
if key == 27:
return
if __name__ == '__main__':
count = 0
perception_obj = Perception()
while True:
count = count + 1
print()
#perception_obj.get_frame()
scene = perception_obj.get_all_targets()
img = perception_obj.frame
img = label_scene(img, scene)
if img is not None:
print(scene)
show_image(img)
perception_obj.my_camera.camera_close()
cv2.destroyAllWindows()
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,841
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/calibrate_detect.py
|
# modified from
# https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_gui/py_trackbar/py_trackbar.html
import cv2
import numpy as np
import params
from perception import Perception, label_scene
block_t = params.Perception.block_threshold
starfish_t = params.Perception.starfish_threshold
def nothing(x):
pass
def main():
global block_t, starfish_t
cv2.namedWindow('image')
perception_obj = Perception()
# Make trackbars, with default values above
cv2.createTrackbar('Block Threshold','image',block_t,255,nothing)
cv2.createTrackbar('Starfish Threshold','image',starfish_t,255,nothing)
print("Showing video. With the CV window in focus, press q to exit, p to pause.")
while(1):
# get current positions of trackbars
block_t = cv2.getTrackbarPos('Block Threshold', 'image')
starfish_t = cv2.getTrackbarPos('Starfish Threshold', 'image')
perception_obj.block_t = block_t
perception_obj.starfish_t = starfish_t
scene = perception_obj.get_all_targets()
img = perception_obj.frame
img = label_scene(img, scene)
if img is not None:
cv2.imshow('image',img)
key = cv2.waitKey(66) # Delay for 66 ms
if key == ord('q'): # Press q to exit, p to pause
break
if key == ord('p'):
cv2.waitKey(-1) #wait until any key is pressed
cv2.destroyAllWindows()
def detect_color(frame, lower, upper):
lab = cv2.cvtColor(frame, cv2.COLOR_BGR2LAB)
mask = cv2.inRange(lab, lower, upper)
return mask
if __name__=="__main__":
main()
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,842
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/run_server.py
|
import time
import os
from flask import Flask, send_file, request
from markupsafe import escape
import json
import random
import db_txt as db
def reset():
default_grasp_data = [0,0,False] # [angle, object_coords, is_grasp]
default_blobs_data = [ # TODO double check format
{'x': '50','y': '50', 'angle': '270', 'color': 'orange', 'type':'starfish'},
{'x': '200','y': '200', 'angle': '270', 'color': 'orange', 'type':'cube'},
]
db.updateGraspDB(default_grasp_data)
db.updateBlockDB(default_blobs_data)
# test code
def run_testing_thread():
""" will run thread that updates data every 5 seconds """
import threading # only used for testing
blobsA = [ # TODO double check format
{'x': '10','y': '10', 'angle': '270', 'color': 'orange', 'type':'cube'},
{'x': '200','y': '150', 'angle': '60', 'color': 'yellow', 'type':'cube'},
]
blobsB = [ # TODO double check format
{'x': '50','y': '50', 'angle': '270', 'color': 'orange', 'type':'starfish'},
{'x': '100','y': '100', 'angle': '60', 'color': 'yellow', 'type':'starfish'},
]
def loopWriteToDB():
fname = 'db_blobs.txt'
while True:
updateBlockDB(blobsA)
time.sleep(5)
updateBlockDB(blobsB)
time.sleep(5)
test_io_thread = threading.Thread(target = loopWriteToDB)
test_io_thread.start()
# setup flask server and routes
app = Flask(__name__)
@app.route('/grasp', methods=['GET', 'PUT'])
def grasp_request():
if request.method == 'PUT':
data = request.get_data(as_text=True)
print('!!!!! put grasp data', data)
db.updateGraspDB(data, as_text=True)
return 'Got'
else: # GET
return json.dumps(db.getGraspDB())
@app.route('/blobs', methods=['GET', 'PUT'])
def blobs_request():
if request.method == 'PUT':
data = request.get_json()
print('!!!!! put grasp data', data)
return db.updateBlockDB(data)
else: # GET
return json.dumps(db.getBlocksDB())
@app.route('/video_feed')
def video_feed():
fpath = '_scene.jpg'
return send_file(fpath, mimetype='image/jpg')
@app.route('/video_feed_test')
def video_feed_test():
test_img_paths = ['_testA.jpg', '_testB.jpg']
fpath = random.choice( test_img_paths )
return send_file(fpath, mimetype='image/jpg')
if __name__ == '__main__':
reset() # reset data in 'server'
#run_testing_thread() # comment this line during production
app.run(host='0.0.0.0')
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,843
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/paw_class.py
|
import sys
sys.path.append('/home/pi/ArmPi/')
import cv2
import time
import Camera
import threading
from LABConfig import *
from ArmIK.Transform import *
from ArmIK.ArmMoveIK import *
import HiwonderSDK.Board as Board
from CameraCalibration.CalibrationConfig import *
import params
class Paw:
""" controls the arm manipulator and joints """
def __init__(self):
""" """
self.reset()
self.resetPosition()
def reset(self):
""" Reset class variables """
self.unreachable = False
self.isRunning = False
self.closeAngle = params.Paw.closeAngle # gipper angle when grabbing object (legacy:servo1)
self.openAngle = (self.closeAngle - 280) # legacy magic number
self.neutralAngle = (self.closeAngle - 50) # legacy magic number
self.AK = ArmIK()
def resetPosition(self, reset_gripper=True): # legacy:initMove
""" Move servo to neutral, initial position """
if reset_gripper:
Board.setBusServoPulse(1, self.neutralAngle, 300)
self.AK.setPitchRangeMoving((0, 10, 10), -30, -30, -90, 1500)
time.sleep(1.5)
def updateCloseAngle(self, newCloseAngle):
""" Updates the default variable for closing the gripper (eg 500) """
self.closeAngle = newCloseAngle
def close(self, closeAngle=None):
""" Close the gripper paw, optionally set a custom close angle int (eg 500) """
if closeAngle == None:
closeAngle = self.closeAngle
Board.setBusServoPulse(1, closeAngle, 500)
time.sleep(0.5)
def open(self):
""" Open the gripper paw """
Board.setBusServoPulse(1, self.openAngle, 500) #
time.sleep(0.5)
def moveToXY(self, world_X, world_Y, world_Z=2):
""" move arm to goal coordinates on mat using built-in inverse kinematics
world_Z (height) is optional, default 2
"""
result = self.AK.setPitchRangeMoving((world_X, world_Y, world_Z), -90, -90, 0, 1000)
if result == False:
return False # object is unreachable
time.sleep(result[2]/1000) # The third item of the return parameter is time
def rotateGripper(self, world_angle):
""" turn the gripper """
Board.setBusServoPulse(2, world_angle, 500)
time.sleep(0.5)
def grabAtXY(self, world_X, world_Y, rotation_angle):
""" grab object at coordinate goal location of the object """
world_angle = getAngle(world_X, world_Y, rotation_angle)
self.resetPosition()
self.open()
self.rotateGripper(world_angle)
self.moveToXY(world_X, world_Y)
self.close()
self.resetPosition()
def placeAtXY(self, world_X, world_Y, rotation_angle):
""" place grabbed object at coordinate gooal location"""
world_angle = getAngle(world_X, world_Y, rotation_angle)
self.resetPosition(False)
self.rotateGripper(world_angle)
self.moveToXY(world_X, world_Y)
self.open()
self.resetPosition()
def partyTime(self):
""" spin 'n wave yo arm like you just dont care - """
self.resetPosition(False)
Board.setBusServoPulse(6, 100, 1000)
time.sleep(1)
Board.setBusServoPulse(6, 800, 1000)
time.sleep(1)
Board.setBusServoPulse(6, 100, 1000)
time.sleep(1)
if __name__ == '__main__':
# will pick up a block on the red square, wave around, and put it back
paw = Paw()
red_block_home_coords = (-15 + 0.5, 12 - 0.5, 1.5) # x, y, angle on mat plane
paw.grabAtXY(*red_block_home_coords)
paw.partyTime()
paw.placeAtXY(*red_block_home_coords)
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,844
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/calibrate_LAB.py
|
# modified from
# https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_gui/py_trackbar/py_trackbar.html
import cv2
import numpy as np
colors = { #LAB
'red': [(0, 151, 100), (255, 255, 255)],
'green': [(0, 0, 0), (255, 122, 255)],
'blue': [(0, 0, 0), (255, 255, 122)],
}
def nothing(x):
pass
def main():
cap = cv2.VideoCapture(0)
# cap =cv2.VideoCapture('/home/tim/wacky_lighting.avi')
cv2.namedWindow('image')
cv2.namedWindow('mask')
c = 'green'
lower = np.array(colors[c][0])
upper = np.array(colors[c][1])
# Make trackbars, with default values above
cv2.createTrackbar('L_lower','image',lower[0],255,nothing)
cv2.createTrackbar('L_upper','image',upper[0],255,nothing)
cv2.createTrackbar('A_lower','image',lower[1],255,nothing)
cv2.createTrackbar('A_upper','image',upper[1],255,nothing)
cv2.createTrackbar('B_lower','image',lower[2],255,nothing)
cv2.createTrackbar('B_upper','image',upper[2],255,nothing)
print("Showing video. With the CV window in focus, press q to exit, p to pause.")
while(1):
ret, img = cap.read()
if not ret: break
# get current positions of four trackbars
lower[0] = cv2.getTrackbarPos('L_lower', 'image')
lower[1] = cv2.getTrackbarPos('A_lower', 'image')
lower[2] = cv2.getTrackbarPos('B_lower', 'image')
upper[0] = cv2.getTrackbarPos('L_upper', 'image')
upper[1] = cv2.getTrackbarPos('A_upper', 'image')
upper[2] = cv2.getTrackbarPos('B_upper', 'image')
# Create mask by thresholding LAB image
mask = detect_color(img, lower, upper)
cv2.imshow('image',img)
cv2.imshow('mask', mask)
key = cv2.waitKey(66) # Delay for 66 ms
if key == ord('q'): # Press q to exit, p to pause
break
if key == ord('p'):
cv2.waitKey(-1) #wait until any key is pressed
cv2.destroyAllWindows()
def detect_color(frame, lower, upper):
lab = cv2.cvtColor(frame, cv2.COLOR_BGR2LAB)
mask = cv2.inRange(lab, lower, upper)
return mask
if __name__=="__main__":
main()
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,845
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/test_motion.py
|
"""
test_motion.py
Script to test the reliability of motion. Picks up the red square and repeatedly moves it to a new spot.
"""
from paw_class import Paw
from perception_class import Perception
import numpy as np
from ArmIK.Transform import getAngle
class RandomMover:
def __init__(self):
self.eye = Perception()
self.paw = Paw()
def randomly_move_block(self, color):
is_not_blind = self.eye.see()
if is_not_blind:
# find block
loc, found_color = self.eye.detect(target_color=color, print_loc=True)
print('loc', loc, 'color', found_color)
# see result in window and arm led
key = self.eye.display()
if key == 27: # ??
return
if loc:
# grab block
print("Grabbing!")
self.paw.grabAtXY(*loc)
# place block at random coord
print("Placing!")
angle = np.random.uniform(-180, 180)
world_angle = getAngle(loc[0], loc[1], angle)
self.paw.placeAtXY(*randomCoord(loc[0], loc[1]), world_angle)
def randomCoord(x, y, min_dist=5, limits=[-10, 10, 12, 25]):
""" get random coordinate at least min_dist from x,y """
while True:
rx = np.random.uniform(*limits[:2])
ry = np.random.uniform(*limits[2:])
if (x-rx)**2 + (y-ry)**2 > min_dist**2:
return rx, ry
if __name__ == "__main__":
rm = RandomMover()
for _ in range(10):
rm.randomly_move_block('red')
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,846
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/perception_class.py
|
#!/usr/bin/python3
# coding=utf8
import sys
sys.path.append('/home/pi/ArmPi/')
import cv2
import time
import Camera
import threading
# from LABConfig import *
from ArmIK.Transform import *
from ArmIK.ArmMoveIK import *
import HiwonderSDK.Board as Board
from CameraCalibration.CalibrationConfig import *
if sys.version_info.major == 2:
print('Please run this program with python3!')
sys.exit(0)
class Perception:
def __init__(self) -> None:
print('Initializing perception')
self.camera = Camera.Camera()
self.camera.camera_open()
# initialize frame variables
self.latest_raw_img = np.zeros((5,5)) # blank 5x5 img
self.latest_display_img = np.zeros((5,5))
# constants used for preception
self.color_range = {
'red': [(0, 151, 100), (255, 255, 255)],
'green': [(0, 0, 0), (255, 115, 255)],
'blue': [(0, 0, 0), (255, 255, 110)],
'black': [(0, 0, 0), (56, 255, 255)],
'white': [(193, 0, 0), (255, 250, 255)],
}
self.range_rgb = {
'red': (0, 0, 255),
'blue': (255, 0, 0),
'green': (0, 255, 0),
'black': (0, 0, 0),
'white': (255, 255, 255),
}
self.size = (640, 480)
self.window_name = "Arm View"
print('Finished initializing perception')
def reset(self):
""" Reset class variables and other important stuff """
pass
def set_rgb(self, color):
""" Set the RGB light color of the expansion board
useful to make it consistent with the color to be tracked
"""
if color == "red":
Board.RGB.setPixelColor(0, Board.PixelColor(255, 0, 0))
Board.RGB.setPixelColor(1, Board.PixelColor(255, 0, 0))
Board.RGB.show()
elif color == "green":
Board.RGB.setPixelColor(0, Board.PixelColor(0, 255, 0))
Board.RGB.setPixelColor(1, Board.PixelColor(0, 255, 0))
Board.RGB.show()
elif color == "blue":
Board.RGB.setPixelColor(0, Board.PixelColor(0, 0, 255))
Board.RGB.setPixelColor(1, Board.PixelColor(0, 0, 255))
Board.RGB.show()
else:
Board.RGB.setPixelColor(0, Board.PixelColor(0, 0, 0))
Board.RGB.setPixelColor(1, Board.PixelColor(0, 0, 0))
Board.RGB.show()
def setTargetColor(self, target_color):
""" Update detection color for gripper to grab """
self.__target_color = target_color
return (True, ())
def getAreaMaxContour(self, contours):
""" Find the contour with the largest area
'contours' is a list of contours to be compared
"""
contour_area_temp = 0
contour_area_max = 0
area_max_contour = None
for c in contours: # Traverse all contours
contour_area_temp = math.fabs(cv2.contourArea(c)) # Calculate the contour area
if contour_area_temp > contour_area_max:
contour_area_max = contour_area_temp
# Only when the area is greater than 300, the contour of the largest area is effective to filter interference
if contour_area_temp > 300:
area_max_contour = c
return area_max_contour, contour_area_max # Return the largest contour
def display_img(self, img, wname='default window name'):
""" show image in window with name """
img_disp = img.copy()
cv2.imshow(wname, img_disp)
key = cv2.waitKey(0)
return key
def testFind(self, color_range):
""" find objects in color range
useful for prototpying other colors or objects
color_range is a 2d tuple of 3d tuples: [(0, 151, 100), (255, 255, 255)]
returns location and if a color was found
updates the display image used in self.display
"""
img = self.latest_raw_img.copy() # img to transform in search for block
img_copy = img.copy()
frame_resize = cv2.resize(img_copy, self.size, interpolation=cv2.INTER_NEAREST)
frame_gb = cv2.GaussianBlur(frame_resize, (11, 11), 11)
frame_lab = cv2.cvtColor(frame_gb, cv2.COLOR_BGR2LAB) # Convert image to LAB space
self.display_img(frame_lab, 'lab frame')
frame_mask = cv2.inRange(frame_lab, color_range[0], color_range[1]) # Perform bit operations on the original image and mask
self.display_img(frame_mask, 'mask range frame')
# process imag e to reduce noise and find contours
opened = cv2.morphologyEx(frame_mask, cv2.MORPH_OPEN, np.ones((6, 6), np.uint8)) # Open operation
closed = cv2.morphologyEx(opened, cv2.MORPH_CLOSE, np.ones((6, 6), np.uint8)) # Closed operation
contours = cv2.findContours(closed, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[-2] # Find the outline
areaMaxContour, area_max = self.getAreaMaxContour(contours) # Find the largest contour
found_color = False
loc = False
if area_max > 2500: # Have found the largest area
# identify rectangular region around blob and blob center
rect = cv2.minAreaRect(areaMaxContour)
box = np.int0(cv2.boxPoints(rect))
roi = getROI(box) # Get roi area
img_centerx, img_centery = getCenter(rect, roi, self.size, square_length) # Get the center coordinates of the block
world_x, world_y = convertCoordinate(img_centerx, img_centery, self.size) # Convert to real world coordinates
# draw outline of object
red_color = (0, 151, 100)
cv2.drawContours(img, [box], -1, red_color, 2) # draw with red
cv2.putText(img, '(' + str(world_x) + ',' + str(world_y) + ')', (min(box[0, 0], box[2, 0]), box[2, 1] - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, red_color, 1) # Draw center point in red
# save values to return
loc = (world_x, world_y,rect[2])
found_color = True
self.latest_display_img = img # save image for display with overlays
return loc, found_color
def findBlock(self):
""" using an image, return expected block location and frame overlay
if no block was found, will return False as location
"""
img_copy = self.latest_raw_img.copy() # img to transform in search for block
img = img_copy.copy() # img to display
img_h, img_w = img_copy.shape[:2]
# draw calibration overlay
cv2.line(img, (0, int(img_h / 2)), (img_w, int(img_h / 2)), (0, 0, 200), 1)
cv2.line(img, (int(img_w / 2), 0), (int(img_w / 2), img_h), (0, 0, 200), 1)
# resize, blur, and convert to lab colorspace
frame_resize = cv2.resize(img_copy, self.size, interpolation=cv2.INTER_NEAREST)
frame_gb = cv2.GaussianBlur(frame_resize, (11, 11), 11)
frame_lab = cv2.cvtColor(frame_gb, cv2.COLOR_BGR2LAB) # Convert image to LAB space
areaMaxContour, area_max = (0,0)
found_color = False
for i in self.color_range:
if i in self.__target_color:
# mask color range for targeted color
detect_color = i
frame_mask = cv2.inRange(frame_lab, self.color_range[detect_color][0], self.color_range[detect_color][1]) # Perform bit operations on the original image and mask
# process image to reduce noise and find contours
opened = cv2.morphologyEx(frame_mask, cv2.MORPH_OPEN, np.ones((6, 6), np.uint8)) # Open operation
closed = cv2.morphologyEx(opened, cv2.MORPH_CLOSE, np.ones((6, 6), np.uint8)) # Closed operation
contours = cv2.findContours(closed, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[-2] # Find the outline
areaMaxContour, area_max = self.getAreaMaxContour(contours) # Find the largest contour
if area_max > 2500:
found_color = i
break # found color, move on
if area_max > 2500: # Have found the largest area
rect = cv2.minAreaRect(areaMaxContour)
box = np.int0(cv2.boxPoints(rect))
roi = getROI(box) # Get roi area
get_roi = True
img_centerx, img_centery = getCenter(rect, roi, self.size, square_length) # Get the center coordinates of the block
world_x, world_y = convertCoordinate(img_centerx, img_centery, self.size) # Convert to real world coordinates
cv2.drawContours(img, [box], -1, self.range_rgb[detect_color], 2)
cv2.putText(img, '(' + str(world_x) + ',' + str(world_y) + ')', (min(box[0, 0], box[2, 0]), box[2, 1] - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, self.range_rgb[detect_color], 1) # Draw center point
world_angle = rect[2]
loc = (world_x, world_y, world_angle)
else:
loc = False
self.latest_display_img = img # save image for display with overlays
return loc, found_color
def detect(self, target_color=None, print_loc=False):
""" detect cube from camera
optionally pass new target color, and print location
"""
if target_color: # if new target passed
self.__target_color = target_color
loc = self.findBlock()
if print_loc:
print('Found block at location: ', loc)
return loc
def display(self):
""" display last image """
cv2.imshow(self.window_name, self.latest_display_img)
#waits for user to press any key
#(this is necessary to avoid Python kernel form crashing)
key = cv2.waitKey(1) # will show for 1ms
return key
def save(self):
""" save last image
useful for streaming images in flask request in another thread
uses window name to set filename
"""
cv2.imwrite(self.latest_display_img + '.jpg', self.latest_display_img)
def see(self):
""" find new frame, will return True if valid """
img = self.camera.frame
if img is None:
return False
self.latest_raw_img = img.copy()
return True
def close(self):
""" shutdown gracefully """
self.camera.camera_close()
cv2.destroyAllWindows()
if __name__ == '__main__':
# Identifies locations of a block and labels it in the camera video display
eye = Perception()
target_color = ('red', )
eye.setTargetColor(target_color)
while True:
is_not_blind = eye.see()
if is_not_blind:
eye.detect()
key = eye.display()
# eye.save()
if key == 27: # ??
break
eye.close()
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,847
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/final_project.py
|
"""
final_project.py
script for the final project logic main loop
"""
import sys
import numpy as np
sys.path.append('/home/pi/ArmPi/')
from paw_class import Paw
from perception import Perception, show_image, label_scene
import numpy as np
from ArmIK.Transform import convertCoordinate
import db_txt as db
if __name__ == '__main__':
count = 0
perception_obj = Perception()
paw_obj = Paw()
final_pos = (-15 + 0.5, 12 - 0.5, 1.5) # x block home
img_size =(640, 480)
while True:
# perception code
count = count + 1
#perception_obj.get_frame()
scene = perception_obj.get_all_targets()
img = perception_obj.frame
img = label_scene(img, scene)
if img is not None:
print(scene)
show_image(img)
# arm code
graspInfo = db.getGraspDB()
if 'x' in graspInfo: # if grasp info updated
# pick up object from set position
x_pos = graspInfo['x']
y_pos = graspInfo['y']
a_pos = graspInfo['angle']
world_x, world_y = convertCoordinate(x_pos, y_pos, img_size) # Convert to real world coordinates
print("Grabbing!", world_x,world_y, a_pos)
paw_obj.grabAtXY(world_x,world_y,a_pos)
# place block at set coord
print("Placing!")
paw_obj.placeAtXY(*final_pos)
# reset paw db
db.clearGraspDB()
perception_obj.my_camera.camera_close()
cv2.destroyAllWindows()
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,848
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/params.py
|
"""
params.py
Global parameters for use in modules
"""
class Paw:
closeAngle = 600
class Perception:
color_ranges = { # HSV
'red': [(0, 151, 100), (255, 255, 255)],
'green': [(0, 0, 0), (255, 122, 255)],
'blue': [(0, 0, 0), (255, 255, 122)],
}
min_cnt_area = 100
block_threshold = 47
starfish_threshold = 6
# HSV color ranges are below, but LAB work better.
# color_ranges={
# 'red': [(0, 151, 100), (80, 255, 230)],
# 'green': [(23, 52, 60), (87, 255, 255)],
# 'blue': [(90, 116, 0), (141, 255, 255)],
# }
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,849
|
stoddabr/research_robotics_arm
|
refs/heads/master
|
/ResearchRobotics/db_txt.py
|
import json
from cv2 import imwrite
# db io functions
db_address_blobs = '_blobs.txt'
db_address_grasp = '_grasp.txt'
db_address_img = '_scene.jpg'
# io helpers
def updateDB(data, file):
with open(file, 'w') as outfile:
json.dump(data, outfile)
def updateDBTxt(data, file):
with open(file, 'w') as outfile:
outfile.write(data)
def getDB(file):
with open(file) as json_file:
return json.load(json_file)
def updateDBImg(frame):
imwrite(db_address_img,frame)
# helper functions
# TODO validation
def updateBlockDB(data):
updateDB(data, db_address_blobs)
def getBlocksDB():
return getDB(db_address_blobs)
def updateGraspDB(data, as_text=False):
if as_text:
updateDBTxt(data, db_address_grasp)
else:
updateDB(data, db_address_grasp)
def getGraspDB():
return getDB(db_address_grasp)
def clearGraspDB():
data = {'grasp':'reset'}
updateDB(data, db_address_grasp)
|
{"/ResearchRobotics/arm_class.py": ["/ResearchRobotics/perception_class.py", "/ResearchRobotics/paw_class.py"]}
|
38,949
|
observablehq/observable-jupyter
|
refs/heads/master
|
/tests/__init__.py
|
"""Unit test package for observable_jupyter_embed."""
|
{"/observable_jupyter/__init__.py": ["/observable_jupyter/jupyter_embed.py"], "/tests/test_embed.py": ["/observable_jupyter/__init__.py"]}
|
38,950
|
observablehq/observable-jupyter
|
refs/heads/master
|
/observable_jupyter/__init__.py
|
"""Top-level package for observable-jupyter-embed."""
__author__ = """Observable, Inc."""
__email__ = "support@observablehq.com"
__version__ = "0.1.7"
from .jupyter_embed import embed
|
{"/observable_jupyter/__init__.py": ["/observable_jupyter/jupyter_embed.py"], "/tests/test_embed.py": ["/observable_jupyter/__init__.py"]}
|
38,951
|
observablehq/observable-jupyter
|
refs/heads/master
|
/observable_jupyter/jupyter_embed.py
|
__all__ = ["embed"]
import json
import random
import html
import html.entities as entities
import pkg_resources
from typing import List, Dict
iframe_bundle_fname = pkg_resources.resource_filename(
"observable_jupyter", "iframe_bundle.js"
)
iframe_bundle_src = open(iframe_bundle_fname).read()
wrapper_bundle_fname = pkg_resources.resource_filename(
"observable_jupyter", "wrapper_bundle.js"
)
wrapper_bundle_src = open(wrapper_bundle_fname).read()
logo_fname = pkg_resources.resource_filename("observable_jupyter", "logo.svg")
logo_src = open(logo_fname).read()
escapes = {v: k for k, v in entities.html5.items()}
def escape(s):
"""Escape template string syntax."""
return s.replace("\\", r"\\").replace("`", r"\`").replace("$", r"\$")
try:
from IPython.display import display, HTML
except ImportError:
print("Expected a Jupyter environment.")
raise
def embed(
slug: str, cells: List[str] = None, inputs: Dict = None, display_logo=True
) -> None:
"""Embeds a set of cells or an entire Observable notebook.
"""
if cells and inputs and set(cells) & set(inputs):
raise ValueError(
f"specify cell names as output or input, not both: {set(cells) & set(inputs)}"
)
jsonified_inputs = jsonify(inputs or {})
pretty_slug = "embedded notebook" if slug.startswith("d/") else slug
if cells:
for cell in cells:
if not isinstance(cell, str):
raise ValueError("Cell names should be strings.")
# Brackets in Python f-strings are escaped by using two brackets: { -> {{, } -> }}
iframe_src = f"""<!DOCTYPE html>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@observablehq/inspector@3/dist/inspector.css">
<style>
body {{
margin: 0;
}}
</style>
<script>
{iframe_bundle_src}
</script>
<div style="overflow: auto;"></div>
<script type="module">
const inputs = {jsonified_inputs};
const slug = '{slug}';
const into = document.getElementsByTagName('div')[0];
const cells = {repr(cells) if cells else "undefined"}
ObservableJupyterIframe.embed(slug, into, cells, inputs).then(m => {{window.main = m;}});
ObservableJupyterIframe.monitor()
window.addEventListener('unload', () => {{
if (typeof window.main !== 'undefined') {{
window.main._runtime.dispose();
}}
}});
</script>
"""
link_back = (
f"""
<style>
.observable-logo {{
position: absolute;
bottom: 0;
right: 0;
margin-bottom: 5px;
margin-right: 1px;
transition: background-color 0.2s;
}}
.observable-logo svg {{
opacity: 0.5;
transition: opacity 0.2s;
}}
.observable-logo span {{
opacity: 0;
transition: opacity 0.2s;
padding-right: .2em;
padding-left: .2em;
}}
.observable-logo:hover {{
background-color: white;
}}
.observable-logo:hover span {{
opacity: .8;
}}
.observable-logo:hover svg {{
opacity: .8;
}}
.observable-link:hover ~ iframe {{
outline: solid 1px #E0E0E0;
box-shadow: 0 0 3px;
transition: box-shadow 0.2s;
}}
.observable-link ~ iframe {{
outline: none;
}}
/* Colab-only rule - untested */
body > .output-area > .output-body {{
margin-right: 2px;
}}
</style>
<a class="observable-link" href="https://observablehq.com/{slug}" target="_blank" style="text-decoration: none; color: inherit;">
<div class="observable-logo" style="display: flex; align-items: center; justify-content: flex-end;">
<span>Edit {pretty_slug} on Observable</span>
{logo_src}
</div>
</a>
"""
if display_logo
else ""
)
iframe_id = f"observable-embed-div-{str(random.random())[2:]}"
# To sidestep the (apparently buggy?) parsing that Jupyter does
# of script tags in template strings, add the script tags in JavaScript.
iframe_src_script_escaped = escape(
iframe_src.replace("<script>", "OPENSCRIPT").replace("</script>", "CLOSESCRIPT")
)
iframe_wrapper = f"""<div style="text-align: right; position: relative">
{link_back}
<iframe id="{iframe_id}" sandbox="allow-scripts" style="overflow: auto; min-width: 100%; width: 0px;" frameBorder="0"></iframe>
</div>
<script>
{wrapper_bundle_src}
</script>
<script>
iframeSrc = `{iframe_src_script_escaped}`.replace(/OPENSCRIPT/gi, '<sc' + 'ript>').replace(/CLOSESCRIPT/gi, '</sc' + 'ript>')
document.getElementById('{iframe_id}').srcdoc = iframeSrc;
ObservableJupyterWrapper.listenToSize(document.getElementById('{iframe_id}'));
</script>
"""
display(HTML(iframe_wrapper))
def jsonify(obj):
return json.dumps(obj, cls=DataJSONEncoder)
class DataJSONEncoder(json.JSONEncoder):
def default(self, obj):
if type(obj).__name__ == "DataFrame": # Pandas DataFrame
return json.dumps(obj.to_dict(orient="records"))
|
{"/observable_jupyter/__init__.py": ["/observable_jupyter/jupyter_embed.py"], "/tests/test_embed.py": ["/observable_jupyter/__init__.py"]}
|
38,952
|
observablehq/observable-jupyter
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages # type: ignore
with open("README.md") as readme_file:
readme = readme_file.read()
setup(
author="Observable, Inc.",
author_email="support@observablehq.com",
python_requires=">=3.6",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: ISC License (ISCL)",
"Natural Language :: English",
"Programming Language :: Python :: 3",
],
description="Embed Observable cells hosted on observablehq.com into Jupyter notebooks.",
install_requires=[],
license="ISC license",
long_description=readme,
long_description_content_type="text/markdown",
include_package_data=True,
keywords="observable_jupyter",
name="observable_jupyter",
packages=find_packages(include=["observable_jupyter", "observable_jupyter.*"]),
setup_requires=[],
url="https://github.com/observablehq/observable_jupyter",
version="0.1.7",
zip_safe=False,
)
|
{"/observable_jupyter/__init__.py": ["/observable_jupyter/jupyter_embed.py"], "/tests/test_embed.py": ["/observable_jupyter/__init__.py"]}
|
38,953
|
observablehq/observable-jupyter
|
refs/heads/master
|
/tests/test_embed.py
|
from observable_jupyter import embed
def test_observable_jupyter_embed():
assert embed is not None
|
{"/observable_jupyter/__init__.py": ["/observable_jupyter/jupyter_embed.py"], "/tests/test_embed.py": ["/observable_jupyter/__init__.py"]}
|
38,956
|
Joffreybvn/lidario
|
refs/heads/master
|
/lidario/__init__.py
|
from lidario.translator import Translator
from lidario.metadata_reader import MetadataReader
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,957
|
Joffreybvn/lidario
|
refs/heads/master
|
/setup.py
|
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
# The directory containing this file
HERE = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# This call to setup() does all the work.
setup(
name="lidario",
version="0.3.0",
description="High-level python library to manipulate LIDAR raster and point cloud",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://lidario.readthedocs.io/",
author="Joffrey Bienvenu",
author_email="joffreybvn@gmail.com",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: GIS",
"Topic :: Scientific/Engineering :: Image Processing",
"Topic :: Utilities"
],
packages=["lidario", "lidario.io"],
include_package_data=True,
install_requires=["pandas", "numpy", "rasterio", "plyfile", "pytz"]
)
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,958
|
Joffreybvn/lidario
|
refs/heads/master
|
/lidario/metadata_reader.py
|
from lidario.io import InputHandler
class MetadataReader:
"""
Instantiate a MetadataReader object which will handle the metadata
retrieval from the given input.
:param input_type: Type of raster data provided: "**geotiff**" or "**mask**".
- "geotiff": a .tif raster file.
- "mask", a *rasterio.mask.mask()* result.
:type input_type: str
"""
def __init__(self, input_type):
# Handle the input of files/objects
self.input_handler = InputHandler(input_type)
def get_metadata(self, input_values):
"""
Retrieve and return the metadata from a given "input_values".
:param input_values: Data values to translate. Depend on the
Translator's "input_type" parameter:
- For a "**geotiff**": Takes the path to your .tif file (string).
- For a "**mask**": Takes the np.array returned by a rasterio.mask.mask() method.
:return: A dictionary of the metadata.
:rtype: dict
"""
return self.input_handler.load(False, input_values)
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,959
|
Joffreybvn/lidario
|
refs/heads/master
|
/main.py
|
import lidario as lio
import rasterio
from rasterio.mask import mask
import numpy as np
# TODO:
# Translator return multiple data structure
# Object to retrieve only metadata
# Increase raster resolution
# dtm = rasterio.open("/media/becode/Elements/wallonia-ml/lidar/TIF Wallonie 2013-2014/DSM/DSM_BRABANT_WALLON/RELIEF_WALLONIE_MNS_2013_2014.tif")
if __name__ == '__main__':
"""
reader = lio.MetadataReader("tif")
metadata = reader.get_metadata("./tests/assets/1.tif")
print(metadata)
"""
#shape = [{'type': 'Polygon', 'coordinates': [[(182545.32672299084, 162803.11793349683), (182540.70250971318, 162801.0220066402), (182539.92463073373, 162803.0260931747), (182537.93350985483, 162802.24429725204), (182534.55473017017, 162809.74636963103), (182541.08707224843, 162812.70320118777), (182545.32672299084, 162803.11793349683)]]}]
#mask_values = rasterio.mask.mask(dtm, shapes=shape, all_touched=True, crop=True)
translator = lio.Translator("tif", "ply", metadata=False)
result = translator.translate("./tests/assets/1.tif", "points_ascii", "ascii")
print(result)
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,960
|
Joffreybvn/lidario
|
refs/heads/master
|
/lidario/translator.py
|
import numpy as np
from lidario.io import InputHandler, OutputHandler
class Translator:
"""
Instantiate a Translator object which will handle the translation between
given input and desired output type.
:param input_type: Type of raster data provided: "**geotiff**" or "**mask**".
- "geotiff": a .tif raster file.
- "mask", a *rasterio.mask.mask()* result.
:param output_type: Type of point cloud data to return: "**csv**",
"**numpy**", "**pandas**", "**dictionary**", "**list**", "**tuple**".
- "csv": a CSV file.
- "ply": a .ply file.
- "numpy": a Numpy array. Alternatives: "np", "array".
- "dataframe": A Pandas dataframe: Alternatives: "pandas", "pd", "df".
- "dictionary": A pure Python dictionary: Alternative: "dict".
- "list" a pure Python list.
- "tuple": a pure Python tuple.
:param affine_transform: If True (default), apply an affine
geo-transformation to the translated coordinates.
:param metadata: If True, the "translate" method will return a tuple
with the point cloud and the metadata. If False (default), it will
only return the point cloud.
:type input_type: str
:type output_type: str
:type affine_transform: bool, optional
:type metadata: bool, optional
"""
def __init__(self, input_type, output_type, affine_transform=True, metadata=False):
# Handle the input and output files/objects
self.input_handler = InputHandler(input_type)
self.output_handler = OutputHandler(output_type)
# True point cloud has to be geo-transformed
self.affine_transform = affine_transform
self.return_metadata = metadata
def translate(self, input_values, out_file="output1.csv", out_format="binary",
no_data=None, decimal=None, transpose=False, band=1):
"""
Translate a given "input_values" into a X, Y, Z point cloud.
:param input_values: Data values to translate. Depend on the
Translator's "input_type" parameter:
- For a "**geotiff**": Takes the path to your .tif file (string).
- For a "**mask**": Takes the np.array returned by a rasterio.mask.mask() method.
:param out_file: Name of the file to save the point cloud.
Used only if the Translator's "output_type" is a file type: "csv", "ply".
Optional, default: "output.csv".
:param out_format: Data format to save the file: "**binary**" (default)
or "**ascii**" (not recommended, may be slow). Used only when "ply"
is specified as "output_type". Optional.
:param no_data: Value to exclude from the translation.
- For a "**geotiff**": By default, use the nodata value stored in the tif file. If this value is missing, use -9999.
- For a "**mask**": By default, use -9999.
:param band: Band of the raster to translate. Used only if Translator's
"input_values" is "geotiff". Default: 1.
:param decimal: Round the coordinate numbers to the given decimal.
Default: None.
:param transpose: If True, transpose the coordinates. Default: False.
:type input_values: str or np.array
:type out_file: str, optional
:param out_format: str, optional
:type no_data: int, optional
:type decimal: int, optional
:type transpose: bool, optional
:type band: bool, optional
:return: The translated point cloud, typed as specified. If
Translator's "output_type" is set to "csv", return None instead
and save the CSV file. If Translator's "metadata" is set to True,
return a tuple with the point cloud and the metadata.
"""
# Load the raster and metadata
raster, metadata = self.input_handler.load(True, input_values, band)
if no_data is None:
no_data = metadata['nodata']
# Create a (x, y, z) point cloud from raster data
x, y, z = self.__create_xyz_points(raster, no_data)
# Geo-transform the coordinates
if self.affine_transform:
x, y = self.__affine_geo_transformation(x, y, metadata['transform'])
# Round the numbers
if decimal is not None:
x, y, z = self.__round(x, y, z, decimal)
# Save the point cloud
point_cloud = self.output_handler.save(x, y, z, out_file, out_format, transpose)
# If self.return_metadata is True, return the metadata
if self.return_metadata:
return point_cloud, metadata
# If not, return only the point cloud
return point_cloud
@staticmethod
def __create_xyz_points(raster, no_data=-9999):
"""
Infer x, y, z points from raster data.
:param raster: Raster data as numpy array.
:param no_data: No data value of the raster.
:type raster: np.array
:type no_data: int
:return: Tuple of np.array containing the point cloud: (x, y, z).
:rtype tuple
"""
y, x = np.where(raster != no_data)
z = np.extract(raster != no_data, raster)
return x, y, z
@staticmethod
def __affine_geo_transformation(x, y, gtr):
"""
Create affine geo-transformed x and y.
An affine transformation preserves collinearity and ratios of
distances. It replace the point cloud into their original
space of coordinates.
:param x: X-array of coordinates.
:param y: Y-array of coordinates.
:param gtr: Affine geo-transformation data.
:return: gtr_x, gtr_y, the geo-transformed x and y, as np.array.
:rtype tuple
"""
# https://gdal.org/user/raster_data_model.html#affine-geotransform
# Affine transformation rewritten for rasterio:
gtr_x = gtr[2] + (x + 0.5) * gtr[0] + (y + 0.5) * gtr[1]
gtr_y = gtr[5] + (x + 0.5) * gtr[3] + (y + 0.5) * gtr[4]
return gtr_x, gtr_y
@staticmethod
def __round(x, y, z, decimal):
return np.around(x, decimal),\
np.around(y, decimal),\
np.around(z, decimal)
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,961
|
Joffreybvn/lidario
|
refs/heads/master
|
/lidario/io/__init__.py
|
from lidario.io.input_handler import InputHandler
from lidario.io.output_handler import OutputHandler
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,962
|
Joffreybvn/lidario
|
refs/heads/master
|
/lidario/io/output_handler.py
|
import pandas as pd
import numpy as np
from plyfile import PlyData, PlyElement
from typing import Callable, Iterable
class OutputHandler:
def __init__(self, output_type):
self.saver: Callable = self.__create_saver(output_type)
def save(self, x, y, z, out_file, out_format, transpose):
"""
Execute the save function.
"""
return self.saver(x, y, z, out_file=out_file, out_format=out_format,
transpose=transpose)
def __create_saver(self, output_type) -> Callable:
"""
Associate the right saver function to the "save" function of
this class. This function is executed during the initialization
phase.
:param output_type: Type of point cloud to return. See
"Translator" init() function for more details.
:type: output_type: str
:return: The save function associated with the given "output_type".
"""
savers = {
# Files
"csv": self.__save_csv,
"ply": self.__save_ply,
# Pandas dataframe
"dataframe": self.__save_dataframe,
"pandas": self.__save_dataframe,
"pd": self.__save_dataframe,
"df": self.__save_dataframe,
# Numpy array
"numpy": self.__save_numpy,
"np": self.__save_numpy,
"array": self.__save_numpy,
# Dictionary
"dictionary": self.__save_dictionary,
"dict": self.__save_dictionary,
# Other Python data structures
"list": self.__save_list,
"tuple": self.__save_tuple
}
return savers[output_type]
# File savings
# -------------------------------------------------------------------------
def __save_csv(self, x, y, z, transpose=False, out_file="output", **kwargs) -> None:
"""
Create a CSV file from a given x, y, z point cloud.
:return: None
"""
# Create a pandas dataframe and save it to CSV
out_file += ".csv"
self.__save_dataframe(x, y, z, transpose).to_csv(out_file, index=False)
def __save_ply(self, x, y, z, out_file="output", out_format="binary", **kwargs) -> None:
"""
Create a .ply file from a given x, y, z point cloud.
:return: None
"""
def to_tuples(array: np.array) -> Iterable:
"""Return an iterable of tuple frm a given (n, 3) np.array"""
shape = array.shape[0]
for i in range(shape):
yield tuple(array[i])
# Create a numpy array and set a custom dtype
raw_array = self.__save_numpy(x, y, z)
# Transform it with a custom dtype
np_array = np.fromiter(
to_tuples(raw_array),
dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')]
)
# Instantiate a PlyElement with the numpy array
out_file += ".ply"
ply = PlyElement.describe(np_array, out_file)
# If out_type is "ascii", write a text file
if out_format == "ascii":
PlyData([ply], text=True).write(out_file)
# Else, write a binary file
else:
PlyData([ply]).write(out_file)
# Python data structure savings
# -------------------------------------------------------------------------
def __save_dictionary(self, x, y, z, transpose=False, **kwargs) -> dict:
"""
Create a dictionary of points, from a previously created
pandas dataframe. This function can be very slow.
:return: A dictionary of the point cloud.
:rtype: dict
"""
# Return a dictionary of x, y and z
return self.__save_dataframe(x, y, z, transpose).to_dict()
def __save_list(self, x, y, z, transpose=False, **kwargs) -> list:
"""
Create a (n, 3) np.array and convert it into a pure Python list of
points [x, y, z]. If "transpose" is set to True, return a list of
len = 3, with all x, y and z values stored separately.
:return: A list of points [[x, y, z], ...]
:rtype: list
"""
# Create a numpy array and transform it into a list
return self.__save_numpy(x, y, z, transpose).tolist()
def __save_tuple(self, x, y, z, transpose=False, **kwargs) -> tuple:
"""
Create a (n, 3) np.array and convert it into a pure Python tuple of
points (x, y, z). If "transpose" is set to True, return a tuple of
len = 3, with all x, y and z values stored separately.
:return: A tuple of points ((x, y, z), ...)
:rtype: tuple
"""
# Create a numpy array and transform it into a tuple
# https://www.geeksforgeeks.org/python-convert-list-of-lists-to-tuple-of-tuples/
return tuple(map(tuple, self.__save_numpy(x, y, z, transpose)))
# C-based data structure savings
# -------------------------------------------------------------------------
def __save_dataframe(self, x, y, z, transpose=False, **kwargs) -> pd.DataFrame:
"""
Create a (n, 3) pandas dataframe of the points. By default,
each point [x, y, z] is written on a new row. If transpose is
set to True, points are written on columns.
:return: A pandas dataframe of the point cloud.
:rtype: pd.DataFrame
"""
# Get a numpy array of [x, y, z]
data = self.__save_numpy(x, y, z, transpose)
# If transpose, set each [x, y, z] point on columns (horizontally)
if transpose:
return pd.DataFrame(data=data, index=['x', 'y', 'z'])
# If not, set each [x, y, z] point on rows (vertically)
return pd.DataFrame(data=data, columns=['x', 'y', 'z'])
@staticmethod
def __save_numpy(x, y, z, transpose=False, **kwargs) -> np.array:
"""
Create a numpy array of shape (n, 3), filled with x, y and z.
If "transpose" is set to True, return a numpy array of shape (3, n).
:return: np.array matrix of shape (n, 3).
:rtype: np.array
"""
# Create a numpy array of shape (n, 3) with x, y, z
np_array = np.column_stack((x, y, z))
# If True, transpose the array to (3, n)
if transpose:
np_array = np.transpose(np_array)
return np_array
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,963
|
Joffreybvn/lidario
|
refs/heads/master
|
/lidario/io/input_handler.py
|
import rasterio
import numpy as np
class InputHandler:
def __init__(self, input_type):
self.loader = self.__create_loader(input_type)
def load(self, raster, input_values, band=None):
"""
:param raster: If True, return a tuple of (raster, metadata).
If False, return only the metadata.
"""
return self.loader(raster=raster,
input_raster=input_values,
rasterio_mask=input_values,
band=band)
def __create_loader(self, input_type):
loaders = {
# Tif files
"tif": self.__load_tif,
"tiff": self.__load_tif,
"geotiff": self.__load_tif,
# Rasterio mask
"mask": self.__load_rasterio_mask,
}
return loaders[input_type]
@staticmethod
def __load_tif(raster=True, input_raster=None, band=1, **kwargs):
"""
Load a tif file with rasterio, return
:param kwargs: dictionary of keywords arguments. It must have:
- "file_path" (str): the path of the tif file.
:type kwargs: dict
:return: a rasterio DatasetReader object.
"""
# Open the tiff file
reader = rasterio.open(input_raster)
# Get the metadata
metadata = reader.meta
# Set -9999 as default if nodata is None
if metadata['nodata'] is None:
metadata['nodata'] = -9999
# If raster, return a tuple of (raster, metadata)
if raster:
return reader.read(band), metadata
return metadata
@staticmethod
def __load_rasterio_mask(raster=True, rasterio_mask=None, **kwargs):
# Retrieve the image and the affine transformation
out_image, transform = rasterio_mask
# Create a metadata object
metadata = {'nodata': -9999, 'transform': transform}
# If raster, return a tuple of (raster, metadata)
if raster:
return np.squeeze(out_image), metadata
return metadata
|
{"/lidario/__init__.py": ["/lidario/translator.py", "/lidario/metadata_reader.py"], "/lidario/metadata_reader.py": ["/lidario/io/__init__.py"], "/main.py": ["/lidario/__init__.py"], "/lidario/translator.py": ["/lidario/io/__init__.py"], "/lidario/io/__init__.py": ["/lidario/io/input_handler.py", "/lidario/io/output_handler.py"]}
|
38,974
|
JenBanks8585/Labs_CitySpireDS
|
refs/heads/main
|
/app/realtybasemodel.py
|
import os
import requests
from dotenv import load_dotenv
from fastapi import APIRouter, Depends
import sqlalchemy
from pydantic import BaseModel, SecretStr
from app import config
router = APIRouter()
headers = {'x-rapidapi-key': os.getenv('api_key'),
'x-rapidapi-host': os.getenv('host') }
class RentalList(BaseModel):
api_key: SecretStr = config.settings.api_key
city: str = "New York"
state: str = "NY"
prop_type: str = "condo"
limit: int = 5
@router.get('/for_rent_list_base')
async def for_rent_list_base(rentallist: RentalList):
"""
Parameters:
api_key
city: str
state: str
prop_type: str ('condo', 'single_family', 'multi_family')
limit: int number of results to populate
Returns:
information about properties for rent
"""
url = os.getenv('url_list_for_rent')
querystring = {"city": rentallist.city,
"state_code": rentallist.state,
"limit": rentallist.limit,
"offset": "0",
"sort":"relevance",
"prop_type": rentallist.prop_type}
response_for_rent = requests.request("GET", url, params = querystring, headers = headers,)
return response_for_rent.json()
|
{"/app/realty.py": ["/app/walk_score.py"]}
|
38,975
|
JenBanks8585/Labs_CitySpireDS
|
refs/heads/main
|
/app/realty.py
|
"""Realty Info"""
import os
import requests
from dotenv import load_dotenv
from fastapi import APIRouter, Depends
import sqlalchemy
from pydantic import BaseModel, SecretStr
from app import config
from app.walk_score import *
load_dotenv()
router = APIRouter()
headers = {'x-rapidapi-key': os.getenv('api_key'),
'x-rapidapi-host': os.getenv('host') }
@router.get('/streamlined_rent_list')
async def streamlined_rent_list(api_key = config.settings.api_key,
city: str = "New York City",
state: str= "NY",
prop_type: str = "condo",
limit: int = 4):
"""
Parameters:
api_key
city: str
state: str
prop_type: str ('condo', 'single_family', 'multi_family')
limit: int number of results to populate
Returns:
information about properties for rent
"""
url = os.getenv('url_list_for_rent')
querystring = {"city": city,
"state_code": state,
"limit": limit,
"offset": "0",
"sort":"relevance",
"prop_type": prop_type}
response_for_rent = requests.request("GET", url, params = querystring, headers = headers,)
response = response_for_rent.json()['properties']
rental_list = []
for i in range(limit):
line = response[i]['address']['line']
city = response[i]['address']['city']
state = response[i]['address']['state']
lat = response[i]['address']['lat']
lon = response[i]['address']['lon']
photos = response[i]['photos']
address = line +" "+ city + " "+ state
walk_score = just_walk_score(address, lat, lon)
element = {'address': address,
'lat': lat,
'lon': lon,
'city':city,
'state':state,
'photos': photos,
'walk_score': walk_score}
rental_list.append(element)
return rental_list
@router.get('/for_rent_list')
async def for_rent_list(api_key = config.settings.api_key,
city: str = "New York City",
state: str= "NY",
prop_type: str = "condo",
limit: int = 4):
"""
Parameters:
api_key
city: str
state: str
prop_type: str ('condo', 'single_family', 'multi_family')
limit: int number of results to populate
Returns:
information about properties for rent
"""
url = os.getenv('url_list_for_rent')
querystring = {"city": city,
"state_code": state,
"limit": limit,
"offset": "0",
"sort":"relevance",
"prop_type": prop_type}
response_for_rent = requests.request("GET", url, params = querystring, headers = headers,)
return response_for_rent.json()['properties']
@router.get('/for_rent_list/{property_id}')
async def property_detail(property_id: str = "O3599084026"):
"""
Parameters:
property_id
Returns:
detailed information about the property
"""
url = os.getenv('url_property_detail')
querystring = {"property_id":property_id}
response_prop_detail = requests.request("GET", url, headers=headers, params=querystring)
return response_prop_detail.json()['properties']
@router.get('/for_sale_list')
async def for_sale_list(api_key = config.settings.api_key,
city = "New York City",
state= "NY",
limit = 4):
url = os.getenv('url_list_for_sale')
querystring = {"city": city ,"limit": limit,"offset":"0","state_code": state,"sort":"relevance"}
response_for_sale = requests.request("GET", url, headers=headers, params=querystring)
return response_for_sale.json()['properties']
|
{"/app/realty.py": ["/app/walk_score.py"]}
|
38,976
|
JenBanks8585/Labs_CitySpireDS
|
refs/heads/main
|
/app/walk_score.py
|
from walkscore import WalkScoreAPI
from pydantic import BaseModel
from fastapi import APIRouter
from app import config
import os
from dotenv import load_dotenv
load_dotenv()
router = APIRouter()
def what_message(score):
if 90 <= score <= 100:
return "daily errands do not require a car"
elif 70 <= score <= 89:
return "most errands can be accomplished on foot"
elif 50 <= score <= 69:
return "some errands can be accomplished on foot"
elif 25 <= score <= 49:
return "most errands require a car"
else:
return " almost all errands require a car"
@router.get('/walk_score')
async def get_just_walk_score(address: str = "7 New Port Beach, Louisiana",
lat: float = 39.5984,
lon: float = -74.2151
):
walk_api = WalkScoreAPI(api_key= os.getenv('walk_api'))
result = walk_api.get_score(longitude = lon,
latitude = lat,
address = address)
message = what_message(result.walk_score)
return {"walk_score": result.walk_score, "walk_message":message}
def just_walk_score(address: str = "7 New Port Beach, Louisiana",
lat: float = 39.5984,
lon: float = -74.2151
):
walk_api = WalkScoreAPI(api_key= os.getenv('walk_api'))
result = walk_api.get_score(longitude = lon,
latitude = lat,
address = address)
message = what_message(result.walk_score)
response = {"walk_score": result.walk_score,
"walk_message":message,
"transit_score": result.transit_score,
"bike_score": result.bike_score}
return response
|
{"/app/realty.py": ["/app/walk_score.py"]}
|
38,977
|
JenBanks8585/Labs_CitySpireDS
|
refs/heads/main
|
/app/main.py
|
from fastapi import FastAPI, Depends
import requests
import uvicorn
import json
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, SecretStr
from app import realty, realtybasemodel, walk_score
description = """
To use these interactive docs:
- Click on an endpoint below
- Click the **Try it out** button
- Edit the Request body or any parameters
- Click the **Execute** button
- Scroll down to see the Server response Code & Details
"""
app = FastAPI(
title = "Jen's Realty API",
description = description,
docs_url = '/')
app.include_router(realty.router, tags=['Realty'])
#app.include_router(realtybasemodel.router, tags=['Realty Using BaseModel'])
app.include_router(walk_score.router, tags=['Walkability Score'])
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=['*'],
allow_headers=['*'],
)
if __name__ == '__main__':
uvicorn.run(app)
|
{"/app/realty.py": ["/app/walk_score.py"]}
|
38,978
|
daveoo-k/core
|
refs/heads/master
|
/szafka/migrations/0003_auto_20210306_2058.py
|
# Generated by Django 3.1.7 on 2021-03-06 19:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('szafka', '0002_szafka_plyta'),
]
operations = [
migrations.AlterField(
model_name='szafka',
name='glebokosc',
field=models.IntegerField(default=False),
),
migrations.AlterField(
model_name='szafka',
name='szerokosc',
field=models.IntegerField(default=False),
),
migrations.AlterField(
model_name='szafka',
name='wysokosc',
field=models.IntegerField(default=False),
),
]
|
{"/szafka/formularze.py": ["/szafka/models.py"]}
|
38,979
|
daveoo-k/core
|
refs/heads/master
|
/szafka/models.py
|
from django.db import models
# Create your models here.
from django.db import models
# Create your models here.
class Szafka(models.Model):
nazwa = models.CharField (max_length=300)
szerokosc = models.IntegerField(default=False)
wysokosc = models.IntegerField(default=False)
glebokosc = models.IntegerField(default=False)
plyta = models.IntegerField(default=False)
|
{"/szafka/formularze.py": ["/szafka/models.py"]}
|
38,980
|
daveoo-k/core
|
refs/heads/master
|
/szafka/formularze.py
|
from django import forms
from .models import Szafka
class SzafkaForm(forms.Form):
nazwa = forms.CharField (max_length=300)
szerokosc = forms.IntegerField()
wysokosc = forms.IntegerField()
glebokosc = forms.IntegerField()
plyta = forms.IntegerField()
class meta:
model = Szafka
|
{"/szafka/formularze.py": ["/szafka/models.py"]}
|
38,983
|
lindtvedtsebastian/linear-regression
|
refs/heads/main
|
/main.py
|
import linreg.LinearRegression as lg
ln = lg.LinearRegression('data/boligOslo.csv')
#Train the model with 500 iterations at a learning rate of 0.000001
ln.train(iterations=500, learning_rate=0.000001)
#Plots the data of the model
ln.plot_model_data()
#Visualizes the loss
ln.plot_loss()
#Predicts a value for y given input of X = 50
ln.predict(50)
|
{"/main.py": ["/linreg/LinearRegression.py"]}
|
38,984
|
lindtvedtsebastian/linear-regression
|
refs/heads/main
|
/linreg/LinearRegression.py
|
import array
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn import preprocessing
plt.rcParams['figure.figsize'] = (12.0, 9.0)
#plt.ticklabel_format(style='plain')
class LinearRegression:
def __init__(self, filepath):
self.data = pd.read_csv(filepath)
self.x_name = self.data.columns[0]
self.y_name = self.data.columns[1]
self.x = self.data[self.x_name]
self.y = self.data[self.y_name]
self.b1 = 0
self.b0 = 0
self.iterations = []
self.costs = []
# Plots the data, including the linear regression line, if the model has been trained.
def plot_model_data(self):
plt.xlabel(self.x_name)
plt.ylabel(self.y_name)
plt.suptitle(self.y_name + ' against ' + self.x_name)
plt.scatter(self.data[self.x_name], self.data[self.y_name], c="black")
#If model has been trained, plot the line for the linear-regression aswell.
if self.b1 != 0 and self.b0 != 0:
pred_y = self.b1 * self.x + self.b0
plt.plot([min(self.x), max(self.x)], [min(pred_y), max(pred_y)], color='red') # regression line
plt.show()
def plot_loss(self):
plt.xlabel('Iterations')
plt.ylabel('Loss')
plt.suptitle('Loss over iteration')
plt.plot(self.iterations, self.costs, linewidth=1)
plt.show()
def train(self, learning_rate=0.0001, iterations=500):
n = float(len(self.x)) # Number of samples
self.b1 = 0 # Resets the values, if one wants to train the model again. The reason for wanting
self.b0 = 0 # to train a model again can be to change learning rate, amount of iterations, etc.
# Trains the model
for i in range(iterations):
predicted_y = self.b1 * self.x + self.b0
der_b1 = (-2 / n) * sum(self.x * (self.y - predicted_y)) # Partial derivative of b1.
der_b0 = (-2 / n) * sum(self.y - predicted_y) # Partial derivative of b0.
self.b1 = self.b1 - learning_rate * der_b1
self.b0 = self.b0 - learning_rate * der_b0
self.iterations.append(i)
self.costs.append(self.calc_loss(predicted_y))
print(self.b1, self.b0)
def calc_loss(self, pred_y):
n = len(self.x)
return (sum((self.b0 + self.b1 * self.x)-pred_y)**2)/2*n
def predict(self, x):
if self.b1 != 0 and self.b0 != 0:
predicted_y = self.b1 * x + self.b0
print("The predicted value is: " + str(predicted_y))
else:
print("Please train the model before attempting to make any predictions!")
|
{"/main.py": ["/linreg/LinearRegression.py"]}
|
39,001
|
kirillkon1/Mathlab3
|
refs/heads/master
|
/Utils.py
|
import sys
import matplotlib.pyplot as plt
from Functions import *
from Solvers import *
def userRead():
integralNumber, methodNum = 0, 0
print("Выберите интеграл:\n"
"1) 4x^3 - 5x^2 + 6x - 7)dx x = 0..2\n"
"2) (3^(x/3) + x/3 + 3)dx x = -3..3\n"
"3) (ln(3x) + 3)dx x = 1..7\n"
"4) 1/(x-2)\n"
"5) sin(x)/x")
while True:
try:
integralNumber = int(input())
if 0 < integralNumber <= 5:
break
print("Неверный ввод")
except Exception:
print("Неверный ввод")
a, b = find_borders(integralNumber)
a, b, mode = changeBorder(a, b)
print("Выберите метод решения интерграла:\n"
"1) Метод прямоугольников\n"
"2) Метод трапеций\n"
"3) Метод Симпсона")
while True:
try:
methodNum = int(input())
if 0 < methodNum <= 3:
break
print("Неверный ввод")
except Exception:
print("Неверный ввод")
func = find_func(integralNumber, a, b)
print("Погрешность измерений. Введите кол-во знаков после запятой (Например '3' = 0.001 )")
epsilon_int = int(input())
while True:
if epsilon_int < 1:
print('А в чем смысл? Выберите другую погрешность')
continue
if epsilon_int > 10:
print("Слишком большая погрешность")
continue
break
return func, 0.1 ** epsilon_int, find_method(methodNum), mode
def find_borders(num: int):
if num == 1:
return 0, 2
elif num == 2:
return -3, 3
elif num == 3:
return 1, 7
else:
return 0, 0
def find_rectangle_mode():
print("Выберите модификацию для метода прямоугольников :\n"
"1) Правые прямоугольники\n"
"2) Левые прямоугольники\n"
"3) Метод средних")
while True:
try:
methodNum = int(input())
if 0 < methodNum <= 3:
break
print("Неверный ввод")
except Exception:
print("Неверный ввод")
return methodNum
def find_method(num: int) -> AbstractMethod:
if num == 1:
return RectangleMethod(find_rectangle_mode())
if num == 2:
return TrapezoidMethod()
if num == 3:
return SimpsonMethod()
else:
return AbstractMethod()
def find_func(num: int, a: float, b: float) -> AbstractFunction:
if num == 1:
return TheFirstFunction(a, b)
elif num == 2:
return TheSecondFunction(a, b)
elif num == 3:
if a < 0 or b < 0:
print("Вышел за пределы")
sys.exit()
return TheThirdFunction(a, b)
elif num == 4:
return TheFourthFunction(a, b)
elif num == 5:
return TheFifthFunction(a, b)
return AbstractFunction(a, b)
def changeBorder(a, b):
print(f"По умолчанию значения границ интеграла равны: {a, b}. Желаете ли вы их изменить? (да/нет)")
tmp = input()
while True:
if tmp == 'да' or tmp == 'нет' or tmp == 'Да' or tmp == 'Нет':
break
print("Неверный ввод")
tmp = input()
if tmp == 'нет':
return a, b, 'not'
print("Введите два новых числа через пробел")
while True:
try:
tmp = input().split()
tmp1, tmp2 = float(tmp[0]), float(tmp[1])
if len(tmp) == 2:
if tmp1 > tmp2:
t = tmp2
tmp2 = tmp1
tmp1 = t
break
print("Неверный ввод")
tmp = input().split()
except Exception:
print("Неверный ввод Exception")
print(f"Новое значение границ интеграла: {tmp1, tmp2}.")
return tmp1, tmp2, 'yes'
def toFixed(numObj, digits=0):
return f"{numObj:.{digits}f}"
def drawImage(fun: AbstractFunction):
X = np.linspace(-4, 4, 100)
Y = fun.find(X)
ax = plt.gca()
ax.spines['left'].set_position('zero')
ax.spines['bottom'].set_position('zero')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
plt.plot(X, Y)
plt.show()
|
{"/Utils.py": ["/Functions.py", "/Solvers.py"], "/Solvers.py": ["/Functions.py"], "/main.py": ["/Functions.py", "/Utils.py"]}
|
39,002
|
kirillkon1/Mathlab3
|
refs/heads/master
|
/Solvers.py
|
import math
from abc import ABC, abstractmethod
from Functions import AbstractFunction
import private as private
class AbstractMethod(ABC):
def __init__(self) -> None:
super().__init__()
@abstractmethod
def solve(self, fun: AbstractFunction, n=-1) -> float:
pass
@abstractmethod
def find_n(self, fun: AbstractFunction, eps: float) -> int:
pass
def __str__(self) -> str:
return "AbstractMethod"
class RectangleMethod(AbstractMethod):
def __init__(self, mode: int) -> None:
super().__init__()
if 1 <= mode <= 3:
self.mode = mode
else:
self.mode = 3
def solve(self, fun: AbstractFunction, n=-1, rupture=False) -> float:
h = (fun.b - fun.a) / n
# i separated it into to integrals
if (fun.__str__() == 'sin(x)/x' or fun.__str__() == '1/(x-2)') and not rupture:
if fun.a < fun.getPoint() < fun.b:
fun1 = fun
fun1.b = fun.getPoint() - 0.0001
fun2 = fun
fun2.a = fun.getPoint() + 0.0001
return self.solve(fun1, n, True) + self.solve(fun2, n, True)
if self.mode == 1:
return self.__leftMode(fun, n)
elif self.mode == 2:
return self.__rightMode(fun, n)
else:
return self.__centerMode(fun, n)
@staticmethod
def __leftMode(fun: AbstractFunction, n: int):
a, b = fun.a, fun.b
h = (b - a) / n
i = 0
for j in range(n - 1):
i += fun.find(a + (j - 1) * h)
return i * h
@staticmethod
def __rightMode(fun: AbstractFunction, n: int):
a, b = fun.a, fun.b
h = (b - a) / n
i = 0
for j in range(n):
i += fun.find(a + j * h)
return i * h
@staticmethod
def __centerMode(fun: AbstractFunction, n: int):
a, b = fun.a, fun.b
h = (b - a) / n
i = 0
for j in range(1, n):
i += fun.find(a + h / 2 + j * h)
return i * h
def find_n(self, fun: AbstractFunction, eps: float) -> int:
tmp = int(math.pow(
max(abs(fun.getSecondDerivative(fun.a)), abs(fun.getSecondDerivative(fun.b))) * math.pow((fun.b - fun.a),
3) / (
24 * eps), 1 / 2))
return tmp + 1 if tmp % 2 == 1 else tmp + 2
def __str__(self) -> str:
return "RectangleMethod"
class TrapezoidMethod(AbstractMethod):
def __init__(self) -> None:
super().__init__()
def solve(self, fun: AbstractFunction, n=10, rupture=False) -> float:
a, b = fun.a, fun.b
h = (b - a) / n
i = fun.find(a) / 2 + fun.find(b) / 2
if (fun.__str__() == 'sin(x)/x' or fun.__str__() == '1/(x-2)') and not rupture:
if fun.a < fun.getPoint() < fun.b:
fun1 = fun
fun1.b = fun.getPoint() - 0.0001
fun2 = fun
fun2.a = fun.getPoint() + 0.0001
return self.solve(fun1, n, True) + self.solve(fun2, n, True)
for j in range(1, n):
i = i + fun.find(a + j * h)
return i * h
def find_n(self, fun: AbstractFunction, eps: float) -> int:
tmp = int(math.pow(
max(abs(fun.getSecondDerivative(fun.a)), abs(fun.getSecondDerivative(fun.b))) * math.pow((fun.b - fun.a),
3) / (
12 * eps), 0.5))
return tmp + 1 if tmp % 2 == 1 else tmp + 2
def __str__(self) -> str:
return "TrapezoidMethod"
class SimpsonMethod(AbstractMethod):
def __init__(self) -> None:
super().__init__()
def solve(self, fun: AbstractFunction, n=-1, rupture=False) -> float:
a, b = fun.a, fun.b
h = (b - a) / n
if (fun.__str__() == 'sin(x)/x' or fun.__str__() == '1/(x-2)') and not rupture:
if fun.a < fun.getPoint() < fun.b:
fun1 = fun
fun1.b = fun.getPoint() - 0.0001
fun2 = fun
fun2.a = fun.getPoint() + 0.0001
return self.solve(fun1, n, True) + self.solve(fun2, n, True)
i_odd = 0
i_even = 0
i = fun.find(a) + fun.find(b)
for j in range(1, n - 1):
if j % 2 == 0:
i_even += fun.find(a + h * j)
else:
i_odd += fun.find(a + h * j)
return (i + 4 * i_odd + 2 * i_even) * h / 3
def find_n(self, fun: AbstractFunction, eps: float) -> int:
tmp = int(math.pow(
max(abs(fun.getFourthDerivative(fun.a)), abs(fun.getFourthDerivative(fun.b))) * math.pow((fun.b - fun.a),
5) / (
12 * eps), 1 / 4))
return tmp + 1 if tmp % 2 == 1 else tmp + 2
def __str__(self) -> str:
return "SimpsonMethod"
def find_Rn(fun: AbstractFunction, n=10):
if fun.__str__() == "RectangleMethod":
x = max(fun.getSecondDerivative(fun.a), fun.getSecondDerivative(fun.b))
return x * (fun.b - fun.a) ** 3 / (24 * n ** 2)
if fun.__str__() == "TrapezoidMethod":
x = max(fun.getSecondDerivative(fun.a), fun.getSecondDerivative(fun.b))
return x * (fun.b - fun.a) ** 3 / (12 * n ** 2)
if fun.__str__() == "SimpsonMethod":
x = max(fun.getFourthDerivative(fun.a), fun.getSecondDerivative(fun.b))
return x * (fun.b - fun.a) ** 5 / (180 * n ** 4)
|
{"/Utils.py": ["/Functions.py", "/Solvers.py"], "/Solvers.py": ["/Functions.py"], "/main.py": ["/Functions.py", "/Utils.py"]}
|
39,003
|
kirillkon1/Mathlab3
|
refs/heads/master
|
/Functions.py
|
import math
import sys
from abc import ABC, abstractmethod
import numpy as np
class AbstractFunction(ABC):
def __init__(self, a: float, b: float) -> None:
super().__init__()
self.a = a
self.b = b
def __str__(self) -> str:
return super().__str__()
@abstractmethod
def find(self, x) -> float:
return 0
@abstractmethod
def getSecondDerivative(self, x: float) -> float:
return 0
@abstractmethod
def getFourthDerivative(self, x: float) -> float:
return 0
@abstractmethod
def getPoint(self):
pass
# integral (4x^3 - 5x^2 + 6x - 7)dx x = 0..2
# answer: 2/3 ~= 0.666667
class TheFirstFunction(AbstractFunction):
def __init__(self, a: float, b: float) -> None:
super().__init__(a, b)
def __str__(self) -> str:
return f"4x^3 - 5x^2 + 6x - 7)dx x = {self.a}..{self.b}"
def find(self, x) -> float:
return 4 * x ** 3 - 5 * x ** 2 + 6 * x - 7
def getSecondDerivative(self, x: float) -> float:
return 24 * x - 10
def getFourthDerivative(self, x: float) -> float:
return 0
def getPoint(self):
return 'not'
# integral (3^(x/3) + x/3 + 3)dx x = -3..3
# answer: 25.282
class TheSecondFunction(AbstractFunction):
def __init__(self, a: float, b: float) -> None:
super().__init__(a, b)
def __str__(self) -> str:
return f"(3^(x/3) + x/3 + 3)dx x = {self.a}..{self.b}"
def find(self, x) -> float:
return 3 ** (x / 3) + x / 3 + 3
# 3^(x/3 - 2) * ln^2(3)
def getSecondDerivative(self, x: float) -> float:
return pow(3, x / 3 - 2) * np.log(3) ** 2
# 3^(x/3 - 4) * ln^4(3)
def getFourthDerivative(self, x: float) -> float:
return pow(3, x / 3 - 4) * np.log(3) ** 4
def getPoint(self):
return 'not'
# integral (ln(3x) + 3)dx x = 1..7
# answer: 32.213
class TheThirdFunction(AbstractFunction):
def __init__(self, a: float, b: float) -> None:
super().__init__(a, b)
def __str__(self) -> str:
return f"(ln(3x) + 3)dx x = {self.a}..{self.b}"
def find(self, x) -> float:
if x < 0:
print("выход за пределы ОДЗ")
sys.exit()
return np.log(3 * x) + 3
# -1/x^2
def getSecondDerivative(self, x: float) -> float:
return -1 / (x ** 2)
# -6/x^4
def getFourthDerivative(self, x: float) -> float:
return -6 / (x ** 4)
def getPoint(self):
return 'not'
class TheFourthFunction(AbstractFunction):
def __init__(self, a: float, b: float) -> None:
super().__init__(a, b)
def __str__(self) -> str:
return "1/(x-2)"
def find(self, x) -> float:
if x == 2:
x += 0.00001
result = 1 / (x - 2)
return result
def getSecondDerivative(self, x: float) -> float:
return 2 / pow(x - 2, 3)
def getFourthDerivative(self, x: float) -> float:
return 24 / pow(x - 2, 5)
def getPoint(self):
return 2
class TheFifthFunction(AbstractFunction):
def __init__(self, a: float, b: float) -> None:
super().__init__(a, b)
def __str__(self) -> str:
return "sin(x)/x"
def find(self, x: float) -> float:
if x == 0:
x += 0.00001
result = math.sin(x) / x
return result
# don't use bcs it is not really needed
def getSecondDerivative(self, x: float) -> float:
return 1
def getFourthDerivative(self, x: float) -> float:
return 1
def getPoint(self):
return 0
|
{"/Utils.py": ["/Functions.py", "/Solvers.py"], "/Solvers.py": ["/Functions.py"], "/main.py": ["/Functions.py", "/Utils.py"]}
|
39,004
|
kirillkon1/Mathlab3
|
refs/heads/master
|
/main.py
|
import sys
from Functions import *
from Utils import userRead, toFixed, drawImage
if __name__ == '__main__':
fun, eps, method, mode = userRead()
n = 2
# drawImage(fun)
res = method.solve(fun, n)
res_prev = method.solve(fun, int(n / 2))
while abs(res - res_prev) > eps:
n *= 2
res_prev = res
res = method.solve(fun, n)
if n > 10000000:
print("Нельзя найти ответ")
sys.exit()
print(f"Ответ: {toFixed(res, -int(np.log10(eps)) * 2)} ± {toFixed(eps, -int(np.log10(eps)))}")
print(f"\nИнтеграл: {fun}, \nМетод: {method}, \nКол-во делений: {n} ")
|
{"/Utils.py": ["/Functions.py", "/Solvers.py"], "/Solvers.py": ["/Functions.py"], "/main.py": ["/Functions.py", "/Utils.py"]}
|
39,005
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/views.py
|
# coding: utf-8
import __init__
from django.http import HttpResponse
from funnel import funneldataOld
from funnel import funneldata
from eventRemain import eventsRemain
from eventRemainMap import eventsRemainMap
from eventRemain import eventsRemainCombine
from crossEvent import eventsCrossCombine
from crossEventMap import crossEventMap
from searchUser import search_user as rt_search
from eventSummary import eventSummary
from MongoDatas import userSample
from MongoDatas import userSample_lite
from MongoDatas import search_user
from MongoDatas import eventsSeries
import urllib
import json
from jhddgapi.settings import DEBUG
# global
# from IPtoLoc.__init__ import ipdataPath
# from IPtoLoc import IPtoAreaFinals
# global initarry
# if initarry is None:
# initarry = IPtoAreaFinals.load(ipdataPath)
# Create your views here.
def getEventsSeries(request, datatype, s_tm, e_tm, events_quote):
# ['dl', 'ac23']
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
result = eventsSeries(datatype, s_tm, e_tm, events)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件漏斗,线上
def getFunnel(request, datatype, params):
'''
:param request:
:param datatype:
:param s_tm:
:param e_tm:
:param events_quote:
:format url: http://101.201.145.120:8090/saasapi/funnel/biqu/{"startDay": "2016-11-10", "endDay": "2016-11-20", "funnel": [[{"id": "jhddg_every", "map": {}}], [{"id": "ac41", "map": {"og": "CSX"}}], [{"id": "ac23", "map": {}}, {"id": "ac11", "map": {}}], [{"id": "ac22", "map": {}}]]}/
:return:
'''
try:
params = urllib.unquote(params)
data = json.loads(params)
startDay = data["startDay"]
endDay = data["endDay"]
# if datatype not in ["biqu", "BIQU_ANDROID", "biqu_all", "feeling", "ncf_h5", "guaeng", "caiyu_ad"]:
if False:
result = funneldata(datatype, startDay, endDay, data)
else:
from saasapi_clickhouse.Funnel import Funnel
funnel = Funnel()
result = funnel.data(datatype, params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件漏斗,线上
# 保留历史接口格式,不支持map属性查询
def getFunnelOld(request, datatype, s_tm, e_tm, events_quote):
'''
:param request:
:param datatype:
:param s_tm:
:param e_tm:
:param events_quote:
:format url: http://101.201.145.120:8090/saasapi/eventserisesingle/feeling/2016-06-12/2016-06-17/[["in"], ["ac23", "ac11"], ["ac22"]]/
:return:
'''
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
result = funneldataOld(datatype, s_tm, e_tm, events)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件留存
def getEventsRemain(request, datatype, params):
# ['dl', 'ac23']
try:
params = urllib.unquote(params)
params = json.loads(params)
# if datatype not in ["biqu", "BIQU_ANDROID", "biqu_all", "feeling", "ncf_h5", "guaeng", "caiyu_ad"]:
if False:
result = eventsRemainMap(datatype, params)
else:
from saasapi_clickhouse.EventRemain import EventRemain
event_remain = EventRemain()
result = event_remain.data(datatype, params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件留存,(日期范围、新格式)
def getEventsRemainCombine(request, datatype, events_quote):
# ['dl', 'ac23']
try:
data_str = urllib.unquote(events_quote)
paramse = json.loads(data_str)
begintm = paramse["startTime"]
endtm = paramse["endTime"]
events = paramse["events"]
lastday = int(paramse["lastDay"])
result = eventsRemainCombine(datatype, begintm, endtm, lastday, events)
# data = json.dumps(result, ensure_ascii=False, sort_keys=True)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def getSample(request, datatype, s_tm):
try:
result = userSample(datatype, s_tm)
# data = json.dumps(result, ensure_ascii=False)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def getSample_lite(request, datatype, s_tm):
try:
result = userSample_lite(datatype, s_tm)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def search(request, datatype, dayStr, hour_s, hour_e, base_cond):
try:
base_cond = json.loads(base_cond)
result = search_user(datatype, dayStr, int(hour_s), int(hour_e), base_cond)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data, ensure_ascii=False)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 用户抽样,搜索接口,线上
def rtSample(request, datatype, conds):
try:
import time
a = time.time()
print("satrt", "-"*100)
base_cond = json.loads(conds)
result = rt_search(datatype, _filter=base_cond)
if len(result) == 0 and "jhd_userkey" in base_cond:
uid = base_cond.pop("jhd_userkey")
base_cond["jhd_pushid"] = uid
result = rt_search(datatype, _filter=base_cond)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
print("end", "-"*100, time.time()-a)
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件交叉(跨天),线上
def getEventsCrossCombine(request, datatype, s_tm, e_tm, last_tm, events_quote):
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
last_tm = int(last_tm)
result = eventsCrossCombine(datatype, s_tm, e_tm, last_tm, events)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件交叉(支持map),线上
def getCrossEventMap(request, datatype, params):
try:
params = urllib.unquote(params)
params = json.loads(params)
# if datatype not in ["biqu", "BIQU_ANDROID", "biqu_all", "feeling", "ncf_h5", "guaeng", "caiyu_ad"]:
if False:
result = crossEventMap(datatype, params)
else:
from saasapi_clickhouse.CrossEvent import CrossEvent
cross_event = CrossEvent()
result = cross_event.data(datatype, params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
# return HttpResponse(params)
print(exstr)
def getEventSummary(request, datatype, conds):
try:
params = urllib.unquote(conds)
params = json.loads(params)
# result = eventSummary(datatype, params)
# if datatype not in ["biqu", "BIQU_ANDROID", "biqu_all", "feeling", "ncf_h5", "guaeng", "caiyu_ad"]:
if False:
result = eventSummary(datatype, params)
else:
from saasapi_clickhouse.EventSummary import EventSummary
event_summary = EventSummary()
result = event_summary.data(datatype, params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def getRoundFlightInterval(request, params):
try:
from saasapi_clickhouse.RoundFlightInterval import RoundFlightInterval
params = urllib.unquote(params)
params = json.loads(params)
query = RoundFlightInterval()
result = query.query(params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def getRoundAirelines(request):
try:
from saasapi_clickhouse.RoundAireLine import RoundAireLine
query = RoundAireLine()
result = query.query()
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,006
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/views.py
|
# -*- coding: utf-8 -*-
import urllib
import json
from django.http import HttpResponse
from jhddgapi.settings import DEBUG
from saasapi_clickhouse.Funnel import Funnel
from saasapi_clickhouse.CrossEvent import CrossEvent
from saasapi_clickhouse.EventRemain import EventRemain
from saasapi_clickhouse.RoundFlightInterval import RoundFlightInterval
from saasapi_clickhouse.RoundAireLine import RoundAireLine
def get_funnel(request, datatype, params):
try:
funnel = Funnel()
params = urllib.unquote(params)
params = json.loads(params)
result = funnel.data(datatype, params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def get_cross_event(request, datatype, params):
try:
cross_event = CrossEvent()
params = urllib.unquote(params)
params = json.loads(params)
result = cross_event.data(datatype, params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def get_event_remain(request, datatype, params):
try:
cross_event = EventRemain()
params = urllib.unquote(params)
params = json.loads(params)
result = cross_event.data(datatype, params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def get_round_fligth_interval(request, params):
try:
query = RoundFlightInterval()
params = urllib.unquote(params)
params = json.loads(params)
result = query.query(params)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def get_round_airelines(request):
try:
query = RoundAireLine()
result = query.query()
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,007
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/SaaSCommon/JHWrite.py
|
# -*- coding: utf-8 -*-
import os
import traceback
import gzip
import shutil
class JHWrite(object):
out_file = {}
def __init__(self, filename, line, mode="w"):
self.write(filename, line, mode)
@staticmethod
def _gzip(sourcepath, destpath=None):
destpath = sourcepath + ".gz" if destpath is None else destpath
with open(sourcepath, 'rb') if not sourcepath.endswith(".gz") \
else gzip.open(sourcepath, 'rb') as f_in, \
gzip.open(destpath, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(sourcepath)
@staticmethod
def fileobj(filepath, mode="rb"):
return open(filepath, mode) if not filepath.endswith(".gz") \
else gzip.open(filepath, mode)
@staticmethod
def combinefiles(files_path, destfile_path):
from os import path, sys
dirname = path.dirname(destfile_path)
if not os.path.exists(dirname):
os.makedirs(dirname)
destfile = JHWrite.fileobj(destfile_path, "w")
map(lambda file: shutil.copyfileobj(file, destfile), [JHWrite.fileobj(filepath) for filepath in files_path
if os.path.exists(filepath)])
destfile.close()
@staticmethod
def write(filename, line, mode="w"):
filename = filename.replace("\\", "/")
os.sep = "/"
path, file = filename.rsplit(os.sep, 1)[0], filename.rsplit(os.sep, 1)[1]
targetFilename, targetFileExtension = file.rsplit(".", 1)[0], file.rsplit(".", 1)[1]
assert mode == "w" or mode == "a", "access file mode error!"
if " " in path:
return
if not os.path.exists(path):
os.makedirs(path)
if targetFileExtension == "gz":
out = JHWrite.out_file[filename] if JHWrite.out_file.get(filename, None) else gzip.open(filename, mode)
else:
out = JHWrite.out_file[filename] if JHWrite.out_file.get(filename, None) else open(filename, mode)
JHWrite.out_file.setdefault(filename, out)
out.write(line.strip() + os.linesep)
@staticmethod
def finished(iszip=False):
for filename in JHWrite.out_file.keys():
try:
JHWrite.out_file[filename].close()
if iszip and not filename.endswith(".gz"):
JHWrite._gzip(filename)
except:
print(traceback.print_exc())
try:
# 删除完成的文件路径(必须)
del JHWrite.out_file[filename]
except:
print(traceback.print_exc())
if __name__ == "__main__":
print(len(os.linesep), os.linesep)
# JHWrite._gzip(r"F:/testfile/a.txt.gz", r"F:/testfile/a_0.txt.gz")
JHWrite.fileobj(r"F:/testfile/c.txt.gz", "w")
# for item in range(20, 30):
# item = str(item)
# JHWrite.write(r"F:/testfile/a.txt.gz", item, "a")
# JHWrite.finished(True)
# file = gzip.open()
# file.write()
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,008
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/QueryOperator.py
|
# -*- coding: utf-8 -*-
def query_operator(op, value):
if op == "is":
return value
if op == "nis":
return {"$ne": str(value)}
elif op == "like":
return {"$regex": str(value)}
elif op == "nlike":
return value
elif op == "startswith":
return {"$regex": "^"+str(value)}
elif op == "endswith":
return {"$regex": str(value)+"$"}
elif op == "eq":
return int(float(value))
elif op == "ne":
return {"$ne": int(float(value))}
elif op == "lte":
return {"$lte": int(float(value))}
elif op == "lt":
return {"$lt": int(float(value))}
elif op == "gte":
return {"$gte": int(float(value))}
elif op == "gt":
return {"$gt": int(float(value))}
raise NotImplementedError("operator %s is invalid!" % (op, ))
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,009
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DBClient/MongoData.py
|
# -*- encoding: utf-8 -*-
from PyMongoClient import PyMongoClient
import time
import datetime
from SummaryDatatypes import DATATYPES
class MongoData(object):
def __init__(self):
self.client = PyMongoClient()
# 新增
def newcomerCount(self, yyyymmddhh, datatype, ver, pub, collectionName="UserProfile"):
newcomer_num = 0
ver = {"$exists": True} if ver == "all" else ver
pub = {"$exists": True} if pub == "all" else pub
start_tm = datetime.datetime.strptime(yyyymmddhh, "%Y%m%d%H").strftime("%Y%m%d%H%M%S")
end_tm = (datetime.datetime.strptime(yyyymmddhh, "%Y%m%d%H") + datetime.timedelta(hours=1)).strftime("%Y%m%d%H%M%S")
query = {"comepub": pub, "comever": ver, "firstLoginTime": {"$gte": start_tm, "$lt": end_tm}}
newcomer_num = self.client.find(datatype, collectionName, query).count()
return newcomer_num
# 当天新增活跃趋势(按小时)
def newcomerCount_bak(self, users, appkey, collectionName="UserProfile", num=1):
users = list(set(users))
partition_date = time.strftime("%Y%m%d", time.localtime(time.time()-86400*num))
olduser_num = self.client.find(appkey, collectionName, {"jh_uid": {"$in": users}, "partition_date": partition_date}).count()
newcomer_num = len(users) - olduser_num
return newcomer_num
def activeAddup(self, yyyymmddhh, datatype, ver, pub, collectionName="uvfile"):
tm = "-".join([yyyymmddhh[:4], yyyymmddhh[4:6], yyyymmddhh[6:8]])
ver = {"$exists": True} if ver == "all" else ver
pub = {"$exists": True} if pub == "all" else pub
activeuser_num = self.client.find(datatype, collectionName, {"tm": tm, "jhd_vr": ver, "jhd_pb": pub}).count()
return activeuser_num
def activeAddup_bak(self, yyyymmddhh, appkey, ver, pub, collectionName="UserEvent"):
# query demo:
# [
# {$match: {$and: [{"partition_date": "20160929"}, {"jhd_ts": {$lte: 1475121599000.0}}, {"jhd_vr": "2.1.2"}, {
# "jhd_pb": "appstore"}]}
# },
# {$group: {"_id": "$jhd_userkey"}},
# {$group: {"total": {$sum: 1}, "_id": "alluser"}}
# ]
partition_date_con = {"partition_date": "".join([yyyymmddhh[:4], yyyymmddhh[4:6], yyyymmddhh[6:8]])}
end_ts = time.mktime(time.strptime(yyyymmddhh+"5959", "%Y%m%d%H%M%S")) * 1000
ver_con = {"jhd_vr": {"$exists": True}} if ver == "all" else {"jhd_vr": ver}
pub_con = {"jhd_pb": {"$exists": True}} if pub == "all" else {"jhd_pb": pub}
ts_con = {"jhd_ts": {"$lte": end_ts}}
query = []
match = {"$match": {"$and": [ts_con, partition_date_con, ver_con, pub_con]}}
group_1 = {"$group": {"_id": "$jhd_userkey"}}
group_2 = {"$group": {"_id": "$all", "usercount": {"$sum": 1}}}
query.append(match)
query.append(group_1)
query.append(group_2)
conn = self.client.getConn()
result = conn[appkey][collectionName].aggregate(query, allowDiskUse=True)
for item in result:
return item["usercount"]
def newcomerAddup(self, yyyymmddhh, datatype, ver, pub, collectionName="UserProfile"):
newcomers = 0
ver = {"$exists": True} if ver == "all" else ver
pub = {"$exists": True} if pub == "all" else pub
yyyymmdd = yyyymmddhh[:8]
hh = yyyymmddhh[-2:]
start_tm = "".join([yyyymmdd, "00", "0000"])
end_tm = "".join([yyyymmdd, hh, "5959"])
newcomers += self.client.find(datatype, collectionName, {"firstLoginTime": {"$gte": start_tm, "$lte": end_tm}, "comever": ver, "comepub": pub}).count()
return newcomers
if __name__ == "__main__":
tester = MongoData()
yyyymmdd = "2016083018"
users = set(["sdfas", "10:48:B1:1A:A6:B7"])
print tester.newcomerCount("2016100613", "biqu_all", "all", "all")
print tester.activeAddup("2016100613", "biqu_all", "all", "all")
# print(tester.newcomerCount(users, "hbtv"))
# print(tester.newcomerAddup(yyyymmdd, "hbtv"))
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,010
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DBClient/test.py
|
import ConfigParser
cf = ConfigParser.ConfigParser()
cf.read("Config.ini")
print(cf.sections())
print(cf.options("db"))
print(cf.items("db"))
cf.set("db", "db_pass", "xgmtes222222t")
cf.write(open("Config.ini", "w"))
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,011
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/tests.py
|
from django.test import TestCase
from MongoDatas import conn
# Create your tests here.
str = '''[{"$match": {"$or": [{"tm": '2016-06-16', "item_count.in": {"$exists": true}}, \
{"tm": '2016-06-18', "item_count.ac23": {"$exists": true}}]}},\
{"$group": {"_id": '$jhd_userkey', "groupnum": {"$sum": 1}}},\
{"$match": {"groupnum": {"$gt": 1}}},\
{"$group": {"_id": 'all', "total": {"$sum": 1}}}]'''
import json
print str
json_obj = json.loads(str)
print json_obj
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,012
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/__init__.py
|
# -*- coding: utf-8 -*-
from os import sys, path
father_dir = path.dirname(path.dirname(path.abspath(__file__)))
sys.path.append(father_dir)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,013
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-05-10 11:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='api_cache',
fields=[
('rowid', models.AutoField(primary_key=True, serialize=False)),
('inserttm', models.DateTimeField()),
('digest', models.CharField(max_length=512)),
('appkey', models.CharField(max_length=512)),
('api_id', models.CharField(max_length=512)),
('params', models.TextField()),
('data', models.TextField()),
('enable', models.IntegerField()),
],
),
]
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,014
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/DBClient/PyMongoClient.py
|
# -*- coding: utf-8 -*-
from __init__ import configPath
from pymongo import MongoClient
from pymongo.operations import *
from pymongo import ASCENDING
from pymongo import DESCENDING
import random
import ConfigParser
import itertools
class PyMongoClient(object):
cf = ConfigParser.ConfigParser()
cf.read(configPath)
mongo_ip = cf.get("mongodb", "mongo_ip")
mongo_port = cf.getint("mongodb", "mongo_port")
mongo_user = cf.get("mongodb", "mongo_user")
mongo_passwd = cf.get("mongodb", "mongo_passwd")
def __init__(self, mongo_id = 1, **kwargs):
self.load_mongo_config(mongo_id)
self._conn = self._getConn(**kwargs)
def load_mongo_config(self, mongo_id = 1):
from Config import mongo_server
mongo_info = mongo_server[mongo_id]
self._url = mongo_info["mongo_ip"]
self._port = mongo_info["mongo_port"]
self._user = mongo_info["mongo_user"]
self._passwd = mongo_info["mongo_passwd"]
def _getConn(self, **kwargs):
if self._passwd and self._user:
connect_string = "mongodb://%(user)s:%(pwd)s@%(ip)s:%(port)s/" % {
"user": self._user.strip(),
"pwd": self._passwd.strip(),
"ip": self._url.strip(),
"port": str(self._port).strip(),
}
return MongoClient(connect_string,
document_class=dict,
tz_aware=False,
connect=True,
maxPoolSize = kwargs.get("maxPoolSize", 10),
socketTimeoutMS = kwargs.get("socketTimeoutMS", 500000),
connectTimeoutMS = kwargs.get("connectTimeoutMS", 20000),
serverSelectionTimeoutMS = kwargs.get("serverSelectionTimeoutMS", 300000),
waitQueueTimeoutMS = kwargs.get("waitQueueTimeoutMS", 10000),
waitQueueMultiple = kwargs.get("waitQueueMultiple", 2),
socketKeepAlive = kwargs.get("socketKeepAlive", False)
)
else:
return MongoClient(self._url,
int(self._port),
document_class=dict,
tz_aware=False,
connect=True,
maxPoolSize=kwargs.get("maxPoolSize", 10),
socketTimeoutMS=kwargs.get("socketTimeoutMS", 500000),
connectTimeoutMS=kwargs.get("connectTimeoutMS", 20000),
serverSelectionTimeoutMS=kwargs.get("serverSelectionTimeoutMS", 300000),
waitQueueTimeoutMS=kwargs.get("waitQueueTimeoutMS", 10000),
waitQueueMultiple=kwargs.get("waitQueueMultiple", 2),
socketKeepAlive=kwargs.get("socketKeepAlive", False)
)
def getConn(self):
return self._conn
def findElemIn(self, dbname, collectionname, key, contains, conds, projection, step = 10000):
length_total = len(contains)
for part, index in zip(itertools.count(0), range(0, length_total, step)):
part_x = contains[part*step: (part+1)*step]
conds.update({key: {"$in": part_x}})
cur = self._conn[dbname][collectionname].find(conds, projection)
for item in cur:
yield item
def storeDaily(self, data, dbname, tablename, remove_dict):
self._conn[dbname][tablename].remove(remove_dict) #删除
for key in data: #插入数据
tmp = data[key]
tmp["random"] = random.randint(0, 1000)
self._conn[dbname][tablename].insert(tmp)
def remove(self, dbname, tablename, remove_dict):
self._conn[dbname][tablename].remove(remove_dict) # 删除
def bulkWrite(self, dbName, collectionName, requests):
step = 300
collect = self._conn[dbName][collectionName]
num = len(requests)
for part, index in zip(itertools.count(0), range(0, num, step)):
part_x = requests[part*step: (part+1)*step]
try:
collect.bulk_write(part_x)
except:
return requests[part*step:]
return
# return collect.bulk_write(requests)
def find(self, dbName, collectionName, selector):
collect = self._conn[dbName][collectionName]
return collect.find(selector)
def createIndex(self, dbName, collectionName, index):
# index format: [(key, sort), ...]
_index = []
assert type(index) == type([])
for key, _sort in index:
assert isinstance(_sort, int)
if _sort >= 0:
_sort = ASCENDING
else:
_sort = DESCENDING
_index.append((key, _sort))
self._conn[dbName][collectionName].create_index(_index, background=True)
def dropIndex(self, dbName, collectionName, index):
# index format: [(key, sort), ...]
_index = []
assert type(index) == type([])
for key, _sort in index:
assert isinstance(_sort, int)
if _sort >= 0:
_sort = "1"
else:
_sort = "-1"
_index.append(key)
_index.append(_sort)
_index_name = "_".join(_index)
self._conn[dbName][collectionName].drop_index(_index_name)
def database_names(self):
return self._conn.database_names()
def collection_names(self, tablename):
return self._conn[tablename].collection_names(include_system_collections=False)
if __name__ == "__main__":
pmc = PyMongoClient()
# pmc.dropIndex("biqu", "UserEvent", [("partition_date", -1)])
# for item in pmc.findElemIn("jh", "UserIP", "_id", ["221.232.131.162"], {"city": {"$exists": True}}, {"_id": False}):
# print item
# data = {
# "key1": {"values": ["sssss"]},
# "key2": {"values": [None, None]}
# }
client = pmc.getConn()
print(client.database_names())
# print(pmc.collection_names("biqu"))
# db.collection_names(include_system_collections=False)
# a = pmc.storeDaily(data, "hbtv", "test", {})
# try:
# print("dddd", type(a), next(a))
# except StopIteration:
# print("ssssss")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,015
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/crossEventMap.py
|
# coding: utf-8
from collections import OrderedDict
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
import time
from MongoDatas import _sortByKey
from SaaSCommon.JHDecorator import fn_timer
from QueryOperator import query_operator
global conn
conn = MongoClient(mongo_ip, mongo_port)
# 线上漏斗(支持map)
@fn_timer
def crossEventMap(datatype, params):
global conn
try:
tm_str_s = params["startDay"]
tm_str_e = params["endDay"]
events = params["events"]
# 初始事件
original_events = events[0]
# 转化事件
transform_events = events[1]
last_num = int(params["windows"])
tm_s_stamp = time.mktime(time.strptime(tm_str_s, "%Y-%m-%d"))
tm_e_stamp = time.mktime(time.strptime(tm_str_e, "%Y-%m-%d"))
attrs = params.get("attrs", {})
except:
return {"errinfo": "参数错误!"}
if last_num > 60 or last_num < 0:
return {"errinfo": "窗口期超出范围!"}
num = (tm_e_stamp - tm_s_stamp)/86400
if num > 60 or num < 0:
return {"errinfo": "日期跨度超出范围!"}
result = {}
dbname = datatype
collection_name = "uvfile"
original_events_query = query_events(original_events)
# transform_events_query = query_events(transform_events)
query = []
# 筛选
match = {"$match": OrderedDict()}
match["$match"].setdefault("tm", {"$gte": tm_str_s, "$lte": tm_str_e})
# 版本
if "jhd_vr" in attrs and isinstance(attrs["jhd_vr"], list):
if len(attrs["jhd_vr"]) >= 1:
attrs["jhd_vr"] = attrs["jhd_vr"][0]
else:
attrs.pop("jhd_vr")
match["$match"].update(attrs)
match["$match"].update(original_events_query)
# 求出每天的筛选结果
group = {"$group": {}}
group["$group"].setdefault("_id", "$tm")
group["$group"].setdefault("user_count", {"$sum": 1})
group["$group"].setdefault("uid", {"$push": "$jhd_userkey"})
# 按日期降序排序
_sort = {"$sort": {"_id": -1}}
query.append(match)
query.append(group)
query.append(_sort)
# print original_events_query
# print query
query_result = [item for item in conn[dbname][collection_name].aggregate(query, allowDiskUse=True)]
# 求出每一天留存事件
step_results = map(lambda item: _map_func(item, last_num, datatype, transform_events), query_result)
for item in step_results:
key = item.keys()[0]
result.setdefault(key, item[key])
return _sortByKey(result)
def _map_func(item, last_num, datatype, events):
global conn
tm = item["_id"]
step_result = {tm: [item.get("user_count", 0)]}
tm_e = time.strftime("%Y-%m-%d", time.localtime(time.mktime(time.strptime(tm, "%Y-%m-%d"))+86400*last_num))
query = OrderedDict()
if last_num == 0:
query.setdefault("tm", {"$gte": tm, "$lte": tm_e})
else:
query.setdefault("tm", {"$gt": tm, "$lte": tm_e})
query.setdefault("jhd_userkey", {"$in": item["uid"]})
transform_events_query = query_events(events)
query.update(transform_events_query)
query_result = conn[datatype]["uvfile"].distinct("jhd_userkey", query)
del item["uid"]
uv = len(query_result) if query_result else 0
step_result[tm].append(uv)
return step_result
def query_events(events):
# events: [[{"id":"jhddg_every"},{"id":"jhddg_every"}]],
# events: [[{"id":"ac8","attrs":[{"id":"id","op":"like","val":"135"}]}]]
# 且关系
and_relation = {"$and": []}
for and_item in events:
# and_item: [{"id":"jhddg_every"},{"id":"jhddg_every"}]
# 或关系
or_relation = {"$or": []}
for or_item in and_item:
event_attrs_query = {}
# or_item: {"id":"ac8","attrs":[{"id":"id","op":"like","val":"135"}]}
event_id = or_item["id"]
if event_id == "jhddg_every":
event_attrs_query.setdefault("item_count", {"$exists": True})
else:
if "attrs" in or_item and or_item["attrs"]:
for attr_item in or_item["attrs"]:
# 排除空值
if not attr_item:
continue
attr_id = attr_item["id"]
# 排除空值
if not attr_id:
continue
attr_value = attr_item["val"]
attr_op = attr_item["op"]
express = query_operator(attr_op, attr_value)
event_attrs_query.setdefault("item_count.%(event)s.maps.%(map_key)s" % {"event": event_id, "map_key": attr_id}, express)
else:
event_attrs_query.setdefault("item_count.%(event)s" % {"event": event_id}, {"$exists": True})
or_relation["$or"].append(event_attrs_query)
and_relation["$and"].append(or_relation)
return and_relation
if __name__ == "__main__":
# print query_events([[{"id":"ac8","attrs":[{"id":"id","op":"like","val":"135"}, {"id":"id1","op":"eq","val":"135"}]},
# {"id": "ac9","attrs": [{"id": "id", "op": "like", "val": "135"}, {"id": "id1", "op": "eq", "val": "135"}]}],
# [{"id": "ac8",
# "attrs": [{"id": "id", "op": "like", "val": "135"}, {"id": "id1", "op": "eq", "val": "135"}]}]])
print crossEventMap("biqu", {"windows":7,"endDay":"2017-01-08","events":[[[{"id":"ac36"}]],[[{"id":"jhddg_every"}]]],"startDay":"2017-01-01"})
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,016
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/crossEvent.py
|
# coding: utf-8
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
import time
from MongoDatas import _sortByKey
from SaaSCommon.JHDecorator import fn_timer
global conn
conn = MongoClient(mongo_ip, mongo_port)
# 线上漏斗(不支持map)
@fn_timer
def eventsCrossCombine(datatype, tm_str_s, tm_str_e, last_num, events = []):
global conn
tm_s_stamp = time.mktime(time.strptime(tm_str_s, "%Y-%m-%d"))
tm_e_stamp = time.mktime(time.strptime(tm_str_e, "%Y-%m-%d"))
num = (tm_e_stamp - tm_s_stamp)/86400
if num > 90 or num <= 0:
return ["Out of date range"]
dbname = datatype
collection_name = "uvfile"
result = {}
query = []
# 筛选
match = {"$match": {}}
match["$match"].setdefault("tm", {"$gte": tm_str_s, "$lte": tm_str_e})
if events[0] != "jhddg_every":
match["$match"].setdefault("item_count.%s"%events[0], {"$exists": True})
# 求出每天的筛选结果
group = {"$group": {}}
group["$group"].setdefault("_id", "$tm")
group["$group"].setdefault(events[0], {"$sum": 1})
group["$group"].setdefault("uid", {"$push": "$jhd_userkey"})
# 按日期降序排序
_sort = {"$sort": {"_id": -1}}
query.append(match)
query.append(group)
query.append(_sort)
query_result = [item for item in conn[dbname][collection_name].aggregate(query, allowDiskUse=True)]
# 求出每一天留存事件
step_results = map(lambda item: _map_func(item, last_num, datatype, events), query_result)
for item in step_results:
key = item.keys()[0]
result.setdefault(key, item[key])
return _sortByKey(result)
def _map_func(item, last_num, datatype, events):
global conn
query = {}
tm = item["_id"]
event_0 = events[0]
event_1 = events[1]
step_result = {tm: [item.get(event_0, 0)]}
tm_e = time.strftime("%Y-%m-%d", time.localtime(time.mktime(time.strptime(tm, "%Y-%m-%d"))+86400*last_num))
query.setdefault("tm", {"$gt": tm, "$lte": tm_e})
query.setdefault("jhd_userkey", {"$in": item["uid"]})
if events[1] != "jhddg_every":
query.setdefault("item_count.%s"%events[1], {"$exists": True})
query_result = conn[datatype]["uvfile"].distinct("jhd_userkey", query)
del item["uid"]
uv = len(query_result) if query_result else 0
step_result[tm].append(uv)
return step_result
if __name__ == "__main__":
print eventsCrossCombine("biqu", "2016-09-20", "2016-10-27", 3, ["ac21", "ac27"])
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,017
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/urls.py
|
# coding: utf-8
"""jhddgapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.conf.urls import include
from saasapi import views
urlpatterns = [
url(r'^eventserise/(?P<datatype>\w+)/(?P<s_tm>[\d-]+)/(?P<e_tm>[\d-]+)/(?P<events_quote>.+)/$', views.getEventsSeries),
url(r'^eventserisesingle/(?P<datatype>\w+)/(?P<s_tm>[\d-]+)/(?P<e_tm>[\d-]+)/(?P<events_quote>.+)/$', views.getFunnelOld), # 事件漏斗
url(r'^funnel/(?P<datatype>\w+)/(?P<params>.+)/$', views.getFunnel), # 事件漏斗(支持map),线上
# url(r'^eventsremain/(?P<datatype>\w+)/(?P<s_tm>[\d-]+)/(?P<last_tm>\d+)/(?P<events_quote>.+)/$', views.getEventsRemain), # 事件留存
url(r'^eventsremain/(?P<datatype>\w+)/(?P<params>.+)/$', views.getEventsRemain), # 事件留存(支持map),线上
url(r'^eventsremaincombine/(?P<datatype>\w+)/(?P<events_quote>.+)/$', views.getEventsRemainCombine), # 事件留存,(多天,新格式)
url(r'^eventslast/(?P<datatype>\w+)/(?P<s_tm>[\d-]+)/(?P<e_tm>[\d-]+)/(?P<last_tm>\d+)/(?P<events_quote>.+)/$', views.getEventsCrossCombine), # 事件交叉(跨天)
url(r'^crossevent/(?P<datatype>\w+)/(?P<params>.+)/$', views.getCrossEventMap), # 事件交叉(支持map),线上
url(r'^usersample/(?P<datatype>\w+)/(?P<s_tm>[\d-]+)/$', views.getSample),
url(r'^usersample_lite/(?P<datatype>\w+)/(?P<s_tm>[\d-]+)/$', views.getSample_lite),
url(r'^search/(?P<datatype>\w+)/(?P<dayStr>[\d-]+)/(?P<hour_s>\d{1,2})/(?P<hour_e>\d{1,2})/(?P<base_cond>\{.*\})/$', \
views.search),
url(r'^rtsample/(?P<datatype>\w+)/(?P<conds>\{.*\})/$', views.rtSample), # 用户抽样,搜索接口
url(r'^eventsummary/(?P<datatype>\w+)/(?P<conds>\{.*\})/$', views.getEventSummary), # 汇总一段时间的事件数据
url(r'^roundtrips/$', views.getRoundAirelines), # 往返航班数据
url(r'^roundtrip_interval/(?P<params>\{.*\})/$', views.getRoundFlightInterval), # 往返航班间隔天数
]
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,018
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/IPtoLoc/iploc_demo.py
|
# coding: utf-8
import IPtoAreaFinals
from __init__ import ipdataPath
# from SaaSConfig.config import ipdata_path
global initarry
initarry = None
def getLoc(ip):
global initarry
try:
ip = ip.strip("\"")
except:
import traceback
print(traceback.print_exc())
if initarry is None:
initarry = IPtoAreaFinals.load(ipdataPath)
locs = IPtoAreaFinals.getlocid(ip, initarry, type = "for_more")
return ("unknown", locs[0], locs[1])
# prov, city = IPtoAreaFinals.getlocid("8.8.8.8", initarry, type = "for_more")
if __name__ == "__main__":
import os
print(os.getcwd())
loc = getLoc("101.201.145.120")
print(type(loc), loc)
print(loc[0])
print(loc[1])
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,019
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/Funnel.py
|
# -*- coding: utf-8 -*-
import __init__
import time
import sys
import logging
import json
import datetime
import copy
import threading
from os import sys, path
from collections import OrderedDict
from Query import Query
from SaaSCommon.JHDecorator import fn_timer
from ClickHouseClient.ClickHouseClient import ClickHouseClient
from CacheDecorator import common_cache_decorator
reload(sys)
sys.setdefaultencoding("utf-8")
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=path.dirname(path.abspath(__file__)) + "/logs/" + "api.log",
filemode='a')
logger = logging.getLogger(__file__)
global api_id
api_id = "funnel"
class Funnel(Query):
'''
输入:
{
"funnel":[[{"id":"jhddg_every"}],[{"id":"ac2"}],[{"id":"ac8","attrs":[{"id":"id","op":"like","val":"135"}]}],[{"id":"ac49","attrs":[{"id":"type","op":"nis","val":"经停"},{"id":"wf","op":"is","val":"0"},{"id":"st","op":"endswith","val":"0"}]}],[{"id":"ac50","attrs":[{"id":"name","op":"endswith","val":"亮"},{"id":"hbh","op":"startswith","val":"MU"}]}],[{"id":"ac53","attrs":[{"id":"op","op":"nis","val":"1"}]}],[{"id":"ac55","attrs":[{"id":"type","op":"is","val":"微信"}]}]],
"endDay":"2017-01-12",
"startDay":"2017-01-05",
# 可选参数
"attrs": {用户属性},
}
输出:
{
"2016-06-17": [
1717,
1105,
1105
],
"2016-06-16": [
1753,
1123,
1123
]
}
'''
def __init__(self):
# 记录任意事件(jhddg_every)位置
self.every_event_indexes = []
# def create_query_sql(self, db_name, start_day, end_day, funnel, attrs=None):
# '''
# :param db_name: appkey/datatype
# :param start_day: 起始日期,格式:yyyy-mm-dd
# :param end_day: 结束日期,格式:yyyy-mm-dd
# :param funnel: 事件属性,格式:[ [{id:…,attrs:[{id:…,op:…,val:…},{map属性“且”关系}]},{事件id“或”关系}], …]
# :param attrs: 基础属性,如:版本(jhd_vr),渠道(jhd_pb)..., 格式:{"jhd_pb": "appstore", "jhd_vr": "1.0"}
# :return:
# '''
# # where partition between toDate(toDateTime('%(start_day)s 00:00:00')) and toDate(toDateTime('%(end_day)s 00:00:00'))%(where)s
# if attrs is None:
# attrs = {}
# columns_format = "sum(num_%(step)d) as step_%(step)d"
# # query_format = "select partition, %(columns)s from ( \
# # select partition, %(sequenceMatch)s \
# # from (%(db_name)s.userevent) \
# # where partition between toDate('%(start_day)s') and toDate('%(end_day)s')%(_where)s \
# # group by partition, jhd_userkey) \
# # group by partition \
# # order by partition desc"
#
# query_format = "select partition, %(columns)s from ( \
# select partition, %(sequenceMatch)s \
# from (" \
# "select partition, jhd_userkey, events_optimes.1 jhd_eventId, events_optimes.2 jhd_opTime, events_optimes.3 jhd_map from(" \
# "select partition, jhd_userkey, arrayMap(i -> (events[i], optimes[i], maps[i]), indexes) events_optimes from (" \
# "select partition, jhd_userkey, arrayFilter((i, event) -> i>1?events[i]!=events[i-1]:1, arrayEnumerate(events), events) indexes, events, optimes, maps from (" \
# "select partition, jhd_userkey, groupArray(jhd_eventId) events, groupArray(jhd_opTime) optimes, groupArray(jhd_map) maps from " \
# "(select partition, jhd_userkey, jhd_eventId, jhd_opTime, case when jhd_eventId in (%(map_events)s) then jhd_map else '{}' end jhd_map from %(db_name)s.userevent where partition between toDate('%(start_day)s') and toDate('%(end_day)s')%(_where)s order by jhd_opTime) " \
# "group by partition, jhd_userkey))) " \
# "array join events_optimes" \
# ") " \
# "group by partition, jhd_userkey) " \
# "group by partition " \
# "order by partition desc"
#
# length, sequenceMatch = self.fragment_sequenceMatch(funnel)
# columns = ", ".join([columns_format % {"step": i+1} for i in range(0, length)])
# _where = " and " + self.fragment_where(attrs)
#
# map_events = self.get_map_events(funnel)
#
# query = query_format % {"columns": columns, "sequenceMatch": sequenceMatch,
# "_where": (_where if bool(attrs) else " ") + " and " + self.fragment_events(funnel),
# "start_day": start_day,
# "end_day": end_day,
# "db_name": db_name,
# "map_events": "".join(["'", "', '".join(map_events), "'"])
# }
# print query
# logger.info(query)
# return query
# def create_query_sql(self, funnel, attrs=None):
# if attrs is None:
# attrs = {}
# columns_format = "sum(num_%(step)d) as step_%(step)d"
# query_format = "select %(columns)s from ( \
# select %(sequenceMatch)s \
# from %(db_name)s.userevent \
# %(where)s \
# group by jhd_userkey)"
# length, sequenceMatch = self.fragment_sequenceMatch(funnel)
# columns = ", ".join([columns_format % {"step": i+1} for i in range(0, length)])
# _where = "where " + self.fragment_where(attrs)
# query = query_format % {"columns": columns, "sequenceMatch": sequenceMatch, \
# "where": _where if bool(attrs) else ""}
# logger.info(query)
# return query
def create_query_sql(self, db_name, start_day, end_day, funnel, attrs=None):
if attrs is None:
attrs = {}
columns_format = "sum(num_%(step)d) as step_%(step)d"
query_format = "select partition, %(columns)s from ( \
select partition, %(sequenceMatch)s \
from %(db_name)s.userevent \
%(where)s \
group by partition, jhd_userkey) \
group by partition"
events = self.get_events(funnel)
length, sequenceMatch = self.fragment_sequenceMatch(funnel)
columns = ", ".join([columns_format % {"step": i+1} for i in range(0, length)])
_where = "where " + self.fragment_where(attrs) + ' and' if self.fragment_where(attrs) else "where " + \
" partition between '%(start_day)s' and '%(end_day)s'" % {"start_day": start_day, "end_day": end_day} + \
" and jhd_eventId in (%(map_events)s)" % {"map_events": "".join(["'", "', '".join(events), "'"])}
# query = query_format % {"columns": columns, "sequenceMatch": sequenceMatch, \
# "where": _where if bool(attrs) else "", "db_name": db_name}
query = query_format % {"columns": columns, "sequenceMatch": sequenceMatch, \
"where": _where, "db_name": db_name}
logger.info(query)
return query
def fragment_events(self, funnel):
events = set()
for step in funnel:
for event_info in step:
event_id = event_info["id"]
events.add(event_id)
return " ".join(["jhd_eventId", "in", "("+"'"+"','".join(list(events))+"'"+")"])
def get_map_events(self, funnel):
events = set()
for step in funnel:
for event_info in step:
event_id = event_info["id"]
if "attrs" in event_info and len(event_info["attrs"]) > 0:
events.add(event_id)
return list(events)
def get_events(self, funnel):
events = set()
for step in funnel:
for event_info in step:
event_id = event_info["id"]
events.add(event_id)
return list(events)
def get_users_query(self, db_name, start_day, end_day, attrs = None):
if attrs == None:
attrs = {}
_where = " and " + self.fragment_where(attrs)
sql_format = "select partition, count(DISTINCT(jhd_userkey)) as uv \
from %(db_name)s.userevent \
where (partition between toDate('%(start_day)s') and toDate('%(end_day)s'))%(_attrs_where)s \
group by partition \
order by partition"
sql = sql_format % {"db_name": db_name, "start_day": start_day, "end_day": end_day, "_attrs_where": _where if bool(attrs) else " "}
logger.info(sql)
return sql
# 生成 sequenceMatch 条件
def fragment_sequenceMatch(self, funnel):
# fragment_format = "sequenceMatch('(?1).*(?2).*(?3)')(jhd_opTime, jhd_eventId = jhd_eventId, jhd_eventId = 'ac86' or jhd_eventId = 'ac87', jhd_eventId = 'ac88') as num_0"
# funnel format: [[{"id":"jhddg_every"}],[{"id":"ac2"}],[{"id":"ac8","attrs":[{"id":"id","op":"like","val":"135"}]}],[{"id":"ac49","attrs":[{"id":"type","op":"nis","val":"经停"},{"id":"wf","op":"is","val":"0"},{"id":"st","op":"endswith","val":"0"}]}],[{"id":"ac50","attrs":[{"id":"name","op":"endswith","val":"亮"},{"id":"hbh","op":"startswith","val":"MU"}]}],[{"id":"ac53","attrs":[{"id":"op","op":"nis","val":"1"}]}],[{"id":"ac55","attrs":[{"id":"type","op":"is","val":"微信"}]}]]
# [
# {
# "id": "ac8",
# "attrs": [
# {
# "id": "id",
# "op": "like",
# "val": "135"
# }
# ]
# }
# ]
sequenceMatch_format = "sequenceMatch('%(pattern)s')(jhd_opTime, %(cond)s) as num_%(step)d"
sequenceMatch_lis = []
pattern_format = "(?%(index)d)"
cond_eventid_format = "jhd_eventId = '%(event_id)s'"
cond_map_format = "%(visit_params)s(jhd_map, '%(mapkey)s') %(operator)s"
cond_lis = []
for step, funnel_step in enumerate(funnel):
step_event_or = []
# 生成每个事件的条件,及mapkey条件(如果有)
for event_data in funnel_step:
event_mapkey_and = []
event_id = event_data["id"]
# 对 jhddg_every 单独处理
if event_id == "jhddg_every":
# jhd_eventId = jhd_eventId 触发 sequence_match_max_iterations > 1000000 的BUG
cond_eventid = "jhd_eventId LIKE %(event_id)s" % {"event_id": "'%%'"}
event_mapkey_and.append(cond_eventid)
if len(funnel_step) == 1:
if step not in self.every_event_indexes:
self.every_event_indexes.append(step)
continue
else:
cond_eventid = cond_eventid_format % {"event_id": event_id}
event_mapkey_and.append(cond_eventid)
# 生成mapkey的条件(如果有)。
if "attrs" in event_data and event_data["attrs"]:
# 包含多个mapkey限制条件
for map_item in event_data["attrs"]:
mapkey = map_item["id"]
op = map_item["op"]
mapvalue = map_item["val"]
visit_params, operator = self.query_operator(op, mapvalue)
cond_map = cond_map_format % {"visit_params": visit_params, "mapkey": mapkey, "operator": operator}
event_mapkey_and.append(cond_map)
# eventid + map 条件
step_event_or.append("(" + " and ".join(event_mapkey_and) + ")")
# 并列 或 关系
if bool(step_event_or):
cond_lis.append("(" + " or ".join(step_event_or) + ")")
for index, item in enumerate(cond_lis):
pattern_lis = [pattern_format % {"index": i+1} for i in range(0, index+1)]
pattern = ".*".join(pattern_lis)
# 补位,sequenceMatch至少包含两步。
cond = ", ".join(cond_lis[:index+1]) if index != 0 else ", ".join(cond_lis[:index+1] + ["(jhd_eventId <> '')"])
sequenceMatch = sequenceMatch_format % {"pattern": pattern, "cond": cond, "step": index+1}
sequenceMatch_lis.append(sequenceMatch)
return (len(sequenceMatch_lis), ", ".join(sequenceMatch_lis))
def funnel_length(self, funnel):
return self.fragment_sequenceMatch(funnel)[0]
def run_query(self, datatype, params, result):
try:
params = params if isinstance(params, dict) else json.loads(params)
start_day = params["startDay"]
end_day = params["endDay"]
funnel = params["funnel"]
attrs = params.get("attrs", {})
# 查询天数最多为60天
tm_s_stamp = time.mktime(time.strptime(start_day, "%Y-%m-%d"))
tm_e_stamp = time.mktime(time.strptime(end_day, "%Y-%m-%d"))
num = (tm_e_stamp - tm_s_stamp) / 86400
if num > 60 or num < 0:
return {"errinfo": "日期跨度超出范围!"}
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
return {"errinfo": "传递参数错误!"}
try:
try:
query = str(self.create_query_sql(datatype, start_day, end_day, funnel, attrs).decode("utf-8"))
except:
query = self.create_query_sql(datatype, start_day, end_day, funnel, attrs).decode("utf-8").encode("utf-8")
funnel_length = self.funnel_length(funnel)
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
return {"errinfo": "生成查询错误!"}
logger.info(query)
try:
client = ClickHouseClient()
for row in client.select(datatype, query):
day = row.partition.strftime("%Y-%m-%d")
funnel_step_user = [eval("row.step_%d" % (step+1, )) for step in range(0, funnel_length)]
result.setdefault(day, funnel_step_user)
# 单独处理任意事件
if len(self.every_event_indexes) != 0 and self.every_event_indexes[0] == 0:
users_query = self.get_users_query(datatype, start_day, end_day, attrs)
for row in client.select(datatype, users_query):
day = row.partition.strftime("%Y-%m-%d")
uv = row.uv
for index in self.every_event_indexes:
# 当任意事件在步骤一
if index == 0:
result[day].insert(index, uv)
# 当任意事件不在步骤一
else:
result[day].insert(index, result[day][index-1])
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
return {"errinfo": "查询错误!"}
@fn_timer
@common_cache_decorator("funnel")
def data(self, datatype, params, interval = 0):
result = OrderedDict([])
_threads = []
params = params if isinstance(params, dict) else json.loads(params)
start_day = params["startDay"]
end_day = params["endDay"]
start_date = datetime.datetime.strptime(start_day, "%Y-%m-%d").date()
end_date = datetime.datetime.strptime(end_day, "%Y-%m-%d").date()
while start_date <= end_date:
try:
params_copy = copy.deepcopy(params)
params_copy["startDay"] = start_date.strftime("%Y-%m-%d")
params_copy["endDay"] = start_date.strftime("%Y-%m-%d")
task = threading.Thread(target=self.run_query, args=(datatype, params_copy, result))
task.start()
_threads.append(task)
if sum(map(lambda t: 1 if t.isAlive() else 0, _threads)) > 10:
for _t in _threads:
if _t.isAlive():
_t.join()
break
time.sleep(interval)
start_date += datetime.timedelta(days=1)
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
for _thread in _threads:
_thread.join()
# 按日期降序排列
result_sorted = OrderedDict(sorted(result.iteritems(), key = lambda item: item[0], reverse=True))
return result_sorted
if __name__ == "__main__":
# funnel = [[{"id":"jhddg_every"}, {"id":"ac2"}],[{"id":"ac37"}],[{"id":"ac8","attrs":[{"id":"id","op":"like","val":"135"}]}],[{"id":"ac49","attrs":[{"id":"type","op":"nis","val":"经停"},{"id":"wf","op":"eq","val":"0"},{"id":"st","op":"endswith","val":"0"}]}],[{"id":"ac50","attrs":[{"id":"name","op":"endswith","val":"亮"},{"id":"hbh","op":"startswith","val":"MU"}]}],[{"id":"ac53","attrs":[{"id":"op","op":"nis","val":"1"}]}],[{"id":"ac55","attrs":[{"id":"type","op":"is","val":"微信"}]}]]
# funnel = [[{"id":"ac2"}],[{"id":"ac6"}],[{"id":"ac8"}],[{"id":"ac49"}],[{"id":"ac50"}],[{"id":"ac53"}],[{"id":"ac55"}]]
# attrs = {"jhd_vr": "1.0.1", "jhd_pb": "appstore"}
# query_info = {"funnel":[[{"id":"jhddg_every","name":"启动应用"}],[{"id":"index_tz","name":"index_tz"}]],"endDay":"2017-02-16","startDay":"2017-01-01"}
query_info = {"funnel":[[{"id":"jhddg_every","name":"启动应用"}],[{"id":"h5_index_nav_ucenter","name":"首页下方点击“我的”按钮"}]],"endDay":"2017-05-08","startDay":"2017-05-01"}
# query_info = {"funnel":[[{"id":"jhddg_every","name":"启动应用"}],[{"id":"zc_ac1","name":"未注册时点击按钮,提示”请先完成注册“时发送"}],[{"id":"zc_ac2","name":"注册流程,点击”获取验证码“按钮"},{"id":"zc_ac3","name":"点击“获取语音验证码”按钮"}],[{"id":"zc_ac4","attrs":[{"id":"type","op":"is","val":"ok","name":"type"}],"name":"注册流程,输入验证码,点击”注册“按钮,完成注册"}]],"endDay":"2017-05-09","startDay":"2017-05-01"}
tester = Funnel()
a = time.time()
# print tester.create_query_sql("2017-01-11", "2017-01-18", funnel, attrs)
print json.dumps(tester.data("ncf_ws", query_info), ensure_ascii=False)
print time.time() - a
# print tester.fragment_events(query_info["funnel"])
# print json.dumps(tester.get_users_query("caiyu_ad", "2017-02-01", "2017-02-04"), ensure_ascii=False)
# print tester.data("biqu", query_info)
# print tester.create_query_sql(funnel)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,020
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/infi/clickhouse_orm/engines.py
|
class Engine(object):
def create_table_sql(self):
raise NotImplementedError()
class MergeTree(Engine):
def __init__(self, date_col, key_cols, sampling_expr=None,
index_granularity=8192, replica_table_path=None, replica_name=None):
self.date_col = date_col
self.key_cols = key_cols
self.sampling_expr = sampling_expr
self.index_granularity = index_granularity
self.replica_table_path = replica_table_path
self.replica_name = replica_name
# TODO verify that both replica fields are either present or missing
def create_table_sql(self):
name = self.__class__.__name__
if self.replica_name:
name = 'Replicated' + name
params = self._build_sql_params()
return '%s(%s)' % (name, ', '.join(params))
def _build_sql_params(self):
params = []
if self.replica_name:
params += ["'%s'" % self.replica_table_path, "'%s'" % self.replica_name]
params.append(self.date_col)
if self.sampling_expr:
params.append(self.sampling_expr)
params.append('(%s)' % ', '.join(self.key_cols))
params.append(str(self.index_granularity))
return params
class CollapsingMergeTree(MergeTree):
def __init__(self, date_col, key_cols, sign_col, sampling_expr=None,
index_granularity=8192, replica_table_path=None, replica_name=None):
super(CollapsingMergeTree, self).__init__(date_col, key_cols, sampling_expr, index_granularity, replica_table_path, replica_name)
self.sign_col = sign_col
def _build_sql_params(self):
params = super(CollapsingMergeTree, self)._build_sql_params()
params.append(self.sign_col)
return params
class SummingMergeTree(MergeTree):
def __init__(self, date_col, key_cols, summing_cols=None, sampling_expr=None,
index_granularity=8192, replica_table_path=None, replica_name=None):
super(SummingMergeTree, self).__init__(date_col, key_cols, sampling_expr, index_granularity, replica_table_path, replica_name)
self.summing_cols = summing_cols
def _build_sql_params(self):
params = super(SummingMergeTree, self)._build_sql_params()
if self.summing_cols:
params.append('(%s)' % ', '.join(self.summing_cols))
return params
class Merge(Engine):
def __init__(self, database, tablePattern):
try:
from infi.clickhouse_orm.utils import escape
except:
from src.infi.clickhouse_orm.utils import escape
self.database = escape(database, True)
self.tablePattern = escape(tablePattern, True)
def create_table_sql(self):
name = self.__class__.__name__
params = self._build_sql_params()
return '%s(%s)' % (name, ', '.join(params))
def _build_sql_params(self):
params = [str(self.database), str(self.tablePattern)]
return params
class ReplacingMergeTree(Engine):
def __init__(self, date_col, key_cols, ver_col, index_granularity=8192):
self.date_col = date_col
self.key_cols = key_cols
self.ver_col = ver_col
self.index_granularity = index_granularity
def create_table_sql(self):
'''
return demo: ReplacingMergeTree(EventDate, (OrderID, EventDate, BannerID, ...), 8192, ver)
:return:
'''
# MergeTree(EventDate, intHash32(UserID), (CounterID, EventDate, intHash32(UserID)), 8192)
name = self.__class__.__name__
params = self._build_sql_params()
return '%s(%s)' % (name, ', '.join(params))
def _build_sql_params(self):
params = []
params.append(self.date_col)
params.append('(%s)' % ', '.join(self.key_cols))
params.append(str(self.index_granularity))
params.append(str(self.ver_col))
return params
if __name__ == "__main__":
# tester = Merge("test", "^user_event")
tester = ReplacingMergeTree("partition", ("jhd_userkey", "jhd_opTime"), "jhd_userkey")
print(tester.create_table_sql())
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,021
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/eventRemainMap.py
|
# coding: utf-8
import time
import datetime
import threading
from collections import OrderedDict
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
from MongoDatas import _sortByKey
from crossEventMap import query_events
from SaaSCommon.JHDecorator import fn_timer
global conn
conn = MongoClient(mongo_ip, mongo_port)
@fn_timer
# def eventsRemainMap(datatype, tm_s, tm_e, last_num, events):
def eventsRemainMap(datatype, params):
try:
tm_s = params["startDay"]
tm_e = params["endDay"]
last_num = params["remain"]
events = params["events"]
except:
return {"errinfo": "参数错误!"}
tm_s_stamp = time.mktime(time.strptime(tm_s, "%Y-%m-%d"))
tm_e_stamp = time.mktime(time.strptime(tm_e, "%Y-%m-%d"))
num = (tm_e_stamp - tm_s_stamp)/86400
if num > 60 or num < 0:
return {"errinfo": "日期跨度超出范围!"}
if last_num > 60 or last_num <= 0:
return {"errinfo": "窗口期超出范围!"}
tm = datetime.datetime.strptime(tm_s, "%Y-%m-%d")
end_tm = datetime.datetime.strptime(tm_e, "%Y-%m-%d")
today = datetime.datetime.today()
result = []
threads = []
while tm <= end_tm:
t = threading.Thread(target=thread_func, args=(datatype, tm.strftime("%Y-%m-%d"), last_num, today, events, result))
t.start()
tm += datetime.timedelta(days=1)
threads.append(t)
# tmp = {}
# tmp.setdefault("tm", tm.strftime("%Y-%m-%d"))
# # 计算留存数据,返回数据格式:{日期: 回访人数},按日期升序
# event_remain_data = eventsRemain(datatype, tm.strftime("%Y-%m-%d"), last_num, events)
# data_default = {}
# # 生成返回数据,没有回访行为的日期补0,没有数据的日志补空字符
# for i in range(last_num+1):
# curday = (tm+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
# data_default.setdefault(curday, 0 if today.strftime("%Y-%m-%d") > curday else "")
# data_keys = data_default.keys()
# data_keys.sort()
# # 回去返回日期的留存数据
# for key in data_keys:
# tmp.setdefault("numbers", [])
# tmp["numbers"].insert(len(tmp["numbers"]), event_remain_data[key] if key in event_remain_data else data_default[key])
# tmp["length"] = len(tmp["numbers"])
# tm += datetime.timedelta(days=1)
# result.append(tmp)
for t in threads:
t.join()
result_sorted = sorted(result, key=lambda item: item["tm"], reverse=False)
return result_sorted
def thread_func(datatype, tm, last_num, today, events, result):
tmp = {}
tmp.setdefault("tm", tm)
# 计算留存数据,返回数据格式:{日期: 回访人数},按日期升序
event_remain_data = eventsRemain(datatype, tm, last_num, events)
data_default = {}
# 生成返回数据,没有回访行为的日期补0,没有数据的日志补空字符
for i in range(last_num + 1):
curday = (datetime.datetime.strptime(tm, "%Y-%m-%d") + datetime.timedelta(days=i)).strftime("%Y-%m-%d")
data_default.setdefault(curday, 0 if today.strftime("%Y-%m-%d") > curday else "")
data_keys = data_default.keys()
data_keys.sort()
# 回去返回日期的留存数据
for key in data_keys:
tmp.setdefault("numbers", [])
tmp["numbers"].insert(len(tmp["numbers"]),
event_remain_data[key] if key in event_remain_data else data_default[key])
tmp["length"] = len(tmp["numbers"])
# tm += datetime.timedelta(days=1)
result.append(tmp)
def eventsRemain(datatype, tm_0, last_num, events):
global conn
assert last_num >= 1
assert type(events) == type([]) and len(events) >= 1
today = datetime.datetime.today()
start_event = events[0]
# format: [[{id:…,attrs:[{id:…,op:…,val:…},{其他mapkey条件}]},{或者关系}], [{并且关系}]
remain_event = events[1]
dbname = datatype
collection_name = "uvfile"
# 如果不是所有事件,则添加限制条件
query = query_events(start_event)
query.setdefault("tm", tm_0)
qurey_result = list(conn[dbname][collection_name].find(query, {"jhd_userkey": 1}))
uids = tuple([item["jhd_userkey"] for item in qurey_result])
tm_s = (datetime.datetime.strptime(tm_0, "%Y-%m-%d") + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
end_day = datetime.datetime.strptime(tm_0, "%Y-%m-%d") + datetime.timedelta(days=last_num)
tm_e = end_day.strftime("%Y-%m-%d")
tm_e = min(tm_e, (today-datetime.timedelta(days=1)).strftime("%Y-%m-%d"))
events_remain = eventIntersection(dbname, tm_s, tm_e, uids, remain_event)
result = {}
result.setdefault(tm_0, len(uids))
for item in events_remain:
tm = item["_id"]
del item["_id"]
eventid = item.keys()[0]
result.setdefault(tm, item[eventid])
return _sortByKey(result)
def eventIntersection(datatype, tm_s, tm_e, uids, event):
global conn
query = []
# 根据留存天数获取期间的活跃用户
match = {"$match": OrderedDict()}
match["$match"].setdefault("tm", {"$gte": tm_s, "$lte": tm_e})
match["$match"].setdefault("jhd_userkey", {"$in": uids})
event_query = query_events(event)
match["$match"].update(event_query)
# 计算每日留存
group = {"$group": {}}
group["$group"].setdefault("_id", "$tm")
group["$group"].setdefault("event", {"$sum": 1})
_sort = {"$sort": {"_id": -1}}
query.append(match)
query.append(group)
query.append(_sort)
query_result = [item for item in conn[datatype]["uvfile"].aggregate(query, allowDiskUse=True)]
return query_result
if __name__ == "__main__":
import time
a = time.time()
print eventsRemainMap("biqu", {"remain":20,"endDay":"2017-02-04","events":[[[{"id":"ac36"}]],[[{"id":"jhddg_every"}]]],"startDay":"2017-02-01"})
# for i in range(1, 7):
# tm_s = (datetime.datetime.strptime("2016-09-01", "%Y-%m-%d") + datetime.timedelta(days=i)).strftime("%Y-%m-%d")
# print eventsRemain("feeling", tm_s, 15, ["ac11", "ac23"])
# print time.time()-a
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,022
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/SaaSCommon/Digest.py
|
# -*- coding: utf-8 -*-
from hashlib import md5
def create_degest(content):
m = md5()
m.update(content)
return m.hexdigest()
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,023
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/CacheData.py
|
# -*- coding: utf-8 -*-
import __init__
import datetime
import logging
import json
from os import path
from saasapi.models import api_cache
# from django.core.exceptions import ObjectDoesNotExist
import os,django
os.environ["DJANGO_SETTINGS_MODULE"] = "jhddgapi.settings"
django.setup()
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=path.dirname(path.abspath(__file__)) + "/logs/" + "api.log",
filemode='a')
logger = logging.getLogger(__file__)
def get_data(**kwargs):
try:
item = api_cache.objects.raw("select rowid, data from saas_server.saasapi_api_cache where digest = '%(digest)s' and enable = 1 order by inserttm desc" % {"digest": kwargs["digest"]})
for _item in item:
return _item.data
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
return '{}'
# def save_data(digest, appkey, api_id, params, data):
def save_data(digest, appkey, api_id, params, data):
try:
obj = api_cache(inserttm=datetime.datetime.now(), digest=digest, appkey=appkey, api_id=api_id, params=params, data=data, enable=1)
obj.save()
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
if __name__ == "__main__":
save_data("test", "test", "test", "test")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,024
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DBClient/MysqlClient.py
|
#coding=utf-8
from __init__ import configPath
import MySQLdb
import ConfigParser
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class MysqlClient(object):
cf = ConfigParser.ConfigParser()
cf.read(configPath)
mysql_host = cf.get("mysqldb", "mysql_host")
mysql_port = cf.getint("mysqldb", "mysql_port")
mysql_user = cf.get("mysqldb", "mysql_user")
mysql_passwd = cf.get("mysqldb", "mysql_passwd")
def __init__(self, db, host=mysql_host, port=mysql_port, user=mysql_user, passwd=mysql_passwd):
self.db = db
self.host = host
self.port = port
self.user = user
self.passwd = passwd
self.con, self.cur = self._connectMysql
@property
def _connectMysql(self):
conn = MySQLdb.connect(
host=self.host,
port=self.port,
user=self.user,
passwd=self.passwd,
db=self.db,
charset='utf8'
)
cur = conn.cursor()
return conn, cur
@property
def connection(self):
return self.con, self.cur
def select(self, cmd):
try:
self.cur.execute(cmd)
print("success: ", cmd)
except:
import traceback
print(traceback.print_exc())
print("faild: ", cmd)
for item in self.cur.fetchall():
yield item
# def getAppkey(self):
# result = []
# sql = "select a.appkey, b.cdkey, a.plat from (select * from saas_meta.d_app where enable = 1 and (plat = 'android' or plat = 'ios')) a left join (select * from saas_meta.d_account where enable = 1) b on a.own = b.name_uid"
# self.cur.execute(sql)
# for item in self.cur.fetchall():
# appkey, dbname, plat = item[0], item[1], item[2]
# appkey = appkey
# result.append((dbname, appkey, plat))
# return result
def getAppkey(self):
result = []
sql = "select a.appkey, b.cdkey, a.plat from (select * from saas_meta.d_app where enable = 1 and (plat = 'android' or plat = 'ios' or plat = 'feeling' or plat = 'all')) a left join (select * from saas_meta.d_account where enable = 1) b on a.own = b.name_uid"
self.cur.execute(sql)
for item in self.cur.fetchall():
appkey, dbname, plat = item[0], item[1], item[2]
# appkey = appkey.lower()
result.append((dbname, appkey, plat))
return result
def getAppkey_h5(self):
result = []
sql = "select a.appkey, b.cdkey, a.plat from (select * from saas_meta.d_app where enable = 1 and plat = 'h5') a left join (select * from saas_meta.d_account where enable = 1) b on a.own = b.name_uid"
self.cur.execute(sql)
for item in self.cur.fetchall():
appkey, dbname, plat = item[0], item[1], item[2]
result.append((dbname, appkey, plat))
return result
def closeMysql(self):
self.cur.close()
self.con.close()
if __name__ == "__main__":
tester = MysqlClient("saas_meta")
# print(tester.con, tester.cur)
print(tester.getAppkey_h5())
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,025
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/SaaSCommon/JHDecorator.py
|
# -*- coding: utf-8 -*-
import time
from functools import wraps
def fn_timer(function):
@wraps(function)
def function_timer(*args, **kwargs):
t0 = time.time()
result = function(*args, **kwargs)
t1 = time.time()
print("@ %s, Total time running %s(%s): %s seconds" %
(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())), function.func_name, str(args), str(t1 - t0))
)
return result
return function_timer
@fn_timer
def test(a):
import time
time.sleep(1)
if __name__ == "__main__":
test("ssss")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,026
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DBClient/PostgreSqlClient.py
|
# --coding=utf8--
from __init__ import configPath
import ConfigParser
import psycopg2
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class PostgreSqlClient(object):
cf = ConfigParser.ConfigParser()
cf.read(configPath)
host = cf.get("postgre", "host")
port = cf.getint("postgre", "port")
user = cf.get("postgre", "user")
passwd = cf.get("postgre", "passwd")
def __init__(self, db, host=host, port=port, user=user, passwd=passwd):
self.db = db
self.host = host
self.port = port
self.user = user
self.passwd = passwd
self.con, self.cur = self._connectPostgreSql
@property
def _connectPostgreSql(self):
conn = psycopg2.connect(
host=self.host,
port=self.port,
user=self.user,
password=self.passwd,
database=self.db
)
cur = conn.cursor()
return conn, cur
def query(self, cmd):
command = str(cmd).split()[0]
assert command.lower().startswith("select")
self.cur.execute(cmd)
for item in self.cur.fetchall():
yield item
def close(self):
self.cur.close()
self.con.close()
# def deleteRecordsByTS(self, ts):
# # try:
# # sql = "delete from " + appkey + "_h5_rt" + " where tm = %s"
# # print(sql)
# # print(cur.execute(sql, [day]))
# # conn.commit()
# # except Exception, e:
# # print(e,)
# # conn.rollback()
# # finally:
# # cur.close()
# # conn.close()
if __name__ == '__main__':
client = PostgreSqlClient("jh_10a0e81221095bdba91f7688941948a6")
for item in client.query("select * from biqu_h5_event_detail"):
print(item)
import uuid, time, datetime
print(uuid.uuid1())
print(datetime.datetime.now())
from SaaSMode.EventDetailH5 import H5EventDetail
print(H5EventDetail().build())
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,027
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DevUsers.py
|
# -*- coding: utf-8 -*-
import time
from DBClient.MysqlClient import MysqlClient
global result
result = {}
global myclock
myclock = time.time()
def DevUsers(datatype='all', iscache = False):
global result
global myclock
if ((time.time() - myclock) > 3600 or (not result)) or iscache:
client = MysqlClient("saas_meta")
con, cur = client.connection
sql = "SELECT a.appkey, b.userkey FROM saas_exclude_appkey a LEFT JOIN saas_dev_userkey b on a.appkey = b.appkey WHERE a.`enable` = 1"
cur.execute(sql)
for item in cur.fetchall():
appkey, userkey = item[0], item[1]
result.setdefault(appkey, set()).add(userkey)
myclock = time.time()
client.closeMysql()
else:
pass
tmp = {}
for key in result:
if "all" in datatype:
tmp.setdefault(key, list(result[key]))
elif key in datatype:
tmp.setdefault(key, list(result[key]))
return tmp
if __name__ == "__main__":
print DevUsers()
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,028
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/CacheDecorator.py
|
# -*- coding: utf-8 -*-
import json
import datetime
import logging
import copy
import functools
import threading
from os import path
from collections import OrderedDict
from SaaSCommon.Digest import create_degest
from SaaSCommon.TasksRunner import TasksRunner
from CacheData import get_data
from CacheData import save_data
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=path.dirname(path.abspath(__file__)) + "/logs/" + "api.log",
filemode='a')
logger = logging.getLogger(__file__)
global tasks_runner
tasks_runner = TasksRunner()
def common_cache_decorator(api_id, result_transform = None, result_detransform = None, reverse = True):
def common_cache_decorator_wraper(func):
@functools.wraps(func)
def func_wapper(*args, **kwargs):
digest_dict = {}
appkey = args[1]
params = args[2]
cache_data = {}
### Run Before Begin ###
try:
params = params if isinstance(params, dict) else json.loads(params)
start_day = params["startDay"]
end_day = params["endDay"]
start_date = datetime.datetime.strptime(start_day, "%Y-%m-%d").date()
end_date = datetime.datetime.strptime(end_day, "%Y-%m-%d").date()
index_day = start_day
while start_date <= end_date:
try:
params_copy = copy.deepcopy(params)
params_copy["startDay"] = start_date.strftime("%Y-%m-%d")
params_copy["endDay"] = start_date.strftime("%Y-%m-%d")
digest = create_degest(json.dumps(params_copy, separators=(',', ':')) + appkey + api_id)
digest_dict.setdefault(start_date.strftime("%Y-%m-%d"),
[digest, json.dumps(params_copy, separators=(',', ':'))]
)
cache_item = json.loads(get_data(digest=digest))
if start_date.strftime("%Y-%m-%d") in cache_item:
cache_data.update(**cache_item)
index_day = start_day
t = threading.Thread(target=func, args=(args[0], appkey, params_copy))
tasks_runner.put(t)
else:
t = threading.Thread(target=func, args=(args[0], appkey, params_copy))
tasks_runner.put(t)
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
start_date += datetime.timedelta(days=1)
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
### Run Before End ###
params_copy = copy.deepcopy(params)
params_copy["startDay"] = index_day
params_copy["endDay"] = end_date.strftime("%Y-%m-%d")
data = func(args[0], appkey, params_copy, **kwargs)
if "errinfo" in data:
cache_data = {}
else:
data = result_transform(data) if result_transform else data
### Run After Begin ###
try:
# 存数据库
for key in data:
digest = digest_dict[key][0]
params_copy = digest_dict[key][1]
_data = json.dumps({key: data[key]}, separators=(',', ':'))
task = threading.Thread(target=save_data, args=(digest, appkey, api_id, params_copy, _data))
tasks_runner.put(task)
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logger.error(json.dumps(errinfo))
### Run After End ###
data.update(**cache_data)
result_sorted = OrderedDict(sorted(data.iteritems(), key=lambda item: item[0], reverse=reverse))
result_sorted = result_detransform(result_sorted) if result_detransform else result_sorted
return result_sorted
return func_wapper
return common_cache_decorator_wraper
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,029
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/models.py
|
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class api_cache(models.Model):
rowid = models.AutoField(primary_key=True)
inserttm = models.DateTimeField()
digest = models.CharField(max_length=512)
appkey = models.CharField(max_length=512)
api_id = models.CharField(max_length=512)
params = models.TextField()
data = models.TextField()
enable = models.IntegerField()
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,030
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/Query.py
|
# -*- coding: utf-8 -*-
class Query(object):
def __init__(self):
pass
# 生成 where 条件
def fragment_where(self, attrs):
'''
:param attrs: {key: value}
:return:
'''
fragment_format = "%s = '%s'"
wheres = []
for key in attrs.keys():
if isinstance(attrs[key], list):
attrs[key] = attrs[key][0]
value = attrs[key]
if not value:
continue
wheres.append(fragment_format % (key, value))
return " and ".join(wheres)
# 生成事件条件
def query_operator(self, op, value):
try:
if op == "is":
return ("visitParamExtractString", "= '%s'" % (str(value),))
if op == "nis":
return ("visitParamExtractString", "<> '%s'" % (str(value),))
elif op == "like":
return ("visitParamExtractString", "like '%%%s%%'" % (str(value),))
elif op == "nlike":
return ("visitParamExtractString", "not like '%%%s%%'" % (str(value),))
elif op == "startswith":
return ("visitParamExtractString", "like '%s%%'" % (str(value),))
elif op == "endswith":
return ("visitParamExtractString", "like '%%%s'" % (str(value),))
elif op == "eq":
return ("visitParamExtractFloat", "= %.10f" % (float(value),))
elif op == "ne":
return ("visitParamExtractFloat", "<> %.10f" % (float(value),))
elif op == "lte":
return ("visitParamExtractFloat", "<= %.10f" % (float(value),))
elif op == "lt":
return ("visitParamExtractFloat", "< %.10f" % (float(value),))
elif op == "gte":
return ("visitParamExtractFloat", ">= %.10f" % (float(value),))
elif op == "gt":
return ("visitParamExtractFloat", "> %.10f" % (float(value),))
except:
raise TypeError("op: %s, value: %s" % (str(op), str(value)))
raise NotImplementedError("operator %s is invalid!" % (str(op), ))
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,031
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/UserSearch.py
|
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
import json
import itertools
import time
from ipinfo import ipinfo_sina
from config import mongo_con_string
global conn
# conn = MongoClient(mongo_con_string)
conn = MongoClient(mongo_ip, mongo_port)
def search(datatype, col_name, conds):
global conn
for item in conn[datatype][col_name].find(conds).limit(10000):
yield item
yield {}
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,032
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/tools/CoordinateToRegion_demo.py
|
# -*- encoding: utf-8-*-
from CoordinateToRegion.getRegion import getRegion
# 经度
lon = 116.65
# w纬度
lat = 40.13
#s省份、城市、乡镇
pro, city, xiangzhen = getRegion(lon, lat)
print pro, city, xiangzhen
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,033
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/apps.py
|
from __future__ import unicode_literals
from django.apps import AppConfig
class SaasapiConfig(AppConfig):
name = 'saasapi'
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,034
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/config.py
|
### mongodb
# mongo_ip = "172.16.152.148"
# mongo_ip = "10.45.141.35"
# mongo_ip = "10.45.141.35"
mongo_ip = "101.201.145.120"
mongo_port = 27017
mongo_user = ""
mongo_pwd = ""
# 'mongodb://tanteng:123456@localhost:27017/'
mongo_con_string = "mongodb://%(mongo_user)s:%(mongo_pwd)s@%(mongo_ip)s:%(mongo_port)s/" % \
{"mongo_ip": mongo_ip, "mongo_port": mongo_port, \
"mongo_user": mongo_user, "mongo_pwd": mongo_pwd}
if __name__ == "__main__":
print mongo_con_string
from pymongo import MongoClient
find = {"tm": "2016-07-31"}
con = MongoClient(mongo_con_string)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,035
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/SaaSCommon/JHOpen.py
|
# -*- coding: utf-8 -*-
import gzip
import os
class JHOpen(object):
def __init__(self):
pass
@staticmethod
def readLines(path):
if path.endswith(".gz"):
if os.path.exists(path):
for line in gzip.open(path):
yield line.strip()
else:
print("Path is not exists: %s" % path)
yield
else:
if os.path.exists(path):
with open(path) as f:
for line in f:
yield line.strip()
else:
print("Path is not exists: %s" % path)
yield
if __name__ == "__main__":
for line in JHOpen().readLines("C:/1652.log.gz"):
print(line.strip())
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,036
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasjob/SchedulerManager.py
|
# -*- coding: utf-8 -*-
from importlib import import_module
import uuid
import time
# import copy
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.mongodb import MongoDBJobStore
from apscheduler.jobstores.memory import MemoryJobStore
from apscheduler.triggers.cron import CronTrigger
from apscheduler.events import (EVENT_SCHEDULER_STARTED, EVENT_SCHEDULER_SHUTDOWN, EVENT_SCHEDULER_PAUSED,
EVENT_SCHEDULER_RESUMED, EVENT_EXECUTOR_ADDED, EVENT_EXECUTOR_REMOVED,
EVENT_JOBSTORE_ADDED, EVENT_JOBSTORE_REMOVED, EVENT_ALL_JOBS_REMOVED,
EVENT_JOB_ADDED, EVENT_JOB_REMOVED, EVENT_JOB_MODIFIED, EVENT_JOB_EXECUTED,
EVENT_JOB_ERROR, EVENT_JOB_MISSED, EVENT_JOB_SUBMITTED, EVENT_JOB_MAX_INSTANCES)
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
# from abc import ABCMeta, abstractmethod
from JobCmd import JobCmd
from JobCmd import jobcmdcallable
from DBClient.PyMongoClient import PyMongoClient
global mongoclient
_mongoclient = PyMongoClient().getConn()
class SchedulerManager(object):
# __metaclass__ = ABCMeta
global _mongoclient
def __init__(self):
self.jobstores = {
'mongo': MongoDBJobStore(collection='job1', database='saasjob', client=_mongoclient),
'default': MemoryJobStore()
}
self.executors = {
'default': ThreadPoolExecutor(1),
'processpool': ProcessPoolExecutor(1)
}
self.job_defaults = {
'coalesce': False,
'misfire_grace_time': 1,
'max_instances': 1
}
self._sched = BackgroundScheduler(jobstores=self.jobstores, executors=self.executors, job_defaults=self.job_defaults)
# 添加 任务提交 事件监听
self._sched.add_listener(self.when_job_submitted, EVENT_JOB_SUBMITTED)
# 添加 任务执行完成 事件监听
self._sched.add_listener(self.when_job_executed, EVENT_JOB_EXECUTED)
# 添加 任务异常退出 事件监听
self._sched.add_listener(self.when_job_crashed, EVENT_JOB_ERROR)
self._jobs = {}
self._jobhandlers = {} # format, key: jobid, value: jobhandler
self._jobs_key = ["name", "func", "args", "kwargs"]
self.start()
def cmd_valid(self, cmd):
cmd = cmd.strip()
if cmd.startswith("python"):
return True
else:
return False
def get_job_trigger(self, _job):
# ('trigger', <CronTrigger (second='4', timezone='Asia/Shanghai')>)
_trigger = self._get_job_attr(_job, "trigger")
# options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
if _trigger:
return dict([(f.name, f.__str__()) for f in _trigger.fields if not f.is_default])
else:
return {}
# 获取job属性
def _get_job_attr(self, _job, attr):
try:
result = eval("_job.%s" % attr)
return result
except:
import traceback
print(traceback.print_exc())
return None
def when_job_submitted(self, event):
try:
job_id = event.job_id
if job_id not in self._jobhandlers and job_id in self._jobhandlers:
self._jobhandlers.setdefault(job_id, JobHandler(self._jobs[job_id]))
jobhandler = self._jobhandlers[event.job_id]
jobhandler.when_job_submitted()
print("%s submitted at %s" % (event.job_id, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
except:
import traceback
print(traceback.print_exc())
def when_job_executed(self, event):
try:
job_id = event.job_id
if job_id not in self._jobhandlers:
self._jobhandlers.setdefault(job_id, JobHandler(self._jobs[job_id]))
jobhandler = self._jobhandlers[event.job_id]
jobhandler.when_job_executed()
print("%s executed at %s" % (event.job_id, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
except:
import traceback
print(traceback.print_exc())
def when_job_crashed(self, event):
try:
if event.exception:
job_id = event.job_id
if job_id not in self._jobhandlers:
self._jobhandlers.setdefault(job_id, JobHandler(self._jobs[job_id]))
jobhandler = self._jobhandlers[event.job_id]
jobhandler.when_job_crashed()
print("%s crashed at %s" % (event.job_id, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
except:
import traceback
print(traceback.print_exc())
# 添加例行任务,crontab 格式
def addCron(self, cmd, **params):
try:
create_jobid = uuid.uuid4().hex
if not self.cmd_valid(cmd):
return {"errinfo": "wrong cmd"}
jobcmdobj = JobCmd(cmd)
data = params.get("data", {})
jobcmdobj.set_jobid(create_jobid)
s = params.get("second", None) if params.get("second", None) != "*" else None
m = params.get("minute", None) if params.get("minute", None) != "*" else None
h = params.get("hour", None) if params.get("hour", None) != "*" else None
d = params.get("day", None) if params.get("day", None) != "*" else None
dw = params.get("day_of_week", None) if params.get("day_of_week", None) != "*" else None
mnth = params.get("month", None) if params.get("month", None) != "*" else None
y = params.get("year", None) if params.get("year", None) != "*" else None
_job = self._sched.add_job(jobcmdcallable,
'cron', year=y, month=mnth, day=d, day_of_week=dw, hour=h, minute=m, second=s,
args=[jobcmdobj, data],
executor="processpool",
jobstore="mongo",
id = create_jobid)
self._jobhandlers.setdefault(create_jobid, JobHandler(_job))
# 保存 job 属性
return {"job_id": create_jobid}
except:
import traceback
print(traceback.print_exc(), cmd, params)
return False
# 修改 job 属性
def modifyJobAttr(self, job_id, **changes):
try:
_job = self._sched.modify_job(job_id=job_id, **changes)
self._jobs[job_id] = _job
if job_id in self._jobhandlers:
self._jobhandlers[job_id].job = _job
else:
self._jobhandlers.setdefault(job_id, JobHandler(_job))
return True
except:
import traceback
print(traceback.print_exc(), job_id, changes)
return False
def modifyJobData(self, job_id, data):
try:
args = self._get_job_attr(self._jobhandlers[job_id].job, "args")
# args_copy = [item for item in args]
for key in data:
args[1][key] = data[key]
_job= self._sched.modify_job(job_id, args=args)
self._jobs[job_id] = _job
if job_id in self._jobhandlers:
self._jobhandlers[job_id].job = _job
else:
self._jobhandlers.setdefault(job_id, JobHandler(_job))
return True
except:
import traceback
print(traceback.print_exc(), job_id, data)
return False
# 修改执行时间,crontab 格式
def modifyJobFreq(self, job_id, cronargs):
try:
_job = self._sched.reschedule_job(job_id, trigger='cron', **cronargs)
self._jobs[job_id] = _job
if job_id in self._jobhandlers:
self._jobhandlers[job_id].job = _job
else:
self._jobhandlers.setdefault(job_id, JobHandler(_job))
return True
except:
import traceback
print(traceback.print_exc(), job_id, cronargs)
return False
# 删除 job
def removeFromCron(self, job_id):
try:
self._sched.remove_job(job_id)
if job_id in self._jobhandlers:
self._jobhandlers.pop(job_id)
if job_id in self._jobs:
self._jobs.pop(job_id)
return True
except:
import traceback
print(traceback.print_exc(), job_id)
return False
def job_exists(self, job_id):
if job_id in self._jobhandlers or job_id in self._jobs:
if job_id not in self._jobhandlers and job_id in self._jobs:
self._jobhandlers[job_id] = JobHandler(self._jobs[job_id])
elif job_id in self._jobhandlers and job_id not in self._jobs:
self._jobs[job_id] = self._jobhandlers[job_id].job
return True
else:
return False
# 根据 job id 查询任务信息
def findCronJob(self, job_ids):
result = []
_keys = [
"cmd",
"create_stamp",
"is_running",
"start_stamp",
"hope_runtime",
"is_success",
"is_pause",
"status",
"name",
"desc",
"allowmodify"
]
for job_id in job_ids:
print("job_exists", self.job_exists(job_id))
if self.job_exists(job_id):
_jobhander = self._jobhandlers[job_id]
job_info = _jobhander.jobhandlerattr
cron_trigger = {}
# cron_trigger = self.get_cron_trigger(_jobhander.job)
tmp = {}
tmp["job_id"] = job_id
if job_info["is_running"]:
execute_time = time.time() - job_info["start_stamp"]
tmp["running_time"] = round(execute_time, 3)
else:
tmp["running_time"] = round(job_info["hope_runtime"], 3)
for key in _keys:
v = job_info.get(key, None)
if key == "is_running":
tmp["finished"] = False if job_info["is_running"] else True
else:
tmp[key] = v
if tmp["finished"]:
tmp["completed_per"] = 1.0
else:
tmp["completed_per"] = round(tmp["running_time"]/max([tmp["running_time"], tmp["hope_runtime"]]), 3)
# del tmp["hope_runtime"]
# del tmp["is_success"]
# del tmp["is_pause"]
tmp.pop("hope_runtime")
tmp.pop("is_success")
tmp.pop("is_pause")
_result = dict(tmp, **cron_trigger)
print("_result", _result)
if _result["status"] == 3:
_result["completed_per"] = 0
_result["running_time"] = 0
_result["start_stamp"] = None
result.append(_result)
else:
result.append({"job_id": job_id, "errinfo": "no exists"})
return result
def getAllJobInfo(self):
try:
result = self.findCronJob(set(self._jobhandlers.keys())|set(self._jobs.keys()))
return result
except:
import traceback
print(traceback.print_exc())
return False
def start_addition(self):
for _job in self._sched.get_jobs():
job_id = self._get_job_attr(_job, "id")
self._jobs.setdefault(job_id, _job)
def start(self):
try:
self._sched.start()
self._sched.pause()
self.start_addition()
self._sched.resume()
return True
except:
import traceback
print(traceback.print_exc())
return False
def stop(self, iswait = True):
try:
self._sched.shutdown(wait=iswait)
self._jobhandlers.clear()
return True
except:
import traceback
print(traceback.print_exc())
return False
def pause_job(self, job_id):
try:
self._sched.pause_job(job_id=job_id)
self._jobhandlers[job_id].ispause = True
self._jobhandlers[job_id].status = 3
self._jobhandlers[job_id].isrunning = False
return True
except:
import traceback
print(traceback.print_exc())
return False
def resume_job(self, job_id):
try:
self._sched.resume_job(job_id=job_id)
self._jobhandlers[job_id].ispause = False
self._jobhandlers[job_id].status = 1
return True
except:
import traceback
print(traceback.print_exc())
return False
class JobHandler(object):
'''
{"jobobj": None, "jobcmdobj": None, "isrunning": False, "hprunningtime": 0, "startstamp": 0, "endstamp": 0}
'''
def __init__(self, job = None, handler = None):
self.jobobj = job
if job:
self.jobcmdobj = self.get_job_attr("args")[0]
self.job_id = self.get_job_attr("id")
else:
self.jobcmdobj = None
self.job_id = None
# 变量初始化
self.is_running = False
self.end_stamp = 0
self.start_stamp = 0
self.hp_runningtime = 0
self.is_crash = False
self.is_pause = False
self.is_success = True
# job 状态码,
# -1:初始状态,
# 0: 异常,
# 1:执行完成,等待下次调度,
# 2:正在执行,
# 3:暂停
self.status = -1
def when_job_submitted(self):
self.is_running = True
self.start_stamp = time.time()
self.status = 2
def when_job_executed(self):
self.is_running = False
self.end_stamp = time.time()
if self.status != 0 and self.is_success == True:
self.status = 1
self.hp_runningtime = self.end_stamp - self.start_stamp
def when_job_crashed(self):
self.is_success = False
self.is_running = False
self.end_stamp = time.time()
self.status = 0
# self.hp_runningtime = self.end_stamp-self.start_stamp
@property
def job(self):
return self.jobobj
@job.setter
def job(self, _job):
self.jobobj = _job
if _job:
self.jobcmdobj = self.get_job_attr("args")[0]
self.job_id = self.get_job_attr("id")
else:
self.jobcmdobj = None
self.job_id = None
if self.jobcmdobj:
self.cmd = self.jobcmdobj._cmd
@property
def isrunning(self):
return self.is_running
@isrunning.setter
def isrunning(self, is_running):
self.is_running = is_running
@property
def issuccess(self):
return self.is_success
@issuccess.setter
def issuccess(self, is_success):
self.is_success = is_success
@property
def ispause(self):
return self.is_pause
@ispause.setter
def ispause(self, is_pause):
self.is_pause = is_pause
@property
def exception(self):
return self.exceptioninfo
@exception.setter
def exception(self, ex):
self.exceptioninfo = ex
@property
def startstamp(self):
return self.startstamp
@startstamp.setter
def startstamp(self, start_stamp):
self.start_stamp = start_stamp
@property
def endstamp(self):
return self.endstamp
@endstamp.setter
def endstamp(self, end_stamp):
self.end_stamp = end_stamp
@property
def hprunningtime(self):
return self.hp_runningtime
@property
def runningtime(self):
if self.is_running:
return int(time.time() - self.start_stamp)
else:
return 0
# 获取job属性
def get_job_attr(self, attr):
try:
result = eval("self.jobobj.%s" % attr)
return result
except:
import traceback
print(traceback.print_exc())
return None
# 获取job属性
def get_job_attrs(self, attrs):
result = []
for attr in attrs:
_result = self.get_job_attr(attr)
result.append(_result)
return result
@property
def jobhandlerattr(self):
if self.jobcmdobj:
job_attr = self.jobcmdobj.jobattr()
else:
job_attr = {}
jobhandler_attr = {
"job_id": self.job_id, # job id
"is_running": self.is_running, # 是否正在运行
"is_pause": self.is_pause, # 是否暂停执行
"is_success": self.is_success, # 是否暂停执行
"start_stamp": self.start_stamp, # 最近一次开始执行时间
"end_stamp": self.end_stamp, # 最近一次执行完成时间
"hope_runtime": self.hp_runningtime, # 预计执行时间
"status": self.status, # 预计执行时间
}
try:
job_name_arg = self.get_job_attr("args")[1].get("name", [""]), # 任务名称
job_name = job_name_arg[0] if len(job_name_arg) == 1 else ""
jobhandler_attr["name"] = job_name
except:
jobhandler_attr["name"] = ""
try:
desc_arg = self.get_job_attr("args")[1].get("desc", [""]), # 任务说明
desc = desc_arg[0] if len(desc_arg) == 1 else ""
jobhandler_attr["desc"] = desc
except:
jobhandler_attr["desc"] = ""
try:
allowmodify_arg = self.get_job_attr("args")[1].get("allowmodify", [False]) # 是否允许修改
print("allowmodify_arg", allowmodify_arg)
allowmodify = allowmodify_arg[0] if len(allowmodify_arg) == 1 else False
jobhandler_attr["allowmodify"] = allowmodify
except:
jobhandler_attr["allowmodify"] = False
return dict(job_attr, **jobhandler_attr)
if __name__ == "__main__":
tester = SchedulerManager()
print tester._sched
# print tester.convertStringToFunction("DBClient.PyMongoClient.PyMongoClient")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,037
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DBClient/SummaryDatatypes.py
|
# -*- coding: utf-8 -*-
DATATYPES = {
"biqu_all": ["biqu", "BIQU_ANDROID"]
}
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,038
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/eventRemain.py
|
# coding: utf-8
import time
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
from MongoDatas import _sortByKey
import datetime
import time
from SaaSCommon.JHDecorator import fn_timer
global conn
conn = MongoClient(mongo_ip, mongo_port)
@fn_timer
def eventsRemainCombine(datatype, tm_s, tm_e, last_num, events):
tm_s_stamp = time.mktime(time.strptime(tm_s, "%Y-%m-%d"))
tm_e_stamp = time.mktime(time.strptime(tm_e, "%Y-%m-%d"))
num = (tm_e_stamp - tm_s_stamp)/86400
if num > 90 or num <= 0:
return ["Out of date range"]
tm = datetime.datetime.strptime(tm_s, "%Y-%m-%d")
end_tm = datetime.datetime.strptime(tm_e, "%Y-%m-%d")
today = datetime.datetime.today()
result = []
while tm <= end_tm:
tmp = {}
tmp.setdefault("tm", tm.strftime("%Y-%m-%d"))
# 计算留存数据,返回数据格式:{日期: 回访人数},按日期升序
event_remain_data = eventsRemain(datatype, tm.strftime("%Y-%m-%d"), last_num, events)
data_default = {}
# 生成返回数据,没有回访行为的日期补0,没有数据的日志补空字符
for i in range(last_num+1):
curday = (tm+datetime.timedelta(days=i)).strftime("%Y-%m-%d")
data_default.setdefault(curday, 0 if today.strftime("%Y-%m-%d") > curday else "")
data_keys = data_default.keys()
data_keys.sort()
# 回去返回日期的留存数据
for key in data_keys:
tmp.setdefault("numbers", [])
tmp["numbers"].insert(len(tmp["numbers"]), event_remain_data[key] if key in event_remain_data else data_default[key])
tmp["length"] = len(tmp["numbers"])
tm += datetime.timedelta(days=1)
result.append(tmp)
return result
@fn_timer
def eventsRemain(datatype, tm_0, last_num, events):
global conn
assert last_num >= 1
assert type(events) == type([]) and len(events) >= 1
today = datetime.datetime.today()
start_event = events[0]
remain_event = events[1]
dbname = datatype
collection_name = "uvfile"
query = {}
query.setdefault("tm", tm_0)
# 如果不是所有事件,则添加限制条件
if start_event != "jhddg_every":
query.setdefault("item_count.%s"%start_event, {"$exists": True})
qurey_result = list(conn[dbname][collection_name].find(query, {"jhd_userkey": 1}))
uids = tuple([item["jhd_userkey"] for item in qurey_result])
tm_s = (datetime.datetime.strptime(tm_0, "%Y-%m-%d") + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
end_day = datetime.datetime.strptime(tm_0, "%Y-%m-%d") + datetime.timedelta(days=last_num)
tm_e = end_day.strftime("%Y-%m-%d")
tm_e = min(tm_e, (today-datetime.timedelta(days=1)).strftime("%Y-%m-%d"))
events_remain = eventIntersection(dbname, tm_s, tm_e, uids, remain_event)
result = {}
result.setdefault(tm_0, len(uids))
for item in events_remain:
tm = item["_id"]
del item["_id"]
eventid = item.keys()[0]
result.setdefault(tm, item[eventid])
return _sortByKey(result)
def eventIntersection(datatype, tm_s, tm_e, uids, event):
global conn
query = []
# 根据留存天数获取期间的活跃用户
match = {"$match": {}}
match["$match"].setdefault("tm", {"$gte": tm_s, "$lte": tm_e})
match["$match"].setdefault("jhd_userkey", {"$in": uids})
if event != "jhddg_every":
match["$match"].setdefault("item_count.%s" % event, {"$exists": True})
# 计算每日留存
group = {"$group": {}}
group["$group"].setdefault("_id", "$tm")
group["$group"].setdefault(event, {"$sum": 1})
_sort = {"$sort": {"_id": -1}}
query.append(match)
query.append(group)
query.append(_sort)
query_result = [item for item in conn[datatype]["uvfile"].aggregate(query, allowDiskUse=True)]
return query_result
if __name__ == "__main__":
import time
a = time.time()
print eventsRemainCombine("hbtv", "2016-09-11", "2016-09-18", 7, ["jhddg_every", "ac13"])
# for i in range(1, 7):
# tm_s = (datetime.datetime.strptime("2016-09-01", "%Y-%m-%d") + datetime.timedelta(days=i)).strftime("%Y-%m-%d")
# print eventsRemain("feeling", tm_s, 15, ["ac11", "ac23"])
# print time.time()-a
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,039
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/IPList.py
|
# -*- coding: utf-8 -*-
IPAccess = [
"0.0.0.0",
]
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,040
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DBClient/MysqlData.py
|
# -*- coding: utf-8 -*-
import MysqlClient
import json
class MysqlData(object):
def __init__(self):
self.client = MysqlClient.MysqlClient("customize")
def getUserTimeDistributeParms(self):
result = {}
for item in self.client.select("select appkey, usertimedistribute from customize.d_customapp_params"):
appkey, usertimedistribute = item
usertimedistribute = json.loads(usertimedistribute)
result.setdefault(appkey, usertimedistribute)
return result
def getCustomParms(self, datatype):
sql = "select a.appkey, b.cdkey, a.plat from \
(select * from saas_meta.d_app where appkey = '%(appkey)s' and enable = 1 and (plat = 'android' or plat = 'ios')) a \
left join \
(select * from saas_meta.d_account where enable = 1) b \
on a.own = b.name_uid" % {"appkey": datatype}
for item in self.client.select(sql):
dbname, appkey, plat = item[1], item[0], item[2]
return (dbname, appkey, plat)
if __name__ == "__main__":
tester = MysqlData()
print tester.getCustomParms("biqu")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,041
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/funnel.py
|
# -*- coding: utf-8 -*-
import datetime
import time
from collections import OrderedDict
from QueryOperator import query_operator
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
from MongoDatas import _sortByKey
from SaaSCommon.JHDecorator import fn_timer
# from config import mongo_con_string
# import json
global conn
conn = MongoClient(mongo_ip, mongo_port)
# 线上漏斗(不支持map)
@fn_timer
def funneldataOld(datatype, s_tm, e_tm, events):
# 保留旧接口,不支持map属性查询
'''
:param datatype:
:param s_tm:
:param e_tm:
:param events:
:return:
:desc: 任意事件id: jhd_every
'''
dbname = datatype
collection_name = "uvfile"
s_stamp = time.mktime(time.strptime(s_tm, "%Y-%m-%d"))
e_stamp = time.mktime(time.strptime(e_tm, "%Y-%m-%d"))
result = {}
# events = map(str, events)
while s_stamp <= e_stamp:
_tm = time.strftime("%Y-%m-%d", time.localtime(s_stamp))
result.setdefault(_tm, [])
for i in range(0, len(events)):
query = []
match = {"$match": {"tm": _tm}}
for event in events[0:i+1]:
if type(event) == type([]) and len(event) == 1:
event =event[0]
if event == "jhddg_every":
match["$match"].setdefault("item_count", {"$exists": True})
else:
match["$match"].setdefault("item_count.%s"%event, {"$exists": True})
# 兼容旧接口格式
elif type(event) == type("") or type(event) == type(u""):
event = event
if event == "jhddg_every":
match["$match"].setdefault("item_count", {"$exists": True})
else:
match["$match"].setdefault("item_count.%s"%event, {"$exists": True})
elif type(event) == type([]) and len(event) > 1:
value = event
tmp = {"$or": []}
for _key in value:
if _key == "jhddg_every":
tmp["$or"].append({"item_count": {"$exists": True}})
else:
tmp["$or"].append({"item_count.%s" % _key: {"$exists": True}})
match["$match"].setdefault("$and", []).append(tmp)
query.append(match)
project = {"$project": {}}
for event in events[0:i+1]:
if type(event) == type([]) and len(event) == 1:
event = event[0]
if event == "jhddg_every":
project["$project"].setdefault("jhd_userkey", 1)
else:
project["$project"].setdefault("jhd_userkey", 1)
project["$project"].setdefault("item_count.%s.opatm"%event, 1)
elif type(event) == type("") or type(event) == type(u""):
event = event
if event == "jhddg_every":
project["$project"].setdefault("jhd_userkey", 1)
else:
project["$project"].setdefault("jhd_userkey", 1)
project["$project"].setdefault("item_count.%s.opatm"%event, 1)
elif type(event) == type([]) and len(event) > 1:
value = event
[project["$project"].setdefault("item_count.%s.opatm"%_key, 1) for _key in value]
query.append(project)
# group = {"$group": {"_id": "all", "count": {"$sum": 1}, "users": {"$push": "$jhd_userkey"}}}
group = {"$group": {"_id": "all", "count": {"$sum": 1}}}
query.append(group)
for item in conn[dbname][collection_name].aggregate(query, allowDiskUse=True):
result[_tm].append(item["count"])
if len(result[_tm]) != i+1:
result[_tm].append(0)
s_stamp += 86400
return _sortByKey(result)
def operator_query(op, value):
if op == "is":
return value
if op == "nis":
return {"$ne": str(value)}
elif op == "like":
return {"$regex": str(value)}
elif op == "nlike":
return value
elif op == "startswith":
return {"$regex": "^"+str(value)}
elif op == "endswith":
return {"$regex": str(value)+"$"}
elif op == "eq":
return int(value)
elif op == "ne":
return {"$ne": int(value)}
elif op == "lte":
return {"$lte": int(value)}
elif op == "lt":
return {"$lt": int(value)}
elif op == "gte":
return {"$gte": int(value)}
elif op == "gt":
return {"$gt": int(value)}
elif op == "in":
return {"$in": int(value)}
raise NotImplementedError("operator %s is invalid!" % (op, ))
# 线上漏斗(支持map)
@fn_timer
def funneldata(datatype, s_tm, e_tm, params, cycle = "daily"):
# 保留旧接口,不支持map属性查询
'''
:param datatype:
:param s_tm:
:param e_tm:
:param events:
:return:
:desc: 任意事件id: jhd_every
'''
# operators for string: is, like, nlike, startswith, endswith
# operators for number: eq, neq, lte, lt, gte, gt
dbname = datatype
collection_name = "uvfile"
s_stamp = time.mktime(time.strptime(s_tm, "%Y-%m-%d"))
e_stamp = time.mktime(time.strptime(e_tm, "%Y-%m-%d"))
num = (e_stamp - s_stamp)/86400
if num > 60 or num < 0:
return {"errinfo": "日期跨度超出范围!"}
eventsdata = params["funnel"]
# funnel format: [[{"id": "ac41", "map": {"og": "CSX"}}], [{"id": "ac23", "map": {}}, {"id": "ac11", "map": {}}], [{"id": "ac22", "map": {}}]]
events = []
events_seq_map = {}
for step, item in enumerate(eventsdata):
tmp = []
for step_part, event in enumerate(item):
tmp.append(event["id"])
events_seq_map.setdefault(step, {}).setdefault(step_part, {}).setdefault(event["id"], event.get("attrs", {}))
events.append(tmp)
attrs = params.get("attrs", {})
if "jhd_vr" in attrs and isinstance(attrs["jhd_vr"], list):
if len(attrs["jhd_vr"]) >= 1:
attrs["jhd_vr"] = attrs["jhd_vr"][0]
else:
attrs.pop("jhd_vr")
result = {}
# events = map(str, events)
while s_stamp <= e_stamp:
_tm = time.strftime("%Y-%m-%d", time.localtime(s_stamp))
result.setdefault(_tm, [])
for i in range(0, len(events)):
query = []
match = {"$match": {"tm": _tm}}
match["$match"].update(attrs)
for step, event in enumerate(events[0:i+1]):
if type(event) == type([]):
value = event
tmp = OrderedDict([
("$or", [])
])
for step_part, _key in enumerate(value):
event_mapkeys = events_seq_map[step][step_part][_key]
if not _key:
continue
if _key == "jhddg_every":
tmp["$or"].append({"item_count": {"$exists": True}})
else:
map_conds = {}
for map_key in event_mapkeys:
id_key = map_key["id"]
if not id_key:
continue
id_value = map_key["val"]
op = map_key["op"]
try:
express = query_operator(op, id_value)
except:
return {"errinfo": "类型错误!"}
map_conds.setdefault("item_count.%(event)s.maps.%(map_key)s" % {"event": _key, "map_key": id_key}, express)
if map_conds:
tmp["$or"].append(map_conds)
else:
tmp["$or"].append({"item_count.%s" % _key: {"$exists": True}})
if tmp["$or"]:
match["$match"].setdefault("$and", []).append(tmp)
# print json.dumps(match)
query.append(match)
project = {"$project": {}}
for event in events[0:i+1]:
if type(event) == type([]) and len(event) == 1:
event = event[0]
if not event:
continue
if event == "jhddg_every":
project["$project"].setdefault("jhd_userkey", 1)
else:
project["$project"].setdefault("jhd_userkey", 1)
project["$project"].setdefault("item_count.%s.opatm"%event, 1)
elif type(event) == type("") or type(event) == type(u""):
event = event
if not event:
continue
if event == "jhddg_every":
project["$project"].setdefault("jhd_userkey", 1)
else:
project["$project"].setdefault("jhd_userkey", 1)
project["$project"].setdefault("item_count.%s.opatm"%event, 1)
elif type(event) == type([]) and len(event) > 1:
value = event
if not event:
continue
[project["$project"].setdefault("item_count.%s.opatm"%_key, 1) for _key in value]
if project["$project"]:
query.append(project)
# group = {"$group": {"_id": "all", "count": {"$sum": 1}, "users": {"$push": "$jhd_userkey"}}}
group = {"$group": {"_id": "all", "count": {"$sum": 1}}}
query.append(group)
# import json
# print json.dumps(query)
for item in conn[dbname][collection_name].aggregate(query, allowDiskUse=True):
result[_tm].append(item["count"])
if len(result[_tm]) != i+1:
result[_tm].append(0)
s_stamp += 86400
return _sortByKey(result)
def group_by_week(data):
# data format
#{
# "2016-06-17": [
# 1717,
# 1105,
# 1105],
# "2016-06-16": [
# 1717,
# 1105,
# 1105],
# }
# Monday is 0 and Sunday is 6
result = {}
days = data.keys()
min_day = datetime.datetime.strptime(min(days), "%Y-%m-%d")
min_day_tumple = min_day.timetuple()
min_day_weekday = min_day.isoweekday()
max_day = datetime.datetime.strptime(max(days), "%Y-%m-%d")
max_day_tumple = max_day.timetuple()
max_day_weekday = min_day.isoweekday()
week_start_day = (min_day - (min_day_weekday - datetime.timedelta(days=1))).strftime("%Y-%m-%d")
def group_by_month(result):
pass
@fn_timer
def funneldata_bak(datatype, s_tm, e_tm, params):
start_day = datetime.datetime.strptime(s_tm, "%Y-%m-%d")
end_day = datetime.datetime.strptime(e_tm, "%Y-%m-%d")
day_span = (end_day - start_day).days
if day_span > 30:
start_day = end_day - 30
eventsdata = params["funnel"]
attrs = params.get("attrs", {})
result = OrderedDict()
while start_day <= end_day:
tm = end_day.date().strftime("%Y-%m-%d")
for i in range(1, len(eventsdata)+1):
eventsdata_part = eventsdata[:i]
funnel_count = funnelstep(datatype, tm, eventsdata_part, attrs=attrs)
result.setdefault(tm, []).append(funnel_count)
end_day = end_day - datetime.timedelta(days=1)
# print(result)
return result
# 根据筛选出的
def funnelstep(datatype, tm, eventsdata, attrs = None):
'''
:param datatype:
:param tm:
:param eventsdata: [[{"id": "ac41", "map": {"og": "CSX"}}], [{"id": "ac23", "map": {}}, {"id": "ac11", "map": {}}], [{"id": "ac22", "map": {}}]]
:return:
'''
if not attrs:
attrs = {}
tm_short = tm.replace("-", "")
dbname = datatype
collection_name = "UserEvent"
events_steps = [] # [["in"], ["ac23", "ac11"], ["ac22"]]
event_group_condition = {}
for events_step in eventsdata:
event_onestep = []
for item in events_step:
eventid = item["id"]
event_onestep.append(eventid)
# 生成 map 筛选条件
# if item["map"]: # 排除空 map 的情况
event_group_condition.setdefault(eventid, []).append(item.get("map", {}))
events_steps.append(event_onestep)
events_num = len(events_steps)
raw_data = funnelstepRawData(datatype, tm_short, events_steps, attrs)
# print(raw_data)
# 如果第一步筛选结果数为 0,则直接返回 0
if raw_data[tm]["count"] == 0:
return 0
# 如果是第一步漏斗则直接返回结果;如果第一步为 “任意事件” 则第二步漏斗时直接返回结果
if len(eventsdata) == 1 or (len(eventsdata) == 2 and eventsdata[0][0]["id"] == "jhddg_every"):
return raw_data[tm]["count"]
query = []
users = raw_data[tm]["users"]
# events = itertools.chain(reduce(operator.concat, events_steps, [])) # 生成 events 列表
match = {"$match": OrderedDict([("partition_date", tm_short), ("jhd_userkey", {"$in": users})])}
or_part = {"$or": []}
for _eventid in event_group_condition:
if _eventid == "jhddg_every":
continue
user_map = event_group_condition[_eventid]
for map_s in user_map:
tmp = {}
tmp["jhd_eventId"] = _eventid
for key in map_s:
tmp["jhd_map.%s"%key] = map_s[key]
or_part["$or"].append(tmp)
match["$match"].update(**or_part)
sort_by = {"$sort": {"jhd_ts": 1}}
group = {"$group": {"_id": "$jhd_userkey", "events": {"$push": "$jhd_eventId"}, "ts": {"$push": "$jhd_ts"}, "eventset": {"$addToSet": "$jhd_eventId"}}}
# 计算事件漏斗个数
project = {"$project": {"events": 1, "_id": 0, "ts": 1, "event_size": {"$size": "$eventset"}}}
# 排除掉不符合漏斗事件个数要求的数据
match_1 = {"$match": {"event_size": events_num}}
query.append(match)
# query.append(sort_by)
query.append(group)
query.append(project)
query.append(match_1)
import json
# print("funnelstep query", json.dumps(query))
funnel_count = 0
for item in conn[dbname][collection_name].aggregate(query, allowDiskUse=True):
ts = item["ts"]
user_events = item["events"]
# 用户事件按操作时间升序排序
events_ts = dict([(_ts, _event) for _ts, _event in zip(ts, user_events)])
ts.sort()
user_events = [events_ts[_ts] for _ts in ts]
for events_onestep, step_index in zip(events_steps, range(0, len(events_steps))):
iscontinue = True
# 根据用户触发事件序列计算漏斗事件
for eventid in events_onestep:
try:
slice_index = user_events.index(eventid)
user_events = user_events[slice_index:]
break
except ValueError:
iscontinue = False
break
if not iscontinue:
break
funnel_count += 1
return funnel_count
# uvfile中筛选出用户 userkey
def funnelstepRawData(datatype, tm, events, attrs = None):
'''
:param datatype:
:param s_tm:
:param e_tm:
:param events:
:return:
:desc: 任意事件id: jhd_every
'''
if attrs == None:
attrs = {}
dbname = datatype
collection_name = "uvfile"
time_stamp = time.mktime(time.strptime(tm, "%Y%m%d"))
result = {}
# events = map(str, events)
_tm = time.strftime("%Y-%m-%d", time.localtime(time_stamp))
# result.setdefault(_tm, [])
for i in [max(range(0, len(events)))]:
query = []
match = {"$match": OrderedDict([("tm", _tm)])}
for attr in attrs:
if not isinstance(attrs[attr], list):
continue
match["$match"].setdefault(attr, {"$in": attrs[attr][:10]})
for event in events[0:i+1]:
if type(event) == type([]) and len(event) == 1:
event = event[0]
if event == "jhddg_every":
match["$match"].setdefault("item_count", {"$exists": True})
else:
match["$match"].setdefault("item_count.%s"%event, {"$exists": True})
# 兼容旧接口格式
elif type(event) == type("") or type(event) == type(u""):
event = event
if event == "jhddg_every":
match["$match"].setdefault("item_count", {"$exists": True})
else:
match["$match"].setdefault("item_count.%s"%event, {"$exists": True})
elif type(event) == type([]) and len(event) > 1:
value = event
tmp = {"$or": []}
for _key in value:
if _key == "jhddg_every":
tmp["$or"].append({"item_count": {"$exists": True}})
else:
tmp["$or"].append({"item_count.%s" % _key: {"$exists": True}})
match["$match"].setdefault("$and", []).append(tmp)
query.append(match)
project = {"$project": {}}
for event in events[0:i+1]:
if type(event) == type([]) and len(event) == 1:
event = event[0]
if event == "jhddg_every":
project["$project"].setdefault("jhd_userkey", 1)
else:
project["$project"].setdefault("jhd_userkey", 1)
project["$project"].setdefault("item_count.%s.opatm" % event, 1)
elif type(event) == type("") or type(event) == type(u""):
event = event
if event == "jhddg_every":
project["$project"].setdefault("jhd_userkey", 1)
else:
project["$project"].setdefault("jhd_userkey", 1)
project["$project"].setdefault("item_count.%s.opatm"%event, 1)
elif type(event) == type([]) and len(event) > 1:
value = event
[project["$project"].setdefault("item_count.%s.opatm"%_key, 1) for _key in value]
query.append(project)
group = {"$group": {"_id": "all", "count": {"$sum": 1}, "users": {"$push": "$jhd_userkey"}}}
query.append(group)
# print("funnelstepRawData query", json.dumps(query))
result.setdefault(_tm, {}).setdefault("count", 0)
result.setdefault(_tm, {}).setdefault("users", set())
for item in conn[dbname][collection_name].aggregate(query, allowDiskUse=True):
result.setdefault(_tm, {})["count"] = item["count"]
result.setdefault(_tm, {})["users"] = item["users"]
return result
if __name__ == "__main__":
# http://101.201.145.120:8090/saasapi/eventserisesingle/feeling/2016-09-14/2016-10-13/[%22ac44%22,%22ac17%22,%22ac46%22]/
# print funnelRawData("biqu", "2016-11-05", "2016-11-13", [["jhddg_every"], ["ac11"], ["ac12"], ["ac13"]])
# print funnelstep("biqu", "2016-11-13", [[{"id": "ac36", "map": {"og": "SHA"}}], [{"id": "ac9", "map": {}}]])
a = time.time()
# print funneldata("biqu", "2016-11-10", "2016-11-20", [[{"id": "ac36", "map": {"og": "SHA"}}], [{"id": "ac9", "map": {}}]])
# print funneldata("guaeng", "2017-01-01", "2017-01-05", {"funnel": [[{"id": "ac12", "map": {"4": "56d32408c507b600509f48f6"}}], [{"id": "ac6", "map": {"18": "emoji"}}]], "attrs": {"jhd_vr": "2.2.3"}})
# print funneldata("BIQU_ANDROID", "2016-12-31", "2017-01-07", {"funnel": [[{"id": "jhddg_every"}], [{"id": "ac7"}]], "attrs": {"jhd_vr": "1.1.4"}})
# print funneldataOld("BIQU_ANDROID", "2016-12-31", "2017-01-07", [["ac1"], ["ac7"]])
# print funneldata("biqu", "2016-11-10", "2016-11-20", {"funnel": [[{"id": "ac49", "map": {"og": {"$regex": "CSX"}}}], [{"id": "ac23", "map": {}}, {"id": "ac11", "map": {}}], [{"id": "ac22", "map": {}}]]})
# print funneldata("biqu", "2017-01-10", "2017-01-11", {"attrs": {"jhd_vr": "2.1.4"}, "funnel":[[{"id":"ac9"}],[{"id":"ac36","attrs":[{"id":"og","op":"is","val":"BJS"}]}],[{"id":"ac65","attrs":[{"id":"id","op":"is","val":"123"},{"id":"op","op":"nlike","val":"123"}]},{"id":"ac83","attrs":[{"id":"id","op":"is","val":"想"}]}]]})
print funneldata("BIQU_ANDROID", "2017-01-04", "2017-01-11", {"funnel":[[{"id":"jhddg_every"}],[{"id":"ac2"}],[{"id":"ac8","attrs":[{"id":"id","op":"like","val":"135"}]}],[{"id":"ac49","attrs":[{"id":"type","op":"nis","val":"经停"},{"id":"wf","op":"is","val":"0"},{"id":"st","op":"endswith","val":"0"}]}],[{"id":"ac50","attrs":[{"id":"name","op":"endswith","val":"亮"},{"id":"hbh","op":"startswith","val":"MU"}]}],[{"id":"ac53","attrs":[{"id":"op","op":"nis","val":"1"}]}],[{"id":"ac55","attrs":[{"id":"type","op":"is","val":"微信"}]}]],"endDay":"2017-01-12","startDay":"2017-01-05"})
# print funneldata("biqu", "2016-11-10", "2016-11-20", [[{"id": "ac13", "map": {"10": "4"}}], [{"id": "ac44", "map": {}}]])
print(time.time()-a)
# print eventsSingle("feeling", "2016-09-14", "2016-10-13", ["ac44", "ac17", "ac46"])
# print eventsSingle("feeling", "2016-09-01", "2016-09-01", ["in", {"$or": ["ac11"]}, "ac22"])
# print eventsSingle("feeling", "2016-09-01", "2016-09-01", ["in", "ac11", "ac22"])
# [["in"], ["ac23", "ac11"], ["ac22"]]
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,042
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/infi/clickhouse_orm/database.py
|
import requests
from collections import namedtuple
from .models import ModelBase
from .utils import escape, parse_tsv, import_submodules
from math import ceil
import datetime
import logging
from string import Template
from six import PY3, string_types
Page = namedtuple('Page', 'objects number_of_objects pages_total number page_size')
class DatabaseException(Exception):
pass
class Database(object):
def __init__(self, db_name, db_url='http://localhost:8123/', username=None, password=None, readonly=False):
self.db_name = db_name
self.db_url = db_url
self.username = username
self.password = password
self.readonly = readonly
if not self.readonly:
self._send('CREATE DATABASE IF NOT EXISTS `%s`' % db_name)
def create_table(self, model_class):
# TODO check that model has an engine
self._send(model_class.create_table_sql(self.db_name))
def drop_table(self, model_class):
self._send(model_class.drop_table_sql(self.db_name))
def drop_database(self):
self._send('DROP DATABASE `%s`' % self.db_name)
def insert(self, model_instances, batch_size=1000):
from six import next
i = iter(model_instances)
try:
first_instance = next(i)
except StopIteration:
return # model_instances is empty
model_class = first_instance.__class__
def gen():
yield self._substitute('INSERT INTO $table FORMAT TabSeparated\n', model_class).encode('utf-8')
yield (first_instance.to_tsv() + '\n').encode('utf-8')
# Collect lines in batches of batch_size
batch = []
for instance in i:
batch.append(instance.to_tsv())
if len(batch) >= batch_size:
# Return the current batch of lines
yield ('\n'.join(batch) + '\n').encode('utf-8')
# Start a new batch
batch = []
# Return any remaining lines in partial batch
if batch:
yield ('\n'.join(batch) + '\n').encode('utf-8')
self._send(gen())
def count(self, model_class, conditions=None):
query = 'SELECT count() FROM $table'
if conditions:
query += ' WHERE ' + conditions
query = self._substitute(query, model_class)
r = self._send(query)
return int(r.text) if r.text else 0
def select(self, query, model_class=None, settings=None):
query += ' FORMAT TabSeparatedWithNamesAndTypes'
query = self._substitute(query, model_class)
r = self._send(query, settings, True)
lines = r.iter_lines()
field_names = parse_tsv(next(lines))
field_types = parse_tsv(next(lines))
model_class = model_class or ModelBase.create_ad_hoc_model(zip(field_names, field_types))
for line in lines:
yield model_class.from_tsv(line, field_names)
def paginate(self, model_class, order_by, page_num=1, page_size=100, conditions=None, settings=None):
count = self.count(model_class, conditions)
pages_total = int(ceil(count / float(page_size)))
offset = (page_num - 1) * page_size
query = 'SELECT * FROM $table'
if conditions:
query += ' WHERE ' + conditions
query += ' ORDER BY %s' % order_by
query += ' LIMIT %d, %d' % (offset, page_size)
query = self._substitute(query, model_class)
return Page(
objects=list(self.select(query, model_class, settings)),
number_of_objects=count,
pages_total=pages_total,
number=page_num,
page_size=page_size
)
def migrate(self, migrations_package_name, up_to=9999):
from .migrations import MigrationHistory
logger = logging.getLogger('migrations')
applied_migrations = self._get_applied_migrations(migrations_package_name)
modules = import_submodules(migrations_package_name)
unapplied_migrations = set(modules.keys()) - applied_migrations
for name in sorted(unapplied_migrations):
logger.info('Applying migration %s...', name)
for operation in modules[name].operations:
operation.apply(self)
self.insert([MigrationHistory(package_name=migrations_package_name, module_name=name, applied=datetime.date.today())])
if int(name[:4]) >= up_to:
break
def _get_applied_migrations(self, migrations_package_name):
from .migrations import MigrationHistory
self.create_table(MigrationHistory)
query = "SELECT module_name from $table WHERE package_name = '%s'" % migrations_package_name
query = self._substitute(query, MigrationHistory)
return set(obj.module_name for obj in self.select(query))
def _send(self, data, settings=None, stream=False):
if PY3 and isinstance(data, string_types):
data = data.encode('utf-8')
params = self._build_params(settings)
r = requests.post(self.db_url, params=params, data=data, stream=stream)
if r.status_code != 200:
raise DatabaseException(r.text)
return r
def _build_params(self, settings):
params = dict(settings or {})
if self.username:
params['user'] = self.username
if self.password:
params['password'] = self.password
return params
def _substitute(self, query, model_class=None):
'''
Replaces $db and $table placeholders in the query.
'''
if '$' in query:
mapping = dict(db="`%s`" % self.db_name)
if model_class:
mapping['table'] = "`%s`.`%s`" % (self.db_name, model_class.table_name())
query = Template(query).substitute(mapping)
return query
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,043
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/EventSummary.py
|
# -*- coding: utf-8 -*-
import __init__
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
from os import sys, path
import time
import json
from collections import OrderedDict
from ClickHouseClient.ClickHouseClient import ClickHouseClient
import logging
from Query import Query
from SaaSCommon.JHDecorator import fn_timer
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=path.dirname(path.abspath(__file__)) + "/logs/" + "api.log",
filemode='a')
logger = logging.getLogger(__file__)
class EventSummary(Query):
def __init__(self):
pass
def create_query_sql(self, db_name, start_day, end_day, params, events, attrs=None):
if bool(events) == False:
events = []
if attrs is None:
attrs = {}
query_format = "select jhd_eventId as _id, sum(1) as pv, count(distinct jhd_userkey) as uv from %(db_name)s.userevent \
where (partition between toDate('%(start_day)s') and toDate('%(end_day)s')) and jhd_eventId != ''%(_where)s \
group by jhd_eventId"
_where = self.fragment_where(params, events)
query = query_format % {
"db_name": db_name,
"start_day": start_day,
"end_day": end_day,
"_where": _where if bool(params) else " ",
}
logger.info(query)
return query
def fragment_where(self, attrs, events):
fragments = []
if bool(events):
if len(events) == 1:
events.append('')
events_cond = "jhd_eventId in %(events_tuple)s" % {"events_tuple": str(tuple(map(str, events)))}
fragments.append(events_cond)
attrs_cond = super(EventSummary, self).fragment_where(attrs)
fragments.append(attrs_cond)
return " and " + " and ".join(fragments)
@fn_timer
def data(self, datatype, params):
# 解析参数
try:
params = params if isinstance(params, dict) else json.loads(params)
events = params.pop("events", [])
start_day = params.pop("startDay")
end_day = params.pop("endDay")
attrs_map = params.pop("attrs", [])
# 查询天数最多为60天
tm_s_stamp = time.mktime(time.strptime(start_day, "%Y-%m-%d"))
tm_e_stamp = time.mktime(time.strptime(end_day, "%Y-%m-%d"))
num = (tm_e_stamp - tm_s_stamp) / 86400
if num > 90 or num < 0:
return {"errinfo": "日期跨度超出范围!"}
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.error(json.dumps(errinfo))
return {"errinfo": "传递参数错误!"}
# 生成查询
try:
try:
query = str(self.create_query_sql(datatype, start_day, end_day, params, events, attrs_map).decode("utf-8"))
except:
query = self.create_query_sql(datatype, start_day, end_day, params, events, attrs_map).decode("utf-8").encode("utf-8")
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.error(json.dumps(errinfo))
return {"errinfo": "生成查询错误!"}
# 返回结果
result = []
try:
client = ClickHouseClient()
for row in client.select(datatype, query):
key = row._id
uv = row.uv
pv = row.pv
result.append({"_id": key, "uv": uv, "pv": pv})
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.error(json.dumps(errinfo))
return {"errinfo": "查询错误!"}
return result
if __name__ == "__main__":
tester = EventSummary()
# query_info = {"startDay": "2017-02-01", "endDay": "2017-02-14", "events": ["ac36"], "jhd_pb": "appstore", "jhd_vr": "2.1.2", " jhd_opType": "action"}
query_info = {"startDay": "2017-03-20", "endDay": "2017-03-26", "jhd_opType": "action", "events": ["pc_dh"]}
print json.dumps(tester.data("ncf_h5", query_info), ensure_ascii=False)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,044
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/ClickHouseClient/SubDatabase.py
|
# -*- coding: utf-8 -*-
import __init__
import ConfigParser
from infi.clickhouse_orm.database import Database
# 扩展create_merge_table方法
class SubDatabase(Database):
def create_merge_table(self, model_class):
self._send(model_class.create_merge_table_sql(self.db_name))
if __name__ == "__main__":
from __init__ import configPath
cf = ConfigParser.ConfigParser()
cf.read(configPath)
DB_URL = cf.get("clickhouse", "db_url")
PASSWORD = cf.get("clickhouse", "password")
db = SubDatabase(db_name="biqu", db_url=DB_URL, password=PASSWORD)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,045
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/eventSummary.py
|
# -*- coding: utf-8 -*-
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
import json
import time
from SaaSCommon.JHDecorator import fn_timer
global conn
conn = MongoClient(mongo_ip, mongo_port)
re_char = [".", "+", "?", "*", "$"]
@fn_timer
def eventSummary(datatype, querydata={}, mode = "UserEventGroup"):
# 添加模糊匹配 url_pattern
if datatype == "BQ_H5" and querydata.get("jhd_opType", None) == "page":
table = "UserEvent"
else:
table = "UserEventGroup"
global conn
tm_s = querydata["startDay"].replace("-", "")
tm_e = querydata["endDay"].replace("-", "")
tm_s_stamp = time.mktime(time.strptime(tm_s, "%Y%m%d"))
tm_e_stamp = time.mktime(time.strptime(tm_e, "%Y%m%d"))
num = (tm_e_stamp - tm_s_stamp)/86400
# print("="*20, tm_s, tm_e, num)
# 2016-12-15日共同商定时间跨度65天
if num > 65 or num < 0:
return ["Out of date range"]
optype = querydata.get("jhd_opType", "action")
if tm_s < '20161001':
return []
events = querydata.get("events", [])
url_pattern = querydata.get("url_pattern", None)
pub = querydata.get("jhd_pb", "")
vr = querydata.get("jhd_vr", "")
match = '''{"$match": {"partition_date": {"$gte": "%(tm_s)s", "$lte": "%(tm_e)s"}, "jhd_opType": "%(optype)s"}}''' % {
"tm_s": tm_s,
"tm_e": tm_e,
"optype": optype
}
match = json.loads(match)
if events:
if datatype == "BQ_H5" and querydata.get("jhd_opType", None) == "page":
match["$match"]["jhd_map.uri"] = {"$in": events}
else:
match["$match"]["jhd_eventId"] = {"$in": events}
if url_pattern:
if url_pattern:
for re_c in re_char:
url_pattern = url_pattern.strip().replace(re_c, "\\"+re_c)
match["$match"]["jhd_map.uri"] = {"$regex": url_pattern}
if pub and pub != "all":
match["$match"]["jhd_pb"] = pub
if vr and vr != "all":
match["$match"]["jhd_vr"] = vr
match = json.dumps(match)
if table == "UserEventGroup":
mongoquery = '''[
%(match)s,
{"$project": {"jhd_userkey": 1, "jhd_eventId": 1}},
{"$group": {"_id": {"eventid": "$jhd_eventId", "uid": "$jhd_userkey"}}},
{"$group": {"_id": "$_id.eventid", "uv": {"$sum": 1}}}
]''' % {"match": match}
elif table == "UserEvent":
'''
%(match)s,
{"$project": {"jhd_userkey": 1, "uri": "$jhd_map.uri"}},
{"$group": {"_id": "$uri", "uids": {"$addToSet": "$jhd_userkey"} }},
{"$project": {"_id": "$_id", "uv": {"$size": "$uids"}}}
]
'''
mongoquery = '''[
%(match)s,
{"$project": {"jhd_userkey": 1, "uri": "$jhd_map.uri"}},
{"$group": {"_id": "$uri", "uids": {"$addToSet": "$jhd_userkey"} }},
{"$project": {"_id": "$_id", "uv": {"$size": "$uids"}}},
{"$match": {"uv": {"$gte": 2}}}
]''' % {"match": match} # {"$sort": {"uv": -1}}
# print(table, )
print(mongoquery)
query_result_cur = conn[datatype][table].aggregate(json.loads(mongoquery), allowDiskUse=True)
result = [item for item in query_result_cur]
return result
if __name__ == "__main__":
# {"startDay": "2016-10-28", "endDay": "2016-11-20", "events": ["ac10"], " jhd_opType ": "action"}
# print json.dumps(eventSummary("BQ_H5", querydata={"startDay": "2016-10-28", "endDay": "2016-11-20", "events": ["ac10"]}))
# print json.dumps(eventSummary("BQ_H5", querydata={"startDay": "2016-12-01", "endDay": "2016-12-13", "jhd_opType": "page"}, mode = "UserEvent"))
print json.dumps(eventSummary("BQ_H5", querydata={"endDay":"2016-12-15","startDay":"2016-10-11","jhd_opType":"page"}, mode = "UserEvent"))
# print json.dumps(eventSummary("BQ_H5", querydata={"endDay":"2016-12-13","startDay":"2016-11-14","jhd_opType":"page","events":["https://m.biqu.panatrip.cn/app?openid=oqfl0uLI_EnV8gOowE-hyBYeFV28"]}, mode = "UserEvent"))
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,046
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/views_biqu.py
|
# coding: utf-8
from django.http import HttpResponse
from funnel import funneldataOld
from funnel import funneldata
from eventRemain import eventsRemain
from eventRemainMap import eventsRemainMap
from eventRemain import eventsRemainCombine
from crossEvent import eventsCrossCombine
from crossEventMap import crossEventMap
from searchUser import search_user as rt_search
from eventSummary import eventSummary
from MongoDatas import userSample
from MongoDatas import userSample_lite
from MongoDatas import search_user
from MongoDatas import eventsSeries
import urllib
import json
from jhddgapi.settings import DEBUG
# global
# from IPtoLoc.__init__ import ipdataPath
# from IPtoLoc import IPtoAreaFinals
# global initarry
# if initarry is None:
# initarry = IPtoAreaFinals.load(ipdataPath)
# Create your views here.
def getEventsSeries(request, datatype, s_tm, e_tm, events_quote):
# ['dl', 'ac23']
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
result = eventsSeries(datatype, s_tm, e_tm, events)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件漏斗,线上
def getFunnel(request, datatype, params):
'''
:param request:
:param datatype:
:param s_tm:
:param e_tm:
:param events_quote:
:format url: http://101.201.145.120:8090/saasapi/funnel/biqu/{"startDay": "2016-11-10", "endDay": "2016-11-20", "funnel": [[{"id": "jhddg_every", "map": {}}], [{"id": "ac41", "map": {"og": "CSX"}}], [{"id": "ac23", "map": {}}, {"id": "ac11", "map": {}}], [{"id": "ac22", "map": {}}]]}/
:return:
'''
try:
params = urllib.unquote(params)
data = json.loads(params)
startDay = data["startDay"]
endDay = data["endDay"]
result = funneldata(datatype, startDay, endDay, data)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件漏斗,线上
# 保留历史接口格式,不支持map属性查询
def getFunnelOld(request, datatype, s_tm, e_tm, events_quote):
'''
:param request:
:param datatype:
:param s_tm:
:param e_tm:
:param events_quote:
:format url: http://101.201.145.120:8090/saasapi/eventserisesingle/feeling/2016-06-12/2016-06-17/[["in"], ["ac23", "ac11"], ["ac22"]]/
:return:
'''
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
result = funneldataOld(datatype, s_tm, e_tm, events)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件留存
def getEventsRemain(request, datatype, params):
# ['dl', 'ac23']
try:
print params
events_str = urllib.unquote(params)
events = json.loads(events_str)
result = eventsRemainMap(datatype, events)
# data = json.dumps(result, ensure_ascii=False, sort_keys=True)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件留存,(日期范围、新格式)
def getEventsRemainCombine(request, datatype, events_quote):
# ['dl', 'ac23']
try:
data_str = urllib.unquote(events_quote)
paramse = json.loads(data_str)
begintm = paramse["startTime"]
endtm = paramse["endTime"]
events = paramse["events"]
lastday = int(paramse["lastDay"])
result = eventsRemainCombine(datatype, begintm, endtm, lastday, events)
# data = json.dumps(result, ensure_ascii=False, sort_keys=True)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def getSample(request, datatype, s_tm):
try:
result = userSample(datatype, s_tm)
# data = json.dumps(result, ensure_ascii=False)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def getSample_lite(request, datatype, s_tm):
try:
result = userSample_lite(datatype, s_tm)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
def search(request, datatype, dayStr, hour_s, hour_e, base_cond):
try:
base_cond = json.loads(base_cond)
result = search_user(datatype, dayStr, int(hour_s), int(hour_e), base_cond)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data, ensure_ascii=False)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 用户抽样,搜索接口,线上
def rtSample(request, datatype, conds):
try:
import time
a = time.time()
print("satrt", "-"*100)
base_cond = json.loads(conds)
result = rt_search(datatype, _filter=base_cond)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
print("end", "-"*100, time.time()-a)
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件交叉(跨天),线上
def getEventsCrossCombine(request, datatype, s_tm, e_tm, last_tm, events_quote):
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
last_tm = int(last_tm)
result = eventsCrossCombine(datatype, s_tm, e_tm, last_tm, events)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
# 事件交叉(支持map),线上
def getCrossEventMap(request, datatype, params):
try:
events_str = urllib.unquote(params)
events = json.loads(events_str)
result = crossEventMap(datatype, events)
# data = json.dumps(result, ensure_ascii=False)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
# return HttpResponse(params)
print(exstr)
def getEventSummary(request, datatype, conds):
try:
print(datatype, conds)
querydata_str = urllib.unquote(conds)
querydata = json.loads(querydata_str)
result = eventSummary(datatype, querydata)
data = json.dumps(result).decode('unicode-escape').encode('utf8')
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
if DEBUG:
print(exstr)
return HttpResponse(exstr)
else:
print(exstr)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,047
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/CrossEvent.py
|
# -*- coding: utf-8 -*-
import __init__
import time
import threading
from collections import OrderedDict
import datetime
from os import sys, path
import json
import logging
from Query import Query
from ClickHouseClient.ClickHouseClient import ClickHouseClient
from CacheDecorator import common_cache_decorator
from SaaSCommon.JHDecorator import fn_timer
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=path.dirname(path.abspath(__file__)) + "/logs/" + "api.log",
filemode='a')
logger = logging.getLogger(__file__)
class CrossEvent(Query):
def __init__(self):
pass
def create_query_sql(self, db_name, start_day, end_day, events, attrs=None):
'''
:param db_name: appkey/datatype
:param start_day: 起始日期,格式:yyyy-mm-dd
:param end_day: 结束日期,格式:yyyy-mm-dd
:param events: 事件id及map属性,格式:[ [ [{id:…,attrs:[{id:…,op:…,val:…},{mapkey}]},{或者关系}], [{并且关系}] ], [留存事件(格式同上)] ]
:param attrs: 基础属性,如:版本(jhd_vr),渠道(jhd_pb)..., 格式:{"jhd_pb": "appstore", "jhd_vr": "1.0"}
:return:
'''
'''
初始事件及转化事件没有实现“且”关系
'''
start_day_1 = (datetime.datetime.strptime(start_day, "%Y-%m-%d") + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if attrs is None:
attrs = {}
# sql_format = "select appoint, count(distinct(userkey)) as uv from ( \
sql_format = "select appoint, count(distinct userkey) as uv from ( \
select appoint, userkey from ( \
select jhd_userkey as userkey from %(db_name)s.userevent prewhere partition = toDate('%(start_day)s') and jhd_opType = 'action' and %(map_cond_init)s%(_where)s group by jhd_userkey) \
any inner join \
(select 'cur_day' as appoint, jhd_userkey as userkey from %(db_name)s.userevent prewhere (partition between toDate('%(start_day)s') and toDate('%(start_day)s')) and jhd_opType = 'action' group by userkey) \
using userkey \
union all \
select appoint, userkey from( \
select jhd_userkey as userkey from %(db_name)s.userevent prewhere partition = toDate('%(start_day)s') and jhd_opType = 'action' and %(map_cond_init)s%(_where)s group by jhd_userkey) \
any inner join \
(select 'remain_day' as appoint, jhd_userkey as userkey from %(db_name)s.userevent prewhere (partition between toDate('%(start_day_1)s') and toDate('%(end_day)s')) and jhd_opType = 'action' and %(map_cond_conversion)s group by userkey) \
using userkey) \
group by appoint \
order by appoint"
_where = " and " + self.fragment_where(attrs)
map_cond_init = self.map_conds(events[0])
map_cond_conversion = self.map_conds(events[1])
query = sql_format % {
"db_name": db_name,
"start_day": start_day,
"yyyymmdd": start_day.replace("-", ""),
"start_day_1": start_day_1,
"end_day": end_day,
"_where": _where if bool(attrs) else " ",
"map_cond_init": map_cond_init,
"map_cond_conversion": map_cond_conversion
}
logger.info(query)
return query
def create_query_sql_bak(self, db_name, start_day, end_day, events, attrs=None):
'''
:param db_name: appkey/datatype
:param start_day: 起始日期,格式:yyyy-mm-dd
:param end_day: 结束日期,格式:yyyy-mm-dd
:param events: 事件id及map属性,格式:[ [ [{id:…,attrs:[{id:…,op:…,val:…},{mapkey}]},{或者关系}], [{并且关系}] ], [留存事件(格式同上)] ]
:param attrs: 基础属性,如:版本(jhd_vr),渠道(jhd_pb)..., 格式:{"jhd_pb": "appstore", "jhd_vr": "1.0"}
:return:
'''
'''
初始事件及转化事件没有实现“且”关系
'''
start_day_1 = (datetime.datetime.strptime(start_day, "%Y-%m-%d") + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if attrs is None:
attrs = {}
# sql_format = "select appoint, count(distinct(userkey)) as uv from ( \
sql_format = "select appoint, sum(1) as uv from ( \
select appoint, userkey from \
(select jhd_userkey as userkey from %(db_name)s.userevent where partition = toDate('%(start_day)s') and %(map_cond_init)s%(_where)s group by jhd_userkey) \
all left join \
( \
select 'cur_day' as appoint, jhd_userkey as userkey from %(db_name)s.userevent where (partition between toDate('%(start_day)s') and toDate('%(start_day)s')) group by userkey \
union all \
select 'remain_day' as appoint, jhd_userkey as userkey from %(db_name)s.userevent where (partition between toDate('%(start_day_1)s') and toDate('%(end_day)s')) and %(map_cond_conversion)s group by userkey) \
using userkey) \
group by appoint \
order by appoint"
_where = " and " + self.fragment_where(attrs)
map_cond_init = self.map_conds(events[0])
map_cond_conversion = self.map_conds(events[1])
query = sql_format % {
"db_name": db_name,
"start_day": start_day,
"start_day_1": start_day_1,
"end_day": end_day,
"_where": _where if bool(attrs) else " ",
"map_cond_init": map_cond_init,
"map_cond_conversion": map_cond_conversion
}
logger.info(query)
return query
def map_conds(self, data):
'''
:param data: 格式:[ [{id:…,attrs:[{id:…,op:…,val:…},{mapkey}]},{或者关系}], [{并且关系}] ]
:return:
'''
cond_eventid_format = "jhd_eventId = '%(event_id)s'"
cond_map_format = "%(visit_params)s(jhd_map, '%(mapkey)s') %(operator)s"
events_and = []
for index, event_array in enumerate(data):
events_or = []
for event_data in event_array:
event_solo_and = []
event_id = event_data["id"]
if event_id == "jhddg_every":
cond_eventid = "jhd_eventId != %(event_id)s" % {"event_id": "''"}
event_solo_and.append(cond_eventid)
else:
event_solo_and.append(cond_eventid_format % {"event_id": event_id})
if "attrs" in event_data and event_data["attrs"]:
# 包含多个mapkey限制条件
for map_item in event_data["attrs"]:
mapkey = map_item["id"]
op = map_item["op"]
mapvalue = map_item["val"]
visit_params, operator = self.query_operator(op, mapvalue)
cond_map = cond_map_format % {"visit_params": visit_params, "mapkey": mapkey, "operator": operator}
event_solo_and.append(cond_map)
events_or.append("(" + " and ".join(event_solo_and) + ")")
events_and.append(" or ".join(events_or))
return " and ".join(events_and)
@common_cache_decorator("cross_event")
@fn_timer
def data(self, datatype, params, interval = 0):
result = OrderedDict([])
try:
tm_str_s = params["startDay"]
tm_str_e = params["endDay"]
events = params["events"]
windows = int(params["windows"])
start_day = datetime.datetime.strptime(tm_str_s, "%Y-%m-%d")
end_day = datetime.datetime.strptime(tm_str_e, "%Y-%m-%d")
attrs = params.get("attrs", {})
num = (end_day - start_day).days
if num > 60 or num < 0:
return {"errinfo": "日期跨度超出范围!"}
if windows > 60 or windows <= 0:
return {"errinfo": "窗口期超出范围!"}
except:
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.error(json.dumps(errinfo))
return {"errinfo": "参数错误!"}
try:
threads = []
while start_day <= end_day:
try:
query = self.create_query_sql(datatype, start_day.strftime("%Y-%m-%d"), (start_day + datetime.timedelta(days=windows)).strftime("%Y-%m-%d"), events, attrs=attrs)
try:
query = str(query.decode("utf-8"))
except:
query = query.decode("utf-8").encode("utf-8")
except:
import traceback
print traceback.print_exc()
t = threading.Thread(target=self.submit, args=(datatype, query, result, start_day.strftime("%Y-%m-%d")))
t.start()
threads.append(t)
time.sleep(interval)
start_day += datetime.timedelta(days=1)
for _thread in threads:
_thread.join()
except:
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.error(json.dumps(errinfo))
return {"errinfo": "查询错误!"}
result_sort = OrderedDict([])
days = result.keys()
days.sort(reverse=True)
for day in days:
result_sort.setdefault(day, result[day])
return result_sort
def submit(self, db_name, query, result, tm):
client = ClickHouseClient()
item = [0, 0]
for row in client.select(db_name, query):
result.setdefault(tm, [0, 0])
appoint = row.appoint
uv = row.uv
if appoint == "cur_day":
result[tm][0] = uv
elif appoint == "remain_day":
result[tm][1] = uv
if __name__ == "__main__":
tester = CrossEvent()
# data = {"events":[[[{"id":"jhddg_every"}]],[[{"id":"ac36","attrs":[{"id":"og","op":"is","val":"SHA"}]}]]],"windows":"4","endDay":"2017-02-04","startDay":"2017-02-01"}
data = {"endDay":"2017-05-01","events":[[[{"id":"jhddg_every","name":"任意事件"}]],[[{"id":"jhddg_every","name":"任意事件"}]]],"startDay":"2017-04-24","windows":7}
import time
a = time.time()
print json.dumps(tester.data("ncf_ws", data), ensure_ascii=False)
print time.time() - a
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,048
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/searchUser.py
|
# coding: utf-8
from pymongo import MongoClient
from config import mongo_ip
from config import mongo_port
import json
import copy
import time
import itertools
# from ipinfo import ipinfo_sina
from IPtoLoc.iploc_demo import getLoc
from apple_jx import apple_jx
from SaaSCommon.JHDecorator import fn_timer
from DBClient.MysqlClient import MysqlClient
from DBClient.PyMongoClient import PyMongoClient
def get_mongo_conn(appkey):
m_client = MysqlClient()
mongo_id = m_client.get_mongoid(appkey)[0]
m_client.closeMysql()
conn = PyMongoClient(mongo_id=mongo_id)
return conn.getConn()
# data format
# example
# {
# "session": {
# "action": [
# "ac6"
# ],
# "seconds": 46002,
# "sessionsbegintm": "2016-07-21+23:59:59",
# "sessionendtm": "2016-07-21+23:59:59"
# },
# "jhd_pb": [
# "appstore"
# ],
# "jhd_netType": [
# "wifi"
# ],
# "lastActiveInterval": 0,
# "loc": [
# "中国",
# "广东",
# "广州"
# ],
# "jhd_userkey": "0a723853-1de2-4d37-8eee-faa1e0cedc58",
# "jhd_vr": "2.0.1",
# "jhd_os": [
# "iphone_9.3.2"
# ],
# "jhd_pushid": "56d97a3a816dfa005a38b738",
# "jhd_datatype": "feeling",
# "jhd_ua": "iPhone 6 Plus",
# "last7ActiveNum": 4,
# "jhd_ip": [
# "116.22.44.81"
# ],
# "lastOpaTime": "2016-07-21+12:13:45",
# "last30ActiveNum": 8
# }
singlekey = ["jhd_datatype", "jhd_userkey", "jhd_pushid", "lastOpaTime"]
appendkey = ["jhd_pb", "jhd_vr", "jhd_os", "jhd_netType", "jhd_ua", "jhd_ip", "jhd_loc"]
global basic_items
basic_items = list(itertools.chain(singlekey, appendkey))
global tm
tm = None
@fn_timer
def search_user(datatype, table='uvfile', _filter={}):
assert _filter, "None filter"
global tm
tm = _filter["tm"]
print("start _filter", json.dumps(_filter))
conn = get_mongo_conn(datatype)
if _filter.get("lastOpaTime", None):
# 如果开始时间没有设置,则设置为"00:00:00",如果结束时间没有设置,设置为"23:59:59"
starttm = ":".join([_filter["lastOpaTime"]["$gte"].split("+")[1], "00:00"]) if _filter["lastOpaTime"].get("$gte", None) else "00:00:00"
endtm = ":".join([_filter["lastOpaTime"]["$lte"].split("+")[1], "00:00"]) if _filter["lastOpaTime"].get("$lte", None) else "23:59:59"
else:
# 如果没有设置lastOpaTime过滤条件
starttm = "00:00:00"
tm = _filter.get("tm", time.strftime("%Y%m%d", time.localtime(time.time()))).replace("-", "")
# 如果日期过滤设置的不是今天,则设置enttm为"23:59:59"
if tm < time.strftime("%Y%m%d", time.localtime(time.time())):
endtm = "23:59:59"
else:
# 如果过滤日期为当天,设置最后时间为当前时间(过滤昨天的日志,今天才收到的异常时间点事件)
endtm = time.strftime("%H:%M:%S", time.localtime(time.time()))
# 处理用户最近来访天数
measure_keys = ["measure.last7ActiveNum", "measure.last30ActiveNum"]
for measurekey in measure_keys:
if _filter.get(measurekey, None):
boundary_down_key = "$gte" if "$gte" in _filter[measurekey] else "$gt"
boundary_up_key = "$lte" if "$lte" in _filter[measurekey] else "$lt"
# boundary_down = _filter[measurekey][boundary_down_key] - 1
# boundary_up = _filter[measurekey][boundary_up_key] - 1
boundary_down = _filter[measurekey][boundary_down_key]
boundary_up = _filter[measurekey][boundary_up_key]
_filter[measurekey] = {boundary_down_key: boundary_down, boundary_up_key: boundary_up}
# 用户时间段抽样
filter_begin_tm = None
filter_end_tm = None
if _filter.get("actm", None):
_filter["item_count.action.opatm"] = _filter["actm"]
filter_begin_tm = _filter["actm"]["$gte"].replace(":", "") if "$gte" in _filter["actm"] else "000000"
filter_end_tm = _filter["actm"]["$lte"].replace(":", "") if "$lte" in _filter["actm"] else "235959"
del _filter["actm"]
if _filter.get("jhd_pb", None) and type(_filter["jhd_pb"]) != type({}):
_filter["jhd_pb"] = {"$regex": _filter["jhd_pb"], "$options": "i"}
result = []
get_result(conn, datatype, table, _filter, starttm, endtm, filter_begin_tm, filter_end_tm, result)
if len(result) == 0 and "jhd_userkey" in _filter:
uid = _filter.pop("jhd_userkey")
_filter["jhd_pushid"] = uid
get_result(conn, datatype, table, _filter, starttm, endtm, filter_begin_tm, filter_end_tm, result)
result_sorted = sorted(result, key=lambda _item: _item["session"]["sessionsbegintm"], reverse = True)
tmp = _valid_result(result_sorted, _filter)
_tmp = []
for item in tmp:
try:
json.dumps(item, ensure_ascii=False)
_tmp.append(item)
except:
import traceback
print(traceback.print_exc())
return _tmp
def get_result(conn, datatype, table, _filter, starttm, endtm, filter_begin_tm, filter_end_tm, result):
for item in conn[datatype][table].find(_filter).limit(500):
try:
userkey = item["jhd_userkey"]
if (not userkey) or len(userkey) < 10:
continue
if userkey in ["-1", "", "0", "00000000-0000-0000-0000-000000000000", "unknown", "739463", "000000000000000","111111111111111","352005048247251","012345678912345", "012345678901237", "88508850885050", "0123456789abcde","004999010640000", "862280010599525", "52443443484950", "355195000000017", "001068000000006", "358673013795895", "355692547693084", "004400152020000", "8552502717594321","113456798945455", "012379000772883", "111111111111119", "358701042909755", "358000043654134", "345630000000115", "356299046587760", "356591000000222","9774d56d682e549c"]:
continue
# item
_sort_data(item)
_item = _split_session(item, starttm, endtm)
# 根据时间段筛选会话
_item_tmp = []
if filter_begin_tm and filter_end_tm:
for item in _item:
sessionsbegintm = item["session"]["sessionsbegintm"].replace("-", "").replace("+", "").replace(":", "")
sessionendtm = item["session"]["sessionendtm"].replace("-", "").replace("+", "").replace(":", "")
if (sessionsbegintm[-6:] >= filter_begin_tm and sessionsbegintm[-6:] <= filter_end_tm) or \
(sessionendtm[-6:] >= filter_begin_tm and sessionendtm[-6:] <= filter_end_tm):
_item_tmp.append(item)
tmp = _item_tmp if ("jhd_userkey" in _filter or "jhd_pushid" in _filter) else _item_tmp[-4:]
result += tmp
else:
tmp = _item if ("jhd_userkey" in _filter or "jhd_pushid" in _filter) else _item[-4:]
result += tmp
except:
import traceback
print(traceback.print_exc())
def _sort_data(data):
if data.get("item_count", None):
item_count = data["item_count"]
for key in item_count.keys():
# 事件序列排序
item_count[key]["opatm"].sort()
def _find_dic_key(data, _like):
for key in data:
if _like in key:
yield key
def _valid_result(result_sorted, _filter):
tmp = []
# print(result_sorted, len(result_sorted), "result_sorted")
for item in result_sorted:
try:
if "jhd_vr" in _filter and type(_filter.get("jhd_vr", {})) == type({}):
vr = _filter.get("jhd_vr", {}).get("$regex", "")
vrs = [_vr for _vr in item["jhd_vr"] if vr in _vr]
item["jhd_vr"] = vrs[0] if len(vrs) else ""
elif "jhd_vr" in _filter and type(_filter.get("jhd_vr", "")) == type(""):
item["jhd_vr"] = _filter["jhd_vr"]
else:
item["jhd_vr"] = item.get("jhd_vr", [""])[-1]
if "jhd_userkey" not in _filter and "jhd_pushid" not in _filter:
if not item.get("session", {"seconds": 0}).get("seconds", 0):
continue
if not item.get("session", {"action": []}).get("action", []):
continue
else:
item.setdefault("session", {"action": []})
session_keys = item.get("session", {}).keys()
if ("action" not in session_keys and "page" not in session_keys and "in" not in session_keys):
# print('''"action" not in session_keys and "page" not in session_keys and "in" not in session_keys''', "continue", session_keys)
continue
actions = [key.split(".")[1].split("_")[1] if key.split(".")[1].startswith("jhf_") else key.split(".")[1] for key in _find_dic_key(_filter, "item_count")]
# 如果按照访问时间筛选,则加入action“动作”
if len(actions) == 1 and "action" in actions:
del actions[actions.index("action")]
# actions = [key.split(".")[1] for key in _find_dic_key(_filter, "item_count")]
if not all([(action in item["session"].get("action", [])) for action in actions]):
continue
# item["loc"] = ipinfo_sina(item.get("jhd_ip", [""])[0])
try:
loc = item.get("jhd_loc", [None])[0]
except:
loc = None
try:
del item["jhd_loc"]
except:
pass
if loc:
if isinstance(loc, dict):
country, prov, city = "", loc.get("prov", ""), loc.get("city", "")
else:
try:
country, prov, city = "unknown", loc.split("_")[0], loc.split("_")[1]
except:
country, prov, city = "unknown", "", ""
item["loc"] = (country, prov, city)
else:
item["loc"] = getLoc(item.get("jhd_ip", [""])[0])
if u"中国" == item["loc"][2] and ("jhd_userkey" not in _filter and "jhd_pushid" not in _filter):
continue
if item["jhd_ua"]:
item["jhd_ua"] = apple_jx.get(item["jhd_ua"][0], item["jhd_ua"][0])
tmp.append(item)
if "jhd_userkey" not in _filter and "jhd_pushid" not in _filter:
if len(tmp) >= 500:
break
except:
import traceback
print(traceback.print_exc())
return tmp
def _split_session(data, starttm = '00:00:00', endtm = '23:59:59'):
"""
# 用户session分割
:param data: 带分割数据
:param starttm: 时间段过滤开始时间
:param endtm: 时间段过滤结束时间
:return: 分割的session集合
"""
global basic_items
global tm
result = []
# 用户基础属性
sessoin_result = {}
tm = data['tm']
for key in data:
# basic_items(uvfile 用户基本属性)
if key in basic_items:
try:
# 如果key不是list类型直接赋值,如果为list类型则取最后一个值
sessoin_result.setdefault(key, data[key]) \
if ((key not in set(["jhd_vr"]) and type(data[key])) != type([])) \
else sessoin_result.setdefault(key, data[key][-1:])
except:
import traceback
print(traceback.print_exc())
elif key == "measure":
# 用户访问属性值设置
sessoin_result.setdefault("last7ActiveNum", data[key].get("last7ActiveNum", 1))
sessoin_result.setdefault("last30ActiveNum", data[key].get("last30ActiveNum", 1))
sessoin_result.setdefault("lastActiveInterval", data[key].get("lastActiveInterval", 1))
elif key == 'item_count':
# 根据item_count生成(opatime, eventtype, event)元组对
# tmp = [("".join([ct, "0"]) if data[key][_key]["eventtype"] != "end" else "".join([ct, "1"]), data[key][_key]["eventtype"], _key) \
# for _key in data[key] for ct in data[key][_key]["opatm"] \
# # 去掉action,page汇总数据
# if _key not in ['action', 'page'] and (ct >= starttm and ct <= endtm)]
tmp = []
# 稳定排序转化,操作时间相同时,in尽量往前放,end尽量往后放
for _key in data[key]:
# 排除两个in间隔30秒之内的情况
in_pre = time.mktime(time.strptime('19880101000000', '%Y%m%d%H%M%S'))
end_pre = time.mktime(time.strptime('19880101000000', '%Y%m%d%H%M%S'))
for ct in data[key][_key]["opatm"]:
# if key == "in":
if _key == "in":
dayStr = (tm + ct).replace("-", "").replace(":", "")
in_cur = time.mktime(time.strptime(dayStr, '%Y%m%d%H%M%S'))
if in_cur - in_pre < 30:
in_pre = in_cur
continue
else:
in_pre = in_cur
# elif key == "end":
elif _key == "end":
dayStr = (tm + ct).replace("-", "").replace(":", "")
end_cur = time.mktime(time.strptime(dayStr, '%Y%m%d%H%M%S'))
if end_cur - end_pre < 30:
end_pre = end_cur
continue
else:
end_pre = end_cur
if _key in ['action', 'page', 'jhf_dur'] and (ct >= starttm and ct <= endtm):
continue
elif _key.startswith("jhf_"): # 排除自定义 action
continue
if data[key][_key]["eventtype"] == "in":
tmp.append(("".join([ct, "0"]), data[key][_key]["eventtype"], _key))
elif data[key][_key]["eventtype"] == "end":
tmp.append(("".join([ct, "6"]), data[key][_key]["eventtype"], _key))
else:
tmp.append(("".join([ct, "5"]), data[key][_key]["eventtype"], _key))
tmp = list(set(tmp))
# 按opatime时间排序
opa_sort = sorted(tmp, key=lambda item: item[0], reverse=False)
opa_sort = [(item[0][:-1], item[1], item[2]) for item in opa_sort]
if not opa_sort:
return []
pre = 0 # 上一次时间
ct_pre = None # 上次时间(字符型)
sessoin_opas = {}
item = None
# eventid = None
for item in opa_sort:
try:
ct, eventtype, eventid = item
if eventid.startswith("jhf"):
continue
# print ct, eventtype, eventid
# datetime 当天时间
cur = time.mktime(time.strptime("".join([tm.replace("-", ""), ct.replace(":", "")]), "%Y%m%d%H%M%S"))
# if pre == 0 and eventtype == "end":
# continue
if pre == 0:
sessoin_opas["seconds"] = sessoin_opas.setdefault("seconds", 0.0)
ct_pre = ct
else:
if cur-pre <= 600:
if sessoin_opas:
sessoin_opas["seconds"] = sessoin_opas.setdefault("seconds", 0.0) + (cur - pre)
else:
sessoin_opas["seconds"] = sessoin_opas.setdefault("seconds", 0.0)
else:
sessoin_opas["seconds"] = sessoin_opas.setdefault("seconds", 0.0)
sessoin_opas.setdefault("sessionsbegintm", "+".join([tm, ct])) # 设置session开始时间
# 如果(间隔大于600)(或者事件为end),向前切分会话,并排除操作为 in 的情况
if ((cur-pre > 600 and pre != 0) or eventtype == "end") and eventtype != "in":
# 获取用户基础属性
basic_data = copy.deepcopy(sessoin_result)
# 如果为新session产生,则把当前时间加入session_opas对应的eventtype中
if eventtype == "end":
sessoin_opas.setdefault(eventtype, []).append(eventid) \
if eventid not in sessoin_opas.get(eventtype, []) else sessoin_opas.setdefault(eventtype, [])
if cur-pre <= 600 and sessoin_opas != {}:
sessoin_opas.setdefault("sessionendtm", "+".join([tm, ct]))
else:
sessoin_opas.setdefault("sessionendtm", "+".join([tm, ct_pre]))
basic_data.setdefault("session", sessoin_opas)
if sessoin_opas.get("sessionendtm", "2999-12-31+23:59:59") <= time.strftime("%Y-%m-%d+%H:%M:%S", time.localtime(time.time())):
result.append(basic_data)
sessoin_opas = {}
# 当操作类型为in时,向后切分会话
elif eventtype == "in":
if pre == 0:
ct_pre = ct
basic_data = copy.deepcopy(sessoin_result)
sessoin_opas.setdefault("sessionendtm", "+".join([tm, ct_pre]))
basic_data.setdefault("session", sessoin_opas)
if "+".join([tm, ct_pre]) <= time.strftime("%Y-%m-%d+%H:%M:%S", time.localtime(time.time())):
result.append(basic_data)
sessoin_opas = {}
sessoin_opas.setdefault("sessionsbegintm", "+".join([tm, ct])) # 设置session开始时间
sessoin_opas.setdefault(eventtype, []).append(eventid) \
if eventid not in sessoin_opas.get(eventtype, []) else sessoin_opas.setdefault(eventtype, [])
if eventtype != "end":
# 按先后顺序归类事件
sessoin_opas.setdefault(eventtype, []).append(eventid) \
if eventid not in sessoin_opas.get(eventtype, []) else sessoin_opas.setdefault(eventtype, [])
pre = cur
ct_pre = ct
except:
import traceback
print(traceback.print_exc(), item)
ct, eventtype, eventid = item
sessoin_opas.setdefault(eventtype, []).append(eventid) \
if eventid not in sessoin_opas.get(eventtype, []) else sessoin_opas.setdefault(eventtype, [])
basic_data = copy.deepcopy(sessoin_result)
sessoin_opas.setdefault("sessionendtm", "+".join([tm, ct_pre]))
basic_data.setdefault("session", sessoin_opas)
if "sessionsbegintm" in basic_data.get("session", {}) and "+".join([tm, ct_pre]) <= time.strftime("%Y-%m-%d+%H:%M:%S", time.localtime(time.time())):
result.append(basic_data)
return result
if __name__ == "__main__":
data = '''{"tm": "2016-07-21", "jhd_vr": "2.0.1", "jhd_ua": {"$regex": "iphone"}, "item_count.ac23": {"$exists": true}, "firstLoginTime": {"$regex": "20160721"}, "lastOpaTime": {"$gte": "2016-07-21+09", "$lte": "2016-07-21+17"}}'''
data = '''{"tm":"2016-10-08", "actm": {"$gte": "00:00:00", "$lte": "17:00:00"}}'''
data = '''{"tm":"2016-10-08", "jhd_userkey": "434A4B68-B48C-4CB9-809E-54FDF25B1DA9", "item_count.ac34": {"$exists": true}}'''
data = '''{"tm":"2016-10-10", "item_count.ac36": {"$exists": true}}'''
data = '''{"tm":"2016-12-28","actm":{"$gte":"02:02:00"},"measure.firstLoginTime":{"$regex":"20161208"}}'''
# data = '''{"tm":"2016-10-08", "item_count.ac34": {"$exists": true}}'''
# data = '''{"tm":"2016-10-08"}'''
data = '''{"tm":"2017-03-10"}'''
_filter = json.loads(data)
a = time.time()
data = search_user("jinjiedao", "uvfile", _filter)
print(time.time()-a)
print(json.dumps(data, ensure_ascii=False))
print(len(data))
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,049
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasjob/__init__.py
|
# from SchedulerManager import SchedulerManager
# # global sched
# sched = SchedulerManager()
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,050
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/SaaSCommon/TasksRunner.py
|
# -*- coding: utf-8 -*-
import time
import threading
import Queue
class TasksRunner(object):
tq = Queue.Queue(maxsize = -1)
def __init__(self):
t = threading.Thread(target=self.run, args=())
t.start()
def put(self, task):
self.__class__.tq.put(task)
def run(self):
while True:
print u"等待任务数量: ", self.__class__.tq.qsize()
try:
_task = self.__class__.tq.get(block=True, timeout=10)
_task.start()
_task.join()
except Queue.Empty:
time.sleep(1)
except Exception:
import traceback
print traceback.print_exc()
time.sleep(3)
if __name__ == "__main__":
tester = TasksRunner()
time.sleep(3)
tester.put("666")
time.sleep(30)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,051
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/dataservice/DBClient/Data.py
|
class Data(object):
def __init__(self):
pass
def newcomerCount(self, users, num = 1):
pass
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,052
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasjob/TaskScheduler_test.py
|
# -*- coding: utf-8 -*-
import time
from pytz import utc
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.mongodb import MongoDBJobStore
# from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
# from apscheduler.jobstores.memory import MemoryJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
from JobCmd import jobcmdcallable
from JobCmd import JobCmd
from DBClient.PyMongoClient import PyMongoClient
global mongoclient
mongoclient = PyMongoClient().getConn()
jobstores = {
'mongo': MongoDBJobStore(collection = 'jobtest', database = 'saasjob', client=mongoclient),
# 'default': MemoryJobStore()
}
executors = {
'default': ThreadPoolExecutor(20),
'processpool': ProcessPoolExecutor(5)
}
job_defaults = {
'coalesce': False,
'misfire_grace_time': 1,
'max_instances': 10
}
def test_job():
# a = scheduler.get_job(job_id="666", jobstore=jobstores["mongo"])
# b = scheduler.get_jobs(jobstore=jobstores["mongo"])
# print a, b
print 'hello world'
global jobcmdobj
jobcmdobj = JobCmd("ipconfig")
def test_job_b():
# a = scheduler.get_job(job_id="666", jobstore=jobstores["mongo"])
# b = scheduler.get_jobs(jobstore=jobstores["mongo"])
# print a, b
for _job in scheduler.get_jobs():
print type(_job.__getstate__()), _job.__getstate__()
# scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults)
scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults)
# scheduler.add_jobstore(jobstore=jobstores["mongo"], alias="mongo")
# scheduler = BackgroundScheduler(executors=executors, job_defaults=job_defaults)
# scheduler.add_job(test_job_b, 'interval', seconds=2, jobstore="mongo", executor="default")
# scheduler.add_job(jobcmdcallable, 'interval', seconds=2, jobstore="mongo", args=[jobcmdobj])
# scheduler.add_jobstore(jobstore=jobstores["mongo"], alias="mongo")
try:
# test_job_b()
# scheduler.add_job(test_job_b, 'interval', seconds=10, name="test_job_b", jobstore='mongo')
print scheduler.get_jobs()
scheduler.start(paused=True)
print scheduler.get_jobs()
time.sleep(200)
except SystemExit:
pass
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,053
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/jhddgapi/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from MongoDatas import eventsSeries
from MongoDatas import eventsSingle
from MongoDatas import eventsRemain
import urllib
import json
# Create your views here.
def getEventsSeries(request, datatype, s_tm, e_tm, events_quote):
# ['dl', 'ac23']
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
result = eventsSeries(datatype, s_tm, e_tm, events)
data = json.dumps(result, ensure_ascii=False, sort_keys=True)
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
return HttpResponse(exstr)
def getEventsSeriesSingle(request, datatype, s_tm, e_tm, events_quote):
# ['dl', 'ac23']
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
result = eventsSingle(datatype, s_tm, e_tm, events)
data = json.dumps(result, ensure_ascii=False, sort_keys=True)
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
return HttpResponse(exstr)
def getEventsRemain(request, datatype, s_tm, last_tm, events_quote):
# ['dl', 'ac23']
try:
events_str = urllib.unquote(events_quote)
events = json.loads(events_str)
result = eventsRemain(datatype, s_tm, int(last_tm), events)
data = json.dumps(result, ensure_ascii=False, sort_keys=True)
return HttpResponse(data)
except:
import traceback
exstr = traceback.format_exc()
return HttpResponse(exstr)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,054
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/ClickHouseClient/ClickHouseClient.py
|
# -*- coding: utf-8 -*-
import ConfigParser
import logging
import sys
from SubDatabase import SubDatabase
# from infi.clickhouse_orm.database import Database
from __init__ import configPath
logger = logging.getLogger(__name__)
class ClickHouseClient(object):
cf = ConfigParser.ConfigParser()
cf.read(configPath)
DB_URL = cf.get("clickhouse", "db_url")
PASSWORD = cf.get("clickhouse", "password")
def __init__(self):
pass
# self.db = Database(db_name=db_name, db_url=self.__class__.DB_URL, password=self.__class__.PASSWORD)
def database(self, db_name):
db = SubDatabase(db_name=db_name, db_url=self.__class__.DB_URL, password=self.__class__.PASSWORD)
return db
def select(self, db_name, query):
db = self.database(db_name)
for row in db.select(query):
yield row
# 封装insert方法
def insert(self, db, model_instances, bulksize = 1000, is_create = False, insert_count = 0, first_recu = True):
cache = []
table_name = None
for elem in model_instances:
if is_create == False:
db.create_table(elem)
db.create_merge_table(elem)
is_create = True
table_name = elem.table_name()
cache.append(elem)
if len(cache) >= bulksize:
try:
# 必须保证 batch_size >= bulksize,否则插入出错情况下二分插入失效
db.insert(cache, batch_size=bulksize)
insert_count += len(cache)
cache = []
except:
# 二分插入
if len(cache) > 1:
self.insert(db, cache[:(len(cache) / 2)], bulksize = min(len(cache[:(len(cache) / 2)]), bulksize), is_create=is_create, insert_count=insert_count, first_recu=False)
self.insert(db, cache[(len(cache) / 2):], bulksize = min(len(cache[(len(cache) / 2):]), bulksize), is_create=is_create, insert_count=insert_count, first_recu=False)
elif len(cache) == 1:
# 输出报错日志
logger.error("%s" % (str(sys.exc_info()), ))
cache = []
if cache:
try:
# 必须保证 batch_size >= bulksize,否则插入出错情况下二分插入失效
db.insert(cache, batch_size=bulksize)
insert_count += len(cache)
except:
# 二分插入
if len(cache) > 1:
self.insert(db, cache[:(len(cache) / 2)], min(len(cache[:(len(cache) / 2)]) / 2, bulksize), is_create=is_create, insert_count=insert_count, first_recu=False)
self.insert(db, cache[(len(cache) / 2):], min(len(cache[(len(cache) / 2):]) / 2, bulksize), is_create=is_create, insert_count=insert_count, first_recu=False)
elif len(cache) == 1:
logger.error("%s; LOG: %s" % (str(sys.exc_info()), ))
if first_recu:
logger.info("%(db_name)s.%(table_name)s insert %(count)d records." % {
"db_name": db.db_name,
"table_name": table_name,
"count": insert_count,
})
if __name__ == "__main__":
tester = ClickHouseClient()
print tester.database("ncf")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,055
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/__init__.py
|
# -*- coding: utf-8 -*-
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
# print path.dirname(path.dirname(path.abspath(__file__)))
# print path.dirname(path.abspath(__file__))
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,056
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/models.py
|
from __future__ import unicode_literals
from django.db import models
from saasapi.models import api_cache
# Create your models here.
# class APICache(models.Model):
# inserttm = models.DateTimeField()
# appkey = models.CharField(max_length=512)
# api_id = models.CharField(max_length=512)
# params = models.TextField()
# data = models.TextField()
# enable = models.IntegerField()
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,057
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasjob/JobHandler.py
|
import time
class JobHandler(object):
'''
{"jobobj": None, "jobcmdobj": None, "isrunning": False, "hprunningtime": 0, "startstamp": 0, "endstamp": 0}
'''
def __init__(self, job = None, handler = None):
self.jobobj = job
if job:
self.jobcmdobj = self.get_job_attr("args")[0]
self.job_id = self.get_job_attr("id")
else:
self.jobcmdobj = None
self.job_id = None
# 变量初始化
self.end_stamp = 0
self.start_stamp = 0
self.hp_runningtime = 0
self.is_finished = False
self.is_crashed = False
self.is_paused = False
'''
# job 状态码,
# -1:等待调度,
# 0: 异常,
# 1:执行完成,等待下次调度,
# 2:正在执行,
# 3:暂停
转换路径:
0 -> 2
1 -> 2, 1 -> 3
2 -> 3, 2 -> 1, 2 -> 0
3 -> -1(1), 3 -> 2
'''
self.status = 1
def when_job_submitted(self):
self.start_stamp = time.time()
self.is_finished = False
self.status = 2
def when_job_executed(self):
self.end_stamp = time.time()
self.is_finished = True
self.is_success = True
if self.status == 2:
self.status = 1
self.hp_runningtime = self.end_stamp - self.start_stamp
def when_job_crashed(self):
self.end_stamp = time.time()
self.is_success = False
self.is_finished = True
self.is_crash = True
self.status = 0
def when_job_resume(self):
# from: 3
if self.status == 3:
self.status = 1
def when_job_pause(self):
# from: 0, 1, 2, 3
self.status = 3
@property
def job(self):
return self.jobobj
@job.setter
def job(self, _job):
self.jobobj = _job
if _job:
self.jobcmdobj = self.get_job_attr("args")[0]
self.job_id = self.get_job_attr("id")
else:
self.jobcmdobj = None
self.job_id = None
if self.jobcmdobj:
self.cmd = self.jobcmdobj.command
@property
def isRunning(self):
return self.is_running
# @isRunning.setter
# def isRunning(self, is_running):
# self.is_running = is_running
@property
def isSuccess(self):
return self.is_success
# @isSuccess.setter
# def isSuccess(self, is_success):
# self.is_success = is_success
@property
def isPause(self):
return self.is_pause
# @isPause.setter
# def isPause(self, is_pause):
# self.is_pause = is_pause
@property
def exception(self):
return self.exceptioninfo
# @exception.setter
# def exception(self, ex):
# self.exceptioninfo = ex
@property
def startStamp(self):
return self.startstamp
# @startStamp.setter
# def startStamp(self, start_stamp):
# self.start_stamp = start_stamp
@property
def endStamp(self):
return self.endstamp
# @endStamp.setter
# def endStamp(self, end_stamp):
# self.end_stamp = end_stamp
@property
def hpRunTime(self):
return self.hp_runningtime
@property
def runnTime(self):
if self.is_running:
return int(time.time() - self.start_stamp)
else:
return 0
# 获取job属性
def get_job_attr(self, attr):
try:
result = eval("self.jobobj.%s" % attr)
return result
except:
import traceback
print(traceback.print_exc())
return None
# 获取job属性
def get_job_attrs(self, attrs):
result = []
for attr in attrs:
_result = self.get_job_attr(attr)
result.append(_result)
return result
# 获取 trriger 属性
def get_job_trigger(self):
# ('trigger', <CronTrigger (second='4', timezone='Asia/Shanghai')>)
_trigger = self.get_job_attr("trigger")
# options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
if _trigger:
return dict([(f.name, f.__str__()) for f in _trigger.fields if not f.is_default])
else:
return {}
@property
def jobhandlerattr(self):
if self.jobcmdobj:
job_attr = self.jobcmdobj.jobattr()
else:
job_attr = {}
jobhandler_attr = {
"job_id": self.job_id, # job id
"is_running": self.is_running, # 是否正在运行
"is_pause": self.is_pause, # 是否暂停执行
"is_success": self.is_success, # 是否暂停执行
"start_stamp": self.start_stamp, # 最近一次开始执行时间
"end_stamp": self.end_stamp, # 最近一次执行完成时间
"hope_runtime": self.hp_runningtime, # 预计执行时间
"status": self.status, # 预计执行时间
}
try:
job_name_arg = self.get_job_attr("args")[1].get("name", [""]), # 任务名称
job_name = job_name_arg[0] if len(job_name_arg) == 1 else ""
jobhandler_attr["name"] = job_name
except:
jobhandler_attr["name"] = ""
try:
desc_arg = self.get_job_attr("args")[1].get("desc", [""]), # 任务说明
desc = desc_arg[0] if len(desc_arg) == 1 else ""
jobhandler_attr["desc"] = desc
except:
jobhandler_attr["desc"] = ""
try:
allowmodify_arg = self.get_job_attr("args")[1].get("allowmodify", [False]) # 是否允许修改
print("allowmodify_arg", allowmodify_arg)
allowmodify = allowmodify_arg[0] if len(allowmodify_arg) == 1 else False
jobhandler_attr["allowmodify"] = allowmodify
except:
jobhandler_attr["allowmodify"] = False
return dict(job_attr, **jobhandler_attr)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,058
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/IPtoLoc/IPtoAreaFinals.py
|
# coding:utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
__author__ = "wuxi<wuxi@ifeng.com>"
__date__ = "$2014-9-23 14:31:01$"
def ip2long(strIP):
ip = strIP.split('.')
return (long(ip[0]) << 24) + (long(ip[1]) << 16) + (long(ip[2]) << 8) + long(ip[3])
def bsearch(longip, array, low, high):
if low > high:
return ()
mid = long((low + high) / 2)
midrecord = array[mid]
if longip >= midrecord[0] and longip <= midrecord[1]:
return (midrecord[2],)
elif low == high:
return ()
elif longip > midrecord[0]:
return bsearch(longip, array, low, mid)
elif longip < midrecord[0]:
return bsearch(longip, array, mid+1, high)
else :
return ()
def bsearch_more(longip, array, low, high):
if low > high:
return ()
mid = long((low + high) / 2)
midrecord = array[mid]
if longip >= midrecord[0] and longip <= midrecord[1]:
return (midrecord[2],midrecord[3])
elif low == high:
return ()
elif longip > midrecord[0]:
#return bsearch_more(longip, array, low, mid)
return bsearch_more(longip, array, mid+1, high)
elif longip < midrecord[0]:
#return bsearch_more(longip, array, mid+1, high)
return bsearch_more(longip, array, low, mid)
else :
return ()
def bsearch_for(longip, array, low, high):
if low > high:
return ()
mid = long((low + high) / 2)
midrecord = array[mid]
if longip >= midrecord[0] and longip <= midrecord[1]:
return (midrecord[3],)
elif low == high:
return ()
elif longip > midrecord[0]:
return bsearch_for(longip, array, low, mid)
elif longip < midrecord[0]:
return bsearch_for(longip, array, mid+1, high)
else :
return ()
def bsearch_formore(longip, array, low, high):
if low > high:
return ()
mid = long((low + high) / 2)
midrecord = array[mid]
if longip >= midrecord[0] and longip <= midrecord[1]:
return (midrecord[3],midrecord[4])
elif low == high:
return ()
elif longip > midrecord[0]:
return bsearch_formore(longip, array, low, mid)
elif longip < midrecord[0]:
return bsearch_formore(longip, array, mid+1, high)
else :
return ()
def getlocid(targetIP, iniarray, type = 'normal'):
longip = ip2long(targetIP)
locid = ()
if type == 'normal':
locid = bsearch(longip, iniarray, 0, len(iniarray)-1)
elif type == 'more':
locid = bsearch_more(longip, iniarray, 0, len(iniarray)-1)
elif type == 'for_normal':
locid = bsearch_for(longip, iniarray, 0, len(iniarray)-1)
elif type == 'for_more':
locid = bsearch_formore(longip, iniarray, 0, len(iniarray)-1)
if locid == ():
if type in ['more','for_more']:
locid = ('Unknown','Unknown')
else:
locid = ('Unknown',)
return locid
# ./ipdata.log
def load(iniFile):
recordlist = []
for line in open(iniFile):
items = line.split()
if len(items) != 5:
continue
recordlist.append([long(items[0]),long(items[1]),items[2],items[3],items[4]])
return recordlist
if __name__ == "__main__":
# iniarray = load("./ipdata.log")
# result = getlocid("113.124.232.55", iniarray, type='for_more')
# print(result)
# for item in result:
# print(item)
pass
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,059
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasjob/JobCmd.py
|
# -*- coding: utf-8 -*-
import os
import sys
import time
import uuid
reload(sys)
sys.setdefaultencoding("utf-8")
class JobCmd(object):
def __init__(self, cmd):
self.jobid = None
self.createstamp = time.time() # 任务创建时间
self._cmd = cmd # 任务命令
self.statuscode = 0 # 任务执行返回状态码,0 表示正常
def set_jobid(self, jobid):
self.jobid = jobid
# 执行
def run(self):
# print("object id %s , task id is %s, will run %s" %(id(self), self.jobid, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
try:
self.statuscode = os.system(self._cmd)
except:
import traceback
print(traceback.print_exc(), self._cmd)
# exc_info = sys.exc_info()
self.statuscode = -1
if self.statuscode == -1:
raise RuntimeError('''%s execute faild.''' % self._cmd)
# 任务属性
def jobattr(self):
result = {
"create_stamp": self.createstamp,
"cmd": self._cmd,
"job_id": self.jobid,
}
return result
def jobcmdcallable(jobcmdobj, *args, **kwargs):
jobcmdobj.run()
if __name__ == "__main__":
jobcmdcallable("ipconfig")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,060
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/DBClient/Config.py
|
# -*- coding: utf-8 -*-
mongo_server = {
1:
{
# "mongo_ip": "10.45.141.35",
"mongo_ip": "101.201.145.120",
"mongo_port": "27017",
"mongo_user": None,
"mongo_passwd": None,
},
2:
{
# "mongo_ip": "10.44.184.245",
"mongo_ip": "10.44.184.245",
"mongo_port": "25332",
"mongo_user": "*",
"mongo_passwd": "*",
}
}
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,061
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/IPtoLoc/__init__.py
|
# no other package
from os import path
import os
ipdataPath = os.sep.join([path.dirname(path.abspath(__file__)), "ipdata.log"])
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,062
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasjob/SchedulerManager_dev.py
|
# -*- coding: utf-8 -*-
from importlib import import_module
import uuid
import time
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.mongodb import MongoDBJobStore
from apscheduler.jobstores.memory import MemoryJobStore
from apscheduler.triggers.cron import CronTrigger
from apscheduler.events import (EVENT_SCHEDULER_STARTED, EVENT_SCHEDULER_SHUTDOWN, EVENT_SCHEDULER_PAUSED,
EVENT_SCHEDULER_RESUMED, EVENT_EXECUTOR_ADDED, EVENT_EXECUTOR_REMOVED,
EVENT_JOBSTORE_ADDED, EVENT_JOBSTORE_REMOVED, EVENT_ALL_JOBS_REMOVED,
EVENT_JOB_ADDED, EVENT_JOB_REMOVED, EVENT_JOB_MODIFIED, EVENT_JOB_EXECUTED,
EVENT_JOB_ERROR, EVENT_JOB_MISSED, EVENT_JOB_SUBMITTED, EVENT_JOB_MAX_INSTANCES)
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
from JobHandler import JobHandler
from DBClient.PyMongoClient import PyMongoClient
global _mongoclient
_mongoclient = PyMongoClient().getConn()
class SchedulerManager(object):
global _mongoclient
def __init__(self):
self._jobs = {}
self._jobhandlers = {} # format, key: jobid, value: jobhandler
self.create_scheduler()
self.start()
def create_scheduler(self):
self.jobstores = {
'mongo': MongoDBJobStore(collection='job1', database='saasjob', client=_mongoclient),
'default': MemoryJobStore()
}
self.executors = {
'default': ThreadPoolExecutor(20),
'processpool': ProcessPoolExecutor(5)
}
self.job_defaults = {
'coalesce': False,
'misfire_grace_time': 1,
'max_instances': 1
}
self._sched = BackgroundScheduler(jobstores=self.jobstores, executors=self.executors, job_defaults=self.job_defaults)
# 添加 任务提交 事件监听
self._sched.add_listener(self.when_job_submitted, EVENT_JOB_SUBMITTED)
# # 添加 任务执行完成 事件监听
# self._sched.add_listener(self.when_job_executed, EVENT_JOB_EXECUTED)
# 添加 任务退出 事件监听
self._sched.add_listener(self.when_job_crashed, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR)
def when_job_submitted(self, event):
try:
job_id = event.job_id
if job_id not in self._jobhandlers and job_id in self._jobhandlers:
self._jobhandlers.setdefault(job_id, JobHandler(self._jobs[job_id]))
jobhandler = self._jobhandlers[event.job_id]
jobhandler.when_job_submitted()
print("%s submitted at %s" % (event.job_id, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
except:
import traceback
print(traceback.print_exc())
def when_job_executed(self, event):
try:
if event.exception:
job_id = event.job_id
if job_id not in self._jobhandlers:
self._jobhandlers.setdefault(job_id, JobHandler(self._jobs[job_id]))
jobhandler = self._jobhandlers[event.job_id]
jobhandler.when_job_crashed()
print("%s crashed at %s" % (event.job_id, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
else:
job_id = event.job_id
if job_id not in self._jobhandlers:
self._jobhandlers.setdefault(job_id, JobHandler(self._jobs[job_id]))
jobhandler = self._jobhandlers[event.job_id]
jobhandler.when_job_executed()
print("%s executed at %s" % (event.job_id, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
except:
import traceback
print(traceback.print_exc())
def when_job_crashed(self, event):
try:
if event.exception:
job_id = event.job_id
if job_id not in self._jobhandlers:
self._jobhandlers.setdefault(job_id, JobHandler(self._jobs[job_id]))
jobhandler = self._jobhandlers[event.job_id]
jobhandler.when_job_crashed()
print("%s crashed at %s" % (event.job_id, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))))
except:
import traceback
print(traceback.print_exc())
def fresh_jobs(self):
self._sched.pause()
for _job in self._sched.get_jobs():
job_id = self._get_job_attr(_job, "id")
self._jobs.setdefault(job_id, _job)
self._sched.resume()
def sync_jobs(self, job_ids = set()):
job_ids = (set(self._jobhandlers.keys()) | set(self._jobs.keys())) if len(job_ids) == 0 else job_ids
for job_id in job_ids:
if job_id in self._jobhandlers and job_id not in self._jobs:
self._jobs[job_id] = self._jobhandlers[job_id]
elif job_id not in self._jobhandlers and job_id in self._jobs:
self._jobhandlers[job_id] = self._jobs[job_id]
job_ids = set()
return True
def start(self):
try:
self._sched.start(paused=True)
self.fresh_jobs()
return True
except:
import traceback
print(traceback.print_exc())
return False
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,063
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi/ipinfo.py
|
# coding: utf-8
import urllib
import json
def ipinfo_sina(ip):
if not ip:
return ('unknown', 'unknown', 'unknown')
ip = ip.replace("_", ".").strip('''"''')
base_url = 'http://int.dpool.sina.com.cn/iplookup/iplookup.php?format=json&ip=%(ip)s'
try:
text = urllib.urlopen(base_url % {"ip": ip}).read()
json_obj = json.loads(text)
country = json_obj["country"]
pro = json_obj["province"]
city = json_obj["city"] if json_obj["city"] else pro
result = (country if country else u"未知", pro if pro else u"未知", city if city else u"未知")
except:
result = (u'未知', u'未知', u'未知')
return result
if __name__ == '__main__':
import time
a = time.time()
tmp = ipinfo_sina('111.44.144.176')
print tmp[0], tmp[1], tmp[2]
print time.time() - a
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,064
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/CacheData.py
|
# -*- coding: utf-8 -*-
import __init__
from saasapi.CacheData import get_data
from saasapi.CacheData import save_data
if __name__ == "__main__":
save_data("test", "test", "test", "test")
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
39,065
|
myzhongguo/OpenSaaSAPI
|
refs/heads/master
|
/saasapi_clickhouse/EventRemain.py
|
# -*- coding: utf-8 -*-
import __init__
import time
import datetime
import threading
from os import sys, path
import json
from ClickHouseClient.ClickHouseClient import ClickHouseClient
import logging
from Query import Query
from CacheDecorator import common_cache_decorator
from SaaSCommon.JHDecorator import fn_timer
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=path.dirname(path.abspath(__file__)) + "/logs/" + "api.log",
filemode='a')
logger = logging.getLogger(__file__)
def result_transform(data):
result = {}
for item in data:
tm = item["tm"]
numbers = item["numbers"]
length = item["length"]
result.setdefault(tm, [numbers, length])
return result
def result_detransform(data):
result = []
for key in data:
tmp = {}
tmp["tm"] = key
tmp["numbers"] = data[key][0]
tmp["length"] = data[key][1]
result.append(tmp)
return result
class EventRemain(Query):
def __init__(self):
pass
def create_query_sql(self, db_name, start_day, end_day, events, attrs=None):
'''
:param db_name: appkey/datatype
:param start_day: 起始日期,格式:yyyy-mm-dd
:param end_day: 结束日期,格式:yyyy-mm-dd
:param events: 事件id及map属性,格式:[ [ [{id:…,attrs:[{id:…,op:…,val:…},{mapkey}]},{或者关系}], [{并且关系}] ], [留存事件(格式同上)] ]
:param attrs: 基础属性,如:版本(jhd_vr),渠道(jhd_pb)..., 格式:{"jhd_pb": "appstore", "jhd_vr": "1.0"}
:return:
'''
'''
初始事件及转化事件没有实现“且”关系
'''
start_day_1 = (datetime.datetime.strptime(start_day, "%Y-%m-%d") + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if attrs is None:
attrs = {}
sql_format = "select partition, count(distinct(userkey)) as uv from ( \
select partition, userkey from \
(select partition, jhd_userkey as userkey from %(db_name)s.userevent where partition = toDate('%(start_day)s') and %(map_cond_init)s%(_where)s group by partition, jhd_userkey) \
all left join \
(select partition, jhd_userkey as userkey from %(db_name)s.userevent where partition = toDate('%(start_day)s') or ((partition between toDate('%(start_day_1)s') and toDate('%(end_day)s')) and %(map_cond_conversion)s) group by partition, jhd_userkey) \
using userkey) \
group by partition \
order by partition"
_where = " and " + self.fragment_where(attrs)
map_cond_init = self.map_conds(events[0])
map_cond_conversion = self.map_conds(events[1])
query = sql_format % {
"db_name": db_name,
"start_day": start_day,
"start_day_1": start_day_1,
"end_day": min(end_day, (datetime.datetime.today()-datetime.timedelta(days=1)).strftime("%Y-%m-%d")),
"_where": _where if bool(attrs) else " ",
"map_cond_init": map_cond_init,
"map_cond_conversion": map_cond_conversion
}
logger.info(query)
return query
def map_conds(self, data):
'''
:param data: 格式:[ [{id:…,attrs:[{id:…,op:…,val:…},{mapkey}]},{或者关系}], [{并且关系}] ]
:return:
'''
cond_eventid_format = "jhd_eventId = '%(event_id)s'"
cond_map_format = "%(visit_params)s(jhd_map, '%(mapkey)s') %(operator)s"
events_and = []
for index, event_array in enumerate(data):
events_or = []
for event_data in event_array:
event_solo_and = []
event_id = event_data["id"]
if event_id == "jhddg_every":
cond_eventid = "jhd_eventId != %(event_id)s" % {"event_id": "''"}
event_solo_and.append(cond_eventid)
else:
event_solo_and.append(cond_eventid_format % {"event_id": event_id})
if "attrs" in event_data and event_data["attrs"]:
# 包含多个mapkey限制条件
for map_item in event_data["attrs"]:
mapkey = map_item["id"]
op = map_item["op"]
mapvalue = map_item["val"]
visit_params, operator = self.query_operator(op, mapvalue)
cond_map = cond_map_format % {"visit_params": visit_params, "mapkey": mapkey, "operator": operator}
event_solo_and.append(cond_map)
events_or.append("(" + " and ".join(event_solo_and) + ")")
events_and.append(" or ".join(events_or))
return " and ".join(events_and)
@fn_timer
# @common_cache_decorator("event_remain", result_transform, result_detransform, reverse=False)
def data(self, datatype, params, interval = 0):
result = []
try:
start_day = datetime.datetime.strptime(params["startDay"], "%Y-%m-%d")
end_day = datetime.datetime.strptime(params["endDay"], "%Y-%m-%d")
remain_num = int(params["remain"])
events = params["events"]
num = (end_day - start_day).days
attrs = params.get("attrs", {})
if num > 60 or num < 0:
return {"errinfo": "日期跨度超出范围!"}
if remain_num > 60 or remain_num <= 0:
return {"errinfo": "窗口期超出范围!"}
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.error(json.dumps(errinfo))
return {"errinfo": "参数错误!"}
try:
threads = []
while start_day <= end_day:
try:
query = str(self.create_query_sql(datatype, start_day.strftime("%Y-%m-%d"), (start_day + datetime.timedelta(days=remain_num)).strftime("%Y-%m-%d"), events, attrs=attrs).decode("utf-8"))
except:
query = self.create_query_sql(datatype, start_day.strftime("%Y-%m-%d"), (start_day + datetime.timedelta(days=remain_num)).strftime("%Y-%m-%d"), events, attrs=attrs).decode("utf-8").encode("utf-8")
# query = self.create_query_sql(datatype, start_day.strftime("%Y-%m-%d"), (start_day + datetime.timedelta(days=remain_num)).strftime("%Y-%m-%d"), events, attrs=attrs)
t = threading.Thread(target=self.submit, args=(datatype, query, result, start_day.strftime("%Y-%m-%d")))
t.start()
threads.append(t)
time.sleep(interval)
start_day += datetime.timedelta(days=1)
for _thread in threads:
_thread.join()
except:
import sys
import traceback
exc_type, exc_value, exc_traceback = sys.exc_info()
errinfo = traceback.format_exception(exc_type, exc_value, exc_traceback)
logging.error(json.dumps(errinfo))
return {"errinfo": "查询错误!"}
# 对“缺失”数据补位
for item in result:
item["length"] = remain_num + 1
item["numbers"] = (item["numbers"] if "numbers" in item else []) + ([""]*(remain_num + 1 - len(item["numbers"] if "numbers" in item else [])))
day = datetime.datetime.strptime(item["tm"], "%Y-%m-%d")
yesterday = datetime.datetime.today() - datetime.timedelta(days=1)
import copy
remain_form = ['']*((yesterday - day).days + 1)
remain_result = copy.deepcopy(item["numbers"])
for index, (value, form) in enumerate(zip(remain_result, remain_form)):
if value == "":
item["numbers"][index] = 0
# 按日期升序排列
result_sorted = sorted(result, key = lambda item: item["tm"])
return result_sorted
def submit(self, db_name, query, result, tm):
client = ClickHouseClient()
item = {"tm": tm}
for row in client.select(db_name, query):
tm = row.partition.strftime("%Y-%m-%d")
item.setdefault("tm", tm)
uv = row.uv
item.setdefault("numbers", []).append(uv)
if bool(item):
result.append(item)
if __name__ == "__main__":
import os
os.linesep
tester = EventRemain()
# data = {"remain":20,"endDay":"2017-02-04","events":[[[{"id":"ac36"}]],[[{"id":"jhddg_every"}]]],"startDay":"2017-02-01"}
data = {"remain":7,"endDay":"2017-02-09","events":[[[{"id":"jhddg_every","name":"任意事件"}]],[[{"id":"jhddg_every","name":"任意事件"}]]],"startDay":"2017-02-02"}
print json.dumps(tester.data("BIQU_ANDROID", data), ensure_ascii=False)
|
{"/saasapi/views.py": ["/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py", "/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventSummary.py"], "/saasapi_clickhouse/views.py": ["/saasapi_clickhouse/Funnel.py", "/saasapi_clickhouse/CrossEvent.py", "/saasapi_clickhouse/EventRemain.py", "/saasapi_clickhouse/RoundFlightInterval.py", "/saasapi_clickhouse/RoundAireLine.py"], "/saasapi/urls.py": ["/saasapi/__init__.py"], "/saasapi/CacheData.py": ["/saasapi/models.py"], "/saasapi_clickhouse/models.py": ["/saasapi/models.py"], "/saasapi_clickhouse/CacheData.py": ["/saasapi/CacheData.py"], "/dataservice/views.py": ["/IPList.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.