index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
26,920
|
benterris/chordstransposer
|
refs/heads/master
|
/chordstransposer/__init__.py
|
from .transposer import transpose, transpose_by
|
{"/chordstransposer/transposer.py": ["/chordstransposer/config.py"], "/chordstransposer/__init__.py": ["/chordstransposer/transposer.py"]}
|
26,946
|
eracle/instagram_private_api
|
refs/heads/master
|
/instagram_web_api/client.py
|
# Copyright (c) 2017 https://github.com/ping
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
# -*- coding: utf-8 -*-
import gzip
import hashlib
import json
import logging
import random
import re
import string
import time
import warnings
from functools import wraps
from io import BytesIO
from socket import timeout, error as SocketError
from ssl import SSLError
from .compat import (
compat_urllib_request, compat_urllib_parse,
compat_urllib_parse_urlparse, compat_urllib_error,
compat_http_client, compat_cookiejar
)
from .compatpatch import ClientCompatPatch
from .errors import (
ClientError, ClientLoginError, ClientCookieExpiredError,
ClientConnectionError, ClientBadRequestError,
ClientForbiddenError, ClientThrottledError,
)
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError # pylint: disable=redefined-builtin
except NameError: # Python 2:
class ConnectionError(Exception):
pass
from .http import ClientCookieJar, MultipartFormDataEncoder
from .common import ClientDeprecationWarning
logger = logging.getLogger(__name__)
warnings.simplefilter('always', ClientDeprecationWarning)
def login_required(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
if not args[0].is_authenticated:
raise ClientError('Method requires authentication.', 403)
return fn(*args, **kwargs)
return wrapper
class Client(object):
"""Main API client class for the web api."""
API_URL = 'https://www.instagram.com/query/'
GRAPHQL_API_URL = 'https://www.instagram.com/graphql/query/'
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.1.2 Safari/605.1.15' # noqa
MOBILE_USER_AGENT = 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.0 Mobile/15E148 Safari/604.1' # noqa
def __init__(self, user_agent=None, **kwargs):
"""
:param user_agent: Override the default useragent string with your own
:param kwargs: See below
:Keyword Arguments:
- **auto_patch**: Patch the api objects to match the public API. Default: False
- **drop_incompat_key**: Remove api object keys that is not in the public API. Default: False
- **timeout**: Timeout interval in seconds. Default: 10
- **username**: Login username
- **password**: Login password
- **authenticate**: Do login on init
- **cookie**: Saved cookie string from a previous session
- **settings**: A dict of settings from a previous session
- **on_login**: Callback after successful login
- **proxy**: Specify a proxy ex: 'http://127.0.0.1:8888' (ALPHA)
- **proxy_handler**: Specify your own proxy handler
:return:
"""
self.auto_patch = kwargs.pop('auto_patch', False)
self.drop_incompat_keys = kwargs.pop('drop_incompat_keys', False)
self.timeout = kwargs.pop('timeout', 10)
self.username = kwargs.pop('username', None)
self.password = kwargs.pop('password', None)
self.authenticate = kwargs.pop('authenticate', False)
self.on_login = kwargs.pop('on_login', None)
user_settings = kwargs.pop('settings', None) or {}
self.user_agent = user_agent or user_settings.get('user_agent') or self.USER_AGENT
self.mobile_user_agent = (kwargs.pop('mobile_user_agent', None)
or user_settings.get('mobile_user_agent')
or self.MOBILE_USER_AGENT)
self.init_csrftoken = None
self.rhx_gis = kwargs.pop('rhx_gis', None) or user_settings.get('rhx_gis')
self.rollout_hash = '1'
cookie_string = kwargs.pop('cookie', None) or user_settings.get('cookie')
cookie_jar = ClientCookieJar(cookie_string=cookie_string)
if cookie_string and cookie_jar.auth_expires and int(time.time()) >= cookie_jar.auth_expires:
raise ClientCookieExpiredError('Cookie expired at {0!s}'.format(cookie_jar.auth_expires))
cookie_handler = compat_urllib_request.HTTPCookieProcessor(cookie_jar)
proxy_handler = kwargs.pop('proxy_handler', None)
if not proxy_handler:
proxy = kwargs.pop('proxy', None)
if proxy:
warnings.warn('Proxy support is alpha.', UserWarning)
parsed_url = compat_urllib_parse_urlparse(proxy)
if parsed_url.netloc and parsed_url.scheme:
proxy_address = '{0!s}://{1!s}'.format(parsed_url.scheme, parsed_url.netloc)
proxy_handler = compat_urllib_request.ProxyHandler({'https': proxy_address})
else:
raise ValueError('Invalid proxy argument: {0!s}'.format(proxy))
handlers = []
if proxy_handler:
handlers.append(proxy_handler)
custom_ssl_context = kwargs.pop('custom_ssl_context', None)
try:
https_handler = compat_urllib_request.HTTPSHandler(context=custom_ssl_context)
except TypeError:
# py version < 2.7.9
https_handler = compat_urllib_request.HTTPSHandler()
handlers.extend([
compat_urllib_request.HTTPHandler(),
https_handler,
cookie_handler])
opener = compat_urllib_request.build_opener(*handlers)
opener.cookie_jar = cookie_jar
self.opener = opener
self.logger = logger
if not self.csrftoken:
self.init()
if not self.is_authenticated and self.authenticate and self.username and self.password:
self.login()
@property
def cookie_jar(self):
return self.opener.cookie_jar
def get_cookie_value(self, key):
for cookie in self.cookie_jar:
if cookie.name.lower() == key.lower():
return cookie.value
return None
@property
def csrftoken(self):
"""The client's current csrf token"""
return self.get_cookie_value('csrftoken') or self.init_csrftoken
@property
def authenticated_user_id(self):
"""The current authenticated user id"""
return self.get_cookie_value('ds_user_id')
@property
def authenticated_user_name(self):
"""The current authenticated user name. No longer available."""
warnings.warn('No longer available.', DeprecationWarning)
return self.get_cookie_value('ds_user')
@property
def is_authenticated(self):
if self.authenticated_user_id:
return True
return False
@property
def settings(self):
"""Helper property that extracts the settings that you should cache
in addition to username and password."""
return {
'cookie': self.opener.cookie_jar.dump(),
'created_ts': int(time.time()),
'rhx_gis': self.rhx_gis,
'user_agent': self.user_agent,
}
@staticmethod
def _read_response(response):
"""
Extract the response body from a http response.
:param response:
:return:
"""
if response.info().get('Content-Encoding') == 'gzip':
buf = BytesIO(response.read())
res = gzip.GzipFile(fileobj=buf).read().decode('utf8')
else:
res = response.read().decode('utf8')
return res
def generate_request_signature(self, query, endpoint=None):
if self.rhx_gis and query.get('query_hash') and query.get('variables'):
variables = query.get('variables')
elif self.rhx_gis and '__a' in query and endpoint:
variables = compat_urllib_parse_urlparse(endpoint).path
else:
return None
m = hashlib.md5()
m.update('{rhx_gis}:{variables}'.format(
rhx_gis=self.rhx_gis,
variables=variables
).encode('utf-8'))
return m.hexdigest()
def _make_request(self, url, params=None, headers=None, query=None,
return_response=False, get_method=None):
"""
Calls the web API.
:param url: fully formed api url
:param params: post params
:param headers: custom headers
:param query: get url params
:param return_response: bool flag to only return the http response object
:param get_method: custom http method type
:return:
"""
if not headers:
headers = {
'User-Agent': self.user_agent,
'Accept': '*/*',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Connection': 'close',
}
if params or params == '':
headers.update({
'x-csrftoken': self.csrftoken,
'x-requested-with': 'XMLHttpRequest',
'x-instagram-ajax': self.rollout_hash,
'Referer': 'https://www.instagram.com',
'Authority': 'www.instagram.com',
'Origin': 'https://www.instagram.com',
'Content-Type': 'application/x-www-form-urlencoded'
})
if query:
url += ('?' if '?' not in url else '&') + compat_urllib_parse.urlencode(query)
sig = self.generate_request_signature(query, url)
if sig:
headers['X-Instagram-GIS'] = sig
req = compat_urllib_request.Request(url, headers=headers)
if get_method:
req.get_method = get_method
data = None
if params or params == '':
if params == '': # force post if empty string
data = ''.encode('ascii')
else:
data = compat_urllib_parse.urlencode(params).encode('ascii')
try:
self.logger.debug('REQUEST: {0!s} {1!s}'.format(url, req.get_method()))
self.logger.debug('REQ HEADERS: {0!s}'.format(
['{}: {}'.format(k, v) for k, v in headers.items()]
))
self.logger.debug('REQ COOKIES: {0!s}'.format(
['{}: {}'.format(c.name, c.value) for c in self.cookie_jar]
))
self.logger.debug('REQ DATA: {0!s}'.format(data))
res = self.opener.open(req, data=data, timeout=self.timeout)
self.logger.debug('RESPONSE: {0:d} {1!s}'.format(
res.code, res.geturl()
))
self.logger.debug('RES HEADERS: {0!s}'.format(
[u'{}: {}'.format(k, v) for k, v in res.info().items()]
))
if return_response:
return res
response_content = self._read_response(res)
self.logger.debug('RES BODY: {0!s}'.format(response_content))
return json.loads(response_content)
except compat_urllib_error.HTTPError as e:
msg = 'HTTPError "{0!s}" while opening {1!s}'.format(e.reason, url)
if e.code == 400:
raise ClientBadRequestError(msg, e.code)
elif e.code == 403:
raise ClientForbiddenError(msg, e.code)
elif e.code == 429:
raise ClientThrottledError(msg, e.code)
raise ClientError(msg, e.code)
except (SSLError, timeout, SocketError,
compat_urllib_error.URLError, # URLError is base of HTTPError
compat_http_client.HTTPException,
ConnectionError) as connection_error:
raise ClientConnectionError('{} {}'.format(
connection_error.__class__.__name__, str(connection_error)))
@staticmethod
def _sanitise_media_id(media_id):
"""The web API uses the numeric media ID only, and not the formatted one where it's XXXXX_YYY"""
if re.match(r'[0-9]+_[0-9]+', media_id): # endpoint uses the entirely numeric ID, not XXXX_YYY
media_id = media_id.split('_')[0]
return media_id
@staticmethod
def _extract_rhx_gis(html):
options = string.ascii_lowercase + string.digits
text = ''.join([random.choice(options) for _ in range(8)])
return hashlib.md5(text.encode())
@staticmethod
def _extract_csrftoken(html):
mobj = re.search(
r'"csrf_token":"(?P<csrf_token>[A-Za-z0-9]+)"', html, re.MULTILINE)
if mobj:
return mobj.group('csrf_token')
return None
@staticmethod
def _extract_rollout_hash(html):
mobj = re.search(
r'"rollout_hash":"(?P<rollout_hash>[A-Za-z0-9]+)"', html, re.MULTILINE)
if mobj:
return mobj.group('rollout_hash')
return None
def _init_rollout_hash(self):
"""Call before any POST call to make sure we get the rollout hash"""
if self.rollout_hash == '1':
# rollout hash not yet retrieved
self.init()
def init(self):
"""Make a GET request to get the first csrf token and rhx_gis"""
# try to emulate cookies consent
self.cookie_jar.set_cookie(
compat_cookiejar.Cookie(
0, 'ig_cb', '1', None, False,
'www.instagram.com', False, None, '/',
False, False, None, True, None, None, {})
)
init_res = self._make_request(
'https://www.instagram.com/', return_response=True, get_method=lambda: 'GET')
init_res_content = self._read_response(init_res)
self.logger.debug('RES BODY: {0!s}'.format(init_res_content))
rhx_gis = self._extract_rhx_gis(init_res_content)
self.rhx_gis = rhx_gis
rollout_hash = self._extract_rollout_hash(init_res_content)
if rollout_hash:
self.rollout_hash = rollout_hash
if not self.csrftoken:
csrftoken = self._extract_csrftoken(init_res_content)
self.init_csrftoken = csrftoken
if not self.csrftoken:
raise ClientError('Unable to get csrf from init request.')
if not self.rhx_gis:
raise ClientError('Unable to get rhx_gis from init request.')
# required to avoid 403 when doing unauthenticated requests
self.cookie_jar.set_cookie(
compat_cookiejar.Cookie(
0, 'ig_pr', '1', None, False,
'www.instagram.com', False, None, '/',
False, False, None, True, None, None, {})
)
def login(self):
"""Login to the web site."""
if not self.username or not self.password:
raise ClientError('username/password is blank')
params = {'username': self.username, 'password': self.password, 'queryParams': '{}'}
self._init_rollout_hash()
login_res = self._make_request('https://www.instagram.com/accounts/login/ajax/', params=params)
if not login_res.get('status', '') == 'ok' or not login_res.get('authenticated'):
raise ClientLoginError('Unable to login')
if self.on_login:
on_login_callback = self.on_login
on_login_callback(self)
return login_res
def user_info(self, user_id, **kwargs): # pragma: no cover
"""
OBSOLETE. Get user info.
:param user_id: User id
:param kwargs:
:return:
"""
warnings.warn(
'This endpoint is obsolete. Do not use.', ClientDeprecationWarning)
params = {
'q': 'ig_user({user_id}) {{id, username, full_name, profile_pic_url, '
'biography, external_url, is_private, is_verified, '
'media {{count}}, followed_by {{count}}, '
'follows {{count}} }}'.format(**{'user_id': user_id}),
}
user = self._make_request(self.API_URL, params=params)
if not user.get('id'):
raise ClientError('Not Found', 404)
if self.auto_patch:
user = ClientCompatPatch.user(user, drop_incompat_keys=self.drop_incompat_keys)
return user
def user_info2(self, user_name, **kwargs):
"""
Get user info.
:param username: User name (not numeric ID)
:param kwargs:
:return:
"""
# For authed and unauthed clients, a "fresh" rhx_gis must be used
endpoint = 'https://www.instagram.com/{username!s}/'.format(**{'username': user_name})
try:
info = self._make_request(endpoint, query={'__a': '1'})
except ClientError as ce:
if ce.code != 403:
raise ce
# reinit to get a fresh rhx_gis
self.init()
info = self._make_request(endpoint, query={'__a': '1'})
if self.auto_patch:
ClientCompatPatch.user(info['graphql']['user'], drop_incompat_keys=self.drop_incompat_keys)
return info['graphql']['user']
def user_feed(self, user_id, **kwargs):
"""
Get user feed
:param user_id:
:param kwargs:
- **count**: Number of items to return. Default: 12
- **end_cursor**: For pagination. Taken from:
.. code-block:: python
info.get('data', {}).get('user', {}).get(
'edge_owner_to_timeline_media', {}).get(
'page_info', {}).get('end_cursor')
- **extract**: bool. Return a simple list of media
:return:
"""
count = kwargs.pop('count', 12)
if count > 50:
raise ValueError('count cannot be greater than 50')
end_cursor = kwargs.pop('end_cursor', None) or kwargs.pop('max_id', None)
variables = {
'id': user_id,
'first': int(count),
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': 'e7e2f4da4b02303f74f0841279e52d76',
'variables': json.dumps(variables, separators=(',', ':'))
}
info = self._make_request(self.GRAPHQL_API_URL, query=query)
if not info.get('data', {}).get('user') or \
not info.get('data', {}).get('user', {}).get('edge_owner_to_timeline_media', {}).get('count', 0):
# non-existent accounts do not return media at all
# private accounts return media with just a count, no nodes
raise ClientError('Not Found', 404)
if self.auto_patch:
[ClientCompatPatch.media(media['node'], drop_incompat_keys=self.drop_incompat_keys)
for media in info.get('data', {}).get('user', {}).get(
'edge_owner_to_timeline_media', {}).get('edges', [])]
if kwargs.pop('extract', True):
return info.get('data', {}).get('user', {}).get(
'edge_owner_to_timeline_media', {}).get('edges', [])
return info
def media_info(self, short_code, **kwargs): # pragma: no cover
"""
OBSOLETE. Get media info. Does not properly extract carousel media.
:param short_code: A media's shortcode
:param kwargs:
:return:
"""
warnings.warn(
'This endpoint is obsolete. Do not use.', ClientDeprecationWarning)
params = {
'q': 'ig_shortcode({media_code}) {{ caption, code, comments {{count}}, date, '
'dimensions {{height, width}}, comments_disabled, '
'usertags {{nodes {{x, y, user {{id, username, full_name, profile_pic_url}} }} }}, '
'location {{id, name, lat, lng}}, display_src, id, is_video, is_ad, '
'likes {{count}}, owner {{id, username, full_name, profile_pic_url, '
'is_private, is_verified}}, __typename, '
'thumbnail_src, video_views, video_url }}'.format(
**{'media_code': short_code})
}
media = self._make_request(self.API_URL, params=params)
if not media.get('code'):
raise ClientError('Not Found', 404)
if self.auto_patch:
media = ClientCompatPatch.media(media, drop_incompat_keys=self.drop_incompat_keys)
return media
def media_info2(self, short_code):
"""
Alternative method to get media info. This method works for carousel media.
:param short_code: A media's shortcode
:param kwargs:
:return:
"""
headers = {
'User-Agent': self.user_agent,
'Accept': '*/*',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Connection': 'close',
'Referer': 'https://www.instagram.com',
'x-requested-with': 'XMLHttpRequest',
}
info = self._make_request(
'https://www.instagram.com/p/{0!s}/'.format(short_code),
query={'__a': '1', '__b': '1'},
headers=headers)
media = info.get('graphql', {}).get('shortcode_media', {})
if self.auto_patch:
media = ClientCompatPatch.media(media, drop_incompat_keys=self.drop_incompat_keys)
return media
def media_comments(self, short_code, **kwargs):
"""
Get media comments
:param short_code:
:param kwargs:
- **count**: Number of comments to return. Default: 16. Maximum: 50
- **end_cursor**: For pagination
- **extract**: bool. Return a simple list of comments
:return:
"""
count = kwargs.pop('count', 16)
if count > 50:
raise ValueError('count cannot be greater than 50')
end_cursor = kwargs.pop('end_cursor', None)
variables = {
'shortcode': short_code,
'first': int(count)
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': 'f0986789a5c5d17c2400faebf16efd0d',
'variables': json.dumps(variables, separators=(',', ':'))
}
info = self._make_request(self.GRAPHQL_API_URL, query=query)
if not info.get('data', {}).get('shortcode_media'):
# deleted media does not return 'comments' at all
# media without comments will return comments, with counts = 0, nodes = [], etc
raise ClientError('Not Found', 404)
if self.auto_patch:
[ClientCompatPatch.comment(c['node'], drop_incompat_keys=self.drop_incompat_keys)
for c in info.get('data', {}).get('shortcode_media', {}).get(
'edge_media_to_comment', {}).get('edges', [])]
if kwargs.pop('extract', True):
return [c['node'] for c in info.get('data', {}).get('shortcode_media', {}).get(
'edge_media_to_comment', {}).get('edges', [])]
return info
@login_required
def media_likers(self, short_code, **kwargs):
"""
Get media likers
:param short_code:
:param kwargs:
- **count**: Number of likers to return. Default: 24. Maximum: 50
- **end_cursor**: For pagination
- **extract**: bool. Return a simple list of likers
:return:
"""
end_cursor = kwargs.pop('end_cursor', None)
# request 24 by default for the first page
if end_cursor:
count = kwargs.pop('count', 12)
else:
count = kwargs.pop('count', 24)
if count > 50:
raise ValueError('count cannot be greater than 50')
variables = {
'shortcode': short_code,
'first': int(count)
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': 'e0f59e4a1c8d78d0161873bc2ee7ec44',
'variables': json.dumps(variables, separators=(',', ':'))
}
info = self._make_request(self.GRAPHQL_API_URL, query=query)
if not info.get('data', {}).get('shortcode_media'):
# deleted media does not return 'likers' at all
# media without likes will return likes, with counts = 0, nodes = [], etc
raise ClientError('Not Found', 404)
if kwargs.pop('extract', True):
return [c['node'] for c in info.get('data', {}).get('shortcode_media', {}).get(
'edge_liked_by', {}).get('edges', [])]
return info
@login_required
def user_following(self, user_id, **kwargs):
"""
Get user's followings. Login required.
:param user_id: User id of account
:param kwargs:
- **count**: Number of followings. Default: 10
- **end_cursor**: For pagination
- **extract**: bool. Return a simple list of users
:return:
"""
count = kwargs.pop('count', 10)
if count > 50:
raise ValueError('count cannot be greater than 50')
end_cursor = kwargs.pop('end_cursor', None)
variables = {
'id': user_id,
'first': int(count)
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': 'c56ee0ae1f89cdbd1c89e2bc6b8f3d18',
'variables': json.dumps(variables, separators=(',', ':'))
}
info = self._make_request(self.GRAPHQL_API_URL, query=query)
if self.auto_patch:
[ClientCompatPatch.list_user(u['node'], drop_incompat_keys=self.drop_incompat_keys)
for u in info.get('data', {}).get('user', {}).get(
'edge_follow', {}).get('edges', [])]
if kwargs.pop('extract', True):
return [u['node'] for u in info.get('data', {}).get('user', {}).get(
'edge_follow', {}).get('edges', [])]
return info
@login_required
def user_followers(self, user_id, **kwargs):
"""
Get a user's followers. Login required.
:param user_id: User id of account
:param kwargs:
- **count**: Number of followers. Default: 10
- **end_cursor**: For pagination
- **extract**: bool. Return a simple list of users
:return:
"""
count = kwargs.pop('count', 10)
if count > 50:
raise ValueError('count cannot be greater than 50')
end_cursor = kwargs.pop('end_cursor', None)
variables = {
'id': user_id,
'first': int(count)
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': '7dd9a7e2160524fd85f50317462cff9f',
'variables': json.dumps(variables, separators=(',', ':'))
}
info = self._make_request(self.GRAPHQL_API_URL, query=query)
if self.auto_patch:
[ClientCompatPatch.list_user(u['node'], drop_incompat_keys=self.drop_incompat_keys)
for u in info.get('data', {}).get('user', {}).get(
'edge_followed_by', {}).get('edges', [])]
if kwargs.pop('extract', True):
return [u['node'] for u in info.get('data', {}).get('user', {}).get(
'edge_followed_by', {}).get('edges', [])]
return info
@login_required
def post_like(self, media_id):
"""
Like an update. Login required.
:param media_id: Media id
:return:
.. code-block:: javascript
{"status": "ok"}
"""
media_id = self._sanitise_media_id(media_id)
endpoint = 'https://www.instagram.com/web/likes/{media_id!s}/like/'.format(**{'media_id': media_id})
self._init_rollout_hash()
res = self._make_request(endpoint, params='')
return res
@login_required
def delete_like(self, media_id):
"""
Unlike an update. Login required.
:param media_id: Media id
:return:
.. code-block:: javascript
{"status": "ok"}
"""
media_id = self._sanitise_media_id(media_id)
endpoint = 'https://www.instagram.com/web/likes/{media_id!s}/unlike/'.format(**{'media_id': media_id})
self._init_rollout_hash()
return self._make_request(endpoint, params='')
@login_required
def delete_media(self, media_id):
"""
Delete an update. Login required.
:param media_id: Media id
:return:
.. code-block:: javascript
{"did_delete": true, "status": "ok"}
"""
media_id = self._sanitise_media_id(media_id)
endpoint = 'https://www.instagram.com/create/{media_id!s}/delete/'.format(**{'media_id': media_id})
self._init_rollout_hash()
return self._make_request(endpoint, params='')
@login_required
def friendships_create(self, user_id):
"""
Follow a user. Login required.
:param user_id: User id
:return:
.. code-block:: javascript
{"status": "ok", "result": "following"}
"""
endpoint = 'https://www.instagram.com/web/friendships/{user_id!s}/follow/'.format(**{'user_id': user_id})
self._init_rollout_hash()
return self._make_request(endpoint, params='')
@login_required
def friendships_destroy(self, user_id):
"""
Unfollow a user. Login required.
:param user_id:
:return:
.. code-block:: javascript
{"status": "ok"}
"""
endpoint = 'https://www.instagram.com/web/friendships/{user_id!s}/unfollow/'.format(**{'user_id': user_id})
self._init_rollout_hash()
return self._make_request(endpoint, params='')
@login_required
def post_comment(self, media_id, comment_text):
"""
Post a new comment. Login required.
:param media_id: Media id (all numeric format, without _userid)
:param comment_text: Comment text
:return:
.. code-block:: javascript
{
"created_time": 1483096000,
"text": "This is a comment",
"status": "ok",
"from": {
"username": "somebody",
"profile_picture": "https://igcdn-photos-b-a.akamaihd.net/something.jpg",
"id": "1234567890",
"full_name": "Somebody"
},
"id": "1785800000"
}
"""
if len(comment_text) > 300:
raise ValueError('The total length of the comment cannot exceed 300 characters.')
if re.search(r'[a-z]+', comment_text, re.IGNORECASE) and comment_text == comment_text.upper():
raise ValueError('The comment cannot consist of all capital letters.')
if len(re.findall(r'#[^#]+\b', comment_text, re.UNICODE | re.MULTILINE)) > 4:
raise ValueError('The comment cannot contain more than 4 hashtags.')
if len(re.findall(r'\bhttps?://\S+\.\S+', comment_text)) > 1:
raise ValueError('The comment cannot contain more than 1 URL.')
media_id = self._sanitise_media_id(media_id)
endpoint = 'https://www.instagram.com/web/comments/{media_id!s}/add/'.format(**{'media_id': media_id})
params = {'comment_text': comment_text}
self._init_rollout_hash()
return self._make_request(endpoint, params=params)
@login_required
def delete_comment(self, media_id, comment_id):
"""
Delete a comment. Login required.
:param media_id: Media id
:param comment_id: Comment id
:return:
.. code-block:: javascript
{"status": "ok"}
"""
media_id = self._sanitise_media_id(media_id)
endpoint = 'https://www.instagram.com/web/comments/{media_id!s}/delete/{comment_id!s}/'.format(**{
'media_id': media_id, 'comment_id': comment_id})
self._init_rollout_hash()
return self._make_request(endpoint, params='')
def search(self, query_text):
"""
General search
:param query_text: Search text
:return:
"""
endpoint = 'https://www.instagram.com/web/search/topsearch/'
res = self._make_request(endpoint, query={'query': query_text})
if self.auto_patch:
for u in res.get('users', []):
ClientCompatPatch.list_user(u['user'])
return res
@login_required
def post_photo(self, photo_data, caption=''):
"""
Post a photo
:param photo_data: byte string of the image
:param caption: caption text
"""
warnings.warn('This endpoint has not been fully tested.', UserWarning)
self._init_rollout_hash()
upload_id = int(time.time() * 1000)
boundary = '----WebKitFormBoundary{}'.format(
''.join(random.choice(string.ascii_letters + string.digits) for _ in range(16)))
fields = [
('upload_id', upload_id),
('media_type', 1),
]
files = [
('photo', 'photo.jpg', 'application/octet-stream', photo_data)
]
content_type, body = MultipartFormDataEncoder(boundary=boundary).encode(
fields, files)
headers = {
'User-Agent': self.mobile_user_agent,
'Accept': '*/*',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Connection': 'close',
'x-csrftoken': self.csrftoken,
'x-requested-with': 'XMLHttpRequest',
'x-instagram-ajax': self.rollout_hash,
'Origin': 'https://www.instagram.com',
'Referer': 'https://www.instagram.com/create/crop/',
'Content-Type': content_type,
'Content-Length': len(body)
}
endpoint = 'https://www.instagram.com/create/upload/photo/'
req = compat_urllib_request.Request(endpoint, body, headers=headers)
self.logger.debug('REQUEST: {0!s}'.format(endpoint))
try:
res = self.opener.open(req, timeout=self.timeout)
response_content = self._read_response(res)
self.logger.debug('RESPONSE: {0!s}'.format(response_content))
upload_res = json.loads(response_content)
if upload_res.get('status', '') != 'ok':
raise ClientError('Upload status: {}'.format(upload_res.get('status', '')))
upload_id = upload_res['upload_id']
headers['Referer'] = 'https://www.instagram.com/create/details/'
headers['Content-Type'] = 'application/x-www-form-urlencoded'
del headers['Content-Length']
endpoint = 'https://www.instagram.com/create/configure/'
res = self._make_request(
endpoint, headers=headers,
params={
'upload_id': upload_id,
'caption': caption,
'retry_timeout': '',
'custom_accessibility_caption': '',
},
get_method=lambda: 'POST')
return res
except compat_urllib_error.HTTPError as e:
raise ClientError('HTTPError "{0!s}" while opening {1!s}'.format(e.reason, endpoint), e.code)
def tag_feed(self, tag, **kwargs):
"""
Get a tag feed.
:param tag:
:param kwargs:
- **count**: Number of records to return
- **end_cursor**: For pagination
:return:
"""
count = kwargs.pop('count', 16)
if count > 50:
raise ValueError('count cannot be greater than 50')
end_cursor = kwargs.pop('end_cursor', None) or kwargs.pop('max_id', None)
variables = {
'tag_name': tag.lower(),
'first': int(count),
'show_ranked': False,
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': 'f92f56d47dc7a55b606908374b43a314',
'variables': json.dumps(variables, separators=(',', ':'))
}
return self._make_request(self.GRAPHQL_API_URL, query=query)
def location_feed(self, location_id, **kwargs):
"""
Get a location feed.
:param location_id:
:param kwargs:
- **count**: Number of records to return
- **end_cursor**: For pagination
:return:
"""
count = kwargs.pop('count', 16)
if count > 50:
raise ValueError('count cannot be greater than 50')
end_cursor = kwargs.pop('end_cursor', None) or kwargs.pop('max_id', None)
variables = {
'id': location_id,
'first': int(count)
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': '1b84447a4d8b6d6d0426fefb34514485',
'variables': json.dumps(variables, separators=(',', ':'))
}
return self._make_request(self.GRAPHQL_API_URL, query=query)
@login_required
def timeline_feed(self, **kwargs):
"""
Get logged-in user's timeline feed.
:param kwargs:
- **count**: Number of records to return
- **end_cursor**: For pagination
"""
end_cursor = kwargs.pop('end_cursor', None) or kwargs.pop('max_id', None)
fetch_media_item_count = int(kwargs.pop('count', 12))
if fetch_media_item_count > 50:
raise ValueError('count cannot be greater than 50')
fetch_comment_count = int(kwargs.pop('fetch_comment_count', 4))
fetch_like = int(kwargs.pop('fetch_like', 10))
has_stories = bool(kwargs.pop('has_stories', False))
variables = {
'fetch_media_item_count': fetch_media_item_count,
'fetch_comment_count': fetch_comment_count,
'fetch_like': fetch_like,
'has_stories': has_stories,
}
if end_cursor:
variables['fetch_media_item_cursor'] = end_cursor
query = {
'query_hash': '3f01472fb28fb8aca9ad9dbc9d4578ff',
'variables': json.dumps(variables, separators=(',', ':'))
}
return self._make_request(self.GRAPHQL_API_URL, query=query)
@login_required
def reels_tray(self):
"""
Get a logged-in users reels tray.
"""
query = {
'query_hash': '60b755363b5c230111347a7a4e242001',
'variables': json.dumps({'only_stories': False}, separators=(',', ':'))
}
return self._make_request(self.GRAPHQL_API_URL, query=query)
def _story_feed(self, reel_ids=[], tag_names=[], location_ids=[]):
variables = {
'reel_ids': reel_ids,
'tag_names': tag_names,
'location_ids': location_ids,
'precomposed_overlay': False,
'show_story_viewer_list': True,
'story_viewer_fetch_count': 50,
'story_viewer_cursor': '',
}
query = {
'query_hash': 'eb1918431e946dd39bf8cf8fb870e426',
'variables': json.dumps(variables, separators=(',', ':'))
}
return self._make_request(self.GRAPHQL_API_URL, query=query)
@login_required
def reels_feed(self, reel_ids, **kwargs):
"""
Get the stories feed for the specified user IDs
:param reel_ids: List of reel user IDs
"""
return self._story_feed(
reel_ids=reel_ids, tag_names=kwargs.pop('tag_names', []),
location_ids=kwargs.pop('location_ids', []))
@login_required
def highlight_reels(self, user_id):
"""
Get the highlights for the specified user ID
:param user_id:
"""
variables = {
'user_id': user_id,
'include_chaining': True,
'include_reel': True,
'include_suggested_users': False,
'include_logged_out_extras': False,
'include_highlight_reels': True,
}
query = {
'query_hash': '7c16654f22c819fb63d1183034a5162f',
'variables': json.dumps(variables, separators=(',', ':'))
}
return self._make_request(self.GRAPHQL_API_URL, query=query)
def highlight_reel_media(self, highlight_reel_ids):
"""
Get medias for the specified highlight IDs
:param highlight_reel_ids: List of highlight reel IDs
"""
variables = {
'highlight_reel_ids': highlight_reel_ids,
'reel_ids': [],
'location_ids': [],
'precomposed_overlay': False,
}
query = {
'query_hash': '45246d3fe16ccc6577e0bd297a5db1ab',
'variables': json.dumps(variables, separators=(',', ':'))
}
return self._make_request(self.GRAPHQL_API_URL, query=query)
def tagged_user_feed(self, user_id, **kwargs):
"""
Get the tagged feed for the specified user ID
:param user_id:
"""
count = kwargs.pop('count', 12)
if count > 50:
raise ValueError('count cannot be greater than 50')
end_cursor = kwargs.pop('end_cursor', None) or kwargs.pop('max_id', None)
variables = {
'id': user_id,
'first': int(count),
}
if end_cursor:
variables['after'] = end_cursor
query = {
'query_hash': 'ff260833edf142911047af6024eb634a',
'variables': json.dumps(variables, separators=(',', ':'))
}
info = self._make_request(self.GRAPHQL_API_URL, query=query)
if not info.get('data', {}).get('user'):
# non-existent accounts do not return media at all
# private accounts return media with just a count, no nodes
raise ClientError('Not Found', 404)
if self.auto_patch:
[ClientCompatPatch.media(media['node'], drop_incompat_keys=self.drop_incompat_keys)
for media in info.get('data', {}).get('user', {}).get(
'edge_user_to_photos_of_you', {}).get('edges', [])]
return info
def tag_story_feed(self, tag, **kwargs):
"""
Get the stories feed for the specified tag
:param location_id:
"""
return self._story_feed(tag_names=[tag])
def location_story_feed(self, location_id, **kwargs):
"""
Get the stories feed for the specified location ID
:param location_id:
"""
return self._story_feed(location_ids=[location_id])
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,947
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/client.py
|
from ..common import (
WebApiTestBase, WebClientError as ClientError,
WebClientLoginError as ClientLoginError,
WebClient as Client,
compat_mock, compat_urllib_error
)
class ClientTests(WebApiTestBase):
"""Tests for client related functions."""
@staticmethod
def init_all(api):
return [
{
'name': 'test_search',
'test': ClientTests('test_search', api),
},
{
'name': 'test_client_properties',
'test': ClientTests('test_client_properties', api),
'require_auth': True,
},
{
'name': 'test_client_errors',
'test': ClientTests('test_client_errors', api)
},
{
'name': 'test_client_init',
'test': ClientTests('test_client_init', api)
},
{
'name': 'test_login_mock',
'test': ClientTests('test_login_mock', api)
},
{
'name': 'test_unauthed_client',
'test': ClientTests('test_unauthed_client', api)
}
]
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_login_mock(self, make_request):
make_request.side_effect = [
{'status': 'ok', 'authenticated': 'x'},
{'status': 'fail'}
]
self.api.on_login = lambda x: self.assertIsNotNone(x)
self.api.login()
self.api.on_login = None
make_request.assert_called_with(
'https://www.instagram.com/accounts/login/ajax/',
params={
'username': self.api.username,
'password': self.api.password,
'queryParams': '{}'})
with self.assertRaises(ClientLoginError):
self.api.login()
def test_search(self):
results = self.api.search('maru')
self.assertGreaterEqual(len(results['users']), 0)
self.assertGreaterEqual(len(results['hashtags']), 0)
def test_client_properties(self):
self.sleep_interval = 0
self.assertIsNotNone(self.api.csrftoken)
self.assertIsNotNone(self.api.authenticated_user_id)
self.assertTrue(self.api.is_authenticated)
settings = self.api.settings
for k in ('cookie', 'created_ts'):
self.assertIsNotNone(settings.get(k))
self.assertIsNotNone(self.api.cookie_jar.dump())
@compat_mock.patch('instagram_web_api.client.compat_urllib_request.OpenerDirector.open')
def test_client_errors(self, open_mock):
self.sleep_interval = 0
open_mock.side_effect = [
compat_urllib_error.HTTPError('', 404, 'Not Found', None, None),
compat_urllib_error.URLError('No route to host')]
with self.assertRaises(ClientError):
self.api.search('maru')
with self.assertRaises(ClientError):
self.api.search('maru')
@compat_mock.patch('instagram_web_api.Client.csrftoken',
new_callable=compat_mock.PropertyMock, return_value=None)
def test_client_init(self, csrftoken):
with self.assertRaises(ClientError):
self.api.init()
def test_unauthed_client(self):
api = Client()
self.assertFalse(api.is_authenticated)
with self.assertRaises(ClientError):
# Test authenticated method
api.user_following(self.test_user_id)
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,948
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/media.py
|
import unittest
from ..common import WebApiTestBase, WebClientError as ClientError, compat_mock
class MediaTests(WebApiTestBase):
"""Tests for media related functions."""
@staticmethod
def init_all(api):
return [
{
'name': 'test_media_info',
'test': MediaTests('test_media_info', api),
},
{
'name': 'test_notfound_media_info',
'test': MediaTests('test_notfound_media_info', api)
},
{
'name': 'test_media_comments',
'test': MediaTests('test_media_comments', api),
},
{
'name': 'test_notfound_media_comments',
'test': MediaTests('test_notfound_media_comments', api)
},
{
'name': 'test_media_comments_noextract',
'test': MediaTests('test_media_comments_noextract', api)
},
{
'name': 'test_post_comment',
'test': MediaTests('test_post_comment', api),
'require_auth': True,
},
{
'name': 'test_post_comment_mock',
'test': MediaTests('test_post_comment_mock', api),
'require_auth': True,
},
{
'name': 'test_del_comment',
'test': MediaTests('test_del_comment', api),
'require_auth': True,
},
{
'name': 'test_del_comment_mock',
'test': MediaTests('test_del_comment_mock', api),
'require_auth': True,
},
{
'name': 'test_post_like',
'test': MediaTests('test_post_like', api),
'require_auth': True,
},
{
'name': 'test_post_like_mock',
'test': MediaTests('test_post_like_mock', api),
},
{
'name': 'test_delete_like',
'test': MediaTests('test_delete_like', api),
'require_auth': True,
},
{
'name': 'test_delete_like_mock',
'test': MediaTests('test_delete_like_mock', api),
},
{
'name': 'test_carousel_media_info',
'test': MediaTests('test_carousel_media_info', api),
},
{
'name': 'test_post_comment_validation_mock',
'test': MediaTests('test_post_comment_validation_mock', api),
},
{
'name': 'test_media_likers',
'test': MediaTests('test_media_likers', api),
},
{
'name': 'test_notfound_media_likers',
'test': MediaTests('test_notfound_media_likers', api),
},
{
'name': 'test_media_likers_noextract',
'test': MediaTests('test_media_likers_noextract', api),
},
]
@unittest.skip('Deprecated.')
def test_media_info(self):
results = self.api.media_info(self.test_media_shortcode)
self.assertEqual(results.get('status'), 'ok')
self.assertIsNotNone(results.get('link'))
self.assertIsNotNone(results.get('images'))
@unittest.skip('Deprecated.')
def test_notfound_media_info(self):
self.assertRaises(ClientError, lambda: self.api.media_info('BSgmaRDg-xX'))
def test_carousel_media_info(self):
results = self.api.media_info2('BQ0eAlwhDrw')
self.assertIsNotNone(results.get('link'))
self.assertIsNotNone(results.get('type'))
self.assertIsNotNone(results.get('images'))
# Check like and comment counts are returned
self.assertGreater(results.get('likes', {}).get('count', 0), 0)
self.assertGreater(results.get('comments', {}).get('count', 0), 0)
def test_media_comments(self):
results = self.api.media_comments(self.test_media_shortcode, count=20)
self.assertGreaterEqual(len(results), 0)
self.assertIsInstance(results, list)
self.assertIsInstance(results[0], dict)
def test_notfound_media_comments(self):
self.assertRaises(ClientError, lambda: self.api.media_comments('BSgmaRDg-xX'))
def test_media_comments_noextract(self):
results = self.api.media_comments(self.test_media_shortcode, count=20, extract=False)
self.assertIsInstance(results, dict)
def test_media_likers(self):
results = self.api.media_likers(self.test_media_shortcode, count=20)
self.assertGreaterEqual(len(results), 0)
self.assertIsInstance(results, list)
self.assertIsInstance(results[0], dict)
def test_notfound_media_likers(self):
self.assertRaises(ClientError, lambda: self.api.media_likers('BSgmaRDg-xX'))
def test_media_likers_noextract(self):
results = self.api.media_likers(self.test_media_shortcode, count=20, extract=False)
self.assertIsInstance(results, dict)
@unittest.skip('Modifies data.')
def test_post_comment(self):
results = self.api.post_comment(self.test_media_id, '<3')
self.assertEqual(results.get('status'), 'ok')
self.assertIsNotNone(results.get('id'))
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_post_comment_mock(self, make_request):
make_request.return_value = {'status': 'ok', 'id': '12345678'}
self.api.post_comment(self.test_media_id + '_12345', '<3') # test sanitise media id
make_request.assert_called_with(
'https://www.instagram.com/web/comments/{media_id!s}/add/'.format(
**{'media_id': self.test_media_id}),
params={'comment_text': '<3'})
@unittest.skip('Modifies data / Needs actual data.')
def test_del_comment(self):
results = self.api.delete_comment(self.test_media_id, self.test_comment_id)
self.assertEqual(results.get('status'), 'ok')
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_del_comment_mock(self, make_request):
make_request.return_value = {'status': 'ok'}
self.api.delete_comment(self.test_media_id, self.test_comment_id)
make_request.assert_called_with(
'https://www.instagram.com/web/comments/{media_id!s}/delete/{comment_id!s}/'.format(
**{'media_id': self.test_media_id, 'comment_id': self.test_comment_id}),
params='')
@unittest.skip('Modifies data')
def test_post_like(self):
results = self.api.post_like(self.test_media_id)
self.assertEqual(results.get('status'), 'ok')
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_post_like_mock(self, make_request):
make_request.return_value = {'status': 'ok'}
self.api.post_like(self.test_media_id)
make_request.assert_called_with(
'https://www.instagram.com/web/likes/{media_id!s}/like/'.format(
**{'media_id': self.test_media_id}),
params='')
@unittest.skip('Modifies data')
def test_delete_like(self):
results = self.api.delete_like(self.test_media_id)
self.assertEqual(results.get('status'), 'ok')
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_delete_like_mock(self, make_request):
make_request.return_value = {'status': 'ok'}
self.api.delete_like(self.test_media_id)
make_request.assert_called_with(
'https://www.instagram.com/web/likes/{media_id!s}/unlike/'.format(
**{'media_id': self.test_media_id}),
params='')
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_post_comment_validation_mock(self, make_request):
make_request.return_value = {'status': 'ok', 'id': '12345678'}
with self.assertRaises(ValueError) as ve:
self.api.post_comment(self.test_media_id, '.' * 400)
self.assertEqual(str(ve.exception), 'The total length of the comment cannot exceed 300 characters.')
with self.assertRaises(ValueError) as ve:
self.api.post_comment(self.test_media_id, 'ABC DEFG.')
self.assertEqual(str(ve.exception), 'The comment cannot consist of all capital letters.')
with self.assertRaises(ValueError) as ve:
self.api.post_comment(self.test_media_id, '#this #is #a #test #fail')
self.assertEqual(str(ve.exception), 'The comment cannot contain more than 4 hashtags.')
with self.assertRaises(ValueError) as ve:
self.api.post_comment(self.test_media_id, 'https://google.com or http://instagram.com?')
self.assertEqual(str(ve.exception), 'The comment cannot contain more than 1 URL.')
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,949
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/unauthenticated.py
|
from ..common import WebApiTestBase
class UnauthenticatedTests(WebApiTestBase):
"""Tests for endpoints with authentication"""
@staticmethod
def init_all(api):
return [
{
'name': 'test_unauthenticated_tag_feed',
'test': UnauthenticatedTests('test_unauthenticated_tag_feed', api),
},
{
'name': 'test_unauthenticated_user_feed',
'test': UnauthenticatedTests('test_unauthenticated_user_feed', api),
},
{
'name': 'test_unauthenticated_location_feed',
'test': UnauthenticatedTests('test_unauthenticated_location_feed', api),
},
{
'name': 'test_unauthenticated_media_comments',
'test': UnauthenticatedTests('test_unauthenticated_media_comments', api),
},
{
'name': 'test_unauthenticated_media_comments_noextract',
'test': UnauthenticatedTests('test_unauthenticated_media_comments_noextract', api),
},
{
'name': 'test_unauthenticated_user_info2',
'test': UnauthenticatedTests('test_unauthenticated_user_info2', api),
},
{
'name': 'test_unauthenticated_tag_story_feed',
'test': UnauthenticatedTests('test_unauthenticated_tag_story_feed', api),
},
{
'name': 'test_unauthenticated_location_story_feed',
'test': UnauthenticatedTests('test_unauthenticated_location_story_feed', api),
},
]
def test_unauthenticated_tag_feed(self):
results = self.api.tag_feed('catsofinstagram').get('data', {})
self.assertIsNotNone(results.get('hashtag', {}).get('name'))
self.assertGreater(
len(results.get('hashtag', {}).get('edge_hashtag_to_media', {}).get('edges', [])), 0)
self.assertGreater(
len(results.get('hashtag', {}).get('edge_hashtag_to_top_posts', {}).get('edges', [])), 0)
def test_unauthenticated_user_feed(self):
results = self.api.user_feed(self.test_user_id)
self.assertGreater(len(results), 0)
self.assertIsInstance(results, list)
self.assertIsInstance(results[0], dict)
def test_unauthenticated_location_feed(self):
results = self.api.location_feed('212988663').get('data', {})
self.assertIsNotNone(results.get('location', {}).get('name'))
self.assertGreater(
len(results.get('location', {}).get('edge_location_to_media', {}).get('edges', [])), 0)
self.assertGreater(
len(results.get('location', {}).get('edge_location_to_top_posts', {}).get('edges', [])), 0)
def test_unauthenticated_media_comments(self):
results = self.api.media_comments(self.test_media_shortcode, count=20)
self.assertGreaterEqual(len(results), 0)
self.assertIsInstance(results, list)
self.assertIsInstance(results[0], dict)
def test_unauthenticated_media_comments_noextract(self):
results = self.api.media_comments(self.test_media_shortcode, count=20, extract=False)
self.assertIsInstance(results, dict)
def test_unauthenticated_user_info2(self):
results = self.api.user_info2('instagram')
self.assertIsNotNone(results.get('id'))
def test_unauthenticated_tag_story_feed(self):
results = self.api.tag_story_feed('catsofinstagram').get('data', {})
self.assertTrue('reels_media' in results)
def test_unauthenticated_location_story_feed(self):
results = self.api.location_story_feed('7226110').get('data', {})
self.assertTrue('reels_media' in results)
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,950
|
eracle/instagram_private_api
|
refs/heads/master
|
/setup.py
|
from os import path
import io
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
try:
import unittest.mock
has_mock = True
except ImportError:
has_mock = False
__author__ = 'ping <lastmodified@gmail.com>'
__version__ = '1.6.0'
packages = [
'instagram_private_api',
'instagram_private_api.endpoints',
'instagram_web_api'
]
test_reqs = [] if has_mock else ['mock']
with io.open(path.join(path.abspath(path.dirname(__file__)), 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='instagram_private_api',
version=__version__,
author='ping',
author_email='lastmodified@gmail.com',
license='MIT',
url='https://github.com/ping/instagram_private_api/tree/master',
install_requires=[],
test_requires=test_reqs,
keywords='instagram private api',
description='A client interface for the private Instagram API.',
long_description=long_description,
long_description_content_type='text/markdown',
packages=packages,
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
)
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,951
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/compatpatch.py
|
import copy
import re
import time
from ..common import WebApiTestBase, WebClientCompatPatch as ClientCompatPatch
class CompatPatchTests(WebApiTestBase):
"""Tests for ClientCompatPatch."""
@staticmethod
def init_all(api):
return [
{
'name': 'test_compat_media',
'test': CompatPatchTests('test_compat_media', api),
},
{
'name': 'test_compat_comment',
'test': CompatPatchTests('test_compat_comment', api),
},
{
'name': 'test_compat_user',
'test': CompatPatchTests('test_compat_user', api),
},
{
'name': 'test_compat_user_list',
'test': CompatPatchTests('test_compat_user_list', api),
'require_auth': True,
},
]
def test_compat_media(self):
self.api.auto_patch = False
media = self.api.media_info2(self.test_media_shortcode)
media_patched = copy.deepcopy(media)
ClientCompatPatch.media(media_patched)
self.api.auto_patch = True
self.assertIsNone(media.get('link'))
self.assertIsNotNone(media_patched.get('link'))
self.assertIsNone(media.get('user'))
self.assertIsNotNone(media_patched.get('user'))
self.assertIsNone(media.get('type'))
self.assertIsNotNone(media_patched.get('type'))
self.assertIsNone(media.get('images'))
self.assertIsNotNone(media_patched.get('images'))
self.assertIsNone(media.get('created_time'))
self.assertIsNotNone(media_patched.get('created_time'))
self.assertIsNotNone(re.match(r'\d+_\d+', media_patched['id']))
media_dropped = copy.deepcopy(media)
ClientCompatPatch.media(media_dropped, drop_incompat_keys=True)
self.assertIsNone(media_dropped.get('code'))
self.assertIsNone(media_dropped.get('dimensions'))
time.sleep(self.sleep_interval)
# Test fix for Issue #20
# https://github.com/ping/instagram_private_api/issues/20
media2 = self.api.media_info2(self.test_media_shortcode2)
ClientCompatPatch.media(media2)
def test_compat_comment(self):
self.api.auto_patch = False
comment = self.api.media_comments(self.test_media_shortcode, count=1)[0]
comment_patched = copy.deepcopy(comment)
self.api.auto_patch = True
ClientCompatPatch.comment(comment_patched)
self.assertIsNone(comment.get('created_time'))
self.assertIsNotNone(comment_patched.get('created_time'))
self.assertIsNone(comment.get('from'))
self.assertIsNotNone(comment_patched.get('from'))
comment_dropped = copy.deepcopy(comment)
ClientCompatPatch.comment(comment_dropped, drop_incompat_keys=True)
self.assertIsNone(comment_dropped.get('created_at'))
self.assertIsNone(comment_dropped.get('user'))
def test_compat_user(self):
self.api.auto_patch = False
user = self.api.user_info2(self.test_user_name)
user_patched = copy.deepcopy(user)
ClientCompatPatch.user(user_patched)
self.api.auto_patch = True
self.assertIsNone(user.get('bio'))
self.assertIsNotNone(user_patched.get('bio'))
self.assertIsNone(user.get('profile_picture'))
self.assertIsNotNone(user_patched.get('profile_picture'))
self.assertIsNone(user.get('website'))
# no bio link for test account
# self.assertIsNotNone(user_patched.get('website'))
self.assertIsNone(user.get('counts'))
self.assertIsNotNone(user_patched.get('counts'))
user_dropped = copy.deepcopy(user)
ClientCompatPatch.user(user_dropped, drop_incompat_keys=True)
self.assertIsNone(user_dropped.get('biography'))
self.assertIsNone(user_dropped.get('status'))
def test_compat_user_list(self):
self.api.auto_patch = False
user = self.api.user_followers(self.test_user_id)[0]
user_patched = copy.deepcopy(user)
ClientCompatPatch.list_user(user_patched)
self.api.auto_patch = True
self.assertIsNone(user.get('profile_picture'))
self.assertIsNotNone(user_patched.get('profile_picture'))
user_dropped = copy.deepcopy(user)
ClientCompatPatch.list_user(user_dropped, drop_incompat_keys=True)
self.assertIsNone(user_dropped.get('followed_by_viewer'))
self.assertIsNone(user_dropped.get('requested_by_viewer'))
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,952
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/__init__.py
|
# flake8: noqa
from .client import ClientTests
from .media import MediaTests
from .user import UserTests
from .upload import UploadTests
from .feed import FeedTests
from .unauthenticated import UnauthenticatedTests
from .compatpatch import CompatPatchTests
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,953
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/private/__init__.py
|
# flake8: noqa
from .accounts import AccountTests
from .collections import CollectionsTests
from .discover import DiscoverTests
from .feed import FeedTests
from .friendships import FriendshipTests
from .live import LiveTests
from .locations import LocationTests
from .media import MediaTests
from .misc import MiscTests
from .tags import TagsTests
from .upload import UploadTests
from .users import UsersTests
from .usertags import UsertagsTests
from .highlights import HighlightsTests
from .igtv import IGTVTests
from .apiutils import ApiUtilsTests
from .client import ClientTests
from .compatpatch import CompatPatchTests
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,954
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/feed.py
|
from ..common import WebApiTestBase
class FeedTests(WebApiTestBase):
"""Tests for media related functions."""
@staticmethod
def init_all(api):
return [
{
'name': 'test_tag_feed',
'test': FeedTests('test_tag_feed', api),
},
{
'name': 'test_location_feed',
'test': FeedTests('test_location_feed', api),
},
{
'name': 'test_timeline_feed',
'test': FeedTests('test_timeline_feed', api),
},
{
'name': 'test_reels_tray',
'test': FeedTests('test_reels_tray', api),
},
{
'name': 'test_reels_feed',
'test': FeedTests('test_reels_feed', api),
},
{
'name': 'test_highlight_reels',
'test': FeedTests('test_highlight_reels', api),
},
{
'name': 'test_tagged_user_feed',
'test': FeedTests('test_tagged_user_feed', api),
},
{
'name': 'test_tag_story_feed',
'test': FeedTests('test_tag_story_feed', api),
},
{
'name': 'test_location_story_feed',
'test': FeedTests('test_location_story_feed', api),
}
]
def test_tag_feed(self):
results = self.api.tag_feed('catsofinstagram').get('data', {})
self.assertIsNotNone(results.get('hashtag', {}).get('name'))
self.assertGreater(
len(results.get('hashtag', {}).get('edge_hashtag_to_media', {}).get('edges', [])), 0)
self.assertGreater(
len(results.get('hashtag', {}).get('edge_hashtag_to_top_posts', {}).get('edges', [])), 0)
def test_location_feed(self):
results = self.api.location_feed('212988663').get('data', {})
self.assertIsNotNone(results.get('location', {}).get('name'))
self.assertGreater(
len(results.get('location', {}).get('edge_location_to_media', {}).get('edges', [])), 0)
self.assertGreater(
len(results.get('location', {}).get('edge_location_to_top_posts', {}).get('edges', [])), 0)
def test_timeline_feed(self):
results = self.api.timeline_feed().get('data', {})
self.assertIsNotNone(results.get('user', {}).get('username'))
self.assertGreater(
len(results.get('user', {}).get('edge_web_feed_timeline', {}).get('edges', [])), 0)
def test_reels_tray(self):
results = self.api.reels_tray().get('data', {})
self.assertGreater(
len(results.get('user', {}).get(
'feed_reels_tray', {}).get(
'edge_reels_tray_to_reel', {}).get('edges', [])), 0)
def test_reels_feed(self):
results = self.api.reels_feed(['25025320']).get('data', {})
self.assertTrue('reels_media' in results)
def test_highlight_reels(self):
results = self.api.highlight_reels('25025320').get('data', {}).get('user', {})
self.assertTrue('edge_highlight_reels' in results)
def test_tagged_user_feed(self):
results = self.api.tagged_user_feed('25025320').get('data', {}).get('user', {})
self.assertTrue('edge_user_to_photos_of_you' in results)
def test_tag_story_feed(self):
results = self.api.tag_story_feed('catsofinstagram').get('data', {})
self.assertTrue('reels_media' in results)
def test_location_story_feed(self):
results = self.api.location_story_feed('7226110').get('data', {})
self.assertTrue('reels_media' in results)
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,955
|
eracle/instagram_private_api
|
refs/heads/master
|
/instagram_web_api/compatpatch.py
|
# -*- coding: utf-8 -*-
import re
class ClientCompatPatch(object):
"""Utility to make entities from the private api similar to the ones
from the public one by adding the necessary properties, and if required,
remove any incompatible properties (to save storage space for example).
"""
IG_IMAGE_URL_EXPR = r'/((?P<crop>[a-z])[0-9]{3}x[0-9]{3}/)'
@classmethod
def _generate_image_url(cls, url, size, crop):
"""
Try to generate an IG cropped image url.
:param url: target url
:param size: width/height of the image
:param crop: 'p' or 's'
:return:
"""
mobj = re.search(cls.IG_IMAGE_URL_EXPR, url)
if not mobj:
replacement_expr = r'\g<eparam>{crop!s}{size!s}x{size!s}/'.format(
**{'crop': crop, 'size': size})
return re.sub(r'(?P<eparam>/e[0-9]+/)', replacement_expr, url)
replacement_expr = '/{crop!s}{size!s}x{size!s}/'.format(
**{'crop': mobj.group('crop') or crop, 'size': size})
return re.sub(cls.IG_IMAGE_URL_EXPR, replacement_expr, url)
@staticmethod
def _drop_keys(obj, keys):
"""
Remove the specified keys from the object.
:param obj: target object
:param keys: list of keys
:return:
"""
if not obj:
return obj
for k in keys:
obj.pop(k, None)
@classmethod
def media(cls, media, drop_incompat_keys=False):
"""Patch a media object"""
media_shortcode = media.get('code') or media.get('shortcode') # for media_info2
media['link'] = 'https://www.instagram.com/p/{0!s}/'.format(media_shortcode)
try:
caption = (media.get('caption') or
media.get('edge_media_to_caption', {}).get('edges', [{}])[0].get(
'node', {}).get('text'))
except IndexError:
# no caption - edge_media_to_caption: { edges: [] }
caption = None
if not caption:
media['caption'] = None
else:
media['caption'] = {
'text': caption,
'from': media['owner'],
# generate a psuedo 12-char ID
'id': str(abs(hash(caption + media_shortcode)) % (10 ** 12)),
}
media['tags'] = []
media['filter'] = ''
media['attribution'] = None
media['user_has_liked'] = False
media_user = {
'id': media['owner']['id'],
}
if 'username' in media['owner']:
media_user['username'] = media['owner']['username']
if 'full_name' in media['owner']:
media_user['full_name'] = media['owner']['full_name']
if 'profile_pic_url' in media['owner']:
media_user['profile_picture'] = media['owner']['profile_pic_url']
media['user'] = media_user
media['type'] = 'video' if media['is_video'] else 'image'
display_src = media.get('display_src') or media.get('display_url') # for media_info2
images = {
'standard_resolution': {
'url': display_src,
'width': media['dimensions']['width'],
'height': media['dimensions']['height']},
'low_resolution': {'url': cls._generate_image_url(display_src, '320', 'p')},
'thumbnail': {'url': cls._generate_image_url(display_src, '150', 's')},
}
media['images'] = images
if media['is_video'] and media.get('video_url'):
videos = {
'standard_resolution': {
'url': media['video_url'],
'width': media['dimensions']['width'],
'height': media['dimensions']['height']},
'low_resolution': {'url': media['video_url']},
'low_bandwidth': {'url': media['video_url']},
}
media['videos'] = videos
media['likes'] = {
'count': (media.get('likes', {})
or media.get('edge_liked_by', {})
or media.get('edge_media_preview_like', {})).get('count', 0),
'data': []
}
media['comments'] = {
'count': (media.get('comments', {})
or media.get('edge_media_to_comment', {})).get('count', 0),
'data': []
}
# Try to preserve location even if there's no lat/lng
if 'location' not in media or not media['location']:
media['location'] = None
elif media.get('location', {}).get('lat') and media.get('location', {}).get('lng'):
media['location']['latitude'] = media['location']['lat']
media['location']['longitude'] = media['location']['lng']
media['id'] = '{0!s}_{1!s}'.format(media['id'], media['owner']['id'])
media['created_time'] = str(
media.get('date', '') or media.get('taken_at_timestamp', ''))
usertags = (
media.get('usertags', {}).get('nodes', []) or
[ut['node'] for ut in media.get('edge_media_to_tagged_user', {}).get('edges', [])])
if not usertags:
media['users_in_photo'] = []
else:
users_in_photo = [{
'position': {'y': ut['y'], 'x': ut['x']},
'user': ut['user']
} for ut in usertags]
media['users_in_photo'] = users_in_photo
# Try to make carousel_media for app api compat
if media.get('edge_sidecar_to_children', {}).get('edges', []):
carousel_media = []
edges = media.get('edge_sidecar_to_children', {}).get('edges', [])
for edge in edges:
node = edge.get('node', {})
images = {
'standard_resolution': {
'url': node['display_url'],
'width': node['dimensions']['width'],
'height': node['dimensions']['height']},
'low_resolution': {
'url': cls._generate_image_url(node['display_url'], '320', 'p')},
'thumbnail': {
'url': cls._generate_image_url(node['display_url'], '150', 's')},
}
node['images'] = images
node['type'] = 'image'
if node.get('is_video'):
videos = {
'standard_resolution': {
'url': node['video_url'],
'width': node['dimensions']['width'],
'height': node['dimensions']['height']},
'low_resolution': {'url': node['video_url']},
'low_bandwidth': {'url': node['video_url']},
}
node['videos'] = videos
node['type'] = 'video'
node['pk'] = node['id']
node['id'] = '{0!s}_{1!s}'.format(node['id'], media['owner']['id'])
node['original_width'] = node['dimensions']['width']
node['original_height'] = node['dimensions']['height']
carousel_media.append(node)
media['carousel_media'] = carousel_media
if drop_incompat_keys:
cls._drop_keys(
media, [
'__typename',
'code',
'comments_disabled',
'date',
'dimensions',
'display_src',
'edge_sidecar_to_children',
'is_ad',
'is_video',
'owner',
'thumbnail_src',
'usertags',
'video_url',
'video_views',
])
cls._drop_keys(
media.get('location'), ['lat', 'lng'])
return media
@classmethod
def comment(cls, comment, drop_incompat_keys=False):
"""Patch a comment object"""
comment['created_time'] = str(int(comment['created_at']))
comment_user = comment.get('user') or comment.get('owner')
from_user = {
'id': comment_user['id'],
'profile_picture': comment_user.get('profile_pic_url'),
'username': comment_user['username'],
'full_name': comment_user.get('full_name') or ''
}
comment['from'] = from_user
if drop_incompat_keys:
cls._drop_keys(comment, ['created_at', 'user'])
return comment
@classmethod
def user(cls, user, drop_incompat_keys=False):
"""Patch a user object"""
user['bio'] = user['biography']
user['profile_picture'] = user['profile_pic_url']
user['website'] = user['external_url']
counts = {
'media': (
user.get('media', {}).get('count')
or user.get('edge_owner_to_timeline_media', {}).get('count')),
'followed_by': (
user.get('followed_by', {}).get('count')
or user.get('edge_followed_by', {}).get('count')),
'follows': (
user.get('follows', {}).get('count')
or user.get('edge_follow', {}).get('count')),
}
user['counts'] = counts
if drop_incompat_keys:
cls._drop_keys(
user,
[
'biography',
'external_url',
'followed_by',
'follows',
'media',
'profile_pic_url',
'status',
]
)
return user
@classmethod
def list_user(cls, user, drop_incompat_keys=False):
"""Patch a user list object"""
user['profile_picture'] = user['profile_pic_url']
if drop_incompat_keys:
cls._drop_keys(
user,
[
'followed_by_viewer',
'is_verified',
'profile_pic_url',
'requested_by_viewer',
]
)
return user
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,956
|
eracle/instagram_private_api
|
refs/heads/master
|
/instagram_web_api/__init__.py
|
# flake8: noqa
from .client import Client
from .compatpatch import ClientCompatPatch
from .errors import (
ClientError, ClientLoginError, ClientCookieExpiredError,
ClientConnectionError, ClientForbiddenError,
ClientThrottledError,ClientBadRequestError,
)
from .common import ClientDeprecationWarning
__version__ = '1.6.0'
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,957
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/test_web_api.py
|
import unittest
import argparse
import os
import json
import sys
import logging
import re
import warnings
from .common import (
__webversion__ as __version__,
to_json, from_json,
WebClient as Client,
WebClientError as ClientError,
WebClientLoginError as ClientLoginError,
WebClientCookieExpiredError as ClientCookieExpiredError
)
from .web import (
ClientTests, MediaTests, UserTests,
CompatPatchTests, UploadTests,
FeedTests, UnauthenticatedTests,
)
if __name__ == '__main__':
warnings.simplefilter('ignore', UserWarning)
logging.basicConfig(format='%(name)s %(message)s', stream=sys.stdout)
logger = logging.getLogger('instagram_web_api')
logger.setLevel(logging.WARNING)
# Example command:
# python test_web_api.py -u "xxx" -p "xxx" -save -settings "web_settings.json"
parser = argparse.ArgumentParser(description='Test instagram_web_api.py')
parser.add_argument('-settings', '--settings', dest='settings_file_path', type=str, required=True)
parser.add_argument('-u', '--username', dest='username', type=str)
parser.add_argument('-p', '--password', dest='password', type=str)
parser.add_argument('-save', '--save', action='store_true')
parser.add_argument('-tests', '--tests', nargs='+')
parser.add_argument('-debug', '--debug', action='store_true')
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
print('Client version: {0!s}'.format(__version__))
cached_auth = None
if args.settings_file_path and os.path.isfile(args.settings_file_path):
with open(args.settings_file_path) as file_data:
cached_auth = json.load(file_data, object_hook=from_json)
api = None
if not cached_auth and args.username and args.password:
# start afresh without existing auth
try:
print('New login.')
api = Client(
auto_patch=True, drop_incompat_keys=False,
username=args.username, password=args.password, authenticate=True)
except ClientLoginError:
print('Login Error. Please check your username and password.')
sys.exit(99)
cached_auth = api.settings
if args.save:
# this auth cache can be re-used for up to 90 days
with open(args.settings_file_path, 'w') as outfile:
json.dump(cached_auth, outfile, default=to_json)
elif cached_auth and args.username and args.password:
try:
print('Reuse login.')
api = Client(
auto_patch=True, drop_incompat_keys=False,
username=args.username,
password=args.password,
settings=cached_auth)
except ClientCookieExpiredError:
print('Cookie Expired. Please discard cached auth and login again.')
sys.exit(99)
else:
# unauthenticated client instance
print('Unauthenticated.')
api = Client(auto_patch=True, drop_incompat_keys=False)
if not api:
raise Exception('Unable to initialise api.')
tests = []
tests.extend(ClientTests.init_all(api))
tests.extend(MediaTests.init_all(api))
tests.extend(UserTests.init_all(api))
tests.extend(CompatPatchTests.init_all(api))
tests.extend(UploadTests.init_all(api))
tests.extend(FeedTests.init_all(api))
web_api = Client(auto_patch=True, drop_incompat_keys=False)
tests.extend(UnauthenticatedTests.init_all(web_api))
def match_regex(test_name):
for test_re in args.tests:
test_re = r'{0!s}'.format(test_re)
if re.match(test_re, test_name):
return True
return False
if args.tests:
tests = filter(lambda x: match_regex(x['name']), tests)
if not api.is_authenticated:
tests = filter(lambda x: not x.get('require_auth', False), tests)
try:
suite = unittest.TestSuite()
for test in tests:
suite.addTest(test['test'])
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(not result.wasSuccessful())
except ClientError as e:
print('Unexpected ClientError {0!s} (Code: {1:d})'.format(e.msg, e.code))
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,958
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/user.py
|
import unittest
import time
from ..common import WebApiTestBase, WebClientError as ClientError, compat_mock
class UserTests(WebApiTestBase):
"""Tests for user related functions."""
@staticmethod
def init_all(api):
return [
{
'name': 'test_user_info',
'test': UserTests('test_user_info', api),
},
{
'name': 'test_user_info2',
'test': UserTests('test_user_info2', api),
},
{
'name': 'test_user_feed',
'test': UserTests('test_user_feed', api),
},
{
'name': 'test_notfound_user_feed',
'test': UserTests('test_notfound_user_feed', api)
},
{
'name': 'test_user_feed_noextract',
'test': UserTests('test_user_feed_noextract', api)
},
{
'name': 'test_user_followers',
'test': UserTests('test_user_followers', api),
'require_auth': True,
},
{
'name': 'test_user_followers_noextract',
'test': UserTests('test_user_followers_noextract', api),
'require_auth': True,
},
{
'name': 'test_user_following',
'test': UserTests('test_user_following', api),
'require_auth': True,
},
{
'name': 'test_friendships_create',
'test': UserTests('test_friendships_create', api),
'require_auth': True,
},
{
'name': 'test_friendships_create_mock',
'test': UserTests('test_friendships_create_mock', api),
},
{
'name': 'test_friendships_destroy',
'test': UserTests('test_friendships_destroy', api),
'require_auth': True,
},
{
'name': 'test_friendships_destroy_mock',
'test': UserTests('test_friendships_destroy_mock', api),
},
]
@unittest.skip('Deprecated.')
def test_user_info(self):
results = self.api.user_info(self.test_user_id)
self.assertEqual(results.get('status'), 'ok')
self.assertIsNotNone(results.get('profile_picture'))
def test_user_info2(self):
results = self.api.user_info2('instagram')
self.assertIsNotNone(results.get('id'))
def test_user_feed(self):
results = self.api.user_feed(self.test_user_id)
self.assertGreater(len(results), 0)
self.assertIsInstance(results, list)
self.assertIsInstance(results[0], dict)
def test_notfound_user_feed(self):
self.assertRaises(ClientError, lambda: self.api.user_feed('1'))
def test_user_feed_noextract(self, extract=True):
results = self.api.user_feed(self.test_user_id, extract=False)
self.assertIsInstance(results, dict)
nodes = [edge['node'] for edge in results.get('data', {}).get('user', {}).get(
'edge_owner_to_timeline_media', {}).get('edges', [])]
self.assertIsInstance(nodes, list)
self.assertGreater(len(nodes), 0)
first_code = nodes[0]['shortcode']
end_cursor = results.get('data', {}).get('user', {}).get(
'edge_owner_to_timeline_media', {}).get('page_info', {}).get('end_cursor')
time.sleep(self.sleep_interval)
results = self.api.user_feed(self.test_user_id, extract=False, end_cursor=end_cursor)
self.assertNotEqual(first_code, results.get('data', {}).get('user', {}).get(
'edge_owner_to_timeline_media', {}).get('edges', [])[0]['node']['shortcode'])
def test_user_followers(self):
results = self.api.user_followers(self.test_user_id)
self.assertGreater(len(results), 0)
self.assertIsInstance(results, list)
self.assertIsInstance(results[0], dict)
def test_user_followers_noextract(self):
results = self.api.user_followers(self.test_user_id, extract=False)
self.assertIsInstance(results, dict)
nodes = results.get('data', {}).get('user', {}).get(
'edge_followed_by', {}).get('edges')
self.assertIsInstance(nodes, list)
self.assertGreater(len(nodes or []), 0)
first_user = nodes[0]['node']['username']
end_cursor = results.get('data', {}).get('user', {}).get(
'edge_followed_by', {}).get('page_info', {}).get('end_cursor')
time.sleep(self.sleep_interval)
results = self.api.user_followers(self.test_user_id, extract=False, end_cursor=end_cursor)
self.assertNotEqual(first_user, results.get('data', {}).get('user', {}).get(
'edge_followed_by', {}).get('edges')[0]['node']['username'])
def test_user_following(self):
results = self.api.user_following(self.test_user_id)
self.assertGreater(len(results), 0)
first_user = results[0]['username']
time.sleep(self.sleep_interval)
results = self.api.user_following(self.test_user_id, extract=False)
end_cursor = results.get('follows', {}).get('page_info', {}).get('end_cursor')
time.sleep(self.sleep_interval)
results = self.api.user_following(self.test_user_id, extract=False, end_cursor=end_cursor)
self.assertNotEqual(first_user, results.get('follows', {}).get('nodes', [{}])[0].get('username'))
@unittest.skip('Modifies data')
def test_friendships_create(self):
results = self.api.friendships_create(self.test_user_id)
self.assertEqual(results.get('status'), 'ok')
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_friendships_create_mock(self, make_request):
make_request.return_value = {'status': 'ok'}
self.api.friendships_create(self.test_user_id)
make_request.assert_called_with(
'https://www.instagram.com/web/friendships/{user_id!s}/follow/'.format(**{'user_id': self.test_user_id}),
params='')
@unittest.skip('Modifies data')
def test_friendships_destroy(self):
results = self.api.friendships_destroy(self.test_user_id)
self.assertEqual(results.get('status'), 'ok')
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_friendships_destroy_mock(self, make_request):
make_request.return_value = {'status': 'ok'}
self.api.friendships_destroy(self.test_user_id)
make_request.assert_called_with(
'https://www.instagram.com/web/friendships/{user_id!s}/unfollow/'.format(**{'user_id': self.test_user_id}),
params='')
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,959
|
eracle/instagram_private_api
|
refs/heads/master
|
/tests/web/upload.py
|
import unittest
import time
try:
# python 2.x
from urllib2 import urlopen
except ImportError:
# python 3.x
from urllib.request import urlopen
import json
from ..common import WebApiTestBase, MockResponse, compat_mock
class UploadTests(WebApiTestBase):
"""Tests for ClientCompatPatch."""
@staticmethod
def init_all(api):
return [
{
'name': 'test_post_photo',
'test': UploadTests('test_post_photo', api),
},
{
'name': 'test_post_photo_mock',
'test': UploadTests('test_post_photo_mock', api),
},
]
@unittest.skip('Modifies data')
def test_post_photo(self):
sample_url = 'https://c1.staticflickr.com/5/4103/5059663679_85a7ec3f63_b.jpg'
res = urlopen(sample_url)
photo_data = res.read()
results = self.api.post_photo(photo_data, caption='Feathers #feathers')
self.assertEqual(results.get('status'), 'ok')
self.assertIsNotNone(results.get('media'))
@compat_mock.patch('instagram_web_api.Client._make_request')
def test_post_photo_mock(self, make_request):
ts_now = time.time()
make_request.return_value = {'status': 'ok', 'upload_id': '123456789'}
with compat_mock.patch(
'instagram_web_api.client.compat_urllib_request.OpenerDirector.open') as opener, \
compat_mock.patch('instagram_web_api.client.time.time') as time_mock, \
compat_mock.patch('instagram_web_api.client.random.choice') as rand_choice, \
compat_mock.patch('instagram_web_api.Client._read_response') as read_response, \
compat_mock.patch(
'instagram_web_api.client.compat_urllib_request.Request') as request:
opener.return_value = MockResponse()
time_mock.return_value = ts_now
rand_choice.return_value = 'x'
# add rhx_gis so that we can reuse the same response for init and uploading
read_response.return_value = json.dumps(
{'status': 'ok', 'upload_id': '123456789', 'rhx_gis': '22aea71b163e335a0ad4479549b530d7'},
separators=(',', ':')
)
self.api.post_photo('...'.encode('ascii'), caption='Test')
headers = {
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Origin': 'https://www.instagram.com',
'x-csrftoken': self.api.csrftoken,
'x-instagram-ajax': '1',
'Accept': '*/*',
'User-Agent': self.api.mobile_user_agent,
'Referer': 'https://www.instagram.com/create/details/',
'x-requested-with': 'XMLHttpRequest',
'Connection': 'close',
'Content-Type': 'application/x-www-form-urlencoded'}
body = '--{boundary}\r\n' \
'Content-Disposition: form-data; name="upload_id"\r\n\r\n' \
'{upload_id}\r\n' \
'--{boundary}\r\n' \
'Content-Disposition: form-data; name="media_type"\r\n\r\n1\r\n' \
'--{boundary}\r\n' \
'Content-Disposition: form-data; name="photo"; filename="photo.jpg"\r\n' \
'Content-Type: application/octet-stream\r\n' \
'Content-Transfer-Encoding: binary\r\n\r\n...\r\n' \
'--{boundary}--\r\n'.format(
boundary='----WebKitFormBoundary{}'.format('x' * 16),
upload_id=int(ts_now * 1000))
request.assert_called_with(
'https://www.instagram.com/create/upload/photo/',
body.encode('utf-8'), headers=headers)
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,960
|
eracle/instagram_private_api
|
refs/heads/master
|
/instagram_web_api/common.py
|
class ClientDeprecationWarning(DeprecationWarning):
pass
class ClientPendingDeprecationWarning(PendingDeprecationWarning):
pass
class ClientExperimentalWarning(UserWarning):
pass
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,961
|
eracle/instagram_private_api
|
refs/heads/master
|
/misc/checkpoint.py
|
import re
import gzip
from io import BytesIO
try:
# python 2.x
from urllib2 import urlopen, Request
from urllib import urlencode, unquote_plus
except ImportError:
# python 3.x
from urllib.request import urlopen, Request
from urllib.parse import urlencode, unquote_plus
import sys
class Checkpoint:
"""OBSOLETE. No longer working or supported."""
USER_AGENT = 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_3 like Mac OS X) ' \
'AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13G34 ' \
'Instagram 9.2.0 (iPhone7,2; iPhone OS 9_3_3; en_US; en-US; scale=2.00; 750x1334)'
def __init__(self, user_id, **kwargs):
self.user_id = user_id
self.csrftoken = ''
self.cookie = ''
self.endpoint = 'https://i.instagram.com/integrity/checkpoint/' \
'checkpoint_logged_out_main/%(user_id)s/?%(params)s' % \
{
'user_id': self.user_id,
'params': urlencode({'next': 'instagram://checkpoint/dismiss'})
}
self.timeout = kwargs.pop('timeout', 15)
def trigger_checkpoint(self):
headers = {
'User-Agent': self.USER_AGENT,
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip',
'Connection': 'keep-alive',
}
req = Request(self.endpoint, headers=headers)
res = urlopen(req, timeout=15)
csrf_mobj = re.search(r'csrftoken=(?P<csrf>[^;]+?);', res.info().get('set-cookie') or '')
if not csrf_mobj:
raise Exception('Unable to retrieve csrf token.')
csrf = csrf_mobj.group('csrf')
self.csrftoken = csrf
cookie_val = res.info().get('set-cookie') or ''
cookie = ''
for c in ['sessionid', 'checkpoint_step', 'mid', 'csrftoken']:
cookie_mobj = re.search(r'{0!s}=(?P<val>[^;]+?);'.format(c), cookie_val)
if cookie_mobj:
cookie += '{0!s}={1!s}; '.format(c, unquote_plus(cookie_mobj.group('val')))
self.cookie = cookie
data = {'csrfmiddlewaretoken': csrf, 'email': 'Verify by Email'} # 'sms': 'Verify by SMS'
headers['Referer'] = self.endpoint
headers['Origin'] = 'https://i.instagram.com'
headers['Content-Type'] = 'application/x-www-form-urlencoded'
headers['Cookie'] = self.cookie
req = Request(self.endpoint, headers=headers)
res = urlopen(req, data=urlencode(data).encode('ascii'), timeout=self.timeout)
if res.info().get('Content-Encoding') == 'gzip':
buf = BytesIO(res.read())
content = gzip.GzipFile(fileobj=buf).read().decode('utf-8')
else:
content = res.read().decode('utf-8')
if 'id_response_code' in content:
return True
return False
def respond_to_checkpoint(self, response_code):
headers = {
'User-Agent': self.USER_AGENT,
'Origin': 'https://i.instagram.com',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip',
'Referer': self.endpoint,
'Cookie': self.cookie,
}
req = Request(self.endpoint, headers=headers)
data = {'csrfmiddlewaretoken': self.csrftoken, 'response_code': response_code}
res = urlopen(req, data=urlencode(data).encode('ascii'), timeout=self.timeout)
if res.info().get('Content-Encoding') == 'gzip':
buf = BytesIO(res.read())
content = gzip.GzipFile(fileobj=buf).read().decode('utf-8')
else:
content = res.read().decode('utf-8')
return res.code, content
if __name__ == '__main__':
print('------------------------------------')
print('** THIS IS UNLIKELY TO BE WORKING **')
print('------------------------------------')
try:
user_id = None
while not user_id:
user_id = input('User ID (numeric): ')
client = Checkpoint(user_id)
successful = client.trigger_checkpoint()
if not successful:
print('Unable to trigger checkpoint challenge.')
response_code = None
while not response_code:
response_code = input('Response Code (6-digit numeric code): ')
status_code, final_response = client.respond_to_checkpoint(response_code)
if status_code != 200 or 'has been verified' not in final_response:
print(final_response)
print('-------------------------------\n[!] Unable to verify checkpoint.')
else:
print('[i] Checkpoint successfully verified.')
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print('Unexpected error: {0!s}'.format(str(e)))
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,962
|
eracle/instagram_private_api
|
refs/heads/master
|
/instagram_web_api/errors.py
|
# -*- coding: utf-8 -*-
class ClientError(Exception):
"""Generic error class, catch-all for most client issues.
"""
def __init__(self, msg, code=None):
self.code = code or 0
super(ClientError, self).__init__(msg)
@property
def msg(self):
return self.args[0]
class ClientLoginError(ClientError):
"""Raised when login fails."""
pass
class ClientCookieExpiredError(ClientError):
"""Raised when cookies have expired."""
pass
class ClientConnectionError(ClientError):
"""Raised due to network connectivity-related issues"""
pass
class ClientBadRequestError(ClientError):
"""Raised due to a HTTP 400 response"""
pass
class ClientForbiddenError(ClientError):
"""Raised due to a HTTP 403 response"""
pass
class ClientThrottledError(ClientError):
"""Raised due to a HTTP 429 response"""
pass
|
{"/instagram_web_api/client.py": ["/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/web/__init__.py": ["/tests/web/client.py", "/tests/web/media.py", "/tests/web/user.py", "/tests/web/upload.py", "/tests/web/feed.py", "/tests/web/unauthenticated.py", "/tests/web/compatpatch.py"], "/instagram_web_api/__init__.py": ["/instagram_web_api/client.py", "/instagram_web_api/compatpatch.py", "/instagram_web_api/errors.py", "/instagram_web_api/common.py"], "/tests/test_web_api.py": ["/tests/web/__init__.py"]}
|
26,972
|
jkwaters/Robot-Rendezvous
|
refs/heads/master
|
/test.py
|
import shift
tests = 100
bigSteps = False # ROBOTS move 1 hop per iteration
# FULL RING OF ROBOTS
# for nodes in (10 ** exponent for exponent in range(1,4)):
# totalIterations = 0
# totalShifts = 0
# numRobots = int(nodes/2)
# for i in range(0,tests):
# result = shift.rendezvouz(nodes, numRobots, bigSteps, output=False)
# totalIterations += result[0]
# totalShifts += result[1]
# # print(robots, ' : ', totalIterations)
# avgIterations = totalIterations / tests
# avgShifts = totalShifts / tests
# print(nodes, avgIterations, avgShifts)
# 2 to 20 robots on 20 node ring
# nodes = 20
# for numRobots in range(2,nodes+1):
# totalIterations = 0
# totalShifts = 0
# for i in range(0,tests):
# result = shift.rendezvouz(nodes, numRobots, bigSteps, output=False)
# totalIterations += result[0]
# totalShifts += result[1]
# avgIterations = totalIterations / tests
# avgShifts = int(totalShifts / tests)
# print (numRobots, avgIterations, avgShifts, sep=', ')
nodes = 200
for numRobots in range(2,nodes+1,5):
totalIterations = 0
totalShifts = 0
for i in range(0,tests):
result = shift.rendezvouz(nodes, numRobots, bigSteps, output=False)
totalIterations += result[0]
totalShifts += result[1]
avgIterations = totalIterations / tests
avgShifts = int(totalShifts / tests)
print (numRobots, avgIterations, avgShifts, sep=', ')
# nodes = 1000 # shift.inputNumber("How many nodes? ")
# # numRobots = 10 # shift.inputNumber("How many robots? ")
# # Ask movement specifics
# for numRobots in range(int(nodes/10),nodes+1,int(nodes/10)):
# totalIterations = 0
# totalShifts = 0
# for i in range(0,tests):
# result = shift.rendezvouz(nodes, numRobots)
# totalIterations += result[0]
# totalShifts += result[1]
# # print(numRobots, ' : ', totalIterations)
# avgIterations = totalIterations / tests
# avgShifts = totalShifts / tests
# print (numRobots, avgIterations, avgShifts)
|
{"/test.py": ["/shift.py"], "/main.py": ["/shift.py"]}
|
26,973
|
jkwaters/Robot-Rendezvous
|
refs/heads/master
|
/brainstorming/Robot.py
|
import random as r
class Robot:
'Common base class for all employees'
totalRobots = 0
def __init__(self, position, movement=1, direction=1):
self.position = position # zero to n
self.movement = movement # how far
self.direction = direction # either +1 or -1
Robot.totalRobots += 1
def displayCount(self):
print "Total Robots %d" % Robot.totalRobots
def setDirection(self):
self.direction = r.choice([1,-1])
def move(self):
self.position = (self.position + self.direction)
def merge(self):
self.position
|
{"/test.py": ["/shift.py"], "/main.py": ["/shift.py"]}
|
26,974
|
jkwaters/Robot-Rendezvous
|
refs/heads/master
|
/main.py
|
import shift
nodes = shift.inputNumber("How many nodes? ")
numRobots = shift.inputNumber("How many robots? ")
bigSteps = shift.yes_or_no("Can the robots take bigger steps?")
verbose = shift.yes_or_no("verbose output?")
result = shift.rendezvouz(nodes, numRobots, bigSteps, output=True, verbose=verbose)
iterations = result[0]
shifts = result[1]
print ("\n\n\ntotal iterations: ", iterations, " total moves: ", shifts)
|
{"/test.py": ["/shift.py"], "/main.py": ["/shift.py"]}
|
26,975
|
jkwaters/Robot-Rendezvous
|
refs/heads/master
|
/shift.py
|
import random
def inputNumber(message):
while True:
try:
userInput = int(input(message))
except ValueError:
print("Not an integer! Try again.")
continue
else:
return userInput
break
def yes_or_no(question):
reply = str(input(question + ' (y/n): ')).lower().strip()
if reply[0] == 'y':
return True
if reply[0] == 'n':
return False
else:
return yes_or_no("Uhhhh... please enter a valid input")
def shift(position, distance, n):
if (position + distance) <= 0: # (position + distance) result is negative and we
return (n + position + distance)
if (position + distance) > n: # shift is greater than n (the number of nodes)
return (n - position + distance)
else:
return (position + distance)
def rendezvouz(nodes, numRobots, bigSteps, output=False, verbose=False):
iterations = 0
shifts = 0
robots = set(random.sample(range(1, nodes+1), numRobots))
# print("robots: ", robots)
while len(robots) > 1:
iterations += 1
lenBefore = len(robots)
robotsBefore = set(robots)
robotsAfter = set()
moves = list()
while len(robotsBefore) > 0:
move = random.choice([-1, 1])
if bigSteps:
move = move * random.randint(1,int(nodes/lenBefore))
moves.append(move)
robotsAfter.add(shift(robotsBefore.pop(),move,nodes))
shifts += 1
robotsAfter.difference_update(robotsBefore)
if output:
if verbose:
print("\nbefore: ", robots)
print("move: ", moves)
print("after: ", robotsAfter, ' : ', len(robotsAfter), ' : ', iterations)
elif len(robotsAfter) != lenBefore:
print("\nbefore: ", robots)
print("move: ", moves)
print("after: ", robotsAfter, ' : ', len(robotsAfter), ' : ', iterations)
robots = set(robotsAfter)
return (iterations, shifts)
# test edge cases & basic sanity testing
# print(shift(1, -1, 10))
# print(shift(10, 1, 10))
# print(shift(1, 1, 10))
# print(shift(10, -1, 10))
|
{"/test.py": ["/shift.py"], "/main.py": ["/shift.py"]}
|
26,976
|
jkwaters/Robot-Rendezvous
|
refs/heads/master
|
/brainstorming/Cycle.py
|
import Robot
class Cycle:
"""Doubly linked list"""
totalRobots = 0
# nodes = 100
def __init__(self, nodes=100, robots):
self.nodes = nodes
self.cycle = [None] * nodes
self.startingRobots = robots
# initialize robots in cycle
def initRobots(self):
|
{"/test.py": ["/shift.py"], "/main.py": ["/shift.py"]}
|
26,984
|
yujianke100/Sub-Bipartite-Graph-Creator
|
refs/heads/main
|
/gui/Ui_design.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'c:\Users\shinshi\Desktop\Sub-Bipartite-Graph-Creator\gui\design.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.setWindowModality(QtCore.Qt.ApplicationModal)
Form.resize(939, 928)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("c:\\Users\\shinshi\\Desktop\\Sub-Bipartite-Graph-Creator\\gui\\icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Form.setWindowIcon(icon)
Form.setAutoFillBackground(True)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setObjectName("gridLayout")
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.layoutWidget = QtWidgets.QWidget(self.splitter)
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.Deselect = QtWidgets.QPushButton(self.layoutWidget)
self.Deselect.setObjectName("Deselect")
self.horizontalLayout.addWidget(self.Deselect)
self.label_2 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.label_2.setFont(font)
self.label_2.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.gap_num = QtWidgets.QDoubleSpinBox(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.gap_num.setFont(font)
self.gap_num.setDecimals(3)
self.gap_num.setMinimum(0.001)
self.gap_num.setMaximum(0.999)
self.gap_num.setSingleStep(0.001)
self.gap_num.setProperty("value", 0.008)
self.gap_num.setObjectName("gap_num")
self.horizontalLayout.addWidget(self.gap_num)
self.label_3 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.label_3.setFont(font)
self.label_3.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_3.setFrameShadow(QtWidgets.QFrame.Plain)
self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.min_box = QtWidgets.QSpinBox(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.min_box.setFont(font)
self.min_box.setMinimum(2)
self.min_box.setProperty("value", 2)
self.min_box.setObjectName("min_box")
self.horizontalLayout.addWidget(self.min_box)
self.label_4 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.label_4.setFont(font)
self.label_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_4.setObjectName("label_4")
self.horizontalLayout.addWidget(self.label_4)
self.max_box = QtWidgets.QSpinBox(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.max_box.setFont(font)
self.max_box.setMinimum(3)
self.max_box.setMaximum(1000)
self.max_box.setProperty("value", 200)
self.max_box.setObjectName("max_box")
self.horizontalLayout.addWidget(self.max_box)
self.horizontalLayout.setStretch(0, 1)
self.horizontalLayout.setStretch(1, 3)
self.horizontalLayout.setStretch(2, 3)
self.horizontalLayout.setStretch(3, 3)
self.horizontalLayout.setStretch(4, 3)
self.horizontalLayout.setStretch(5, 3)
self.horizontalLayout.setStretch(6, 3)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label_9 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(14)
self.label_9.setFont(font)
self.label_9.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_9.setObjectName("label_9")
self.horizontalLayout_2.addWidget(self.label_9)
self.label_5 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(14)
self.label_5.setFont(font)
self.label_5.setText("")
self.label_5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_5.setObjectName("label_5")
self.horizontalLayout_2.addWidget(self.label_5)
self.label_6 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(14)
self.label_6.setFont(font)
self.label_6.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_6.setObjectName("label_6")
self.horizontalLayout_2.addWidget(self.label_6)
self.label_7 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(14)
self.label_7.setFont(font)
self.label_7.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_7.setObjectName("label_7")
self.horizontalLayout_2.addWidget(self.label_7)
self.label_8 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(14)
self.label_8.setFont(font)
self.label_8.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_8.setObjectName("label_8")
self.horizontalLayout_2.addWidget(self.label_8)
self.label_10 = QtWidgets.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
self.label_10.setFont(font)
self.label_10.setText("")
self.label_10.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_10.setObjectName("label_10")
self.horizontalLayout_2.addWidget(self.label_10)
self.horizontalLayout_2.setStretch(0, 5)
self.horizontalLayout_2.setStretch(1, 1)
self.horizontalLayout_2.setStretch(2, 15)
self.horizontalLayout_2.setStretch(3, 5)
self.horizontalLayout_2.setStretch(4, 5)
self.horizontalLayout_2.setStretch(5, 1)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.scrollArea = QtWidgets.QScrollArea(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(28)
self.scrollArea.setFont(font)
self.scrollArea.setFrameShape(QtWidgets.QFrame.Box)
self.scrollArea.setFrameShadow(QtWidgets.QFrame.Plain)
self.scrollArea.setSizeAdjustPolicy(QtWidgets.QAbstractScrollArea.AdjustToContents)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 913, 315))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout.addWidget(self.scrollArea)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.Quit = QtWidgets.QPushButton(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(13)
self.Quit.setFont(font)
self.Quit.setObjectName("Quit")
self.horizontalLayout_3.addWidget(self.Quit)
self.Generate = QtWidgets.QPushButton(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(13)
self.Generate.setFont(font)
self.Generate.setObjectName("Generate")
self.horizontalLayout_3.addWidget(self.Generate)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.textBrowser = QtWidgets.QTextBrowser(self.splitter)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(14)
self.textBrowser.setFont(font)
self.textBrowser.setObjectName("textBrowser")
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
Form.setTabOrder(self.gap_num, self.min_box)
Form.setTabOrder(self.min_box, self.max_box)
Form.setTabOrder(self.max_box, self.Quit)
Form.setTabOrder(self.Quit, self.Generate)
Form.setTabOrder(self.Generate, self.scrollArea)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "SBGC"))
self.label.setText(_translate("Form", "Sub-bipartite graph creator"))
self.Deselect.setText(_translate("Form", "Deselect all"))
self.label_2.setText(_translate("Form", "Gap:"))
self.label_3.setText(_translate("Form", "Min:"))
self.label_4.setText(_translate("Form", "Max:"))
self.label_9.setText(_translate("Form", " Index"))
self.label_6.setText(_translate("Form", "Name"))
self.label_7.setText(_translate("Form", "Nodes"))
self.label_8.setText(_translate("Form", "Edges"))
self.Quit.setText(_translate("Form", "Quit"))
self.Generate.setText(_translate("Form", "Generate"))
|
{"/utils/data_cut.py": ["/utils/os_control.py"], "/utils/data_generate.py": ["/utils/os_control.py"], "/main.py": ["/gui/Ui_design.py", "/utils/crawler.py", "/utils/unpacker.py", "/utils/data_cut.py", "/utils/data_generate.py"], "/utils/crawler.py": ["/utils/os_control.py"]}
|
26,985
|
yujianke100/Sub-Bipartite-Graph-Creator
|
refs/heads/main
|
/utils/os_control.py
|
import os
from shutil import rmtree
def ensure_dir(path):
if(not os.path.exists(path)):
os.makedirs(path)
def label_save(path, data):
with open(path, 'w') as f:
for i in data:
f.write(str(i) + '\n')
def edge_save(path, data):
with open(path, 'w') as f:
for i in data:
f.write(str(i[0]) + ', ' + str(i[1]) + '\n')
def rmdir(path):
try:
rmtree(path)
except:
pass
def rmfile(path):
try:
remove(path)
except:
pass
def find_file(dir):
files = listdir(dir)
for file in files:
if(file[:3] == 'out'):
file = dir + file
return file
print('not found datas!')
return 'None'
def listdir(path):
return os.listdir(path)
def makedirs(path):
os.makedirs(path)
def exists(path):
return os.path.exists(path)
|
{"/utils/data_cut.py": ["/utils/os_control.py"], "/utils/data_generate.py": ["/utils/os_control.py"], "/main.py": ["/gui/Ui_design.py", "/utils/crawler.py", "/utils/unpacker.py", "/utils/data_cut.py", "/utils/data_generate.py"], "/utils/crawler.py": ["/utils/os_control.py"]}
|
26,986
|
yujianke100/Sub-Bipartite-Graph-Creator
|
refs/heads/main
|
/utils/unpacker.py
|
# -*- coding: utf-8 -*-
# https://blog.csdn.net/luoye7422/article/details/41950937
from tarfile import open
# 数据解压
def unpacker(data):
archive = open(
'./output/datas_tar/download.tsv.{}.tar.bz2'.format(data), 'r:bz2')
archive.debug = 0
for tarinfo in archive:
archive.extract(tarinfo, r'./output/datas_origin/')
archive.close()
|
{"/utils/data_cut.py": ["/utils/os_control.py"], "/utils/data_generate.py": ["/utils/os_control.py"], "/main.py": ["/gui/Ui_design.py", "/utils/crawler.py", "/utils/unpacker.py", "/utils/data_cut.py", "/utils/data_generate.py"], "/utils/crawler.py": ["/utils/os_control.py"]}
|
26,987
|
yujianke100/Sub-Bipartite-Graph-Creator
|
refs/heads/main
|
/utils/data_cut.py
|
# -*- coding: utf-8 -*-
from numpy import genfromtxt, array
from utils.os_control import *
import openpyxl
# 读入数据集(图)
def read_data(graph, root_dir):
try:
data = genfromtxt(find_file(root_dir + graph),
dtype=int, delimiter='\t', comments='%')
data = data[data[:, 3].argsort()]
except:
data = genfromtxt(find_file(root_dir + graph),
dtype=int, delimiter=' ', comments='%')
data = data[data[:, 3].argsort()]
return data
# 数据集(图)的处理,修剪数据集使其满足给定的限制条件
def cut_graph(graph, STEP_NUM, min_len, max_len, root_dir, target_dir):
print('-'*58)
print('reading data {}...'.format(graph[:-1]), end='')
data = read_data(graph, root_dir)
print('finished')
s_nodes = data[:, 0]
t_nodes = data[:, 1]
timestamps = data[:, 3]
time_min, time_max = min(timestamps), max(timestamps)
time_len = time_max - time_min
time_step = int(time_len * STEP_NUM)
data_list_groups = []
data_list = []
tmp_time_max = time_step + time_min
data_len = len(timestamps)
print('classification datas...', end='')
for tmp_idx in range(data_len):
tmp_list = [s_nodes[tmp_idx], t_nodes[tmp_idx]]
if(timestamps[tmp_idx] <= tmp_time_max):
if(tmp_list not in data_list):
data_list.append(tmp_list)
else:
tmp_time_max += time_step
if(min_len <= len(data_list) <= max_len):
data_list_groups.append(data_list)
data_list = []
if(timestamps[tmp_idx] <= tmp_time_max):
if(tmp_list not in data_list):
data_list.append(tmp_list)
if(min_len <= len(data_list) <= max_len):
data_list_groups.append(data_list)
target_graph_dir = target_dir + graph
rmdir(target_graph_dir)
ensure_dir(target_graph_dir)
data_list_groups_final = []
for graph in data_list_groups:
tmp_edge_list = []
edge_set_list = []
for edge in graph:
s_node, t_node = edge[0], edge[1]
out_of_graph = True
for edge_set_idx in range(len(edge_set_list)):
if(s_node in edge_set_list[edge_set_idx] or t_node in edge_set_list[edge_set_idx]):
edge_set_list[edge_set_idx].update(edge)
tmp_edge_list[edge_set_idx].append(edge)
out_of_graph = False
if(out_of_graph):
edge_set_list.append(set(edge))
tmp_edge_list.append([edge])
while(1):
len_of_list = len(edge_set_list)
continue_flag = False
for set1_idx in range(len_of_list):
for set2_idx in range(set1_idx+1, len_of_list):
if(edge_set_list[set1_idx] & edge_set_list[set2_idx]):
edge_set_list[set1_idx] = edge_set_list[set1_idx] | edge_set_list[set2_idx]
tmp_edge_list[set1_idx].extend(tmp_edge_list[set2_idx])
del edge_set_list[set2_idx]
del tmp_edge_list[set2_idx]
continue_flag = True
break
if(continue_flag):
break
if(continue_flag):
continue
else:
break
for i in tmp_edge_list:
if(min_len < len(i) < max_len):
data_list_groups_final.append(i)
print('finished')
print('saving datas...', end='')
data_list_group_len = len(data_list_groups_final)
for idx_group in range(data_list_group_len):
with open(target_graph_dir + '{}.txt'.format(idx_group), 'w') as f:
for nodes in data_list_groups_final[idx_group]:
f.write(str(nodes[0]) + '\t' + str(nodes[1]) + '\n')
print('finished')
return data_list_group_len
def data_cut(graph_box, gap, min_num, max_num, root_dir, target_dir):
rmdir(target_dir)
ensure_dir(target_dir)
graph_num_box = []
for graph in graph_box:
graph_num_box.append(
cut_graph(graph + '/', gap, min_num, max_num, root_dir, target_dir))
return graph_num_box
# 计算图的相关数据:原始节点数,目标节点数,大小。
def cal_graph(graph_name, root_dir):
graph = graph_name + '/'
data = array([])
try:
data = genfromtxt(find_file(root_dir + graph),
dtype=int, delimiter='\t', comments='%')
s_nodes = data[:, 0]
t_nodes = data[:, 1]
except:
data = genfromtxt(find_file(root_dir + graph),
dtype=int, delimiter=' ', comments='%')
s_nodes = data[:, 0]
t_nodes = data[:, 1]
return len(set(s_nodes)), len(set(t_nodes)), len(data)
# 通过Excel表输出显示数据集(图)的各项指标
def data_cal(graph_box, gap, min_num, max_num, timestamp):
root_dir = './output/datas_origin/'
ensure_dir('./output/output/')
ensure_dir('./output/output/{}/'.format(timestamp))
target_dir = './output/output/{}/datas/'.format(timestamp)
excel_path = './output/output/{}/dataset_info.xlsx'.format(timestamp)
graph_num_box = data_cut(graph_box, gap, min_num,
max_num, root_dir, target_dir)
wb = openpyxl.Workbook()
ws = wb.active
idx = 1
ws['A{}'.format(idx)] = 'Data'
ws['B{}'.format(idx)] = 'S Vertices'
ws['C{}'.format(idx)] = 'T Vertices'
ws['D{}'.format(idx)] = 'Edges'
ws['E{}'.format(idx)] = 'Graphs'
idx += 1
print()
print('='*58)
print('||{:^54}||'.format('Generated dataset statistics'))
print('||' + '='*54 + '||')
print('||{:<30}|{:<5}|{:<5}|{:<6}|{:<4}||'.format(
'Dataset', 'S', 'T', 'E', 'G'))
for graph, num in zip(graph_box, graph_num_box):
s_node_len, t_node_len, data_len = cal_graph(graph, root_dir)
print('||' + '-'*54 + '||')
print('||{:<30}|{:<5}|{:<5}|{:<6}|{:<4}||'.format(
graph, s_node_len, t_node_len, data_len, num))
ws['A{}'.format(idx)] = graph
ws['B{}'.format(idx)] = s_node_len
ws['C{}'.format(idx)] = t_node_len
ws['D{}'.format(idx)] = data_len
ws['E{}'.format(idx)] = num
idx += 1
print('='*58, '\n')
while(1):
try:
# python can't save excel file while user has been opened it.
wb.save(excel_path)
break
except:
input('please close the excel and try again!')
|
{"/utils/data_cut.py": ["/utils/os_control.py"], "/utils/data_generate.py": ["/utils/os_control.py"], "/main.py": ["/gui/Ui_design.py", "/utils/crawler.py", "/utils/unpacker.py", "/utils/data_cut.py", "/utils/data_generate.py"], "/utils/crawler.py": ["/utils/os_control.py"]}
|
26,988
|
yujianke100/Sub-Bipartite-Graph-Creator
|
refs/heads/main
|
/utils/data_generate.py
|
# -*- coding: utf-8 -*-
from numpy import genfromtxt, array, zeros
from utils.os_control import *
# 数据生成过程
def data_generate(timestamp):
s_path = './output/output/{}/datas/'.format(timestamp)
t_path = './output/output/{}/data/'.format(timestamp)
data_name = 'BIPARTITE'
graph_types = listdir(s_path)
rmdir(t_path + data_name)
edge_label = []
node_label = []
graph_label = []
graph_indicator = []
edges = []
last_node_idx, graph_idx = 1, 1
total_edge_num = 0
fake_edge_num = 0
for graph in graph_types:
print('generate datasets of {}...'.format(graph), end='')
type_idx = graph_types.index(graph)
files = listdir(s_path + graph)
for file in files:
data = genfromtxt(
s_path + '{}/{}'.format(graph, file), dtype=int, delimiter='\t', comments='%')
edge_num = len(data)
total_edge_num += edge_num
s_nodes_array = data[:, 0]
t_nodes_array = data[:, 1]
s_nodes_set = set(s_nodes_array)
t_nodes_set = set(t_nodes_array)
s_nodes_list = list(s_nodes_set)
t_nodes_list = list(t_nodes_set)
s_nodes = [s_nodes_list.index(
i) + last_node_idx for i in s_nodes_array]
t_nodes_start_idx = max(s_nodes) + 1
t_nodes = [t_nodes_list.index(
i) + t_nodes_start_idx for i in t_nodes_array]
data = array([list(i) for i in zip(s_nodes, t_nodes)])
nodes_set = set(s_nodes)
nodes_set.update(t_nodes)
last_node_idx = max(t_nodes) + 1
for node in set(nodes_set):
graph_indicator.append(graph_idx)
# node_label.append(0)
for i in range(edge_num):
edges.append([s_nodes[i], t_nodes[i]])
# edge_label.append(0)
nodes_len = len(nodes_set)
node_matr = zeros([nodes_len, nodes_len])
node_min = min(s_nodes)
graph_label.append(type_idx)
graph_idx += 1
print('finished')
ensure_dir(t_path)
ensure_dir(t_path + '{}/'.format(data_name))
data_path = t_path + '{}/{}/raw/'.format(data_name, data_name)
ensure_dir(data_path)
data_path = data_path + data_name
# print('total edge num:{}'.format(total_edge_num))
# label_save(data_path + '_edge_labels.txt', edge_label)
label_save(data_path + '_graph_labels.txt', graph_label)
# label_save(data_path + '_node_labels.txt', node_label)
label_save(data_path + '_graph_indicator.txt', graph_indicator)
edge_save(data_path + '_A.txt', edges)
print('All datasets saved!')
print('*'*58)
# if __name__ == '__main__':
# generate_data()
|
{"/utils/data_cut.py": ["/utils/os_control.py"], "/utils/data_generate.py": ["/utils/os_control.py"], "/main.py": ["/gui/Ui_design.py", "/utils/crawler.py", "/utils/unpacker.py", "/utils/data_cut.py", "/utils/data_generate.py"], "/utils/crawler.py": ["/utils/os_control.py"]}
|
26,989
|
yujianke100/Sub-Bipartite-Graph-Creator
|
refs/heads/main
|
/main.py
|
# -*- coding: utf-8 -*-
import sys
from PyQt5.QtWidgets import QApplication, QHBoxLayout, QVBoxLayout, QWidget, QCheckBox, QSplashScreen, QLabel, QDialog, QMessageBox
from PyQt5.QtGui import QPixmap, QFont, QTextCursor
from PyQt5.QtCore import QObject, pyqtSignal, QEventLoop, Qt, QTimer
from gui.Ui_design import Ui_Form
from utils.crawler import get_datasets, downloader
from utils.unpacker import unpacker
from utils.data_cut import data_cal
from utils.data_generate import data_generate
from time import strftime, localtime
splash_img = './gui/splash.png'
# 实现主界面友好的用户操作及显示功能
class EmittingStream(QObject):
# https://blog.csdn.net/william_munch/article/details/89425038
textWritten = pyqtSignal(str)
def write(self, text):
self.textWritten.emit(str(text))
loop = QEventLoop()
QTimer.singleShot(10, loop.quit)
loop.exec_()
class main_window(Ui_Form):
def __init__(self, splash):
super(main_window, self).__init__()
self.data_list_len = 0
self.data_num = 0
self.splash = splash
def change_init_status(self, info):
self.splash.showMessage(info, Qt.AlignHCenter |
Qt.AlignBottom, Qt.black)
def outputWritten(self, text):
cursor = self.textBrowser.textCursor()
cursor.movePosition(QTextCursor.End)
cursor.insertText(text)
self.textBrowser.setTextCursor(cursor)
self.textBrowser.ensureCursorVisible()
def ui_init(self):
self.Generate.clicked.connect(self.on_click_generate)
self.Quit.clicked.connect(self.on_click_quit)
self.Deselect.clicked.connect(self.on_click_deselect)
self.splitter.setStretchFactor(0, 2)
self.splitter.setStretchFactor(1, 1)
sys.stdout = EmittingStream(textWritten=self.outputWritten)
sys.stderr = EmittingStream(textWritten=self.outputWritten)
self.fresh_scroll()
def element_switch(self, flag):
self.Generate.setEnabled(flag)
self.Quit.setEnabled(flag)
self.max_box.setEnabled(flag)
self.min_box.setEnabled(flag)
self.gap_num.setEnabled(flag)
# 开始对用户选择的数据集进行处理
def run(self, selected_list):
timestamp = strftime('%Y-%m-%d_%H-%M-%S', localtime())
for i in selected_list:
print('downloading {}...'.format(i), end='')
downloader(i)
print('finished')
for i in selected_list:
print('unpacking {}...'.format(i), end='')
unpacker(i)
print('finished')
data_cal(selected_list, self.gap_num.value(),
self.min_box.value(), self.max_box.value(), timestamp)
data_generate(timestamp)
# 用户点击'Generate'选项,即用户进行开始生成的操作。
def on_click_generate(self):
if(len(self.selected_idx) <= 1):
return
self.element_switch(False)
selected_list = []
check_info = ''
for i in self.selected_idx:
selected_list.append(self.data_list[i][4])
check_info += '{}:{}\n'.format(
self.data_list[i][0], self.data_list[i][1])
w = QWidget()
reply = QMessageBox.question(w, 'Check', 'Selected datasets:\n{}'.format(
check_info[:-1]), QMessageBox.Yes | QMessageBox.No)
if reply == QMessageBox.No:
self.element_switch(True)
return
self.scrollArea.setEnabled(False)
self.run(selected_list)
self.scrollArea.setEnabled(True)
self.element_switch(True)
# 用户点击'Quit'选项,即用户进行停止生成的操作。
def on_click_quit(self):
self.element_switch(False)
sys.exit()
# 用户点击'Deselect all'选项,取消所有数据集的勾选。
def on_click_deselect(self):
for i in range(len(self.check_box)):
if(self.check_box[i].isChecked()):
self.check_box[i].setChecked(False)
self.selected_idx = set()
self.Generate.setText('Generate')
def generate_label(self, name, style):
tmp_label = QLabel()
tmp_label.setText(name)
tmp_label.setAlignment(Qt.AlignVCenter | Qt.AlignLeft)
tmp_label.setStyleSheet(style)
return tmp_label
def fresh_scroll(self):
lv = QVBoxLayout()
lv.setSpacing(0)
v = QWidget()
v.setLayout(lv)
self.check_box = []
self.selected_idx = set()
self.data_list = get_datasets(self)
self.data_list_len = len(self.data_list)
self.data_num = 0
for i in self.data_list:
style = """padding:10px; font-size:28px; font-family:"Times New Roman";"""
if(self.data_num % 2):
style += 'background-color:rgb(240,240,240);'
else:
style += 'background-color:rgb(220,220,220);'
lh = QHBoxLayout()
lh.setSpacing(0)
btn = QCheckBox('{:<5}'.format(i[0]))
btn.setStyleSheet(style)
self.check_box.append(btn)
lh.addWidget(self.check_box[-1], stretch=1)
self.check_box[-1].stateChanged.connect(self.check_box_select)
# https://blog.csdn.net/Nin7a/article/details/104533138
lh.addWidget(self.generate_label(i[1], style), stretch=3)
lh.addWidget(self.generate_label(i[2], style), stretch=1)
lh.addWidget(self.generate_label(i[3], style), stretch=1)
lv.addLayout(lh)
self.data_num += 1
self.change_init_status(
'Loading datasets...({}/{})'.format(self.data_num, self.data_list_len))
self.scrollArea.setWidget(v)
def check_box_select(self):
for i in range(len(self.check_box)):
if(self.check_box[i].isChecked()):
self.selected_idx.update([i])
else:
try:
self.selected_idx.remove(i)
except:
pass
self.Generate.setText('Generate({})'.format(len(self.selected_idx)))
class MySplashScreen(QSplashScreen):
def mousePressEvent(self, event):
pass
def main():
# 启动界面https://blog.csdn.net/ye281842_/article/details/109637580
app = QApplication(sys.argv)
splash = MySplashScreen()
splash.setPixmap(QPixmap(splash_img)) # 设置背景图片
splash.setFont(QFont('Times New Roman', 12))
splash.show()
app.processEvents()
Dialog = QDialog()
ui = main_window(splash)
ui.setupUi(Dialog)
ui.ui_init()
Dialog.resize(1400,1024)
Dialog.show()
splash.finish(Dialog)
splash.deleteLater()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
{"/utils/data_cut.py": ["/utils/os_control.py"], "/utils/data_generate.py": ["/utils/os_control.py"], "/main.py": ["/gui/Ui_design.py", "/utils/crawler.py", "/utils/unpacker.py", "/utils/data_cut.py", "/utils/data_generate.py"], "/utils/crawler.py": ["/utils/os_control.py"]}
|
26,990
|
yujianke100/Sub-Bipartite-Graph-Creator
|
refs/heads/main
|
/utils/crawler.py
|
# -*- coding: utf-8 -*-
from requests_html import HTMLSession
from time import time
from requests import get
from utils.os_control import *
s_path = './output/datas_tar/'
t_path = './output/datas_origin/'
# 爬虫获取数据集
def get_datasets(ui):
url = 'http://konect.cc/networks/'
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36 Edg/89.0.774.76'}
session = HTMLSession()
ui.change_init_status('Preparing crawler...')
r = session.get(url=url, headers=headers)
r.html.render()
ui.change_init_status('Reading datasets\' names...')
data_name = r.html.xpath(
"//div[@id='page']/table[1]/tbody[1]/tr/td[2]")[1:]
node_num = r.html.xpath("//div[@id='page']//table/tbody[1]/tr/td[4]")[1:]
edge_num = r.html.xpath("//div[@id='page']//table/tbody[1]/tr/td[5]")[1:]
ui.change_init_status('Reading datasets\' availability...')
avaliable_img = r.html.xpath(
"//div[@id='page']//table/tbody[1]/tr/td[3]/img[1]")
ui.change_init_status('Reading datasets\' verifiability...')
test_img = r.html.xpath(
"//div[@id='page']//table/tbody[1]/tr/td[3]/img[2]")
ui.change_init_status('Reading datasets\' type 1...')
first_data_types_img = r.html.xpath(
"//div[@id='page']//table/tbody[1]/tr/td[3]/img[3]")
ui.change_init_status('Reading datasets\' type 2...')
second_data_types_img = r.html.xpath(
"//div[@id='page']//table/tbody[1]/tr/td[3]/img[4]")
#选取符合我们需求的数据集
data_len = len(avaliable_img)
data_list = []
for i in range(data_len):
if(avaliable_img[i].attrs['title'] == 'Dataset is available for download' and
first_data_types_img[i].attrs['title'] == 'Bipartite, undirected' and
second_data_types_img[i].attrs['title'] == 'Unweighted, multiple edges' and
test_img[i].attrs['title'] == 'Dataset passed all tests'):
try:
timestamp = r.html.xpath(
"//div[@id='page']//table/tbody[1]/tr[{}]/td[3]/img[5]".format(i+2))[0].attrs['title']
if(timestamp == 'Edges are annotated with timestamps'):
data_sname = list(data_name[i].links)[0][:-1]
data_list.append(
[i, data_name[i].text, node_num[i].text, edge_num[i].text, data_sname])
except:
pass
ui.change_init_status(
'Screening of graph...({}/{})'.format(i, data_len))
return data_list
# https://blog.csdn.net/dqy74568392/article/details/96479370
# 下载文件并显示网速和下载进度
def downloader(data):
if(not exists(s_path)):
makedirs(s_path)
if(not exists(t_path)):
makedirs(t_path)
data_name = 'download.tsv.{}.tar.bz2'.format(data)
if(exists(s_path + 'download.tsv.{}.tar.bz2'.format(data))):
return
url = 'http://konect.cc/files/{}'.format(data_name)
# 请求下载地址,以流式的。打开要下载的文件位置。
with get(url, stream=True) as r, open(s_path + data_name, 'wb') as file:
total_size = int(r.headers['content-length'])
content_size = 0
plan = 0
start_time = time()
temp_size = 0
# 开始下载每次请求1024字节
for content in r.iter_content(chunk_size=1024):
file.write(content)
# 统计已下载大小
content_size += len(content)
# 计算下载进度
plan = '{:.4}'.format((content_size / total_size) * 100)
# 每一秒统计一次下载量
if time() - start_time > 1:
start_time = time()
speed = content_size - temp_size
# KB级下载速度处理
if 0 <= speed < (1024 ** 2):
print(plan, '%', speed / 1024, 'KB/s')
# MB级下载速度处理
elif (1024 ** 2) <= speed < (1024 ** 3):
print(plan, '%', speed / (1024 ** 2), 'MB/s')
# GB级下载速度处理
elif (1024 ** 3) <= speed < (1024 ** 4):
print(plan, '%', speed / (1024 ** 3), 'GB/s')
# TB级下载速度处理
else:
print(plan, '%', speed / (1024 ** 4), 'TB/s')
if __name__ == '__main__':
print('test for getting detasets')
data_list, data_len, useful_dataset_num = get_datasets()
print(data_list[:3])
|
{"/utils/data_cut.py": ["/utils/os_control.py"], "/utils/data_generate.py": ["/utils/os_control.py"], "/main.py": ["/gui/Ui_design.py", "/utils/crawler.py", "/utils/unpacker.py", "/utils/data_cut.py", "/utils/data_generate.py"], "/utils/crawler.py": ["/utils/os_control.py"]}
|
26,991
|
zhihanyang2022/d4rl_evaluations
|
refs/heads/main
|
/qlearning_dataset_with_mc_return.py
|
import numpy as np
from tqdm import tqdm
from collections import deque
GAMMA = 0.99
def compute_mc_returns(rewards):
mc_return = 0
mc_returns = deque()
for r in reversed(rewards):
mc_return = r + GAMMA * mc_return
mc_returns.appendleft(mc_return) # using deque is more efficient lol
return list(mc_returns)
def qlearning_dataset_with_mc_return(env, dataset=None, terminate_on_end=False, **kwargs):
"""Made minimal changes (to compute MC returns) from the original qlearning_dataset function from D4RL."""
if dataset is None:
dataset = env.get_dataset(**kwargs)
N = dataset['rewards'].shape[0]
obs_ = []
next_obs_ = []
action_ = []
reward_ = []
done_ = []
mc_returns_ = []
# The newer version of the dataset adds an explicit
# timeouts field. Keep old method for backwards compatability.
use_timeouts = False
if 'timeouts' in dataset:
use_timeouts = True
episode_step = 0
rewards_of_current_episode = []
# The author previously used range(N-1), which means that i is up to N-2, the second last index.
# The problem is that, for the second last transition, both final_timestep and done_bool
# are false. As a result, the MC returns for the final episode does not get calculated.
#
# I changed it so that i goes up to N-1, the last index. It turns out that for the last index timeout
# is always True. This makes final_timestep true and allows MC returns to be calculated.
for i in tqdm(range(N)):
obs = dataset['observations'][i].astype(np.float32)
if i + 1 > N - 1: # N - 1 is the last timestep; so here we are asking, is i+1 an invalid index?
new_obs = np.zeros_like(obs)
# Reasoning on why this is the correct thing to do:
# At the very end, there is only one possible scenario:
# - final_timestep=True, last transition is ignored (so this full of zeros next state is not used)
else:
new_obs = dataset['observations'][i+1].astype(np.float32)
action = dataset['actions'][i].astype(np.float32)
reward = dataset['rewards'][i].astype(np.float32)
done_bool = bool(dataset['terminals'][i])
rewards_of_current_episode.append(reward)
if use_timeouts: # Always true for our use case.
final_timestep = dataset['timeouts'][i]
else:
final_timestep = (episode_step == env._max_episode_steps - 1)
# We are using terminate_on_end=False, so the following if statement is entered
# whenever final_timestep=True. In this case, we ignore the final transition, because
# the next state is not available, due to the "bad" design of rlkit.
if (not terminate_on_end) and final_timestep:
episode_step = 0
# The last transition is not actually included in the dataset (no next state), but nevertheless
# MC returns can consider it with no problem.
# Essentially, [:-1] deal with the mis-matched length of mc_returns (include last transition) and
# other stuff (do not include last transition).
mc_returns = compute_mc_returns(rewards_of_current_episode)[:-1]
mc_returns_.extend(mc_returns)
rewards_of_current_episode = []
continue
# If we are here, it means that final_timestep=False.
# The following if statement is entered if final_timestep=False (otherwise the previous if is entered)
# and done_bool=True. In this case, we don't put a "continue" at the end because the invalid next state
# will be ignored during bootstrapping with the help of the done flag.
# Computing MC returns follows the exact same procedure as in the previous if.
if done_bool or final_timestep:
episode_step = 0
mc_returns = compute_mc_returns(rewards_of_current_episode)
mc_returns_.extend(mc_returns)
rewards_of_current_episode = []
obs_.append(obs)
next_obs_.append(new_obs)
action_.append(action)
reward_.append(reward)
done_.append(done_bool)
episode_step += 1
return {
'observations': np.array(obs_),
'actions': np.array(action_),
'next_observations': np.array(next_obs_),
'rewards': np.array(reward_),
'terminals': np.array(done_),
'mc_returns': np.array(mc_returns_)
}
|
{"/brac/scripts/train_bc.py": ["/my_helper_functions.py"], "/bcq/scripts/run_script.py": ["/my_helper_functions.py"], "/cql/d4rl/examples/cql_mujoco_new.py": ["/my_helper_functions.py", "/qlearning_dataset_with_mc_return.py", "/qlearning_dataset_with_next_action.py"]}
|
26,992
|
zhihanyang2022/d4rl_evaluations
|
refs/heads/main
|
/qlearning_dataset_with_next_action.py
|
import collections
import numpy as np
import gym
import d4rl
Trajectory = collections.namedtuple('Trajectory', 'states actions rewards dones frames')
def _parse_v0(env_id):
env = gym.make(env_id)
dataset = env.get_dataset()
obs, acs, rs, dones =\
dataset['observations'], dataset['actions'], dataset['rewards'], dataset['terminals']
def _parse(obs,actions,rewards,dones,trim_first_T,max_episode_steps):
trajs = []
start = trim_first_T
while start < len(dones):
end = start
while end != 1000000 - 1 and end < len(dones) - 1 and \
(not dones[end] and end - start + 1 < max_episode_steps):
end += 1
if dones[end]:
# the trajectory ends normally.
# since the next state will not be (should not be, actually) used by any algorithms,
# we add null states (zero-states) at the end.
traj = Trajectory(
states = np.concatenate([obs[start:end+1],np.zeros_like(obs[0])[None]],axis=0),
actions = actions[start:end+1],
rewards = rewards[start:end+1],
dones = dones[start:end+1].astype(np.bool_),
frames = None,
)
assert np.all(traj.dones[:-1] == False) and traj.dones[-1]
else:
# episodes end unintentionally (terminate due to timeout, cut-off when concateante two trajectories, or etc).
# since the next-state is not available, it drops the last action.
traj = Trajectory(
states = obs[start:end+1],
actions = actions[start:end],
rewards = rewards[start:end],
dones = dones[start:end].astype(np.bool_),
frames = None,
)
assert np.all(traj.dones == False)
if len(traj.states) > 1: # some trajectories are extremely short in -medium-replay dataset (due to unexpected timeout caused by RLKIT); https://github.com/rail-berkeley/d4rl/issues/86#issuecomment-778566671
trajs.append(traj)
start = end + 1
return trajs
if env_id == 'halfcheetah-medium-replay-v0':
trajs = _parse(obs,acs,rs,dones,0,env._max_episode_steps)
elif env_id == 'halfcheetah-medium-v0':
trajs = _parse(obs,acs,rs,dones,899,env._max_episode_steps-1) # why env._max_episode_stpes - 1? it is questionable, but it looks a valid thing to do.
elif env_id == 'halfcheetah-expert-v0':
trajs = _parse(obs,acs,rs,dones,996,env._max_episode_steps-1)
elif env_id == 'halfcheetah-medium-expert-v0':
trajs = _parse(obs[:1000000],acs[:1000000],rs[:1000000],dones[:1000000],899,env._max_episode_steps-1) + \
_parse(obs[1000000:],acs[1000000:],rs[1000000:],dones[1000000:],996,env._max_episode_steps-1)
elif env_id == 'hopper-medium-v0':
trajs = _parse(obs,acs,rs,dones,211,env._max_episode_steps)
elif env_id == 'hopper-expert-v0':
trajs = _parse(obs,acs,rs,dones,309,env._max_episode_steps-1)
elif env_id == 'hopper-medium-expert-v0': # actually, expert + mixed
trajs = _parse(obs[:1000000],acs[:1000000],rs[:1000000],dones[:1000000],309,env._max_episode_steps-1) + \
_parse(obs[1000000:],acs[1000000:],rs[1000000:],dones[1000000:],0,env._max_episode_steps-1)
elif env_id == 'walker2d-medium-v0':
trajs = _parse(obs,acs,rs,dones,644,env._max_episode_steps)
elif env_id == 'walker2d-expert-v0':
trajs = _parse(obs,acs,rs,dones,487,env._max_episode_steps-1)
elif env_id == 'walker2d-medium-expert-v0': # actually, expert + mixed
trajs = _parse(obs[:1000000],acs[:1000000],rs[:1000000],dones[:1000000],644,env._max_episode_steps) + \
_parse(obs[1000000:],acs[1000000:],rs[1000000:],dones[1000000:],487,env._max_episode_steps-1)
elif env_id in ['halfcheetah-random-v0', 'walker2d-random-v0', 'hopper-random-v0', 'walker2d-medium-replay-v0', 'hopper-medium-replay-v0']:
trajs = _parse(obs,acs,rs,dones,0,env._max_episode_steps-1)
elif env_id in ['pen-expert-v0', 'hammer-expert-v0', 'door-expert-v0', 'relocate-expert-v0']:
trajs = _parse(obs,acs,rs,dones,0,env._max_episode_steps)
elif env_id in ['door-human-v0','relocate-human-v0','hammer-human-v0']:
trajs = _parse(obs,acs,rs,dones,0,1000)
for traj in trajs:
traj.dones[:] = False # this is philosophical decision; since its original env does not terminate, so 'done' in the human data does not meaning anything. I regard this information is given only as a trajectory separator.
elif env_id in ['door-cloned-v0','relocate-cloned-v0','hammer-cloned-v0']:
trajs = _parse(obs[:500000],acs[:500000],rs[:500000],dones[:500000],0,1000) + \
_parse(obs[500000:],acs[500000:],rs[500000:],dones[500000:],0,env._max_episode_steps)
for traj in trajs:
traj.dones[:] = False # this is philosophical decision; since its original env does not terminate, so 'done' in the human data does not meaning anything. I regard this information is given only as a trajectory separator.
elif env_id in ['pen-human-v0']:
trajs = _parse(obs,acs,rs,np.zeros_like(dones),0,200)
for traj in trajs:
traj.dones[:] = False
elif env_id in ['pen-cloned-v0']:
trajs = _parse(obs[:250000],acs[:250000],rs[:250000],dones[:250000],0,200) + \
_parse(obs[250000:],acs[250000:],rs[250000:],dones[250000:],0,env._max_episode_steps)
else:
trajs = _parse(obs,acs,rs,dones,0,env._max_episode_steps)
return trajs
def parse_v2(env_id, drop_trailings=False):
## Parse the dataset into set of trajectories
dataset = gym.make(env_id).get_dataset()
obs, actions, rewards, terminals, timeouts = \
dataset['observations'], \
dataset['actions'], \
dataset['rewards'], \
dataset['terminals'], \
dataset['timeouts']
assert len(obs) == len(actions) == len(rewards) == len(terminals) == len(timeouts)
N = len(obs)
trajs = []
start = 0
while start < N:
end = start
while not (terminals[end] or timeouts[end]) and end < N - 1:
end += 1
if timeouts[end] or (end == N - 1 and not drop_trailings):
# the trajectory ends due to some external cut-offs
# since the next-state is not available, it drops the last action.
traj = Trajectory(
states=obs[start:end + 1],
actions=actions[start:end],
rewards=rewards[start:end],
dones=terminals[start:end].astype(np.bool),
frames=None,
)
assert np.all(traj.dones == False)
elif terminals[end]:
# the trajectory ends normally.
# since the next state will not be (should not be, actually) used by any algorithms,
# we add null states (zero-states) at the end.
traj = Trajectory(
states=np.concatenate([obs[start:end + 1], np.zeros_like(obs[0])[None]], axis=0),
actions=actions[start:end + 1],
rewards=rewards[start:end + 1],
dones=terminals[start:end + 1].astype(np.bool),
frames=None,
)
assert np.all(traj.dones[:-1] == False) and traj.dones[-1]
elif end == N - 1 and drop_trailings:
break
else:
assert False
if len(traj.states) > 1: # some trajectories are extremely short in -medium-replay dataset (due to unexpected timeout caused by RLKIT); https://github.com/rail-berkeley/d4rl/issues/86#issuecomment-778566671
trajs.append(traj)
start = end + 1
return trajs
def parse_S_A_R_D_NS_from_trajs(trajs):
s, a, r, d, ns, = [], [], [], [], []
for traj in trajs:
traj_len = len(traj.rewards)
for t in range(traj_len):
s.append(traj.states[t])
a.append(traj.actions[t])
r.append(traj.rewards[t])
d.append(traj.dones[t])
ns.append(traj.states[t + 1])
return s, a, r, d, ns
def parse_S_A_R_D_NS_NA_from_trajs(trajs):
s, a, r, d, ns, na = [], [], [], [], [], []
action_dim = len(trajs[0].actions[0])
for traj in trajs:
traj_len = len(traj.rewards)
for t in range(traj_len):
if t == traj_len - 1: # final timestep
if traj.dones[t]: # ok to append a dummy next action; done prevents bootstrapping
na.append(np.zeros((action_dim, )))
else: # the final timestep should be discarded
break # start next trajectory
else:
na.append(traj.actions[t+1])
s.append(traj.states[t])
a.append(traj.actions[t])
r.append(traj.rewards[t])
d.append(traj.dones[t])
ns.append(traj.states[t+1])
return s, a, r, d, ns, na
def qlearning_dataset_wonjoon(env_id):
trajs = _parse_v0(env_id)
s, a, r, d, ns = parse_S_A_R_D_NS_from_trajs(trajs)
return {
'observations': np.array(s),
'actions': np.array(a),
'rewards': np.array(r),
'terminals': np.array(d),
'next_observations': np.array(ns),
}
def qlearning_dataset_with_next_action_v0(env_id):
trajs = _parse_v0(env_id)
s, a, r, d, ns, na = parse_S_A_R_D_NS_NA_from_trajs(trajs)
return {
'observations': np.array(s),
'actions': np.array(a),
'rewards': np.array(r),
'terminals': np.array(d),
'next_observations': np.array(ns),
'next_actions': np.array(na)
}
def qlearning_dataset_with_next_action_v2(env_id):
trajs = parse_v2(env_id)
s, a, r, d, ns, na = parse_S_A_R_D_NS_NA_from_trajs(trajs)
return {
'observations': np.array(s),
'actions': np.array(a),
'rewards': np.array(r),
'terminals': np.array(d),
'next_observations': np.array(ns),
'next_actions': np.array(na)
}
|
{"/brac/scripts/train_bc.py": ["/my_helper_functions.py"], "/bcq/scripts/run_script.py": ["/my_helper_functions.py"], "/cql/d4rl/examples/cql_mujoco_new.py": ["/my_helper_functions.py", "/qlearning_dataset_with_mc_return.py", "/qlearning_dataset_with_next_action.py"]}
|
26,993
|
zhihanyang2022/d4rl_evaluations
|
refs/heads/main
|
/my_helper_functions.py
|
import os
import numpy as np
import gym
import d4rl
def get_dataset_size(dataset_name:str) -> int:
env = gym.make(dataset_name)
dataset = env.get_dataset()
dataset_size = len(dataset['rewards'])
return dataset_size
def get_log_dir(
base_dir:str,
algo_dir:str,
env_dir:str,
seed_dir:int
) -> str:
"""Simplifies the progress of making log dirs"""
return os.path.join(base_dir, algo_dir, env_dir, str(seed_dir))
def get_agent_dir_for_brac(agent_name:str, value_penalty:int) -> str:
if value_penalty == 0: # policy regularization only
agent_dir = f'{agent_name}_pr'
else: # both value penalty & policy regularization
agent_dir = f'{agent_name}_vp'
return agent_dir
|
{"/brac/scripts/train_bc.py": ["/my_helper_functions.py"], "/bcq/scripts/run_script.py": ["/my_helper_functions.py"], "/cql/d4rl/examples/cql_mujoco_new.py": ["/my_helper_functions.py", "/qlearning_dataset_with_mc_return.py", "/qlearning_dataset_with_next_action.py"]}
|
26,994
|
zhihanyang2022/d4rl_evaluations
|
refs/heads/main
|
/brac/scripts/train_bc.py
|
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Offline training binary."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
sys.path.append('brac')
sys.path.append('.')
import my_helper_functions as mhf # added/modified by Zhihan
import os
from absl import app
from absl import flags
from absl import logging
import gin
import tensorflow as tf0
import tensorflow.compat.v1 as tf
from behavior_regularized_offline_rl.brac import agents
from behavior_regularized_offline_rl.brac import train_eval_offline
from behavior_regularized_offline_rl.brac import utils
import d4rl
tf0.compat.v1.enable_v2_behavior()
# =========== parse arguments ==========
# All irrelevant arguments have been commented out.
# agent info
# flags.DEFINE_string('agent_name', 'brac_primal', 'agent name.')
# flags.DEFINE_integer('value_penalty', 0, '')
# flags.DEFINE_float('alpha', 1.0, '')
# env info
flags.DEFINE_string('env_name', 'halfcheetah-random-v0', 'env name.')
flags.DEFINE_integer('seed', 0, 'random seed, mainly for training samples.')
# training info
flags.DEFINE_integer('total_train_steps', int(5e5), '') # 500K grad steps; eval_freq is default 5000
flags.DEFINE_integer('n_eval_episodes', 10, '')
# logging info
flags.DEFINE_string('root_dir', 'results', '') # added/modified by Zhihan
flags.DEFINE_multi_string('gin_file', None, 'Paths to the gin-config files.')
flags.DEFINE_multi_string('gin_bindings', None, 'Gin binding parameters.')
# flags.DEFINE_string('sub_dir', 'auto', '')
# flags.DEFINE_integer('n_train', int(1e6), '')
# flags.DEFINE_integer('save_freq', 1000, '')
FLAGS = flags.FLAGS
# =====================================
def main(_):
# Setup log dir.
# if FLAGS.sub_dir == 'auto':
# sub_dir = utils.get_datetime()
# else:
# sub_dir = FLAGS.sub_dir
log_dir = os.path.join(
FLAGS.root_dir,
'BC',
FLAGS.env_name,
str(FLAGS.seed),
) # simplified; added/modified by Zhihan
model_arch = ((200,200),)
opt_params = (('adam', 5e-4),)
utils.maybe_makedirs(log_dir)
train_eval_offline.train_eval_offline(
log_dir=log_dir,
data_file=None,
agent_module=agents.AGENT_MODULES_DICT['bc'],
env_name=FLAGS.env_name,
n_train=mhf.get_dataset_size(FLAGS.env_name), # added/modified by Zhihan
total_train_steps=FLAGS.total_train_steps,
n_eval_episodes=FLAGS.n_eval_episodes, # added/modified by Zhihan
model_params=model_arch,
optimizers=opt_params,
save_freq=FLAGS.total_train_steps + 100 # I don't want any models to be saved; added/modified by Zhihan
)
if __name__ == '__main__':
app.run(main)
|
{"/brac/scripts/train_bc.py": ["/my_helper_functions.py"], "/bcq/scripts/run_script.py": ["/my_helper_functions.py"], "/cql/d4rl/examples/cql_mujoco_new.py": ["/my_helper_functions.py", "/qlearning_dataset_with_mc_return.py", "/qlearning_dataset_with_next_action.py"]}
|
26,995
|
zhihanyang2022/d4rl_evaluations
|
refs/heads/main
|
/bcq/scripts/run_script.py
|
import argparse
import gym
import numpy as np
import os
import torch
import d4rl
import uuid
import json
import sys # added/modified by Zhihan
sys.path.append("bcq/")
import continuous_bcq.BCQ
import continuous_bcq.DDPG as DDPG
import continuous_bcq.utils as utils
sys.path.append(".")
import my_helper_functions as mhf
# Handles interactions with the environment, i.e. train behavioral or generate buffer
def interact_with_environment(env, state_dim, action_dim, max_action, device, args):
# For saving files
setting = f"{args.env_name}_{args.seed}"
buffer_name = f"{args.buffer_name}_{setting}"
# Initialize and load policy
policy = DDPG.DDPG(state_dim, action_dim, max_action, device)#, args.discount, args.tau)
if args.generate_buffer: policy.load(f"./models/behavioral_{setting}")
# Initialize buffer
replay_buffer = utils.ReplayBuffer(state_dim, action_dim, device)
evaluations = []
state, done = env.reset(), False
episode_reward = 0
episode_timesteps = 0
episode_num = 0
# Interact with the environment for max_timesteps
for t in range(int(args.max_timesteps)):
episode_timesteps += 1
# Select action with noise
if (
(args.generate_buffer and np.random.uniform(0, 1) < args.rand_action_p) or
(args.train_behavioral and t < args.start_timesteps)
):
action = env.action_space.sample()
else:
action = (
policy.select_action(np.array(state))
+ np.random.normal(0, max_action * args.gaussian_std, size=action_dim)
).clip(-max_action, max_action)
# Perform action
next_state, reward, done, _ = env.step(action)
done_bool = float(done) if episode_timesteps < env._max_episode_steps else 0
# Store data in replay buffer
replay_buffer.add(state, action, next_state, reward, done_bool)
state = next_state
episode_reward += reward
# Train agent after collecting sufficient data
if args.train_behavioral and t >= args.start_timesteps:
policy.train(replay_buffer, args.batch_size)
if done:
# +1 to account for 0 indexing. +0 on ep_timesteps since it will increment +1 even if done=True
print(f"Total T: {t+1} Episode Num: {episode_num+1} Episode T: {episode_timesteps} Reward: {episode_reward:.3f}")
# Reset environment
state, done = env.reset(), False
episode_reward = 0
episode_timesteps = 0
episode_num += 1
# Evaluate episode
if args.train_behavioral and (t + 1) % args.eval_freq == 0:
evaluations.append(eval_policy(policy, args.env_name, args.seed))
np.save(f"./results/behavioral_{setting}", evaluations)
policy.save(f"./models/behavioral_{setting}")
# Save final policy
if args.train_behavioral:
policy.save(f"./models/behavioral_{setting}")
# Save final buffer and performance
else:
evaluations.append(eval_policy(policy, args.env_name, args.seed))
np.save(f"./results/buffer_performance_{setting}", evaluations)
replay_buffer.save(f"./buffers/{buffer_name}")
# Trains BCQ offline
def train_BCQ(env, state_dim, action_dim, max_action, device, output_dir, args):
# For saving files
setting = f"{args.env_name}_{args.seed}"
buffer_name = f"{args.buffer_name}_{setting}"
# Initialize policy
policy = continuous_bcq.BCQ.BCQ(state_dim, action_dim, max_action, device, args.discount, args.tau, args.lmbda, args.phi)
# Load buffer
dataset = env.get_dataset()
N = dataset['rewards'].shape[0]
replay_buffer = utils.ReplayBuffer(state_dim, action_dim, device, max_size=N) # added/modified by Zhihan
print(f'Loading buffer with size {N}!')
episode_step = 0
for i in range(N-1):
obs = dataset['observations'][i]
new_obs = dataset['observations'][i+1]
action = dataset['actions'][i]
reward = dataset['rewards'][i]
done_bool = bool(dataset['terminals'][i])
# Don't apply terminals on the last step of an episode
if episode_step == env._max_episode_steps - 1:
episode_step = 0
continue
if done_bool:
episode_step = 0
replay_buffer.add(obs, action, new_obs, reward, done_bool)
episode_step += 1
print('Loaded buffer')
#replay_buffer.load(f"./buffers/{buffer_name}")
evaluations = []
episode_num = 0
done = True
training_iters = 0
while training_iters < args.max_timesteps:
print('Num grad steps this epoch:', int(args.eval_freq))
pol_vals = policy.train(replay_buffer, iterations=int(args.eval_freq), batch_size=args.batch_size)
evaluations.append(eval_policy(policy, args.env_name, args.seed))
npy_path = os.path.join(output_dir, "progress.npy")
np.save(npy_path, evaluations) # added/modified by Zhihan
# for example, npy_path could be ./results/BCQ/halfcheetah-random-v1/0/progress.npy
training_iters += args.eval_freq
print(f"Num grad steps done: {int(training_iters)} / {int(args.max_timesteps)} = {round(training_iters / args.max_timesteps * 100, 1)} %")
# Runs policy for X episodes and returns average reward
# A fixed seed is used for the eval environment
def eval_policy(policy, env_name, seed, eval_episodes=10):
eval_env = gym.make(env_name)
eval_env.seed(seed + 100)
avg_reward = 0.
for _ in range(eval_episodes):
state, done = eval_env.reset(), False
while not done:
action = policy.select_action(np.array(state))
state, reward, done, _ = eval_env.step(action)
avg_reward += reward
avg_reward /= eval_episodes
print("---------------------------------------")
print(f"Evaluation over {eval_episodes} episodes: {avg_reward:.3f}")
print("---------------------------------------")
return avg_reward
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--env_name", default="halfcheetah-random-v0") # OpenAI gym environment name
parser.add_argument("--seed", default=0, type=int) # Sets Gym, PyTorch and Numpy seeds
parser.add_argument("--buffer_name", default="Robust") # Prepends name to filename
parser.add_argument("--eval_freq", default=5e3, type=float) # How often (time steps) we evaluate
parser.add_argument("--max_timesteps", default=5e5, type=int) # Max time steps to run environment or train for (this defines buffer size)
parser.add_argument("--start_timesteps", default=25e3, type=int)# Time steps initial random policy is used before training behavioral
parser.add_argument("--rand_action_p", default=0.3, type=float) # Probability of selecting random action during batch generation
parser.add_argument("--gaussian_std", default=0.3, type=float) # Std of Gaussian exploration noise (Set to 0.1 if DDPG trains poorly)
parser.add_argument("--batch_size", default=100, type=int) # Mini batch size for networks
parser.add_argument("--discount", default=0.99) # Discount factor
parser.add_argument("--tau", default=0.005) # Target network update rate
parser.add_argument("--lmbda", default=0.75) # Weighting for clipped double Q-learning in BCQ
parser.add_argument("--phi", default=0.05) # Max perturbation hyper-parameter for BCQ
parser.add_argument("--train_behavioral", action="store_true") # If true, train behavioral (DDPG); warning: store_true means the default is false
parser.add_argument("--generate_buffer", action="store_true") # If true, generate buffer; warning: store_true means the default is false
parser.add_argument("--output_dir", default="results")
args = parser.parse_args()
d4rl.set_dataset_path('/home/yangz2/.d4rl/datasets') # added/modified by Zhihan
print("---------------------------------------")
if args.train_behavioral:
print(f"Setting: Training behavioral, Env: {args.env_name}, Seed: {args.seed}")
elif args.generate_buffer:
print(f"Setting: Generating buffer, Env: {args.env_name}, Seed: {args.seed}")
else:
print(f"Setting: Training BCQ, Env: {args.env_name}, Seed: {args.seed}")
print("---------------------------------------")
# ========== construct the directory in which the npy file and the config file will be saved ==========
# for example, results_dir could be ./results/BCQ/halfcheetah-random-v1/0
results_dir = mhf.get_log_dir(
base_dir=args.output_dir,
algo_dir='BCQ',
env_dir=args.env_name,
seed_dir=args.seed
)
os.makedirs(results_dir, exist_ok=True) # will overwrite
# =====================================================================================================
with open(os.path.join(results_dir, 'params.json'), 'w') as params_file:
json.dump({
'env_name': args.env_name,
'seed': args.seed,
}, params_file)
if args.train_behavioral and args.generate_buffer:
print("Train_behavioral and generate_buffer cannot both be true.")
exit()
# We will create these directories ourselves if necessary.
# if not os.path.exists("./results"):
# os.makedirs("./results")
#
# if not os.path.exists("./models"):
# os.makedirs("./models")
#
# if not os.path.exists("./buffers"):
# os.makedirs("./buffers")
env = gym.make(args.env_name)
env.seed(args.seed)
torch.manual_seed(args.seed)
np.random.seed(args.seed)
state_dim = env.observation_space.shape[0]
action_dim = env.action_space.shape[0]
max_action = float(env.action_space.high[0])
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if args.train_behavioral or args.generate_buffer:
interact_with_environment(env, state_dim, action_dim, max_action, device, args)
else:
train_BCQ(env, state_dim, action_dim, max_action, device, results_dir, args)
|
{"/brac/scripts/train_bc.py": ["/my_helper_functions.py"], "/bcq/scripts/run_script.py": ["/my_helper_functions.py"], "/cql/d4rl/examples/cql_mujoco_new.py": ["/my_helper_functions.py", "/qlearning_dataset_with_mc_return.py", "/qlearning_dataset_with_next_action.py"]}
|
26,996
|
zhihanyang2022/d4rl_evaluations
|
refs/heads/main
|
/cql/d4rl/examples/cql_mujoco_new.py
|
import sys
sys.path.append('.')
import my_helper_functions as mhf
from qlearning_dataset_with_mc_return import qlearning_dataset_with_mc_return
from qlearning_dataset_with_next_action import qlearning_dataset_wonjoon, qlearning_dataset_with_next_action_v0, qlearning_dataset_with_next_action_v2
sys.path.append('cql/d4rl')
import rlkit.torch.pytorch_util as ptu
from rlkit.data_management.env_replay_buffer import EnvReplayBuffer
from rlkit.data_management.env_replay_buffer_with_return import EnvReplayBufferWithReturn
from rlkit.data_management.env_replay_buffer_with_next_action import EnvReplayBufferWithNextAction
from rlkit.envs.wrappers import NormalizedBoxEnv
from rlkit.launchers.launcher_util import setup_logger
from rlkit.samplers.data_collector import MdpPathCollector, CustomMDPPathCollector
from rlkit.torch.sac.policies import TanhGaussianPolicy, MakeDeterministic
from rlkit.torch.sac.cql import CQLTrainer
from rlkit.torch.networks import FlattenMlp
from rlkit.torch.torch_rl_algorithm import TorchBatchRLAlgorithm
import argparse, os
import numpy as np
import torch
import h5py
import d4rl, gym
import shutil
def load_hdf5(dataset, replay_buffer):
replay_buffer._observations = dataset['observations']
replay_buffer._next_obs = dataset['next_observations']
replay_buffer._actions = dataset['actions']
replay_buffer._rewards = np.expand_dims(np.squeeze(dataset['rewards']), 1)
replay_buffer._terminals = np.expand_dims(np.squeeze(dataset['terminals']), 1)
replay_buffer._size = dataset['terminals'].shape[0]
print ('Number of terminals on: ', replay_buffer._terminals.sum())
replay_buffer._top = replay_buffer._size
def load_hdf5_with_mc_return(dataset, replay_buffer):
replay_buffer._observations = dataset['observations']
replay_buffer._next_obs = dataset['next_observations']
replay_buffer._actions = dataset['actions']
replay_buffer._rewards = np.expand_dims(np.squeeze(dataset['rewards']), 1)
replay_buffer._terminals = np.expand_dims(np.squeeze(dataset['terminals']), 1)
replay_buffer._mc_returns = np.expand_dims(np.squeeze(dataset['mc_returns']), 1)
replay_buffer._size = dataset['terminals'].shape[0]
print ('Number of terminals on: ', replay_buffer._terminals.sum())
replay_buffer._top = replay_buffer._size
def load_hdf5_with_next_action(dataset, replay_buffer):
replay_buffer._observations = dataset['observations']
replay_buffer._next_obs = dataset['next_observations']
replay_buffer._actions = dataset['actions']
replay_buffer._rewards = np.expand_dims(np.squeeze(dataset['rewards']), 1)
replay_buffer._terminals = np.expand_dims(np.squeeze(dataset['terminals']), 1)
replay_buffer._next_actions = dataset['next_actions']
replay_buffer._size = dataset['terminals'].shape[0]
print('Number of terminals on: ', replay_buffer._terminals.sum())
replay_buffer._top = replay_buffer._size
def experiment(variant):
eval_env = gym.make(variant['env_name'])
expl_env = eval_env
obs_dim = expl_env.observation_space.low.size
action_dim = eval_env.action_space.low.size
M = variant['layer_size']
qf1 = FlattenMlp(
input_size=obs_dim + action_dim,
output_size=1,
hidden_sizes=[M, M, M],
)
qf2 = FlattenMlp(
input_size=obs_dim + action_dim,
output_size=1,
hidden_sizes=[M, M, M],
)
target_qf1 = FlattenMlp(
input_size=obs_dim + action_dim,
output_size=1,
hidden_sizes=[M, M, M],
)
target_qf2 = FlattenMlp(
input_size=obs_dim + action_dim,
output_size=1,
hidden_sizes=[M, M, M],
)
policy = TanhGaussianPolicy(
obs_dim=obs_dim,
action_dim=action_dim,
hidden_sizes=[M, M, M],
)
eval_policy = MakeDeterministic(policy)
eval_path_collector = MdpPathCollector(
eval_env,
eval_policy,
)
expl_path_collector = CustomMDPPathCollector(
eval_env,
)
buffer_filename = None
if variant['buffer_filename'] is not None:
buffer_filename = variant['buffer_filename']
# =========================================================
# different dataset modifications
if variant['use_sil']:
print('Internal report: loading data for CQL SIL')
replay_buffer = EnvReplayBufferWithReturn(
variant['replay_buffer_size'],
expl_env,
)
load_hdf5_with_mc_return(qlearning_dataset_with_mc_return(eval_env), replay_buffer)
elif variant['cql_beta']:
if variant['env_name'].endswith('v0'):
print('Internal report: Loading data for CQL beta v0')
replay_buffer = EnvReplayBufferWithNextAction(
variant['replay_buffer_size'],
expl_env,
)
load_hdf5_with_next_action(qlearning_dataset_with_next_action_v0(variant['env_name']), replay_buffer)
elif variant['env_name'].endswith('v2'):
print('Internal report: Loading data for CQL beta v2')
replay_buffer = EnvReplayBufferWithNextAction(
variant['replay_buffer_size'],
expl_env,
)
load_hdf5_with_next_action(qlearning_dataset_with_next_action_v2(variant['env_name']), replay_buffer)
else:
raise NotImplementedError
elif variant['cql_original']:
print('Internal report: Loading data for CQL original')
replay_buffer = EnvReplayBuffer(
variant['replay_buffer_size'],
expl_env,
)
if variant['load_buffer'] and buffer_filename is not None:
replay_buffer.load_buffer(buffer_filename)
elif 'random-expert' in variant['env_name']:
load_hdf5(d4rl.basic_dataset(eval_env), replay_buffer)
else:
load_hdf5(d4rl.qlearning_dataset(eval_env), replay_buffer)
else:
replay_buffer = EnvReplayBuffer(
variant['replay_buffer_size'],
expl_env,
)
load_hdf5(qlearning_dataset_wonjoon(variant['env_name']), replay_buffer)
# =========================================================
trainer = CQLTrainer(
env=eval_env,
policy=policy,
qf1=qf1,
qf2=qf2,
target_qf1=target_qf1,
target_qf2=target_qf2,
**variant['trainer_kwargs']
)
algorithm = TorchBatchRLAlgorithm(
trainer=trainer,
exploration_env=expl_env,
evaluation_env=eval_env,
exploration_data_collector=expl_path_collector,
evaluation_data_collector=eval_path_collector,
replay_buffer=replay_buffer,
eval_both=False, # added/modified by Zhihan
batch_rl=variant['load_buffer'],
**variant['algorithm_kwargs']
)
algorithm.to(ptu.device)
algorithm.train()
print('Saving networks.')
DIR = variant['model_save_dir']
torch.save(policy.state_dict(), os.path.join(DIR, 'policy.pth'))
torch.save(qf1.state_dict(), os.path.join(DIR, 'qf1.pth'))
torch.save(qf2.state_dict(), os.path.join(DIR, 'qf2.pth'))
print('Done saving networks.')
def enable_gpus(gpu_str):
if (gpu_str is not ""):
os.environ["CUDA_VISIBLE_DEVICES"] = gpu_str
return
if __name__ == "__main__":
# noinspection PyTypeChecker
variant = dict(
algorithm="CQL",
version="normal",
layer_size=256,
replay_buffer_size=int(2E6),
buffer_filename=None,
load_buffer=None,
env_name='Hopper-v2',
sparse_reward=False,
use_sil=False, # added for the new SIL idea; default to be false
algorithm_kwargs=dict(
num_epochs=300,
num_eval_steps_per_epoch=1000,
num_trains_per_train_loop=1000,
num_expl_steps_per_train_loop=1000,
min_num_steps_before_training=1000,
max_path_length=1000,
batch_size=256,
),
trainer_kwargs=dict(
discount=0.99,
soft_target_tau=5e-3,
policy_lr=1E-4,
qf_lr=3E-4,
reward_scale=1,
use_automatic_entropy_tuning=True,
# Target nets/ policy vs Q-function update
policy_eval_start=40000,
num_qs=2,
# min Q
temp=1.0,
min_q_version=3,
min_q_weight=1.0,
# lagrange
with_lagrange=True, # Defaults to true
lagrange_thresh=10.0,
# extra params
num_random=10,
max_q_backup=False,
deterministic_backup=False,
),
)
# added/modified by Zhihan
# Arguments that should be specified
# env
# According to instructions in the codebase, for Gym Mujoco tasks, we should use:
# min_q_weight: 5.0 (different from default)
# lagrange_thresh: -1.0 (different from default)
# policy_lr: 1e-4
# Variants:
# min_q_version: 3 (CQL(H) default) vs 2 (CQL(rho))
# For convenience, we should use
# seed: 10 -> 0 (different from default)
parser = argparse.ArgumentParser()
parser.add_argument("--env", type=str, default='hopper-medium-v0')
parser.add_argument("--gpu", default='0', type=str)
parser.add_argument("--max_q_backup", type=str, default="False") # if we want to try max_{a'} backups, set this to true
parser.add_argument("--deterministic_backup", type=str, default="True") # defaults to true, it does not backup entropy in the Q-function, as per Equation 3
parser.add_argument("--policy_eval_start", default=10000, type=int) # Defaulted to 20000 (40000 or 10000 work similarly)
parser.add_argument('--min_q_weight', default=1.0, type=float) # the value of alpha, set to 5.0 or 10.0 if not using lagrange
parser.add_argument('--policy_lr', default=1e-4, type=float) # Policy learning rate
parser.add_argument('--min_q_version', default=3, type=int) # min_q_version = 3 (CQL(H)), version = 2 (CQL(rho))
parser.add_argument('--lagrange_thresh', default=5.0, type=float) # the value of tau, corresponds to the CQL(lagrange) version
parser.add_argument('--seed', default=10, type=int)
parser.add_argument('--use_sil', default='False', type=str) # added for the new idea
parser.add_argument('--cql_beta', default='False', type=str) # added for the new idea
parser.add_argument('--cql_original', default='False', type=str)
args = parser.parse_args()
enable_gpus(args.gpu)
variant['use_sil'] = (True if args.use_sil == 'True' else False)
variant['cql_beta'] = (True if args.cql_beta == 'True' else False)
variant['cql_original'] = (True if args.cql_original == 'True' else False)
assert not (variant['use_sil'] and variant['cql_beta']), "can't use these two together at this point"
variant['trainer_kwargs']['max_q_backup'] = (True if args.max_q_backup == 'True' else False)
variant['trainer_kwargs']['deterministic_backup'] = (True if args.deterministic_backup == 'True' else False)
variant['trainer_kwargs']['min_q_weight'] = args.min_q_weight
variant['trainer_kwargs']['policy_lr'] = args.policy_lr
variant['trainer_kwargs']['min_q_version'] = args.min_q_version
variant['trainer_kwargs']['policy_eval_start'] = args.policy_eval_start
variant['trainer_kwargs']['lagrange_thresh'] = args.lagrange_thresh
if args.lagrange_thresh < 0.0:
variant['trainer_kwargs']['with_lagrange'] = False
variant['buffer_filename'] = None
variant['load_buffer'] = True
variant['env_name'] = args.env
variant['seed'] = args.seed
# added/modified by Zhihan: use entire buffer
variant['replay_buffer_size'] = mhf.get_dataset_size(args.env)
# added/modified by Zhihan: use 1M grad steps, report avg return across 10 episodes per 10K grad steps
variant['algorithm_kwargs']['num_epochs'] = 100
variant['algorithm_kwargs']['num_trains_per_train_loop'] = int(5e3)
variant['algorithm_kwargs']['num_eval_steps_per_epoch'] = 10 * variant['algorithm_kwargs']['max_path_length']
print('Epochs:', variant['algorithm_kwargs']['num_epochs'])
print('Num trains per epoch:', variant['algorithm_kwargs']['num_trains_per_train_loop'])
# added/modified by Zhihan: convenient log dir
algo_name = 'CQL'
if variant['use_sil']:
algo_name += '_SIL'
elif variant['cql_beta']:
algo_name += '_BETA'
elif variant['cql_original']:
algo_name += '_ORIGINAL'
elif (not variant['use_sil']) and (not variant['cql_beta']) and (not variant['cql_original']):
pass # just use default algo_name
else:
raise NotImplementedError # prevent invalid algo_name
log_dir = mhf.get_log_dir(
base_dir='results',
algo_dir=algo_name,
env_dir=args.env,
seed_dir=args.seed
)
print('Log dir:', log_dir)
shutil.rmtree(log_dir, ignore_errors=True) # overwrite any previous stuff written in here by deleting the directory
# later on setup_logger would re-create it anyway
setup_logger(
log_dir=log_dir,
)
variant['model_save_dir'] = log_dir
ptu.set_gpu_mode(True)
experiment(variant)
|
{"/brac/scripts/train_bc.py": ["/my_helper_functions.py"], "/bcq/scripts/run_script.py": ["/my_helper_functions.py"], "/cql/d4rl/examples/cql_mujoco_new.py": ["/my_helper_functions.py", "/qlearning_dataset_with_mc_return.py", "/qlearning_dataset_with_next_action.py"]}
|
26,997
|
re0ah/Color-detector-openCV
|
refs/heads/main
|
/main.py
|
import cv2
import numpy as np
import json
import time
import telegram
def nothing(x):
pass
class Color_cv:
"""
Класс для цветового маскирования изображений
"""
settings_default = {
"low - hue": 0,
"low - sat": 0,
"low - val": 0,
"upp - hue": 255,
"upp - sat": 255,
"upp - val": 255
}
def __init__(self, color_name: str):
"""
Создает окно с скроллами для настройки цвета маскирования
"""
self.hsv_fname = f"hsv_{color_name}.json"
self.w_name_trackbars = f"{color_name} trackbars"
self.mask_name = f"{color_name} mask"
self.recognized = False # Флаг распознавания
# Создание окна с скроллами для настройки цвета маскирования
hsv_json = self.load_settings()
cv2.namedWindow(self.w_name_trackbars)
cv2.createTrackbar("low - hue", self.w_name_trackbars,
hsv_json["low - hue"], 179, nothing)
cv2.createTrackbar("low - sat", self.w_name_trackbars,
hsv_json["low - sat"], 255, nothing)
cv2.createTrackbar("low - val", self.w_name_trackbars,
hsv_json["low - val"], 255, nothing)
cv2.createTrackbar("upp - hue", self.w_name_trackbars,
hsv_json["upp - hue"], 179, nothing)
cv2.createTrackbar("upp - sat", self.w_name_trackbars,
hsv_json["upp - sat"], 255, nothing)
cv2.createTrackbar("upp - val", self.w_name_trackbars,
hsv_json["upp - val"], 255, nothing)
def get_color_diaposones(self) -> dict:
"""
Возвращает цветовые диапозоны в формате HSV для маскирования
"""
l_h = cv2.getTrackbarPos("low - hue", self.w_name_trackbars)
l_s = cv2.getTrackbarPos("low - sat", self.w_name_trackbars)
l_v = cv2.getTrackbarPos("low - val", self.w_name_trackbars)
u_h = cv2.getTrackbarPos("upp - hue", self.w_name_trackbars)
u_s = cv2.getTrackbarPos("upp - sat", self.w_name_trackbars)
u_v = cv2.getTrackbarPos("upp - val", self.w_name_trackbars)
return {
"lower": np.array([l_h, l_s, l_v]),
"upper": np.array([u_h, u_s, u_v])
}
def show_mask(self, frame):
"""
Маскирует изображение и выводит его в отдельное окно.
Устанавливает флаг распознавания
"""
diaposones = self.get_color_diaposones()
self.mask = cv2.inRange(frame, diaposones["lower"],
diaposones["upper"])
cv2.imshow(self.mask_name, self.mask)
# self.recognized = self.recognize(cv2.moments(self.mask, 1))
result_frame = cv2.bitwise_and(frame, frame, mask=self.mask)
blocks = []
i = 0
j = 0
def recognize(self, moments: dict) -> bool:
"""
Вычисляет и возвращает флаг распознавания объекта
В случае, если объект распознан, то вычисляет и устанавливает точку
его центра
:return: True, если moments["m00"] > 35
False в иных случаях
"""
moments = cv2.moments(self.mask, 1)
dM01 = moments["m01"]
dM10 = moments["m10"]
dArea = moments["m00"]
if dArea > 35:
self.x_center = int(dM10 / dArea)
self.y_center = int(dM01 / dArea)
return True
else:
return False
def save_settings(self):
"""
Сохраняет настройки маскирования из трэкбара
"""
with open(self.hsv_fname, "w") as write_file:
data = {
"low - hue": cv2.getTrackbarPos("low - hue",
self.w_name_trackbars),
"low - sat": cv2.getTrackbarPos("low - sat",
self.w_name_trackbars),
"low - val": cv2.getTrackbarPos("low - val",
self.w_name_trackbars),
"upp - hue": cv2.getTrackbarPos("upp - hue",
self.w_name_trackbars),
"upp - sat": cv2.getTrackbarPos("upp - sat",
self.w_name_trackbars),
"upp - val": cv2.getTrackbarPos("upp - val",
self.w_name_trackbars)
}
json.dump(data, write_file)
def load_settings(self) -> dict:
"""
Загружает настройки маскирования. Если файл не найден, то
возвращает стандартные настройки
"""
try:
with open(self.hsv_fname, "r") as read_file:
return json.load(read_file)
except FileNotFoundError:
return self.settings_default
def write_info(frame, text: str) -> "Frame":
font = cv2.FONT_HERSHEY_SIMPLEX
org = (39, 28)
fontScale = 0.75
color = (255, 0, 0)
thickness = 2
return cv2.putText(frame, text, org, font,
fontScale, color, thickness, cv2.LINE_AA)
def calc_lenght(p1: tuple, p2: tuple) -> int:
"""
Расчет расстояния между двумя точками
:param p1: точка 1 (x;y)
:param p2: точка 2 (x;y)
:return: расстояние между точками
"""
max_x = max(p1[0], p2[0])
min_x = min(p1[0], p2[0])
len_x = max_x - min_x
# Расчет длины между точками по y
max_y = max(p1[1], p2[1])
min_y = min(p1[1], p2[1])
len_y = max_y - min_y
# Расчет длины между точками
return int(round((len_x**2 + len_y**2)**0.5))
def show_information(frame):
"""
Отображает состояние двух объектов: то, распознаны ли они (и какие
не распознаны), расстояние между ними. Так же отображает круг, цвет
которого означает диапозон расстояний, в котором сейчас находится объект.
Рисует круг на распознаном объекте.
"""
# Отображение круга на опознанных объектах
if cv_yellow.recognized:
cv2.circle(frame, (cv_yellow.x_center, cv_yellow.y_center),
10, (255, 255, 255), -1)
if cv_red.recognized:
cv2.circle(frame, (cv_red.x_center, cv_red.y_center),
10, (0, 0, 255), -1)
if cv_yellow.recognized & cv_red.recognized:
lenght = calc_lenght((cv_yellow.x_center, cv_red.y_center),
(cv_red.x_center, cv_yellow.y_center))
print(lenght)
if lenght > 200:
text = f"Lenght: {lenght}, Green"
cv2.line(frame, (cv_yellow.x_center, cv_yellow.y_center),
(cv_red.x_center, cv_red.y_center), (0, 255, 0), 3)
cv2.circle(frame, (16, 20), 14, (0, 255, 0), -1)
telegram.bot.mailing = -1
elif lenght > 50:
text = f"Lenght: {lenght}, Orange"
cv2.line(frame, (cv_yellow.x_center, cv_yellow.y_center),
(cv_red.x_center, cv_red.y_center), (0, 165, 255), 3)
cv2.circle(frame, (16, 20), 14, (0, 165, 255), -1)
if telegram.bot.mailing == -1:
telegram.bot.send_message(f"Внимание! До айсберга {lenght}")
telegram.bot.mailing += 1
if telegram.bot.mailing == 150:
telegram.bot.mailing = -1
elif lenght <= 50:
text = f"Lenght: {lenght}, Red"
cv2.line(frame, (cv_yellow.x_center, cv_yellow.y_center),
(cv_red.x_center, cv_red.y_center), (0, 0, 255), 3)
cv2.circle(frame, (16, 20), 14, (0, 0, 255), -1)
if telegram.bot.mailing == -1:
telegram.bot.send_message(f"Внимание! До столкновения с айсбергом {lenght}")
telegram.bot.mailing += 1
if telegram.bot.mailing == 150:
telegram.bot.mailing = -1
else:
if (not cv_yellow.recognized) & (cv_red.recognized):
text = "white not recognized"
elif (cv_yellow.recognized) & (not cv_red.recognized):
text = "red not recognized"
else:
text = "red & white not recognized"
write_info(frame, text)
cap = cv2.VideoCapture(1)
cap_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
cap_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
cv_yellow = Color_cv("white")
cv_red = Color_cv("red")
while True:
_, frame = cap.read()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
cv_yellow.show_mask(hsv)
cv_red.show_mask(hsv)
result_red = cv2.bitwise_and(frame, frame, mask=cv_yellow.mask)
result = result_red
result_white = cv2.bitwise_and(frame, frame, mask=cv_red.mask)
result = cv2.bitwise_or(result, result_white)
show_information(frame)
cv2.imshow("Frame", frame)
cv2.imshow("Result", result)
key = cv2.waitKey(1)
if key == 27:
break
time.sleep(1 / 30)
cv_yellow.save_settings()
cv_red.save_settings()
cap.release()
cv2.destroyAllWindows()
|
{"/main.py": ["/telegram.py"]}
|
26,998
|
re0ah/Color-detector-openCV
|
refs/heads/main
|
/telegram.py
|
import telebot
import json
from threading import Thread
def save_user_id(user_id):
with open("telegram_user_list.json", "w") as write_file:
data = {
"id": user_id
}
json.dump(data, write_file)
def load_user_id() -> dict:
try:
with open("telegram_user_list.json", "r") as read_file:
return json.load(read_file)
except FileNotFoundError:
return 0
class Telegram_th(Thread):
def __init__(self):
super().__init__()
"""-1 - ожидание начала подсчета
0..150 - подсчет
150 - отправка и обнуление"""
self.mailing = -1
self.id = load_user_id()["id"]
self.bot = telebot.TeleBot("TELEGRAM_TOKEN")
self.bot.remove_webhook()
@self.bot.message_handler(commands=["start"])
def registration_command(message):
self.bot.send_message(message.chat.id, "Вы добавлены в список рассылки!")
save_user_id(message.chat.id)
def run(self):
self.bot.polling(none_stop=True)
def send_message(self, message: str):
self.bot.send_message(self.id, message)
bot = Telegram_th()
bot.start()
|
{"/main.py": ["/telegram.py"]}
|
26,999
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0008_reviews_rate.py
|
# Generated by Django 3.1.2 on 2021-05-04 08:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0007_expense_landarea'),
]
operations = [
migrations.AddField(
model_name='reviews',
name='rate',
field=models.IntegerField(default=5),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,000
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0012_auto_20210507_1312.py
|
# Generated by Django 3.1.2 on 2021-05-07 04:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0011_auto_20210505_1427'),
]
operations = [
migrations.RenameField(
model_name='expense',
old_name='comment',
new_name='costdowncomment',
),
migrations.AddField(
model_name='expense',
name='costupcomment',
field=models.TextField(default='ok'),
preserve_default=False,
),
migrations.AddField(
model_name='expense',
name='create_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='expense',
name='gradecomment',
field=models.TextField(default='ok'),
preserve_default=False,
),
migrations.AddField(
model_name='expense',
name='update_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='makercard',
name='create_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='makercard',
name='update_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='reviews',
name='create_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='reviews',
name='update_date',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='expense',
name='images',
field=models.ImageField(blank=True, null=True, upload_to='expense'),
),
migrations.AlterField(
model_name='reviews',
name='images',
field=models.ImageField(blank=True, null=True, upload_to='review'),
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,001
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0028_auto_20210624_1229.py
|
# Generated by Django 3.1.2 on 2021-06-24 03:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('house', '0027_expense_hidden'),
]
operations = [
migrations.RenameField(
model_name='expense',
old_name='hidden',
new_name='hid',
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,002
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0026_auto_20210622_1603.py
|
# Generated by Django 3.1.2 on 2021-06-22 07:03
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('house', '0025_auto_20210605_0321'),
]
operations = [
migrations.AddField(
model_name='makercard',
name='description',
field=models.TextField(default='arund'),
preserve_default=False,
),
migrations.AlterField(
model_name='expense',
name='create_date',
field=models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='makercard',
name='create_date',
field=models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='reviews',
name='create_date',
field=models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,003
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0019_auto_20210520_0023.py
|
# Generated by Django 3.1.2 on 2021-05-19 15:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0018_auto_20210520_0013'),
]
operations = [
migrations.AddField(
model_name='expense',
name='status',
field=models.CharField(default='見', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='title',
field=models.CharField(default='悪くない', max_length=100),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,004
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0009_auto_20210505_1346.py
|
# Generated by Django 3.1.2 on 2021-05-05 04:46
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0008_reviews_rate'),
]
operations = [
migrations.RenameField(
model_name='reviews',
old_name='comment',
new_name='costcomment',
),
migrations.RemoveField(
model_name='reviews',
name='rate',
),
migrations.AddField(
model_name='reviews',
name='attachcomment',
field=models.TextField(default='ok'),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='attachrate',
field=models.IntegerField(default=3.0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)]),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='costrate',
field=models.IntegerField(default=3.0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)]),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='designcomment',
field=models.TextField(default='ok'),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='designrate',
field=models.IntegerField(default=3.0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)]),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='guaranteecomment',
field=models.TextField(default='okok'),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='guaranteerate',
field=models.IntegerField(default=4.0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)]),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='layoutcomment',
field=models.TextField(default='daijoubu'),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='layoutrate',
field=models.IntegerField(default=2.0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)]),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='salescomment',
field=models.TextField(default='okokokok'),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='salesrate',
field=models.IntegerField(default=5.0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)]),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='speccomment',
field=models.TextField(default='warukunai'),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='specrate',
field=models.IntegerField(default=1.0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)]),
preserve_default=False,
),
migrations.AddField(
model_name='reviews',
name='status',
field=models.CharField(default='見積もり', max_length=100),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,005
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0013_auto_20210507_1810.py
|
# Generated by Django 3.1.2 on 2021-05-07 09:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('house', '0012_auto_20210507_1312'),
]
operations = [
migrations.RenameField(
model_name='expense',
old_name='images',
new_name='image',
),
migrations.RenameField(
model_name='reviews',
old_name='images',
new_name='image',
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,006
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0011_auto_20210505_1427.py
|
# Generated by Django 3.1.2 on 2021-05-05 05:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0010_reviews_avgrate'),
]
operations = [
migrations.AlterField(
model_name='reviews',
name='avgrate',
field=models.DecimalField(decimal_places=2, max_digits=3),
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,007
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/urls.py
|
from . import views
from django.urls import path
from rest_framework import routers
from rest_framework_jwt.views import obtain_jwt_token, refresh_jwt_token
router = routers.DefaultRouter()
router.register(r'v1/makers', views.MakerCardViewSet)
router.register(r'v1/reviews', views.ReviewViewSet)
router.register(r'v1/expense', views.ExpenseViewSet)
urlpatterns = [
path('v1/isposted/<slug:targetmaker>/', views.isPosted.as_view()),
path('v1/user/', views.PingViewSet.as_view()),
path('auth/', obtain_jwt_token),
path('auth/reresh/', refresh_jwt_token)
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,008
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/housesearch_backend/urls.py
|
from django.contrib import admin
from django.conf.urls import url
from django.urls import path,include
from django.conf import settings
from django.conf.urls.static import static
from house.urls import router as house_router
urlpatterns = [
path('admin/', admin.site.urls),
url('api/', include(house_router.urls),),
url('api/', include('house.urls'),),
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)+ static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,009
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/permission.py
|
from rest_framework.permissions import BasePermission, SAFE_METHODS
class IsMeOrAdminOrGuestOrOthers(BasePermission):
def has_permission(self, request, view):
if request.method in SAFE_METHODS:
return True
return request.user.is_authenticated
def has_object_permission(self, request, view, obj):
if request.method in SAFE_METHODS:
return True
return obj.author == str(request.user) or request.user.is_superuser
class IsAdminOrReadOnly(BasePermission):
def has_permission(self, request, view):
if request.method in SAFE_METHODS:
return True
return request.user.is_superuser
def has_object_permission(self, request, view, obj):
if request.method in SAFE_METHODS:
return True
return request.user.is_superuser
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,010
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0017_expense_maker_name.py
|
# Generated by Django 3.1.2 on 2021-05-19 11:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0016_reviews_maker_name'),
]
operations = [
migrations.AddField(
model_name='expense',
name='maker_name',
field=models.CharField(default='sekisuiheim', max_length=100),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,011
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/admin.py
|
from django.contrib import admin
from .models import MakerCard, Reviews, Expense
admin.site.register(MakerCard)
admin.site.register(Reviews)
admin.site.register(Expense)
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,012
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0016_reviews_maker_name.py
|
# Generated by Django 3.1.2 on 2021-05-12 14:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0015_auto_20210508_1846'),
]
operations = [
migrations.AddField(
model_name='reviews',
name='maker_name',
field=models.CharField(default='sekisuiheim', max_length=100),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,013
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/serializer.py
|
from rest_framework import serializers
from .models import MakerCard, Reviews, Expense
class MakerCardSerializer(serializers.ModelSerializer):
image_url = serializers.SerializerMethodField()
class Meta:
model = MakerCard
fields = ('pk', 'name', 'name_hira', 'name_kata', 'name_eng', 'image_url', 'get_review_count', 'get_expense_count', 'get_expense_avg', 'get_landarea_avg', 'get_rateavg', 'ratetostr', 'get_costavg', 'get_designavg', 'get_layoutavg', 'get_specavg', 'get_guaranteeavg', 'get_salesavg')
read_only_fields = ('pk','created_at',)
def get_image_url(self, maker):
request = self.context.get('request')
image_url = maker.images.url
return request.build_absolute_uri(image_url)
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Reviews
fields = ('pk','author', 'status', 'costrate', 'costcomment', 'designrate', 'designcomment', 'layoutrate', 'layoutcomment', 'specrate', 'speccomment', 'guaranteerate', 'guaranteecomment', 'salesrate', 'salescomment', 'avgrate', 'get_rateavg', 'maker_name', 'create_date')
read_only_fields = ('pk','created_at',)
class ExpenseSerializer(serializers.ModelSerializer):
expimage_url = serializers.SerializerMethodField()
layoutimage_url = serializers.SerializerMethodField()
class Meta:
model = Expense
fields = ('pk','author', 'status', 'cost', 'landarea', 'gradecomment', 'costupcomment', 'costdowncomment', 'hid', 'expimage', 'layoutimage', 'expimage_url', 'layoutimage_url', 'maker_name', 'create_date')
read_only_fields = ('pk','created_at','hid', )
def get_expimage_url(self, expense):
request = self.context.get('request')
expimage_url = ""
if expense.expimage:
expimage_url = request.build_absolute_uri(expense.expimage.url)
return expimage_url
def get_layoutimage_url(self, expense):
request = self.context.get('request')
layoutimage_url = ""
if expense.layoutimage:
layoutimage_url = request.build_absolute_uri(expense.layoutimage.url)
return layoutimage_url
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,014
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0032_auto_20210625_1638.py
|
# Generated by Django 3.1.2 on 2021-06-25 07:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0031_remove_expense_layoutimage'),
]
operations = [
migrations.AddField(
model_name='expense',
name='expimage',
field=models.ImageField(blank=True, null=True, upload_to='expense/'),
),
migrations.AddField(
model_name='expense',
name='layoutimage',
field=models.ImageField(blank=True, null=True, upload_to='expense/'),
),
migrations.DeleteModel(
name='Picture',
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,015
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0024_auto_20210605_0309.py
|
# Generated by Django 3.1.2 on 2021-06-04 18:09
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('house', '0023_auto_20210604_0256'),
]
operations = [
migrations.RemoveField(
model_name='expense',
name='update_date',
),
migrations.RemoveField(
model_name='reviews',
name='image',
),
migrations.RemoveField(
model_name='reviews',
name='title',
),
migrations.RemoveField(
model_name='reviews',
name='update_date',
),
migrations.AlterField(
model_name='expense',
name='id',
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='reviews',
name='id',
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,016
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0007_expense_landarea.py
|
# Generated by Django 3.1.2 on 2021-05-04 07:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0006_auto_20210429_1458'),
]
operations = [
migrations.AddField(
model_name='expense',
name='landarea',
field=models.IntegerField(default=30),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,017
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0029_auto_20210624_1253.py
|
# Generated by Django 3.1.2 on 2021-06-24 03:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0028_auto_20210624_1229'),
]
operations = [
migrations.RemoveField(
model_name='reviews',
name='attachcomment',
),
migrations.RemoveField(
model_name='reviews',
name='attachrate',
),
migrations.AddField(
model_name='expense',
name='layoutimage',
field=models.ImageField(blank=True, null=True, upload_to='expense/'),
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,018
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0006_auto_20210429_1458.py
|
# Generated by Django 3.1.2 on 2021-04-29 05:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('house', '0005_auto_20210429_1419'),
]
operations = [
migrations.AlterModelOptions(
name='expense',
options={'verbose_name': '費用明細'},
),
migrations.AlterModelOptions(
name='makercard',
options={'verbose_name': 'メーカー'},
),
migrations.AlterModelOptions(
name='reviews',
options={'verbose_name': '口コミ'},
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,019
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0014_auto_20210508_0057.py
|
# Generated by Django 3.1.2 on 2021-05-07 15:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0013_auto_20210507_1810'),
]
operations = [
migrations.AlterField(
model_name='expense',
name='image',
field=models.ImageField(blank=True, null=True, upload_to='expense/'),
),
migrations.AlterField(
model_name='reviews',
name='image',
field=models.ImageField(blank=True, null=True, upload_to='review/'),
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,020
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0015_auto_20210508_1846.py
|
# Generated by Django 3.1.2 on 2021-05-08 09:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0014_auto_20210508_0057'),
]
operations = [
migrations.AddField(
model_name='makercard',
name='name_eng',
field=models.CharField(default='home', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='makercard',
name='name_hira',
field=models.CharField(default='ホーム', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='makercard',
name='name_kata',
field=models.CharField(default='ホーム', max_length=100),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,021
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0003_auto_20210429_1408.py
|
# Generated by Django 3.1.2 on 2021-04-29 05:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0002_expense_reviews'),
]
operations = [
migrations.RenameField(
model_name='expense',
old_name='content',
new_name='comment',
),
migrations.RenameField(
model_name='reviews',
old_name='content',
new_name='comment',
),
migrations.AddField(
model_name='expense',
name='cost',
field=models.IntegerField(default=10),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,022
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0021_makercard_images.py
|
# Generated by Django 3.1.2 on 2021-05-19 15:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0020_auto_20210520_0042'),
]
operations = [
migrations.AddField(
model_name='makercard',
name='images',
field=models.ImageField(default='a', upload_to=''),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,023
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/views.py
|
# Model, Form, Serializer, Permissionをimport
from .models import MakerCard, Reviews, Expense
from .serializer import MakerCardSerializer, ReviewSerializer, ExpenseSerializer
from .permission import IsAdminOrReadOnly, IsMeOrAdminOrGuestOrOthers
# REST FRAMEWORK系
from rest_framework import viewsets, status, generics
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import action
class isPosted(APIView):
""" Confirm whether user already posted """
# ユーザー認証
permission_classes = [IsAuthenticated]
def get(self, request, format=None, **kwargs):
RevPosted = ""
if Reviews.objects.filter(author=str(request.user), maker_name=self.kwargs['targetmaker']).exists():
RevPosted = Reviews.objects.filter(author=str(request.user), maker_name=self.kwargs['targetmaker']).values_list('pk', flat=True)
RevPosted = ', '.join(map(str, RevPosted))
ExpPosted = ""
if Expense.objects.filter(author=str(request.user), maker_name=self.kwargs['targetmaker']).exists():
ExpPosted = Expense.objects.filter(author=str(request.user), maker_name=self.kwargs['targetmaker']).values_list('pk', flat=True)
ExpPosted = ', '.join(map(str, ExpPosted))
return Response({'RevPosted':RevPosted, 'ExpPosted':ExpPosted})
class MakerCardViewSet(viewsets.ModelViewSet):
""" Maker Informations """
# キーを指定
lookup_field = "name_eng"
# モデル
queryset = MakerCard.objects.all()
# ユーザー認証
permission_classes = [IsAdminOrReadOnly]
# シリアライザー
serializer_class = MakerCardSerializer
class ReviewViewSet(viewsets.ModelViewSet):
""" Review Informations """
# モデル
queryset = Reviews.objects.order_by('-create_date')
# ユーザー認証
permission_classes = (IsMeOrAdminOrGuestOrOthers,)
# シリアライザー
serializer_class = ReviewSerializer
# フィルター
filter_fields = ('maker_name','author',)
class ExpenseViewSet(viewsets.ModelViewSet):
""" Expense Informations """
# モデル
queryset = Expense.objects.order_by('-create_date')
# ユーザー認証
permission_classes = (IsMeOrAdminOrGuestOrOthers,)
# シリアライザー
serializer_class = ExpenseSerializer
# フィルター
filter_fields = ('maker_name','author',)
class PingViewSet(generics.GenericAPIView):
""" User Informations """
# ユーザー認証
permission_classes = (IsAuthenticated,)
def get(self, request, format=None):
return Response(data={'username': request.user.username}, status=status.HTTP_200_OK)
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,024
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0027_expense_hidden.py
|
# Generated by Django 3.1.2 on 2021-06-23 16:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0026_auto_20210622_1603'),
]
operations = [
migrations.AddField(
model_name='expense',
name='hidden',
field=models.BooleanField(default=False),
preserve_default=False,
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,025
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/migrations/0005_auto_20210429_1419.py
|
# Generated by Django 3.1.2 on 2021-04-29 05:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('house', '0004_auto_20210429_1418'),
]
operations = [
migrations.AlterField(
model_name='expense',
name='images',
field=models.ImageField(blank=True, null=True, upload_to=''),
),
]
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,026
|
rendaman0215/housesearch_backend
|
refs/heads/master
|
/house/models.py
|
from django.db import models
from django.db.models import Avg
from django.core.validators import MaxValueValidator, MinValueValidator
class MakerCard(models.Model):
id = models.AutoField(primary_key=True)
description = models.TextField()
name = models.CharField(max_length=100)
name_hira = models.CharField(max_length=100)
name_kata = models.CharField(max_length=100)
name_eng = models.CharField(max_length=100)
images = models.ImageField(upload_to='')
create_date = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
def get_review_count(self):
return Reviews.objects.filter(maker_name=self.name_eng).count()
def get_expense_count(self):
return Expense.objects.filter(maker_name=self.name_eng).count()
def get_expense_avg(self):
costavg = Expense.objects.filter(maker_name=self.name_eng).aggregate(Avg('cost')) ["cost__avg"]
if costavg == None:
return 0.0
else:
costavg = round(costavg,1)
return costavg
def get_landarea_avg(self):
landareaavg = Expense.objects.filter(maker_name=self.name_eng).aggregate(Avg('landarea'))["landarea__avg"]
if landareaavg == None:
return 0.0
else:
landareaavg = round(landareaavg,1)
return landareaavg
def get_rateavg(self):
avgrateavg = Reviews.objects.filter(maker_name=self.name_eng).aggregate(Avg('avgrate'))["avgrate__avg"]
if avgrateavg == None:
avgrateavg = 0.00
else:
avgrateavg = round(avgrateavg,2)
return avgrateavg
def get_costavg(self):
costrateavg = Reviews.objects.filter(maker_name=self.name_eng).aggregate(Avg('costrate'))["costrate__avg"]
if costrateavg == None:
costrateavg = 0.00
else:
costrateavg = round(costrateavg,2)
return costrateavg
def get_designavg(self):
designrateavg = Reviews.objects.filter(maker_name=self.name_eng).aggregate(Avg('designrate'))["designrate__avg"]
if designrateavg == None:
designrateavg = 0.00
else:
designrateavg = round(designrateavg,2)
return designrateavg
def get_layoutavg(self):
layoutrateavg = Reviews.objects.filter(maker_name=self.name_eng).aggregate(Avg('layoutrate'))["layoutrate__avg"]
if layoutrateavg == None:
layoutrateavg = 0.00
else:
layoutrateavg = round(layoutrateavg,2)
return layoutrateavg
def get_specavg(self):
specrateavg = Reviews.objects.filter(maker_name=self.name_eng).aggregate(Avg('specrate'))["specrate__avg"]
if specrateavg == None:
specrateavg = 0.00
else:
specrateavg = round(specrateavg,2)
return specrateavg
def get_guaranteeavg(self):
guaranteerateavg = Reviews.objects.filter(maker_name=self.name_eng).aggregate(Avg('guaranteerate'))["guaranteerate__avg"]
if guaranteerateavg == None:
guaranteerateavg = 0.00
else:
guaranteerateavg = round(guaranteerateavg,2)
return guaranteerateavg
def get_salesavg(self):
salesrateavg = Reviews.objects.filter(maker_name=self.name_eng).aggregate(Avg('salesrate'))["salesrate__avg"]
if salesrateavg == None:
salesrateavg = 0.00
else:
salesrateavg = round(salesrateavg,2)
return salesrateavg
def ratetostr(self):
ratestr = ""
rateavg = self.get_rateavg()
for i in range(int(rateavg)):
ratestr += '<i class="bi bi-star-fill rateicon"></i>'
if float(rateavg) - float(int(rateavg)) > 0 and float(rateavg) - float(int(rateavg)) <= 0.5 and rateavg!=5 and rateavg!=0:
ratestr += '<i class="bi bi-star-half rateicon"></i>'
elif float(rateavg) - float(int(rateavg)) >= 0.5:
ratestr += '<i class="bi bi-star-fill rateicon"></i>'
for i in range(int(5.0-float(rateavg))):
ratestr += '<i class="bi bi-star rateicon"></i>'
return ratestr
class Meta:
verbose_name = "メーカー"
class Reviews(models.Model):
id = models.AutoField(primary_key=True)
author = models.CharField(max_length=100)
status = models.CharField(max_length=100)
costrate = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
costcomment = models.TextField()
designrate = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
designcomment = models.TextField()
layoutrate = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
layoutcomment = models.TextField()
specrate = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
speccomment = models.TextField()
guaranteerate = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
guaranteecomment = models.TextField()
salesrate = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
salescomment = models.TextField()
avgrate = models.DecimalField(max_digits=3,decimal_places=2)
maker_name = models.CharField(max_length=100)
create_date = models.DateTimeField(auto_now=True)
def get_rateavg(self):
total = self.costrate+self.designrate+self.layoutrate+self.specrate+self.guaranteerate+self.salesrate
avgrateavg = total / 6
avgrateavg = round(avgrateavg,2)
return avgrateavg
def __str__(self):
return self.maker_name + " : " + self.author
class Meta:
verbose_name = "口コミ"
class Expense(models.Model):
id = models.AutoField(primary_key=True)
author = models.CharField(max_length=100)
status = models.CharField(max_length=100)
hid = models.BooleanField(default=True)
cost = models.IntegerField()
landarea = models.IntegerField()
gradecomment = models.TextField()
costupcomment = models.TextField()
costdowncomment = models.TextField()
expimage = models.ImageField(upload_to='expense/', blank=True, null=True)
layoutimage = models.ImageField(upload_to='expense/', blank=True, null=True)
maker_name = models.CharField(max_length=100)
create_date = models.DateTimeField(auto_now=True)
def __str__(self):
sta = ""
if self.hid:
sta = "非表示"
return self.maker_name + " : " + self.author + sta
class Meta:
verbose_name = "費用明細"
|
{"/housesearch_backend/urls.py": ["/house/urls.py"], "/house/admin.py": ["/house/models.py"], "/house/serializer.py": ["/house/models.py"], "/house/views.py": ["/house/models.py", "/house/serializer.py", "/house/permission.py"]}
|
27,031
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/post-message-slack/handler.py
|
import os
import json
import sys
import logging
import requests
SLACK_BOT_TOKEN_SECRET_NAME = os.getenv("SLACK_BOT_TOKEN_SECRET_NAME", 'slack_bot_token')
SLACK_BOT_TOKEN = None
SLACK_POST_URL = os.getenv("SLACK_POST_URL", 'https://slack.com/api/chat.postMessage')
from .general_utility import load_faas_secret
from .general_utility import get_pretty_JSON
def handle(req):
"""handler request from query-dialogflow-slack and get-user-selected-option-from-slack-slack
this function post message back to slack as a bot.
Arguments:
req {str} -- json format request content
"""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logging.info("Enter FaaS Function-[post-message-slack]")
set_slack_bot_token()
req_dict = json.loads(req)
logging.debug("Req Content:{}".format(get_pretty_JSON(req_dict)))
handle_req(req_dict)
def handle_req(req_dict):
"""Handler request by source field
Arguments:
req_dict {dict} -- request content
"""
if 'source' not in req_dict:
logging.error("No source info, Can't handle this request")
else:
source = req_dict['source']
if source == 'bot-crud-slack':
logging.info("POST Async Message Received from function:{}".format(source))
handle_by_source_bot_crud(req_dict)
elif source == 'query-dialogflow-slack':
logging.info("POST Message from function:{}".format(source))
hanlle_by_source_query_dialogflow_slack(req_dict)
else:
logging.error("Can't handle request from source: #{}#".format(source))
def handle_by_source_bot_crud(req_dict):
"""post message received from bot crud
Arguments:
req_dict {dict} -- req content
"""
req_message_list = req_dict.get('return_message_list', [])
attachments = []
for msg in req_message_list:
append_message_to_attachments(attachments, msg)
channel = req_dict['channel']
message_dict = {
'channel': channel,
'attachments': attachments,
}
post_message(message_dict)
def hanlle_by_source_query_dialogflow_slack(req_dict):
"""handle message from function[query-dialogflow-slack]
Arguments:
req_dict {dict} -- req content
"""
google_dialogflow_response = req_dict['data']
channel = req_dict['channel']
attachments = []
append_google_dialogflow_prompt_message(attachments, google_dialogflow_response)
append_google_dialogflow_return_message(attachments, google_dialogflow_response)
append_google_dialogflow_option_list_message(attachments, google_dialogflow_response)
message_dict = {
'channel': channel,
'attachments': attachments,
}
post_message(message_dict)
def append_google_dialogflow_return_message(attachments, google_dialogflow_response):
"""append return messages to attachments
Arguments:
attachments {list} -- list of slack attachment
google_dialogflow_response {dict} -- response content
"""
result = google_dialogflow_response['result']
if ('fulfillment' in result) and ('data' in result['fulfillment']):
webhook_data = result['fulfillment']['data']
return_message_list = webhook_data.get('return_message_list', [])
for msg in return_message_list:
append_message_to_attachments(attachments, msg)
def append_google_dialogflow_option_list_message(attachments, google_dialogflow_response):
"""append option list to attachments
Arguments:
attachments {list} -- list of attachments
google_dialogflow_response {dict} -- response content dict
"""
result = google_dialogflow_response['result']
if ('fulfillment' in result) and ('data' in result['fulfillment']):
webhook_data = result['fulfillment']['data']
if 'option_list' in webhook_data:
attachment = {
'text': "choose a product",
"attachment_type": "default",
"fallback": "",
"callback_id": "product_selection",
}
actions = []
action = {
'name': 'product_option_list',
'text': "select a product",
'type': 'select',
}
options = []
option_list = webhook_data.get('option_list', [])
for idx, option in enumerate(option_list):
options.append({
'text': '{}-{}'.format(idx + 1, option),
'value': option,
})
action['options'] = options
actions.append(action)
attachment['actions'] = actions
attachments.append(attachment)
def append_google_dialogflow_prompt_message(attachments, google_dialogflow_response):
"""append prompt message from dialogflow's response
Arguments:
attachments {list} -- list of slack attachment
google_dialogflow_response {dict} -- response content
"""
result = google_dialogflow_response['result']
fulfillment = result.get('fulfillment', {})
if ('speech' in fulfillment) and ((fulfillment['speech'] is not None) or (fulfillment['speech'] != '')):
attachments.append({
"text": fulfillment['speech'],
"color": get_color_val("blue"),
"fallback": ""
})
def append_message_to_attachments(attachments, msg):
"""Append message to attachments
Arguments:
attachments {list} -- slack message - attachments
msg {dict} -- message object
"""
msg_content = msg['content']
msg_color = msg['color']
attachments.append({
"text": msg_content,
"color": get_color_val(msg_color),
"fallback": ''
})
def get_color_val(color_name):
"""Convert color to HEX format color value
Arguments:
color_name {str} -- color name, 'red/blue..'
Returns:
str -- hex format color value
"""
if color_name == 'green':
return '#00cc00'
elif color_name == 'red':
return '#cc3300'
elif color_name == 'blue':
return '#0099ff'
else:
# Grey
return '#808080'
def post_message(message_dict):
"""Post message to a slack channel
Arguments:
message_dict {dict} -- slack format message
"""
headers = {
'Content-Type': 'application/json',
'Authorization': ('Bearer %s' % SLACK_BOT_TOKEN),
}
response = requests.post(SLACK_POST_URL, data = json.dumps(message_dict), headers = headers)
logging.debug("Slack Response Code: {}".format(response.status_code))
logging.debug("Slack POST Response Content: {}".format(get_pretty_JSON(json.loads(response.text))))
def set_slack_bot_token():
"""Set Global Varible SLACK_BOT_TOKEN, get token from docker secret.
"""
logging.info("Get Slack Bot Token From Docker Secret")
global SLACK_BOT_TOKEN
SLACK_BOT_TOKEN = load_faas_secret(SLACK_BOT_TOKEN_SECRET_NAME)
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,032
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py
|
"""
Utility for CRUD operation of the python-eve.
Build with python requests.
"""
import os
import sys
import json
import requests
import logging
# Entry point for access from docker container to host machine.
# Setting Python-Eve: app.run(host='0.0.0.0'), then use one of the following entry point.{docker0/docker_gwbridge/enp0s3}
# Using "ifconfig" to get the address of {docker0/docker_gwbridge/enp0s3}
# We use 'docker0' as the config
ENTRY_POINT = os.getenv("Eve_Mongo_Entry_POINT", 'http://172.17.0.1:5000')
def endpoint(resource):
"""return the endpoint of specified resource
Arguments:
resource {str} -- [name of the resource]
Returns:
str -- endpoint of the resource
"""
return "{}/{}".format(ENTRY_POINT, resource)
def get_query_res_list(r):
"""return list of query result documents
Arguments:
r {Response-request} -- [respnose of request]
Returns:
list -- list of query result items
"""
query_res_list = []
json_result = r.json()
items = json_result.get('_items')
res_cnt = 0
for item in items:
res_cnt += 1
cur_res_id = item['_id']
cur_res = item
query_res_list.append(cur_res)
logging.debug("Res id-{}:{}".format(res_cnt, cur_res_id))
logging.info("{} item returned by the query".format(len(query_res_list)))
return query_res_list
def perform_document_query(resource, query_params):
"""Query documents from the specified resource(collection) according to the query_params
Arguments:
resource {str} -- [name of the collection in the mongodb]
query_params {python-dict} -- [conditions used in the mongodb query]
Returns:
[r-requests] -- [handle of the requests]
"""
# Create new payload for python-eve.
# Example of query_params:
# query_params = {
# "last_name" : "Jing",
# 'username': "jd",
# }
# or we could simply pass a empty query_param dict object, which will query all the object
# query_params = {
# }
# Sample query result
# {
# "_id":ObjectId("5b244594291a071d22e3dea4"),
# "username":"jd",
# "first_name":"Dong",
# "last_name":"Jing",
# "role":"root",
# "user_uuid":"d8852848b54f4ea99e4ab0f00e199042"
# }
logging.info("Perform Document Query")
new_payload = {"where": json.dumps(query_params)}
r = requests.get(endpoint(resource), params = new_payload)
logging.debug("Query Request URL: {}".format(r.url))
logging.debug("Query Response Status Code: {}".format(r.status_code))
return r
def perform_document_insert(resource, data):
"""Insert Python Dict Object into the database.
Arguments:
resource {str} -- name of the resource / collection
data {Python dict} -- data want to be inserted into the collection.
Returns:
response -- handle of the request's response
"""
logging.info("Perform Document Insert")
headers = {
'Content-Type': 'application/json'
}
r = requests.post(endpoint(resource), data=json.dumps(data), headers = headers)
logging.debug("Insert Request URL: {}".format(r.url))
logging.debug("Insert Response Status Code: {}".format(r.status_code))
return r
def perform_document_delete(collection_name, document_id, document_ETag):
"""delete document
Arguments:
collection_name {str} -- name of the collection the document belonged to
document_id {str} -- object id of the document
document_ETag {str} -- Latest ETag of the current file. Must offer the latest one, or you will get 428 error.
Returns:
response of requests -- delete response
"""
logging.info("Perform Document Insert")
headers = {
"Content-Type": "application/json",
"If-Match": document_ETag,
}
resource = "{}/{}".format(collection_name, document_id)
r = requests.delete(endpoint(resource), headers = headers)
logging.debug("DELETE Request URL: {}".format(r.url))
logging.debug("ETag of deleted item: {}".format(document_ETag))
logging.debug("DELETE Document Response Status Code: {}".format(r.status_code))
return r
def perform_document_update(collection_name, document_id, data, document_ETag):
"""update the document, update old fields, add new fields
Arguments:
collection_name {str} -- name of the collection that document belongs to
document_id {str} -- object id of the document
data {python dict} -- dict of the fields that want to update
document_ETag {str} -- latest ETag offered by the MongoDB
Returns:
Response of request -- response of the PATCH request.
"""
logging.info("Perform Document Update")
resource = endpoint("{}/{}".format(collection_name, document_id))
headers = {
"Content-Type": "application/json",
"If-Match": document_ETag,
}
r = requests.patch(resource, data = json.dumps(data), headers = headers)
logging.debug("Update Request URL: {}".format(r.url))
logging.debug("ETag of updated item: {}".format(document_ETag))
logging.debug("Update Response Status Code: {}".format(r.status_code))
# logging.debug("Content: {}".format(r.text))
return r
def perform_document_replace(collection_name, document_id, data, document_ETag):
"""replace the document, use new data replave old data.
Arguments:
collection_name {str} -- name of the collection that document belongs to
document_id {str} -- object id of the document
data {python dict} -- dict of the fields that want to update
document_ETag {str} -- latest ETag offered by the MongoDB
Returns:
Response of request -- response of the PUT request.
"""
logging.info("Perform Document Replace")
resource = endpoint("{}/{}".format(collection_name, document_id))
headers = {
"Content-Type": "application/json",
"If-Match": document_ETag,
}
r = requests.put(resource, data = json.dumps(data), headers = headers)
logging.debug("Replace Request URL: {}".format(r.url))
logging.debug("ETag of updated item: {}".format(document_ETag))
logging.debug("Replace Response Status Code: {}".format(r.status_code))
# logging.debug("Content: {}".format(r.text))
return r
def get_internal_collection_name(external_collection_name):
"""Used for db name convention.
Arguments:
external_collection_name {str} -- [collection name used outside of the database]
Returns:
str -- [internal resource name]
"""
if external_collection_name == 'DB_PRODUCT':
return 'product_info'
elif external_collection_name == 'DB_SESSION':
return 'session_info'
elif external_collection_name == 'DB_TEST':
return 'test_db'
else:
return 'ERROR_COLLECTION_NAME'
def main():
"""
Entrance of the eve tool. for testing usage.
"""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
if __name__ == '__main__':
main()
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,033
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/script_build_deploy_bot_functions.py
|
"""Script for Build and Deploy Functions
"""
import os
import sys
import logging
import subprocess
def execute_and_print_result(command):
"""Execute and print running result of a command
Arguments:
command {str} -- bash command
"""
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in p.stdout.readlines():
print(str(line, "utf-8"))
def build_and_run_function(function_name):
"""Build and deploy FaaS functions
Arguments:
function_name {str} -- name of function
"""
function_yml = '{}.yml'.format(function_name)
logging.info("Build Function:{}".format(function_name))
execute_and_print_result('faas-cli build -f ./{}'.format(function_yml))
logging.info("Deploy Function:{}".format(function_name))
execute_and_print_result('faas-cli deploy -f ./{}'.format(function_yml))
def remove_function(function_name):
logging.info("RM Function:{}".format(function_name))
execute_and_print_result('docker service rm {}'.format(function_name))
def main():
function_name_list = [
'bot-crud-slack',
'get-option-list-slack',
'get-user-selected-option-from-slack-slack',
'nlp-webhook-slack',
'post-message-slack',
'query-dialogflow-slack',
'slack-event-webhook-slack',
]
# for function_name in function_name_list:
# remove_function(function_name)
for cur_function_name in function_name_list:
build_and_run_function(cur_function_name)
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
main()
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,034
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/Eve_MongoDB/settings.py
|
"""
Setting for the python-eve.
"""
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DBNAME = 'db_faas_bot'
# Setting ALLOW_UNKNOWN = True cause we don't wan't to setting schema for each resource below.
ALLOW_UNKNOWN = True
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
# (if you omit this line, the API will default to ['GET'] and provide
# read-only access to the endpoint).
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
# Enable reads (GET), edits (PATCH), replacements (PUT) and deletes of
# individual items (defaults to read-only item access).
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
# Collection without any constraint / setting
DOMAIN = {
'product_info': {},
'session_info': {},
}
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,035
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/Utility/product_info_utility.py
|
"""
Utility for product info related operartions
"""
import sys
import uuid
import logging
from pymongo import MongoClient
def generate_uuid_str():
"""Generate uuid str of length 32 (UUID4)
Returns:
str -- uuid str of length 32 (UUID4)
"""
return str(uuid.uuid4())
def read_product_info_from_csv_file(file_path):
"""read CSV file of the product info.
Returns:
list -- list of product info
"""
product_info_list = []
with open(file_path, 'r') as f:
for line in f:
splited_parts = line.rstrip().split(',')
cur_product_info = {"product_uuid": generate_uuid_str(),
"product_name": splited_parts[0],
"price_per_unit": float(splited_parts[1])}
product_info_list.append(cur_product_info)
logging.info("Read {} product_info from csv file".format(len(product_info_list)))
return product_info_list
def insert_product_info_mongo(file_path, MongoDB_Host, MongoDB_Port):
"""insert product info into mongodb
Arguments:
file_path {str} -- path of product info
MongoDB_Host {str} -- Host of MongoDB
MongoDB_Port {int} -- Port of MongoDB
"""
logging.info("MongoDB HOST:{}, Port:{}".format(MongoDB_Host, MongoDB_Port))
client = MongoClient(MongoDB_Host, MongoDB_Port)
# Get DB and Collection
db = client.db_faas_bot
product_info_collection = db.product_info
product_info_list = read_product_info_from_csv_file(file_path)
# Insert
for idx, product_info in enumerate(product_info_list):
logging.info("No.({}/{}), Insert {} to db".format(idx + 1, len(product_info_list), product_info))
product_info_collection.insert_one(product_info)
client.close()
logging.info("Inserted {} product_info into the database".format(len(product_info_list)))
def main():
argvs = sys.argv
logging.info("Len{}, {}".format(len(argvs), argvs))
if(len(argvs) < 3):
logging.error("Parameters missing")
return
operate_command = argvs[1]
file_path = argvs[2]
if operate_command == 'INSERT_PRODUCT_INFO':
try:
logging.info("Insert Product Info into MongoDB")
# file_path = './product_info_list.csv'
MongoDB_Port = 27017
MongoDB_Host = 'localhost'
insert_product_info_mongo(file_path, MongoDB_Host, MongoDB_Port)
except:
logging.error("It seems there exist error in your command paramenters")
else:
logging.info("Sucessfully product info into MongoDB")
else:
logging.info("Do Nothing")
if __name__ == "__main__":
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
main()
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,036
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py
|
"""
Utility for Asynchronous / Synchronous call a FaaS function
"""
import sys
import json
import logging
import requests
def get_function_url(is_async, base_url, function_name):
"""Get the function url for HTTP call
Arguments:
is_async {bool} -- is asynchronous
base_url {str} -- FaaS addreess
function_name {str} -- name of the FaaS fucntion
Returns:
str -- function url address.
e.g. 'http://127.0.0.1:8080/async-function/function_name's
"""
function_url = None
if is_async is True:
function_url = "{}/async-function/{}".format(base_url, function_name)
else:
function_url = "{}/function/{}".format(base_url, function_name)
logging.debug("Get Function URL. Is Async: {}, Function URL: {}".format(is_async, function_url))
return function_url
def synchronous_call_function(function_url, request_data_body_dict, custom_header_dict = {}):
""" synchronous calll FaaS function
Arguments:
function_url {str} -- URL of FaaS function
request_data_body_dict {dict} -- dict of function HTTP call body
Keyword Arguments:
custom_header_dict {dict} -- custom HTTP header info (default: {{}})
Returns:
response -- response of HTTP request
"""
logging.info("Sync Call FaaS function: {}".format(function_url))
headers = dict()
for key, val in custom_header_dict.items():
headers[key] = val
response = requests.get(function_url, data = json.dumps(request_data_body_dict), headers = headers)
logging.info("Sync Call Status: {}".format(response.status_code))
return response
def asynchronous_call_function(function_url, request_data_body_dict, call_back_url = None, custom_header_dict = {}):
"""Asynchronous calll FaaS function
Arguments:
function_url {str} -- URL of FaaS function
request_data_body_dict {dict} -- dict of function HTTP call body
Keyword Arguments:
call_back_url {str} -- call back url (default: {None})
custom_header_dict {dict} -- custom HTTP header info (default: {{}})
Returns:
response -- response of HTTP request
"""
logging.info("Async Call FaaS function: {}".format(function_url))
headers = {
'X-Callback-Url': call_back_url,
}
for key, val in custom_header_dict.items():
headers[key] = val
response = requests.post(function_url, data = json.dumps(request_data_body_dict), headers = headers)
logging.info("Async Call Status: {}".format(response.status_code))
return response
def call_faas_function(base_url, function_name, request_data_body_dict, is_async = False, call_back_url = None, custom_header_dict = {}):
"""Call FaaS Function
Arguments:
base_url {str} -- Address where FaaS is deployed.
function_name {str} -- Name of the FaaS Function
request_data_body_dict {dict} -- dict of function HTTP call body
Keyword Arguments:
is_async {bool} -- is asynchronous (default: {False})
call_back_url {str} -- url of callback (default: {None})
custom_header_dict {dict} -- custom HTTP header info (default: {{}})
Returns:
request response -- response of request
"""
logging.info("Call FaaS function: {}".format(function_name))
function_url = get_function_url(is_async, base_url, function_name)
if is_async is True:
logging.info("Async Call")
return asynchronous_call_function(function_url, request_data_body_dict, call_back_url, custom_header_dict)
else:
logging.info("Sync Call")
return synchronous_call_function(function_url, request_data_body_dict, custom_header_dict)
def main():
base_url = "http://127.0.0.1:8080"
function_name = 'faas-async-callee'
request_data_body_dict = {}
is_async = True
call_back_url = "{}/function/{}".format("http://gateway:8080", 'faas-async-callback-receiver')
logging.debug("TEST Initial Call Res:")
res = call_faas_function(base_url, function_name, request_data_body_dict, is_async, call_back_url)
logging.debug("TEST RES")
logging.debug(res.url)
logging.debug(res.status_code)
logging.debug(res.text)
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
main()
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,037
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/slack-event-webhook-slack/handler.py
|
import os
import sys
import logging
import json
from .faas_function_call_utility import call_faas_function
from .general_utility import get_pretty_JSON
FAAS_GATEWAY_URL = os.getenv('FAAS_GATEWAY_URL', 'http://gateway:8080')
FUNCTION_NAME_QUERY_DIALOGFLOW_SLACK = os.getenv('FUNCTION_NAME_QUERY_DIALOGFLOW_SLACK', 'query-dialogflow-slack')
CALLBACK_URL = ''
def handle(req):
"""function for handle event from slack
Arguments:
req {str} -- json format slack event content
Returns:
str -- response for slack event
"""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logging.info("FaaS-[slack-event-webhook-slack]")
req_dict = json.loads(req)
logging.debug("===We Received the following Request Content From Slack Event====")
logging.debug(get_pretty_JSON(req_dict))
return handle_by_req_type(req_dict)
def handle_by_req_type(slack_req_dict):
"""handler event by type
Arguments:
slack_req_dict {dict} -- slack event content
Returns:
str -- event return message
"""
retry_num = os.getenv("Http_X_Slack_Retry_Num", '0')
logging.info("Header Retry_Num[Http_X_Slack_Retry_Num]: {}".format(retry_num))
retry_num = int(retry_num)
if retry_num >= 1:
return "We have received such one"
else:
logging.info("Handle First One")
req_type = slack_req_dict['type']
if req_type == 'url_verification':
logging.info("Event URL VERIFICATION HANDSHAKE")
challenge_val = slack_req_dict.get('challenge', '')
return challenge_val
elif req_type == 'event_callback':
logging.info("Async Handle Slack Req")
event = slack_req_dict['event']
if 'subtype' in event:
logging.info("Message From Bot, Ignore it")
pass
else:
logging.info("Handle Message From User")
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_QUERY_DIALOGFLOW_SLACK, slack_req_dict, is_async = True)
return "Return Message, Wait Event Handle Response"
else:
return "Empty Message, Wrong Slack Request"
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,038
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py
|
"""
FaaS function for operating database - MongoDB
"""
import sys
import logging
import json
from .eve_mongo_utility import perform_document_insert
from .eve_mongo_utility import perform_document_query
from .eve_mongo_utility import perform_document_update
from .eve_mongo_utility import perform_document_delete
from .eve_mongo_utility import perform_document_replace
from .eve_mongo_utility import get_internal_collection_name
from .eve_mongo_utility import get_query_res_list
from .general_utility import get_pretty_JSON
# Name of MongoDB Collection Name
COLLECTION_NAME_SESSION = 'DB_SESSION'
COLLECTION_NAME_PRODUCT = 'DB_PRODUCT'
def handle(req):
"""FaaS function for operating database - MongoDB
Arguments:
req {str} -- json format request body str
Returns:
str -- json format response body str
"""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logging.info("Enter function: [bot-crud-slack]")
request_body_dict = json.loads(req)
logging.debug("DB Opeartion Dict:{}".format(get_pretty_JSON(request_body_dict)))
channel = request_body_dict.get('channel', '')
function_response_body_dict = {
'channel': channel,
'source': 'bot-crud-slack',
'cart_info': None,
'return_message_list': [],
}
handle_database_operation_request_by_action(request_body_dict, function_response_body_dict)
logging.debug("Exit function: [bot-crud-slack]")
return json.dumps(function_response_body_dict)
def handle_database_operation_request_by_action(database_operation_request_dict, function_response_body_dict):
"""Handle databse operation by action type.
Arguments:
database_operation_request_dict {dict} -- dict of db operation dict
function_response_body_dict {dict} -- dict of response body
"""
database_action = database_operation_request_dict['database_action']
logging.info("DB Operate Action Type:{}".format(database_action))
cart_info = get_shopping_cart(database_operation_request_dict)
parameters = database_operation_request_dict['parameters']
return_message_list = function_response_body_dict['return_message_list']
if database_action == 'add_item_number':
product_name_list = [parameters['product_name'], ]
product_number_list = [int(parameters['number']), ]
item_info_dict = get_product_info_dict(product_name_list)
add_items_to_cart(cart_info, return_message_list, item_info_dict, product_name_list, product_number_list)
elif database_action == 'reduce_item_number':
product_name_list = [parameters['product_name'], ]
product_number_list = [int(parameters['number']), ]
edit_items_in_cart(cart_info, return_message_list, product_name_list, product_number_list)
elif database_action == 'delete_single_item':
product_name_list = [parameters['product_name'], ]
delete_items_in_cart(cart_info, return_message_list, product_name_list)
elif database_action == 'delete_all_item':
remove_all_items_in_cart(cart_info, return_message_list)
elif database_action == 'check_out':
append_message_object_to_return_message_list(return_message_list, "Thanks for shopping with us!", 'blue')
cart_info['session_status'] = "FINISHED"
elif database_action == 'query_cart_info':
# Do Nothing
pass
# Get cart_info
get_cart_info(cart_info, return_message_list)
save_shopping_cart(cart_info)
function_response_body_dict['cart_info'] = cart_info
def get_cart_info(cart_info, return_message_list):
"""get shopping cart info.
Arguments:
cart_info {dict} -- dict of cart_info parameters
return_message_list {list} -- message returned as part of response for display.
"""
logging.info("Get Cart Info")
shopping_cart = cart_info['shopping_cart']
append_message_object_to_return_message_list(return_message_list, "Name".ljust(40) + "Quantity".ljust(20) + "Price".ljust(20), 'green')
append_message_object_to_return_message_list(return_message_list, "-"*65, 'green')
total_val = 0.0
for key, item in shopping_cart.items():
cur_product_name = item['product_name']
cur_product_quantity = item['product_number']
cur_product_price_per_unit = item['price_per_unit']
cur_product_total_price = float(cur_product_quantity) * float(cur_product_price_per_unit)
total_val += cur_product_total_price
append_message_object_to_return_message_list(return_message_list, cur_product_name.ljust(40) + str(cur_product_quantity).ljust(20) + str("{:.2f}".format(cur_product_total_price)).ljust(20), 'green')
append_message_object_to_return_message_list(return_message_list, "-"*65, 'green')
username = cart_info['username']
first_name = cart_info['first_name']
last_name = cart_info['last_name']
append_message_object_to_return_message_list(return_message_list, "Total Amount: ${:.2f}".format(total_val), 'green')
logging.info(" Customer: {} {}({}). Total Amount: ${}".format(first_name, last_name, username, total_val))
def delete_items_in_cart(cart_info, return_message_list, product_name_list):
"""Delete a product from the cart
Arguments:
cart_info {dict} -- dict of cart_info parameters
return_message_list {list} -- message returned as part of response for display.
product_name_list {list} -- list of product name
"""
logging.info("Delete Item From the Cart")
shopping_cart = cart_info['shopping_cart']
for i in range(len(product_name_list)):
cur_product_name = product_name_list[i]
# if the item is not in the cart, ignore it.
if cur_product_name not in shopping_cart:
logging.warning("[{}] is not in the cart.".format(cur_product_name))
append_message_object_to_return_message_list(return_message_list, '{} not in the shopping cart.'.format(cur_product_name), 'red')
else:
shopping_cart.pop(cur_product_name, None)
logging.info("Remove all the [{}] from cart.".format(cur_product_name))
append_message_object_to_return_message_list(return_message_list, "Successfully remove all the {} from cart.".format(cur_product_name), 'blue')
def edit_items_in_cart(cart_info, return_message_list, product_name_list, product_number_list):
"""edit items number in the cart, if over the limit, just remove the item. or just modify the number.
Arguments:
cart_info {dict} -- dict of cart_info parameters
return_message_list {list} -- message returned as part of response for display.
product_name_list {list} -- list of product name
product_number_list {list} -- list of product number
"""
logging.info("Edit Item Num to Cart")
shopping_cart = cart_info['shopping_cart']
for i in range(len(product_name_list)):
cur_product_name = product_name_list[i]
cur_product_modify_num = product_number_list[i]
# if the item is not in the cart, ignore it.
if cur_product_name not in shopping_cart:
logging.warning("[{}] is not in the cart.".format(cur_product_name))
append_message_object_to_return_message_list(return_message_list, '{} not in the shopping cart.'.format(cur_product_name), 'red')
else:
cur_product_in_cart = shopping_cart[cur_product_name]
cur_product_num_in_cart = cur_product_in_cart['product_number']
# logging.debug("Type cur_product_modify_num: {}, Type cur_product_num_in_cart: {}".format(type(cur_product_modify_num), type(cur_product_num_in_cart)))
if cur_product_modify_num < cur_product_num_in_cart:
cur_product_in_cart['product_number'] = cur_product_num_in_cart - cur_product_modify_num
logging.info("Remove {} {} from cart.".format(cur_product_modify_num, cur_product_name))
logging.info("Product: {}, Original Num: {}, Modify Num: {}, New Num: {}".format(cur_product_name, cur_product_num_in_cart, cur_product_modify_num, cur_product_in_cart['product_number']))
# if over the limit, just remove all.
else:
shopping_cart.pop(cur_product_name, None)
logging.info("Remove all the [{}] from cart.".format(cur_product_name))
logging.info("Product: {}, Original Num: {}, Modify Num: {}, New Num: {}".format(cur_product_name, cur_product_num_in_cart, cur_product_modify_num, "None"))
append_message_object_to_return_message_list(return_message_list, "Successfully remove {} {} from cart.".format(min(cur_product_num_in_cart, cur_product_modify_num), cur_product_name), 'blue')
def remove_all_items_in_cart(cart_info, return_message_list):
""" remove all the items in the cart, make it empty.
Arguments:
cart_info {dict} -- dict of cart_info parameters
return_message_list {list} -- message returned as part of response for display.
"""
logging.info("Delete all the item in cart")
shopping_cart = cart_info['shopping_cart']
removed_item_name_list = []
for key in shopping_cart:
removed_item_name_list.append(key)
logging.info("Remove [{}] from cart".format(key))
append_message_object_to_return_message_list(return_message_list, "Successfully remove {} from cart!".format(key), 'blue')
shopping_cart.clear()
def add_items_to_cart(cart_info, return_message_list, item_info_dict, product_name_list, product_number_list):
"""add items to the shopping cart.
Arguments:
cart_info {dict} -- dict of cart_info parameters
return_message_list {list} -- message returned as part of response for display.
item_info_dict {dict} -- dict of product item info, key-product name, value: product info.
product_name_list {list} -- list of product name
product_number_list {list} -- list of product number
"""
logging.info("Add Items to Cart")
shopping_cart = cart_info['shopping_cart']
for i in range(len(product_name_list)):
cur_product_name = product_name_list[i]
cur_product_modify_num = product_number_list[i]
cur_product_info = item_info_dict[cur_product_name]
if cur_product_modify_num <= 0:
logging.warning("Modify Num <= 0")
continue
if cur_product_name in shopping_cart:
# Update old number
cur_product_in_cart = shopping_cart[cur_product_name]
cur_product_old_quantity = cur_product_in_cart['product_number']
cur_product_new_quantity = cur_product_modify_num + cur_product_old_quantity
cur_product_in_cart['product_number'] = cur_product_new_quantity
else:
# Insert product info
cur_product_in_cart = {
"product_id": cur_product_info['product_id'],
"product_name": cur_product_info['product_name'],
"product_number": cur_product_modify_num,
"price_per_unit": cur_product_info['price_per_unit'],
}
shopping_cart[cur_product_name] = cur_product_in_cart
append_message_object_to_return_message_list(return_message_list, "Successfully add {} unit {} to cart!".format(cur_product_modify_num, cur_product_name), 'blue')
logging.info("Add [{}] unit [{}] to cart!".format(cur_product_modify_num, cur_product_name))
def append_message_object_to_return_message_list(return_message_list, message_content, message_color):
"""Append Colored Message Object to Reurn message list
Arguments:
return_message_list {list} -- return message list
message_content {str} -- content of message
message_color {str} -- color of message
"""
return_message_list.append({
'content': message_content,
'color': message_color,
})
return
def get_product_info_dict(product_name_list):
"""Get dict of product info
Arguments:
product_name_list {list} -- list of product names
Returns:
dict -- query result of product info dict. Key: Product Name, Val: Product Info
"""
logging.info("Get Product Info")
query_params = {
'product_name': {
'$in': product_name_list,
}
}
database_operation_dict = {
'database_operation': 'read',
'collection_name': COLLECTION_NAME_PRODUCT,
'data_body': query_params,
}
query_request_response = perform_database_operation(database_operation_dict)
query_result_raw_list = get_query_res_list(query_request_response)
query_result_dict = {}
for cur_res in query_result_raw_list:
cur_item = {
'product_id': cur_res['_id'],
'product_name': cur_res['product_name'],
'price_per_unit': cur_res['price_per_unit'],
}
query_result_dict[cur_res['product_name']] = cur_item
logging.info("Get {} product info".format(len(query_result_raw_list)))
return query_result_dict
def save_shopping_cart(cart_info):
"""Save current shopping cart_info data to the database.
Arguments:
cart_info {dict} -- dict of cart_info parameters
"""
cur_cart = cart_info
if 'has_saved_before' in cur_cart:
has_saved_before = cur_cart['has_saved_before']
else:
has_saved_before = False
database_operation_dict = {
'database_operation': None,
'collection_name': COLLECTION_NAME_SESSION,
'data_body': cur_cart,
}
logging.debug("Save Session, Content:{}".format(json.dumps(cur_cart)))
if has_saved_before == True:
database_operation_dict['document_etag'] = cur_cart['document_etag']
database_operation_dict['document_id'] = cur_cart['document_id']
database_operation_dict['database_operation'] = 'replace'
remove_cmd = "REMOVE_BEFORE_SAVE"
remove_cart_info_by_command(cur_cart, remove_cmd)
else:
database_operation_dict['database_operation'] = 'create'
save_request_response = perform_database_operation(database_operation_dict)
return save_request_response
def get_shopping_cart(database_operation_request_dict):
"""get shopping cart info from mongodb
Arguments:
database_operation_request_dict {dict} -- dict of db operation dict
Returns:
dict -- shopping cart dict.
"""
cur_cart = None
cart_id = database_operation_request_dict['cart_id']
has_exist_cart, unfinished_cart = get_exist_unfinished_cart_info(cart_id)
if has_exist_cart == True:
# User the exist one.
cur_cart = unfinished_cart
cur_cart['document_etag'] = unfinished_cart['_etag']
cur_cart['document_id'] = unfinished_cart['_id']
remove_cmd = 'REMOVE_NAIVE_MONGO_INFO'
remove_cart_info_by_command(cur_cart, remove_cmd)
cur_cart['has_saved_before'] = True
logging.info("Get Cart. Use exist one from DATABASE")
else:
# If don't have unfinished session, create a new one.
cur_cart = initialize_shopping_cart(database_operation_request_dict)
return cur_cart
def initialize_shopping_cart(database_operation_request_dict):
"""Initialize a cart from a base template.
Arguments:
database_operation_request_dict {dict} -- dict of db operation dict
Returns:
dict -- initialized session dict.
"""
logging.info("Initialize a New Shopping Cart")
initialized_cart = {
"cart_id": database_operation_request_dict['cart_id'],
"username": database_operation_request_dict['username'],
"first_name": database_operation_request_dict['first_name'],
"last_name": database_operation_request_dict['last_name'],
"shopping_cart": dict(),
"session_status": "UNFINISHED",
"has_saved_before": False,
}
return initialized_cart
def get_exist_unfinished_cart_info(cart_id):
"""Check if exist unfinished cart.
If exist: return true, and the exist cart info
If not exist: return false, and None for cart info.
Arguments:
cart_id {str} -- id of shopping cart
Returns:
boolean -- is exist unfinished cart
dict -- unfinished cart info
"""
logging.info("Check if exist unfinished cart")
query_params = {
'cart_id': cart_id,
'session_status': "UNFINISHED",
}
database_operation_dict = {
'database_operation': 'read',
'collection_name': COLLECTION_NAME_SESSION,
'data_body': query_params,
}
query_request_response = perform_database_operation(database_operation_dict)
unfinished_cart_list = get_query_res_list(query_request_response)
if len(unfinished_cart_list) == 0:
logging.info("Don't exist unfinished cart")
return False, None
else:
logging.info("Exist Unfinished Cart")
logging.debug("Exist Cart Info:{}".format(unfinished_cart_list[0]))
return True, unfinished_cart_list[0]
def remove_cart_info_by_command(cur_cart, remove_cmd):
"""Remove unused key, value pair in the cart.
Arguments:
cur_cart {dict} -- dict of cart parameters
remove_cmd {str} -- remove command type.
"""
logging.debug("Remove CMD: {}".format(remove_cmd))
if (remove_cmd == 'REMOVE_NAIVE_MONGO_INFO') or (remove_cmd == 'REMOVE_BEFORE_SAVE'):
logging.info("Remove field: (_id, _updated, _created, _links, _etag)")
cur_cart.pop('_id', None)
cur_cart.pop('_updated', None)
cur_cart.pop('_created', None)
cur_cart.pop('_links', None)
cur_cart.pop('_etag', None)
if remove_cmd == 'REMOVE_BEFORE_SAVE':
logging.info("Remove field: (document_etag, document_id)")
cur_cart.pop('document_etag', None)
cur_cart.pop('document_id', None)
def perform_database_operation(database_operation_dict):
"""data base related operation
Arguments:
database_operation_dict {dict} -- dict of info about database operation
Returns:
response -- response of database operation request.
"""
logging.debug("Enter function: [perform_database_operation]")
# Get internal collection name representation
external_collection_name = database_operation_dict['collection_name']
collection_name = get_internal_collection_name(external_collection_name)
# Get opreation type
operation_type = database_operation_dict['database_operation']
logging.info("Database Operation Type: {}".format(operation_type))
# Perform corresponding operation C/R/U/D
if operation_type == 'create':
data = database_operation_dict['data_body']
database_operation_request_response = perform_document_insert(collection_name, data)
elif operation_type == 'read':
query_params = database_operation_dict['data_body']
database_operation_request_response = perform_document_query(collection_name, query_params)
elif operation_type == 'replace':
document_id = database_operation_dict['document_id']
document_etag = database_operation_dict['document_etag']
data = database_operation_dict['data_body']
database_operation_request_response = perform_document_replace(collection_name, document_id, data, document_etag)
elif operation_type == 'update':
document_id = database_operation_dict['document_id']
document_etag = database_operation_dict['document_etag']
data = database_operation_dict['data_body']
database_operation_request_response = perform_document_update(collection_name, document_id, data, document_etag)
elif operation_type == 'delete':
document_id = database_operation_dict['document_id']
document_etag = database_operation_dict['document_etag']
database_operation_request_response = perform_document_delete(collection_name, document_id, document_etag)
else:
logging.error("Illegal Operation")
database_operation_request_response = None
if database_operation_request_response is not None:
logging.debug("Operation [{}] on collection [{}] Finished. Status Code: {}.".format(operation_type, collection_name, database_operation_request_response.status_code))
return database_operation_request_response
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,039
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/Eve_MongoDB/mongo_eve.py
|
"""
Script for starting the simplest python-eve.
No auth.
"""
import sys
import logging
from eve import Eve
app = Eve()
def main():
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logging.info("Start Python-Eve...")
app.run(host='0.0.0.0')
if __name__ == '__main__':
main()
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,040
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py
|
"""
Webhook for the NLP module(Google's Dialogflow).
"""
import os
import sys
import json
import logging
import requests
from .faas_function_call_utility import call_faas_function
from .general_utility import get_pretty_JSON
FAAS_GATEWAY_URL = os.getenv("FAAS_GATEWAY_URL", 'http://gateway:8080')
FUNCTION_NAME_GET_OPTION_LIST_SLACK = os.getenv("FUNCTION_NAME_GET_OPTION_LIST_SLACK", "get-option-list-slack")
FUNCTION_NAME_BOT_CRUD_SLACK = os.getenv("FUNCTION_NAME_BOT_CRUD_SLACK", 'bot-crud-slack')
FUNCTION_NAME_POST_MESSAGE_SLACK = os.getenv("FUNCTION_NAME_POST_MESSAGE_SLACK", "post-message-slack")
DB_CRUD_CALLBACK_URL = "{}/function/{}".format(FAAS_GATEWAY_URL, FUNCTION_NAME_POST_MESSAGE_SLACK)
def handle(req):
"""Handler Dialogflow webhook request
Arguments:
req {str} -- dialogflow request body
Returns:
str -- dialogflow webhook response
"""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logging.debug("Enter FaaS function [nlp-webhook-slack]")
logging.debug("req Content: ")
logging.debug(get_pretty_JSON(json.loads(req)))
req_dict = json.loads(req)
return handle_request(req_dict)
def handle_request(req_dict):
"""Handler Dialogflow webhook request
Arguments:
req_dict {dict} -- dialogflow request dict
Returns:
str -- dialogflow webhook response
"""
dialogflow_request_dict = req_dict
dialogflow_webhook_response_dict = {
"displayText":"",
"data": {},
"contextOut": None,
"source": "jd_faas_webhook",
}
handle_dialogflow_request_by_intent(dialogflow_request_dict, dialogflow_webhook_response_dict)
logging.debug("Dialogflow webhook response Content:")
logging.debug(get_pretty_JSON(dialogflow_webhook_response_dict))
logging.debug("End-handle_request_by_source")
dialogflow_webhook_response_str = json.dumps(dialogflow_webhook_response_dict)
return dialogflow_webhook_response_str
def handle_dialogflow_request_by_intent(dialogflow_request_dict, dialogflow_webhook_response_dict):
"""handle request by intent type
Arguments:
dialogflow_request_dict {dict} --dialogflow request content
dialogflow_webhook_response_dict {dict} -- webhook response
"""
intentName, metadata, parameters = get_intent_metadata_parameters_from_dialogflow_response(dialogflow_request_dict)
sessionId = dialogflow_request_dict['sessionId']
context_set_command = "CLEAR_ALL"
if intentName == 'greeting':
append_message_to_return_message_list_by_category('Greeting', dialogflow_webhook_response_dict)
elif intentName == 'addToCart':
# Get possible option for a synonym.
option_list = get_product_item_option_list(parameters)
# If have multiple option, send back option list and let user to select one.
if len(option_list) > 1:
context_set_command = "KEEE_ORIGIN"
set_custom_data(dialogflow_webhook_response_dict, 'option_list', option_list)
append_message_to_return_message_list_by_category('Select_option', dialogflow_webhook_response_dict)
# If only have one option, then operate database and modify item in the cart
elif len(option_list) == 1:
database_action = 'add_item_number'
parameters = {
'product_name': option_list[0],
'number': parameters['number-integer'],
}
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName, parameters)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
# Follow up intent of add to cart
elif intentName == 'addToCart - select.number':
product_name = get_product_name_from_option_list(dialogflow_request_dict, parameters)
context_name = "addtocart-followup"
product_num = get_product_number_from_context(dialogflow_request_dict, context_name)
database_action = 'add_item_number'
parameters = {
'product_name': product_name,
'number': product_num,
}
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName, parameters)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
elif intentName == 'removeFromCart':
if ("product_name_synonyms" in parameters) and (parameters['product_name_synonyms'] is not ''):
option_list = get_product_item_option_list(parameters)
if len(option_list) > 1:
context_set_command = "KEEE_ORIGIN"
set_custom_data(dialogflow_webhook_response_dict, 'option_list', option_list)
append_message_to_return_message_list_by_category('Select_option', dialogflow_webhook_response_dict)
elif len(option_list) == 1:
database_action = 'reduce_item_number'
parameters = {
'product_name': option_list[0],
'number': parameters['number-integer'],
}
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName, parameters)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
elif intentName == 'removeFromCart - select.number':
product_name = get_product_name_from_option_list(dialogflow_request_dict, parameters)
context_name = "removefromcart-followup"
product_num = get_product_number_from_context(dialogflow_request_dict, context_name)
database_action = 'reduce_item_number'
parameters = {
'product_name': product_name,
'number': product_num,
}
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName, parameters)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
elif intentName == 'remove_all':
if ("product_name_synonyms" in parameters) and (parameters['product_name_synonyms'] is not ''):
option_list = get_product_item_option_list(parameters)
if len(option_list) > 1:
context_set_command = "KEEE_ORIGIN"
set_custom_data(dialogflow_webhook_response_dict, 'option_list', option_list)
append_message_to_return_message_list_by_category('Select_option', dialogflow_webhook_response_dict)
elif len(option_list) == 1:
database_action = 'delete_single_item'
parameters = {
'product_name': option_list[0],
}
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName, parameters)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
else:
database_action = 'delete_all_item'
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
elif intentName == 'remove_all - select.number':
product_name = get_product_name_from_option_list(dialogflow_request_dict, parameters)
database_action = 'delete_single_item'
parameters = {
'product_name': product_name,
}
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName, parameters)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
elif intentName == 'checkOut':
database_action = 'check_out'
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
elif intentName == 'end':
append_message_to_return_message_list_by_category('Exit', dialogflow_webhook_response_dict)
append_message_to_return_message_list_by_category('End', dialogflow_webhook_response_dict)
elif intentName == 'show_cart':
database_action = 'query_cart_info'
database_operation_request_dict = get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName)
append_message_to_return_message_list_by_category('Processing', dialogflow_webhook_response_dict)
is_async = True
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict, is_async, DB_CRUD_CALLBACK_URL)
# faas_db_crud_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_BOT_CRUD_SLACK, database_operation_request_dict)
# append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_crud_response)
elif intentName == 'help':
append_message_to_return_message_list_by_category('Help', dialogflow_webhook_response_dict)
else:
append_message_to_return_message_list_by_category('Error', dialogflow_webhook_response_dict)
set_dialogflow_context_by_command_and_intent(context_set_command, intentName, dialogflow_request_dict, dialogflow_webhook_response_dict)
def append_faas_db_response_to_return_message_list(dialogflow_webhook_response_dict, faas_db_response):
"""Append faas db operation response to return message list
Arguments:
dialogflow_webhook_response_dict {dict} -- dict of webhook response
faas_db_response {request response} -- DB operation HTTP request response
"""
faas_db_response_dict = json.loads(faas_db_response.text)
faas_db_return_message_list = faas_db_response_dict['return_message_list']
dialogflow_webhook_return_message_list = dialogflow_webhook_response_dict['data']['return_message_list']
for msg in faas_db_return_message_list:
dialogflow_webhook_return_message_list.append(msg)
return
def set_custom_data(dialogflow_webhook_response_dict, key, value):
"""set custom data returned back to client
Arguments:
dialogflow_webhook_response_dict {dict} -- webhook response dict
key {str} -- key for custom data
value {any type} -- val of the data
"""
custom_data = dialogflow_webhook_response_dict.get('data', {})
custom_data[key] = value
dialogflow_webhook_response_dict['data'] = custom_data
return
def get_product_number_from_context(dialogflow_request_dict, context_name):
"""For follow up intent, Get product number from context.
- I want 10 juice (intent-addToCart)
- We have juice-1, juice-2, juice-3, which do you want?
- The second one (intent-addToCart-Follow-up)
For the follow-up intent, we need to get the number(10) from context.
Then we can do the real operation: add 10 juice-2 to the cart.
Arguments:
dialogflow_request_dict {dict} -- dialogflow request content
context_name {str} -- name of context
Returns:
int -- quantity of product used want to buy / delete
"""
result = dialogflow_request_dict.get("result", {})
contexts = result.get('contexts', {})
for context in contexts:
cur_context_name = context.get('name', '')
if cur_context_name == context_name:
parameters = context.get('parameters', '')
number = parameters.get('number-integer', 1)
return number
return 1
def get_product_name_from_option_list(dialogflow_request_dict, dialogflow_parameters):
"""Get User selected option
Arguments:
dialogflow_request_dict {dict} -- dialogflow request content
dialogflow_parameters {dict} -- dialogflow parameters
Returns:
str -- user selected product name
"""
option_list = dialogflow_request_dict['originalRequest']['data']['option_list']
# option_ordinal_number starts from 1 not 0. e.g. I want the first one / Number 1 please
option_ordinal_number = int(dialogflow_parameters.get('number', '1'))
if option_ordinal_number > len(option_list):
option_ordinal_number = 1
return option_list[option_ordinal_number - 1]
def get_product_item_option_list(dialogflow_parameters):
"""Get possible options for the synonum of a product name
Arguments:
dialogflow_parameters {dict} -- dialogflow parameters
Returns:
list -- list of possible options
"""
faas_get_option_list_request_dict = {
'product_synonym': dialogflow_parameters.get('product_name_synonyms', '')
}
faas_get_option_list_response = call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_GET_OPTION_LIST_SLACK, faas_get_option_list_request_dict)
faas_get_option_list_response_dict = json.loads(faas_get_option_list_response.text)
option_list = faas_get_option_list_response_dict['product_option_list']
return option_list
def get_database_operation_request_dict(dialogflow_request_dict, database_action, intentName, parameters = None):
"""initialize dict for database operation
Arguments:
dialogflow_request_dict {dict} -- dialogflow request content
database_action {str} -- databse action type
intentName {str} -- name of intent
Keyword Arguments:
parameters {dict} -- [dict of db operation parameters] (default: {None})
Returns:
dict -- dict of database operation
"""
sessionId = dialogflow_request_dict['sessionId']
originalRequest = dialogflow_request_dict['originalRequest']
original_request_data = originalRequest['data']
username = original_request_data['username']
first_name = original_request_data['first_name']
last_name = original_request_data['last_name']
channel = original_request_data['channel']
cart_id = "{}#{}".format(username, sessionId)
database_operation_request_dict_template = {
"username": username,
"first_name": first_name,
"last_name": last_name,
"channel": channel,
"cart_id": cart_id,
"database_action": database_action,
"parameters": parameters,
"intentName": intentName,
}
return database_operation_request_dict_template
def append_message_object_to_return_message_list(return_message_list, message_content, message_color):
"""append message object(content + color) to return message list
Arguments:
return_message_list {list} -- list of return message object
message_content {str} -- message content
message_color {str} -- color of message
"""
return_message_list.append({
'content': message_content,
'color': message_color,
})
return
def append_message_to_return_message_list_by_category(message_category, dialogflow_webhook_response_dict):
"""Append messages to the returned message list.
Arguments:
message_category {str} -- category of the message
dialogflow_webhook_response_dict {list} -- response dict to be returned
"""
data = dialogflow_webhook_response_dict.get('data', dict())
if 'return_message_list' not in data:
data['return_message_list'] = []
return_message_list = data['return_message_list']
# The Greeting message to display
if message_category == 'Greeting':
append_message_object_to_return_message_list(return_message_list, 'Hi! Welcome to our grocery store! ', 'blue')
append_message_object_to_return_message_list(return_message_list, 'You can always type "help" to get more information about our system!', 'blue')
# The Help message to display
elif message_category == 'Help':
append_message_object_to_return_message_list(return_message_list, 'Want to put items in shopping cart? -- Try "add three cookie to my cart"', 'blue')
append_message_object_to_return_message_list(return_message_list, 'Want to remove items from shopping cart? -- Try "remove two cookie"', 'blue')
append_message_object_to_return_message_list(return_message_list, 'Want check what\'s in your shopping cart? -- Try "Show me the cart?"', 'blue')
append_message_object_to_return_message_list(return_message_list, 'Want to checkout? -- Try "Check out"', 'blue')
append_message_object_to_return_message_list(return_message_list, 'Want to exit? -- Try "exit"', "blue")
# The End Conversation message to display
elif message_category == 'End':
append_message_object_to_return_message_list(return_message_list, 'Thanks for shopping with us! Have a nice day!', 'blue')
# The Error message to display for not recognizing
elif message_category == 'Error':
append_message_object_to_return_message_list(return_message_list, 'Sorry, I don\'t understand. Could you say that again?', 'red')
# The Error message to display for no such item
elif message_category == 'NoItem':
append_message_object_to_return_message_list(return_message_list, 'Sorry, we don\'t have the item you want to add/remove.', 'red')
# The exit message to quit current unfinished shopping session.
elif message_category == 'Exit':
append_message_object_to_return_message_list(return_message_list, 'We have saved items in your cart. See you later!', 'blue')
# The processing message for asynchronize call
elif message_category == 'Processing':
append_message_object_to_return_message_list(return_message_list, 'Wait a moment', 'blue')
# Message for offer options
elif message_category == 'Select_option':
append_message_object_to_return_message_list(return_message_list, 'We have following choices, select one', 'blue')
def get_input_contexts(dialogflow_request_dict):
"""Get context from input dialogflow request
Arguments:
dialogflow_request_dict {dict} -- dialogflow request content
Returns:
dict -- input context
"""
result = dialogflow_request_dict.get('result', {})
contexts = result.get('contexts', {})
return contexts
def set_dialogflow_context_by_command_and_intent(context_set_command, intentName, dialogflow_request_dict, dialogflow_webhook_response_dict):
"""Set dialog context parameter
Arguments:
context_set_command {str} -- command of set context
intentName {str} -- name of intent
dialogflow_request_dict {dict} -- dialogflow request content
dialogflow_webhook_response_dict {dict} -- dialogflow response dict
"""
input_contexts = get_input_contexts(dialogflow_request_dict)
if context_set_command == 'CLEAR_ALL':
for context in input_contexts:
context['lifespan'] = 0
dialogflow_webhook_response_dict['contextOut'] = input_contexts
return
def get_intent_metadata_parameters_from_dialogflow_response(dialogflow_request_dict):
"""extract metadata, parameter, intent from dialogflow request dict
Arguments:
dialogflow_request_dict {dict} -- dialogflow request content
Returns:
str -- name of intent
dict -- metadata
dict -- parameters
"""
logging.info("Extract [intentName, metadata, parameters]")
result = dialogflow_request_dict.get('result', {})
pararmeters = result.get('parameters', None)
metadata = result.get('metadata', None)
intentName = metadata.get('intentName', None)
return intentName, metadata, pararmeters
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,041
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/Utility/__init__.py
|
from . import eve_mongo_utility
from . import general_utility
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,042
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/query-dialogflow-slack/handler.py
|
import os
import sys
import logging
import json
import requests
from .faas_function_call_utility import call_faas_function
from .general_utility import get_pretty_JSON
from .general_utility import load_faas_secret
FAAS_GATEWAY_URL = os.getenv('FAAS_GATEWAY_URL', 'http://gateway:8080')
FUNCTION_NAME_POST_MESSAGE_SLACK = os.getenv('FUNCTION_NAME_POST_MESSAGE_SLACK', 'post-message-slack')
SLACK_POST_URL = os.getenv('SLACK_POST_URL', 'https://slack.com/api/chat.postMessage')
DIALOGFLOW_CLIENT_TOKEN_SECRET_NAME = os.getenv('DIALOGFLOW_CLIENT_TOKEN_SECRET_NAME', 'dialogflow_client_token')
DIALOGFLOW_BASE_URL_V1 = os.getenv('DIALOGFLOW_BASE_URL_V1','https://api.dialogflow.com/v1')
DIALOGFLOW_PROTOCOL_VERSION = os.getenv('DIALOGFLOW_PROTOCOL_VERSION', '20150910')
DIALOGFLOW_QUERY = 'query'
DIALOGFLOW_CLIENT_TOKEN = None
def handle(req):
"""handle a request to the function
Args:
req (str): request body
"""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logging.info("FaaS-[query-dialogflow-slack]")
set_dialogflow_token()
req_dict = json.loads(req)
logging.debug("Req Content: {}".format(get_pretty_JSON(req_dict)))
return handle_req_message(req_dict)
def handle_req_message(slack_req_dict):
"""receive user input text and send to google dialogflow.then send the response to function post-message-slack
Arguments:
slack_req_dict {dict} -- request dict
Returns:
str -- return message represent different case.
"""
req_type = slack_req_dict['type']
if req_type == 'event_callback':
logging.info("Async Handle Slack Event Req")
event = slack_req_dict.get('event', {})
user_input = event.get('text', None)
if user_input == None or user_input == '':
logging.warning("Empty User Input")
return "Empty UserInput"
else:
username = event.get('user', 'default_user')
session_id = username
logging.debug("User Input:#{}#".format(user_input))
custom_payload = initialize_custom_payload_via_slack_event(event)
channel = event.get('channel', '')
google_dialogflow_response = query_google_dialogflow_v1(user_input, custom_payload, session_id)
bot_message_sender_payload = {
"source": 'query-dialogflow-slack',
'channel': channel,
"data": google_dialogflow_response,
}
logging.info("Send Google Response to slack bot message poster")
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_POST_MESSAGE_SLACK, bot_message_sender_payload, is_async = True)
return "Wait for Bot to Post Message"
elif req_type == 'user_select_product_option' or req_type == 'user_select_product_option_error':
logging.debug("Handle Fake Slack Event:User select option, reqType:{}".format(req_type))
custom_payload = initialize_custom_payload_from_function_get_user_selected_option(slack_req_dict)
if req_type == 'user_select_product_option':
original_options_list = slack_req_dict['original_options_list']
custom_payload['data']['option_list'] = original_options_list
user_input = slack_req_dict.get('user_input', '')
session_id = slack_req_dict.get('user', '')
channel = slack_req_dict.get('channel', 'ss')
google_dialogflow_response = query_google_dialogflow_v1(user_input, custom_payload, session_id)
bot_message_sender_payload = {
"source": 'query-dialogflow-slack',
'channel': channel,
"data": google_dialogflow_response,
}
logging.info("Send Google Response to slack bot message sender")
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_POST_MESSAGE_SLACK, bot_message_sender_payload, is_async = True)
return "Handler user selected option"
else:
return "Wrong Slack Req Type"
def initialize_custom_payload_via_slack_event(slack_event):
"""initialize dialogflow query's custom payload
Arguments:
slack_event {dict} -- slack event parameter
Returns:
dict -- custom payload for dialogflow's query
"""
username = slack_event.get('user', 'default_user')
first_name = 'first_name_{}'.format(username)
last_name = 'last_name_{}'.format(username)
session_id = username
channel = slack_event.get('channel', '')
custom_payload = {
"source": "query-dialogflow-slack",
"data": {
"username": username,
"first_name": first_name,
"last_name": last_name,
"session_id": session_id,
"channel": channel,
}
}
return custom_payload
def initialize_custom_payload_from_function_get_user_selected_option(slack_req_dict):
"""initialize custom payload for request tat came from function: get-user-selected-option
Arguments:
slack_req_dict {dict} -- request parameter content
Returns:
dict -- custom payload dict for dialogflow's query
"""
channel = slack_req_dict['channel']
username = slack_req_dict['user']
session_id = username
custom_payload = {
"source": "query-dialogflow-slack",
"data": {
"username": username,
"first_name": 'first_name_{}'.format(username),
"last_name": 'last_name_{}'.format(username),
"session_id": session_id,
"channel": channel,
}
}
return custom_payload
def query_google_dialogflow_v1(user_input, custom_payload, sessionID):
"""Send user input to NLU module Google Dialogflow's V1 api.
Arguments:
user_input {str} -- user raw input
Returns:
[dict] -- dict of diaogflow response that contains metadata and parameters.
"""
request_body = {
'lang': 'en',
'query': user_input,
'sessionId': sessionID,
'originalRequest': custom_payload,
}
url = "{}/{}?v={}".format(DIALOGFLOW_BASE_URL_V1, DIALOGFLOW_QUERY, DIALOGFLOW_PROTOCOL_VERSION)
headers = {
'Content-Type': 'application/json',
'Authorization': ('Bearer %s' % DIALOGFLOW_CLIENT_TOKEN),
}
req = requests.post(url, json = request_body, headers = headers)
google_dialogflow_response = json.loads(req.text)
return google_dialogflow_response
def set_dialogflow_token():
"""Set Global Varible DIALOGFLOW_CLIENT_TOKEN, get token from docker secret.
"""
logging.info("Get DIALOGFLOW_CLIENT_TOKEN From Docker Secret")
global DIALOGFLOW_CLIENT_TOKEN
DIALOGFLOW_CLIENT_TOKEN = load_faas_secret(DIALOGFLOW_CLIENT_TOKEN_SECRET_NAME)
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,043
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/get-user-selected-option-from-slack-slack/handler.py
|
import os
import sys
import logging
import json
import requests
from urllib.parse import parse_qs
from .faas_function_call_utility import call_faas_function
from .general_utility import get_pretty_JSON
FAAS_GATEWAY_URL = os.getenv("FAAS_GATEWAY_URL", 'http://gateway:8080')
FUNCTION_NAME_QUERY_DIALOGFLOW_SLACK = os.getenv('FUNCTION_NAME_QUERY_DIALOGFLOW_SLACK', 'query-dialogflow-slack')
def handle(req):
"""Function for handle user option in slack
Arguments:
req {str} -- select option payload
Returns:
str -- return prompt message
"""
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logging.info("Enter FaaS Function - [get-user-selected-option-from-slack-slack]")
# INFO:root:HTTP Content Type:application/x-www-form-urlencoded
logging.debug("HTTP Content Type:{}".format(os.getenv('Http_Content_Type', 'Default Content Type')))
payload_dict = decode_req_body(req)
return handle_payload(payload_dict)
def handle_payload(payload_dict):
"""handle payload by callback_id
Arguments:
payload_dict {dict} -- request payload dict
Returns:
str -- return message post by bot
"""
logging.info("Handle payload by Callback type")
callback_id = payload_dict.get('callback_id', 'Default_callback_id')
logging.info("Callback id:{}".format(callback_id))
if callback_id == 'product_selection':
selected_option_val = payload_dict['actions'][0]['selected_options'][0]['value']
channel = payload_dict['channel']['id']
user = payload_dict['user']['id']
selected_option_idx, original_options_list = get_original_options_list(payload_dict, selected_option_val)
slack_message_dict_select_product = {
'selected_option_idx': selected_option_idx,
'original_options_list': original_options_list,
'user': user,
'channel': channel,
'user_input': 'I choose number {}'.format(selected_option_idx + 1),
'type': 'user_select_product_option',
}
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_QUERY_DIALOGFLOW_SLACK, slack_message_dict_select_product, is_async = True)
return "You selected: {}".format(selected_option_val)
else:
# If we met error, like time out, send a help message represent user.
logging.error("Wrong Callback_id")
channel = 'Default_Channel'
if ('channel' in payload_dict) and ('id' in payload_dict['channel']):
channel = payload_dict['channel']['id']
user = "Default_User"
if ('user' in payload_dict) and ('id' in payload_dict['user']):
user = payload_dict['user']['id']
slack_message_dict_error = {
'channel': channel,
'user': user,
'user_input': 'I need help',
'type': 'user_select_product_option_error',
}
call_faas_function(FAAS_GATEWAY_URL, FUNCTION_NAME_QUERY_DIALOGFLOW_SLACK, slack_message_dict_error, is_async = True)
return "Sorry we can't handle your selection"
def get_original_options_list(payload_dict, selected_option_val):
"""Get Original select option list and selected option index
Arguments:
payload_dict {dict} -- payload dict
selected_option_val {str} -- option selected by user
Returns:
int -- index of selected option
list -- list of options
"""
logging.info("Get Options List from payload")
attachments = payload_dict['original_message']['attachments']
original_options_list = []
selected_option_idx = -1
for attachment in attachments:
if ('callback_id' in attachment) and (attachment['callback_id'] == 'product_selection'):
actions = attachment['actions']
for action in actions:
name = action.get('name', 'default_name')
if name == 'product_option_list':
options = action['options']
for idx, option in enumerate(options):
original_options_list.append(option['value'])
if option['value'] == selected_option_val:
selected_option_idx = idx
logging.debug("Idx:{}, list len: {}. Content: {}".format(selected_option_idx, len(original_options_list), original_options_list))
return selected_option_idx, original_options_list
logging.error("Can't get options")
return selected_option_idx, original_options_list
def decode_req_body(req):
"""Decode req body(HTTP Content Type:application/x-www-form-urlencoded) to a dict.
Arguments:
req {str} -- application/x-www-form-urlencoded str
Returns:
dict -- dict of request payload
"""
logging.info("Decode Request Body")
decoded_req = parse_qs(req)
payload_list = decoded_req['payload']
payload_str = payload_list[0]
payload_dict = json.loads(payload_str)
logging.debug("Payload Type:{}".format(type(payload_dict)))
return payload_dict
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,044
|
yuanchen8911/serverless-chatbot
|
refs/heads/master
|
/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py
|
"""
General Utility for FaaS functions
"""
import sys
import logging
import json
def get_pretty_JSON(json_dict):
"""pretty json str
Arguments:
json_dict {dict} -- json dict
Returns:
str -- json str after pretty
"""
pretty_JSON_str = json.dumps(json_dict, indent=4)
return pretty_JSON_str
def load_faas_secret(secret_name):
"""Load secret stored in the docker swarm.
For more info about secret, check these
- https://github.com/openfaas/faas/blob/master/guide/secure_secret_management.md
- https://docs.openfaas.com/reference/secrets/
Arguments:
secret_name {str} -- name of the secret in the 'docker secret ls'
Returns:
str -- content of the secret
"""
try:
secret_path = "/var/openfaas/secrets/{}".format(secret_name)
with open(secret_path, 'r') as f:
secret_content = f.read()
logging.debug("Get Secret: {}, Content:{}".format(secret_name, secret_content))
return secret_content
except FileNotFoundError:
secret_path = "/run/secrets/{}".format(secret_name)
with open(secret_path, 'r') as f:
secret_content = f.read()
# lines = f.readlines()
# secret_content = ''.join(lines)
logging.debug("Get Secret: {}, Content:{}".format(secret_name, secret_content))
return secret_content
def main():
pass
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
main()
|
{"/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/bot-crud-slack/eve_mongo_utility.py"], "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/handler.py": ["/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/faas_function_call_utility.py", "/tig_faas_bot/FaaS_Functions/Slack_Version/nlp-webhook-slack/general_utility.py"]}
|
27,045
|
mgieger/Titanic-survival-analysis
|
refs/heads/master
|
/forestclf/main.py
|
from preprocessor import Preprocessor
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import GradientBoostingClassifier
from forestclf.ensemblerunner import EnsembleRunner
import pprint
import itertools as iter
import sys
def main():
""" columns = [
'pclass',
'name',
'sex',
'age',
'sibSp',
'parch',
'ticket',
'fare',
'cabin',
'embarked',
'survived',
'boat',
'body'
] """
full_file = '../titanic_full.csv'
feature_list = ['sex', 'age', 'ticket', 'fare', 'pclass', 'name', 'sibsp', 'parch', 'embarked']
survived = 'survived'
default_rfc = RandomForestClassifier(random_state=0)
random_rfc = RandomForestClassifier(random_state=1)
est_rfc = RandomForestClassifier(n_estimators=10, random_state=0)
depth_rfc = RandomForestClassifier(max_depth=3, random_state=0)
gradient_boost = GradientBoostingClassifier(n_estimators=1, learning_rate=.1)
combo_1_rfc = RandomForestClassifier(n_estimators=9, random_state=0, max_depth=3)
extreme_rand_rfc = RandomForestClassifier(max_depth=3, min_samples_split=10, random_state=0, n_estimators=9)
classifier_dict = {
"default_rfc": default_rfc,
"random_rfc": random_rfc,
"est_rfc": est_rfc,
"depth_rfc": depth_rfc,
"gradient_boost": gradient_boost,
"combo_1_rfc": combo_1_rfc,
"extreme_rand_rfc": extreme_rand_rfc
}
max_dict = {
"value": 0,
"feature_list": "",
"classifier": ""
}
preprocessor = Preprocessor(full_file)
high_performers = dict()
high_perf_count = 0
experiment_failed_results = []
if sys.argv.__contains__("-brute"):
for k in classifier_dict:
for i in range(len(feature_list)):
if i < 1:
continue
for j in iter.combinations(feature_list, i):
# create hash key to avoid running same experiments with diff orders of same feature
j = list(j)
j.append(survived)
data = preprocessor.get_matrix_split(list(j))
pred_results, acc, failed_results = run_experiment(data, classifier_dict[k])
if acc > max_dict["value"]:
max_dict["value"] = acc
max_dict["feature_list"] = j
max_dict["classifier"] = k
if acc > 0.77:
high_performers[high_perf_count] = {
"value": acc,
"feature_list": j,
"classifier": k
}
experiment_failed_results[high_performers[high_perf_count]] = failed_results
high_perf_count += 1
print("MAX:")
print(max_dict)
print(" # of High performers: ", high_perf_count)
pprint.pprint(high_performers)
print("Default random forest classifier - feature set ['sex', 'age', 'pclass', 'parch', 'survived'] ")
# #non biased feature list -- 77.21% accuracy -- classifier default rfc-
nb_feature_list = preprocessor.get_matrix_split(['sex', 'age', 'pclass', 'parch', 'survived'])
pred_results, acc, failed_results = run_experiment(nb_feature_list, default_rfc)
experiment_failed_results.append(failed_results)
# # 80.15% accuracy -- boost
print("Default gradient boosting classifier - feature set['name', 'embarked', 'survived']")
opt_feature_list = preprocessor.get_matrix_split(['name', 'embarked', 'survived'])
pred_results, acc, failed_results = run_experiment(opt_feature_list, default_rfc)
experiment_failed_results.append(failed_results)
print("Default random forest classifier with full feature set")
feature_list = preprocessor.get_matrix_split(['sex', 'age', 'ticket', 'fare', 'pclass', 'name', 'sibsp', 'parch',
'embarked', 'survived'])
pred_results, acc, failed_results = run_experiment(feature_list,
default_rfc,
create_graph=False,
print_confusion_matrix=True)
experiment_failed_results.append(failed_results)
if sys.argv.__contains__("-failed"):
pprint.pprint(experiment_failed_results)
if sys.argv.__contains__("-show"):
pprint.pprint(pred_results)
def run_experiment(data_perm, rfc, create_graph=False, print_confusion_matrix=False):
"""
:param data_perm:
:param rfc:
:return: dict containing experiment results
"""
dtr = EnsembleRunner(data_perm, rfc)
print("running experiment for ", rfc)
dtr.run()
dtr.print_feature_importance()
dtr.print_accuracy()
if create_graph is True:
dtr.graph_results()
if print_confusion_matrix is True:
print(dtr.confusion_matrix)
print("\n")
return dtr.prediction_results, dtr.accuracy, dtr.failed_predictions
def get_hash(feature_list, k):
"""
:param feature_list:
:param k: name of classifer
:return:
"""
hash_key = 0;
for feature in range(len(feature_list)):
hash_key += hash(feature_list[feature])
hash_key = hash_key + hash(k)
print("\nhash key for classifier ", k, " and list ", feature_list, " : ", hash_key) # TODO: remove
return hash_key
if __name__ == '__main__':
main()
|
{"/forestclf/main.py": ["/forestclf/ensemblerunner.py"]}
|
27,046
|
mgieger/Titanic-survival-analysis
|
refs/heads/master
|
/DNN/main.py
|
import json
import math
import os
import time
from preprocessor import Preprocessor
from keras import optimizers
from keras.layers import Dense, Activation, Dropout
from keras.models import Sequential
import numpy as np
import pandas as pd
from sklearn.metrics import classification_report, confusion_matrix
def save_data(directory, file_name, results, file_type='.json'):
'''
Saves performance data to:
directory/file_name: raw data
results (dict | pd.dataframe):
file_type (string): .json or .csv
'''
if not os.path.exists(directory):
os.makedirs(directory)
if file_type == '.json':
with open(file_name + file_type, 'w') as f:
json.dump(results, f)
elif file_type == '.csv':
results.to_csv(file_name + file_type, index=False)
def main():
"""
Reads in the data, preprocesses it, and uses the training and test data sets to train
the three neural networks. Then each neural network analyzes the data set to predict
the survival outcome of each passenger.
"""
full_file = '../titanic_full.csv'
columns = [
'pclass',
'name',
'sex',
'age',
'sibsp',
'parch',
'ticket',
'fare',
'cabin',
'embarked',
'survived'
]
preprocessor = Preprocessor(full_file)
data = preprocessor.get_matrix_scaled(columns)
TRAIN_SIZE = math.ceil(data.shape[0]*.70)
train_data, train_labels = data[:TRAIN_SIZE,:-1], data[:TRAIN_SIZE,-1:]
test_data, test_labels = data[TRAIN_SIZE:,:-1], data[TRAIN_SIZE:,-1]
# Sequence of creating neural networks to analyze Titanic dataset.
# We tested various models of:
# 1-4 layers
# 4, 8, and 10 nodes per layer
# With and without dropout; using dropout values of 0.2, 0.3, 0.4
# For the sake of expediting the script run time only the best sequences
# we found for 1-3 layers were included.
# All 4 layer models would always predict the passenger to perish,
# so these models were not kept, as they were not useful.
target_names = ['Class 0: Perished', 'Class 1: Survived']
sgd = optimizers.SGD(lr=0.05, momentum=0.9)
## Create the 1 layer model.
np.random.seed(79)
model1 = Sequential()
model1.add(Dense(units = 10, input_dim = 10, activation='sigmoid', use_bias=True))
model1.add(Dropout(0.30, seed=None))
model1.add(Dense(1, activation='sigmoid'))
#Create visualization overview to verify model structure.
print(model1.summary())
#Compile the model.
model1.compile(optimizer = sgd, loss = 'mean_squared_error', metrics = ['accuracy'])
#Train and Evaluate the model.
model1.fit(train_data, train_labels, epochs = 5, batch_size = 1, verbose = 2, shuffle = True)
score1 = model1.evaluate(test_data, test_labels, batch_size = 1, verbose=0)
print(model1.metrics_names)
print(score1, '\n')
test_predictions1 = model1.predict_classes(test_data, batch_size = 1)
print(classification_report(test_labels, test_predictions1, target_names=target_names))
print(confusion_matrix(test_labels, test_predictions1))
##Create 2 layer model.
np.random.seed(79)
model2 = Sequential()
model2.add(Dense(units = 10, input_dim = 10, activation='sigmoid', use_bias=True))
model2.add(Dropout(0.312))
model2.add(Dense(4, activation='sigmoid', use_bias=True))
model2.add(Dense(1, activation='sigmoid'))
#Create visualization overview to verify model structure.
print(model2.summary())
#Compile the model.
model2.compile(optimizer = sgd, loss = 'mean_squared_error', metrics = ['accuracy'])
#Train and Evaluate the model.
model2.fit(train_data, train_labels, epochs = 5, batch_size = 1, verbose = 2, shuffle = True)
score2 = model2.evaluate(test_data, test_labels, batch_size = 1, verbose=0)
print(model2.metrics_names)
print(score2, '\n')
test_predictions2 = model2.predict_classes(test_data, batch_size = 1)
print(classification_report(test_labels, test_predictions2, target_names=target_names))
print(confusion_matrix(test_labels, test_predictions2))
##Create 3 layer model.
np.random.seed(79)
model3 = Sequential()
model3.add(Dense(units = 10, input_dim = 10, activation='sigmoid', use_bias=True))
model3.add(Dropout(0.31075, seed=None))
model3.add(Dense(8, activation='sigmoid', use_bias=True))
model3.add(Dropout(0.2, seed=None))
model3.add(Dense(4, activation='sigmoid', use_bias=True))
model3.add(Dense(1, activation='sigmoid'))
#Create visualization overview to verify model structure.
print(model3.summary())
#Compile the model.
model3.compile(optimizer = sgd, loss = 'mean_squared_error', metrics = ['accuracy'])
#Train and Evaluate the model.
model3.fit(train_data, train_labels, epochs = 5, batch_size = 1, verbose = 2, shuffle = True)
score3 = model3.evaluate(test_data, test_labels, batch_size = 1, verbose=0)
print(model3.metrics_names)
print(score3, '\n')
test_predictions3 = model3.predict_classes(test_data, batch_size = 1)
print(classification_report(test_labels, test_predictions3, target_names=target_names))
print(confusion_matrix(test_labels, test_predictions3))
if __name__ == '__main__':
main()
|
{"/forestclf/main.py": ["/forestclf/ensemblerunner.py"]}
|
27,047
|
mgieger/Titanic-survival-analysis
|
refs/heads/master
|
/forestclf/ensemblerunner.py
|
import numpy as np
import preprocessor
from sklearn.externals.six import StringIO
from sklearn.tree import export_graphviz
import pydotplus
#import matplotlib.pyplot as plt
import os
class EnsembleRunner(object):
def __init__(self, data, clf):
training_data = data["training"]
testing_data = data["test"]
self.train = preprocessor.normalize(training_data[:, :-1])
self.train_labels = training_data[:, -1:].reshape(training_data.shape[0], )
# print(self.train_labels.shape)
self.test = preprocessor.normalize(testing_data[:, :-1])
self.test_labels = testing_data[:, -1:]
self.confusion_matrix = np.zeros((2, 2))
self.clf = clf
self.accuracy = 0
self.prediction_results = dict()
self.failed_predictions = dict()
def run(self):
wrong_prediction_count = 0
self.clf.fit(self.train, self.train_labels)
for i in range(self.test_labels.shape[0]):
test_data = self.test[i].reshape(1, -1) # TODO: reshape all data set instead of individual
predicted = int(self.clf.predict(test_data))
# log_prob = int(self.clf.predict_log_proba(test_data[i]))
actual = int(self.test_labels[i])
self.confusion_matrix[predicted][actual] += 1
self.prediction_results[i] = {
'predicted_survival': predicted,
'actual': actual
}
# 'predicted_probability_survival': log_prob, #TODO: fix this
if predicted != actual:
self.failed_predictions[wrong_prediction_count] = {
'predicted_survival': predicted,
'actual': actual,
'data': test_data
}
wrong_prediction_count += 1
self.wrong_prediction_count = wrong_prediction_count
self.accuracy = np.trace(self.confusion_matrix) / np.sum(self.confusion_matrix)
def graph_results(self):
#dot_data = StringIO()
export_graphviz(self.clf.estimators_[0], filled=True, rounded=True, special_characters=True)
#graph = pydotplus.graph_from_dot_data(dot_data.getvalue())
# Image(graph.create_png())
#print(self.clf.estimators_)
#os.system('dot -Tpng tree.dot -o tree.png')
def score(self):
return self.clf.score(self.test, self.test_labels) # TODO: test
def print_feature_importance(self):
print(self.clf.feature_importances_)
def print_accuracy(self):
print(self.accuracy)
|
{"/forestclf/main.py": ["/forestclf/ensemblerunner.py"]}
|
27,048
|
xiaoyanguoke/klb
|
refs/heads/master
|
/webadmin/models.py
|
# -*- coding:utf-8 -*-
from django.db import models
class ebusiness_members(models.Model):
father = models.ForeignKey('self', default='', null=True, blank=True, verbose_name='父级渠道')
username = models.CharField(max_length=255, default='', verbose_name='渠道方的用户名')
passwd = models.CharField(max_length=255, default='', verbose_name='渠道方密码')
reg_time = models.DateTimeField(verbose_name='生成时间', auto_now_add=True, auto_now=False)
code = models.CharField(max_length=255, default='', verbose_name='渠道代码')
status = models.IntegerField(default=0, max_length=2, null=True, blank=True, verbose_name='是否被屏蔽')
last_login = models.CharField(max_length=255, default='', null=True, blank=True, verbose_name='最后登录时间')
last_ip = models.IPAddressField(verbose_name='最后登录IP', null=True, blank=True)
sys = models.CharField(max_length=255, default='', null=True, blank=True, verbose_name='登录操作系统')
login_num = models.IntegerField(default=0, max_length=2, null=True, blank=True, verbose_name='是否被屏蔽')
rebate = models.CharField(max_length=255, default='50', null=True, blank=True, verbose_name='返现金额')
ischildren = models.IntegerField(default=0,max_length=2,verbose_name='是否有下线功能')
class Meta:
verbose_name = '渠道方用户表'
verbose_name_plural = '渠道方用户表'
def delete(self, *args, **kwargs):
self.clear_nullable_related()
super(ebusiness_members, self).delete(*args, **kwargs)
def clear_nullable_related(self):
for related in self._meta.get_all_related_objects():
accessor = related.get_accessor_name()
related_set = getattr(self, accessor)
if related.field.null:
related_set.clear()
else:
for related_object in related_set.all():
related_object.clear_nullable_related()
def __unicode__(self):
return self.username
class flow_analytics(models.Model):
ebusiness = models.ForeignKey("ebusiness_members", default='', null=True, blank=True, verbose_name='渠道代码')
ip = models.CharField(max_length=255, default='', null=True, blank=True, verbose_name='IP')
browser = models.CharField(max_length=255, default='', null=True, blank=True, verbose_name='浏览器')
os = models.CharField(max_length=255, default='', null=True, blank=True, verbose_name='操作系统')
intime = models.DateTimeField(verbose_name='开始访问时间', auto_now_add=True, auto_now=False)
endtime = models.DateTimeField(verbose_name='最后访问时间', auto_now_add=True, auto_now=True)
inurl = models.CharField(max_length=255, default='', null=True, blank=True, verbose_name='第一次打开的网址')
endurl = models.CharField(max_length=255, default='', null=True, blank=True, verbose_name='最后一次访问的网址')
num = models.IntegerField(max_length=11, default=1, verbose_name='日访问次数')
class Meta:
verbose_name = '访问统计表'
verbose_name_plural = '访问统计表'
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,049
|
xiaoyanguoke/klb
|
refs/heads/master
|
/members/urls.py
|
# -*- coding:utf-8 -*-
from django.conf.urls import patterns,include,url
from members.views import *
urlpatterns = patterns('',
url(r'^$',Index,name="index"),
url(r'^login/$',Login,name="login"),
url(r'login/wechat/$',WechatLogin,name="WechatLogin"),
url(r'^reg/$',Reg,name="reg"),
url(r'^logout/$',Logout_View,name="logout"),
url(r'^order_details/$',OrderDetails,name="order_details"),
url(r'^order_details/(?P<t>\w+)/$', OrderDetails),
url(r'^setting/$', Setting),
url(r'^setting/(?P<action>\w+)/$', Setting),
url(r'^help/$',Help,name="help"),
url(r'^getvcode/$',GetVcode,name="getvcode"),
url(r'^validcode/$',ValidCode,name="validcode"),
url(r'^qrcode/$', GenerateQrcode, name='qrcode'),
url(r'^imgupload/$', ImgUpload, name='ImgUpload'),
url(r'^updatetovip/$', UpDateToVip, name='UpDateToVip'),#升级VIP
url(r'^mycar/$', MyCar, name='MyCar'),#我的车
url(r'^recall/$', CarRecall, name='CarRecall'),#我的车
url(r'^tjcode/$', TuiJianMa, name='TuiJianMa'),
url(r'^pay/$', pay, name='pay'),#支付
url(r'^payResult/$', payResult, name='payResult'),#支付
url(r'^NotifyURL/$', NotifyURL, name='NotifyURL'),#支付
url(r'^GetPayMD5Info/$', GetPayMD5Info, name='GetPayMD5Info'),#返回md5加密
)
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,050
|
xiaoyanguoke/klb
|
refs/heads/master
|
/klbapp/appclass.py
|
# -*- coding:utf-8 -*-
from LYZ.settings import *
from datetime import datetime
import time,urllib,json,sys,httplib,urllib2,re
# 字符串加密解密
class AppBaseStr(object):
def __init__(self, key=SECRET_KEY):
self.__src_key = key
self.__key = self.__get_strascii(self.__src_key, True)
def encode(self, value):
return "%d" % (self.__get_strascii(value, True) ^ self.__key)
def decode(self, pwd):
if self.is_number(pwd):
return self.__get_strascii((int(pwd)) ^ self.__key, False)
else:
return False
def reset_key(self, key):
self.__src_key = key
self.__key = self.__get_strascii(self.__src_key, True)
def __get_strascii(self, value, bFlag):
if bFlag:
return self.__get_str2ascii(value)
else:
return self.__get_ascii2str(value)
def __get_str2ascii(self, value):
ls = []
for i in value:
ls.append(self.__get_char2ascii(i))
return long("".join(ls))
def __get_char2ascii(self, char):
try:
return "%03.d" % ord(char)
except (TypeError, ValueError):
print "key error."
exit(1)
def __get_ascii2char(self, ascii):
if self.is_ascii_range(ascii):
return chr(ascii)
else:
print "ascii error(%d)" % ascii
exit(1)
def __get_ascii2str(self, n_chars):
ls = []
s = "%s" % n_chars
n, p = divmod(len(s), 3)
if p > 0:
nRet = int(s[0: p])
ls.append(self.__get_ascii2char(nRet))
pTmp = p
while pTmp < len(s):
ls.append(self.__get_ascii2char(int(s[pTmp: pTmp + 3])))
pTmp += 3
return "".join(ls)
def is_number(self, value):
try:
int(value)
return True
except (TypeError, ValueError):
pass
return False
def is_ascii_range(self, n):
return 0 <= n < 256
def is_custom_ascii_range(self, n):
return 33 <= n < 48 or 58 <= n < 126
# 输出JSON数据
class PrintJson(object):
def __init__(self):
self._time = str(datetime.now())
self._error = 0
self._msg = None
self._data = {}
self._url = ''
self._jsonvalue = ''
self._jsonstr = ''
self._len = 0
def echo(self, msg=None, data=None, error=0, url=None, _len=None):
if error <> None and error <> "":
self._error = error
if msg <> None and msg <> "":
self._msg = msg
if data <> None and data <> "":
self._data = data
if url <> None and url <> "":
self._url = url
if _len <> None and _len <> "":
self._len = _len
self._jsonvalue = {"time": self._time, "error": self._error, "msg": self._msg, "data": self._data,
"url": self._url, "len": self._len}
self._jsonstr = json.dumps(self._jsonvalue)
return self._jsonstr
'''
验证
'''
class AppCheck(object):
#验证手机号
def phonecheck(self,s):
msg = False
phoneprefix = ['130', '131', '132', '133', '134', '135', '136', '137', '138', '139', '150', '151', '152', '153',
'156', '158', '159', '170', '183', '182', '185', '186', '188', '189', '177']
if len(s) <> 11:
msg = False
else:
if s.isdigit():
if s[:3] in phoneprefix:
msg = True
else:
msg = False
else:
msg = False
return msg
#验证邮件地址
def validateEmail(self,email):
if len(email) > 7:
if re.match("^.+\\@(\\[?)[a-zA-Z0-9\\-\\.]+\\.([a-zA-Z]{2,3}|[0-9]{1,3})(\\]?)$", email) <> None:
return True
return False
def UserCheck(self,s):
if re.match("^[a-zA-z][a-zA-Z0-9_]{2,9}$",s) <> None:
return True
else:
return False
def PwdCheck(self,s):
if re.match("^[^\s]{6,20}$",s)<> None:
return True
else:
return False
class SendMessage(object):
def send(self,phone,n):
para = {
'name': SMS_USER,
'pwd': SMS_PWD,
# 'content': '(8888)卡来宝注册验证码。',
# 'content': '尊敬的用户您好!您的验证码是8888',
'content': '您好!欢迎您注册成为卡来宝会员,您的验证码是%s'%(n),
'mobile': phone,
'stime': '',
'sign': '卡来宝',
'type': 'pt',
'extno': '',
}
postData = urllib.urlencode(para)
req = urllib2.Request(SMS_URL, postData)
resp = urllib2.urlopen(req).read()
return resp
#用户key加密解密
class UserKey(object):
def __init__(self):
self.BaseCode = AppBaseStr()
def encode(self,uid="",mobile=""):
Str = str(uid)+"||"+str(mobile)+"||"+str(time.time()*1000)
return self.BaseCode.encode(Str)
def decode(self,s):
return self.BaseCode.decode(s)
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,051
|
xiaoyanguoke/klb
|
refs/heads/master
|
/ebusiness/urls.py
|
# -*- coding:utf-8 -*-
from django.conf.urls import patterns, include, url
from ebusiness.views import *
urlpatterns = patterns('',
url(r'^$', initVehicleBaseInfo, name="index"),
url(r'^initVehicleBaseInfo/$', initVehicleBaseInfo, name="initVehicleBaseInfo"),
url(r'^selectCerList/$', selectCerList, name="selectCerList"),
url(r'^initQuotation/$', initQuotation, name="initQuotation"),
url(r'^editInfo/$', editInfo, name="editInfo"),
url(r'^ConfirmInsure/$', ConfirmInsure, name="ConfirmInsure"),
url(r'^auto/$', auto, name="auto"),
url(r'^GetVIN/$', GetVIN, name="GetVIN"),
url(r'^UserCenter/$', UserCenter, name="UserCenter"),
)
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,052
|
xiaoyanguoke/klb
|
refs/heads/master
|
/webadmin/views.py
|
# -*- coding:utf-8 -*-
from django.shortcuts import *
from LYZ.common import *
from webadmin.forms import UserForms, UserAuth,DefineCodeForms,AutoCreateUser
from webadmin.models import ebusiness_members
from django.core.paginator import Paginator, InvalidPage, EmptyPage, PageNotAnInteger
from LYZ.klb_class import KLBCode
import time
def Index(request):
Auth = UserAuth(request)
if Auth.isLogin():
return render_to_response('ebusiness/Member/admin-index.html', {}, context_instance=RequestContext(request))
else:
return HttpResponsePermanentRedirect("/webadmin/Login/")
def Login(request):
import json
TempData = {}
if request.method == "POST":
reqtime = request.session.get("reqtime", "")
request.session['reqtime'] = time.time()
xztime = time.time()
try:
TimeInterval = int(xztime - reqtime)
except:
TimeInterval = 0
request.session['reqtime'] = time.time()
if 0 < TimeInterval < 10:
TempData.update({"accessGranted": "", "errors": "操作频繁,请%s秒后再试......" % str(TimeInterval)})
else:
forms = UserForms(request.POST)
if forms.is_valid():
username = request.REQUEST.get("username", "")
UserMember = ebusiness_members.objects.filter(username=username).values()[0]
request.session['ebusiness_username'] = username
request.session['ebusiness_user_id'] = UserMember['id']
request.session['ebusiness_code'] = UserMember['code']
request.session['ebusiness_ischildren'] = UserMember['ischildren']
TempData.update({"url": "/webadmin/", "accessGranted": "ok", })
else:
TempData.update({"accessGranted": "", "errors": forms.errors['username'].as_text()})
TempJSON = json.dumps(TempData)
return HttpResponse(TempJSON, content_type="application/json")
else:
forms = UserForms()
TempData.update({"forms": forms})
return render_to_response('ebusiness/Member/login-admin.html', TempData,
context_instance=RequestContext(request))
def Analytics(request):
TempData = dict(request.REQUEST.items())
SessionData = dict(request.session.items())
Auth = UserAuth(request)
if Auth.isLogin():
page_size = 20
after_range_num = 5
before_range_num = 6
Action = TempData.get("action")
Action = (Action == "") and "flow" or Action
if Action == "flow":
eUser = ebusiness_members.objects.get(id=SessionData.get("ebusiness_user_id"))
eUser_Flow = eUser.flow_analytics_set.order_by('-endtime').all()
paginator = Paginator(eUser_Flow, page_size)
elif Action == "buy":
paginator = Paginator([], page_size)
else:
paginator = Paginator([], page_size)
try:
try:
page = int(TempData.get("page", ""))
page = page < 1 and 1 or page
except ValueError:
page = 1
try:
DATA = paginator.page(page)
except(EmptyPage, InvalidPage, PageNotAnInteger):
DATA = paginator.page(1)
if page >= after_range_num:
page_range = paginator.page_range[page - after_range_num:page + before_range_num]
else:
page_range = paginator.page_range[0:int(page) + before_range_num]
TempData.update({"DATA": DATA, "page_range": page_range})
if Action == "flow":
TEMP = 'ebusiness/Member/admin-analytics.html'
if Action == 'buy':
TEMP = 'ebusiness/Member/admin-buy.html'
return render_to_response(TEMP, TempData, context_instance=RequestContext(request))
except AttributeError:
return HttpResponsePermanentRedirect("/webadmin/")
else:
return HttpResponsePermanentRedirect("/webadmin/Login/")
'''
推广中心
'''
def Share(request):
Auth = UserAuth(request)
if Auth.isLogin():
TempData = dict(request.REQUEST.items())
Action = TempData.get("action","")
if request.method == "POST":
CodeForms = DefineCodeForms(request.POST)
if Action == "define_code" and CodeForms.is_valid():
TempData.update({"DefineCodeStatus":"1"})
request.session['ebusiness_code'] = TempData.get("code","")
else:
CodeForms = DefineCodeForms(initial={"code":request.session.get("ebusiness_code",""),"uid":request.session.get("ebusiness_user_id","")})
GetUser = ebusiness_members.objects.get(id=request.session.get("ebusiness_user_id",""))
TempData.update({"CodeForms":CodeForms,"user":GetUser})
return render_to_response("ebusiness/Member/admin-share.html", TempData, context_instance=RequestContext(request))
else:
return HttpResponsePermanentRedirect("/webadmin/Login/")
def ShareChildren(request, code=None):
KCode = KLBCode()
TempData = dict(request.REQUEST.items())
code = KCode.decode(code)
if code <> "":
if request.method == "POST":
forms = AutoCreateUser(request.POST)
if forms.is_valid():
In,IsSet = forms.CheckAuthenticate(fcode=code)
TempData.update({"isok":True,"forms":forms,"User":In})
else:
forms = AutoCreateUser()
TempData.update({"forms":forms})
return render_to_response("ebusiness/Member/admin-share-children.html", TempData,context_instance=RequestContext(request))
else:
html = '' \
'<!DOCTYPE html>' \
'<html lang="en">' \
'<head>' \
'<meta charset="UTF-8">' \
'<title></title>' \
'</head><body>' \
'<body>' \
'<script>' \
'alert("链接错误!");' \
'window.location.href="http://www.kalaibao.com/ebusiness/?ShowBanner=1";' \
'</script>' \
'</body>' \
'</html>'
return HttpResponse(html)
'''
用户退出登录
'''
def Logout(request):
for k,v in request.session.items():
del request.session[k]
return HttpResponsePermanentRedirect("/webadmin/Login/")
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,053
|
xiaoyanguoke/klb
|
refs/heads/master
|
/ClientAdmin/forms.py
|
# -*- coding:utf-8 -*-
from django import forms
from django.contrib.auth.hashers import make_password, check_password
from webadmin.models import ebusiness_members
STATUS_OPTIONS = (("0", "激活"),("1", "未激活"))
class AddUserForms(forms.Form):
username = forms.CharField(
required=True,
max_length=40,
error_messages={'invalid': '用户名必须输入', 'required': '用户名必须输入'},
label='渠道登录用户名:',
widget=forms.TextInput(
attrs={"class":"form-control rounded","id":"username","placeholder":"用户名"}
)
)
passwd = forms.CharField(
required=True,
error_messages={'invalid': '密码格式不正确', 'required': '密码格式不正确'},
label='密码:',
widget=forms.PasswordInput(
attrs={"class":"form-control rounded","id":"passwd","placeholder":"请输入密码"}
)
)
repasswd = forms.CharField(
required=True,
error_messages={'invalid': '密码格式不正确', 'required': '密码格式不正确'},
label='确认密码:',
widget=forms.PasswordInput(
attrs={"class":"form-control rounded","id":"repasswd","placeholder":"请再次输入密码"}
)
)
status = forms.ChoiceField(
choices=STATUS_OPTIONS,
required=False,
label='用户状态:',
initial='激活',
widget=forms.Select(
attrs={"class":"form-control rounded","id":"status"}
)
)
rebate = forms.CharField(
required=True,
error_messages={'invalid': '用户返利格式不正确', 'required': '用户返利格式不正确'},
label='用户返利:',
widget=forms.TextInput(
attrs={"class":"form-control rounded","id":"rebate","placeholder":"用户返利"}
)
)
def clean_repasswd(self):
passwd = self.cleaned_data.get("passwd")
repasswd = self.cleaned_data.get("repasswd")
if passwd and repasswd and passwd != repasswd:
raise forms.ValidationError('两次密码输入不一致')
return repasswd
def clean_username(self):
username = self.cleaned_data.get("username")
if ebusiness_members.objects.filter(username=username).exists():
raise forms.ValidationError('用户名存在,请更换用户名')
return username
def CheckAuthenticate(self):
username = self.cleaned_data.get("username")
passwd = self.cleaned_data.get("passwd")
status = self.cleaned_data.get("status")
rebate = self.cleaned_data.get("rebate")
passwd = make_password(passwd)
code = str((int(ebusiness_members.objects.count())*100)+888)
CreateUser,isset = ebusiness_members.objects.get_or_create(username=username,passwd=passwd,status=status,rebate=rebate,code=code)
return CreateUser,isset
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,054
|
xiaoyanguoke/klb
|
refs/heads/master
|
/bxservice/common.py
|
# -*-coding:utf-8 -*-
import cookielib
import random
import string
import urllib
import urllib2
import re
import time
import json
import datetime
import base64
import os
import httplib
from django.core.exceptions import ObjectDoesNotExist
from bxservice.models import bxcarvin
from bxservice.models import bxcarinfo
from bxservice.models import bxpriceinfo
from bxservice.models import bxpayinfo
from bxservice.models import citycode
from bxservice.models import bxygpriceinfo
from bxservice.models import bxyghebaoinfo
from bxservice.models import bxzhcallbackinfo
from bxservice.models import bxashebaoinfo
from bxservice.models import bxaspriceinfo
from bxservice.models import bxascallbackinfo
from bxservice.models import bxygcallbackinfo
from bxservice.models import bxzhisread
import hashlib, socket
from Crypto.Cipher import AES
from urllib import unwrap, toBytes, quote, splittype, splithost, splituser, unquote, addinfourl
from bxservice.models import callbaklog
# 获取vin和发动机号
class Yo(object):
def __init__(self, plate='', name=''):
# VIN校验
self.__VIN_VALUE = {'0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9,
'A': 1, 'B': 2, 'C': 3, 'D': 4, 'E': 5, 'F': 6, 'G': 7, 'H': 8, 'J': 1, 'K': 2,
'L': 3, 'M': 4, 'N': 5, 'P': 7, 'R': 9, 'S': 2, 'T': 3, 'U': 4, 'V': 5, 'W': 6,
'X': 7, 'Y': 8, 'Z': 9,
}
self.__VIN_WEIGHT = {1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 10, 10: 9, 11: 8, 12: 7, 13: 6,
14: 5, 15: 4, 16: 3, 17: 2,
}
self.__VIN_NO_CONTAIN = ['I', 'O', 'Q']
# 随机手机号
self.__PHONE_HEAD = ['133', '153', '180', '181', '189', '177',
'130', '131', '132', '155', '156', '145',
'185', '186', '176', '178', '188', '147',
'134', '135', '136', '137', '138', '139',
'150', '151', '152', '157', '158', '159',
'182', '183', '184', '187',
]
self.__PHONE_BASE = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
# 随机邮箱地址
self.__MAIL_ORG = ['@qq.com', '@126.com', '@163.com', '@sina.com', '@yahoo.com.cn', '@hotmail.com',
'@gmail.com', '@souhu.com']
self.__MAIL_BASE = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 'd', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '_', '-']
# 数据爬取
self.url = 'http://chexian.sinosig.com/Net/Human_exact.action'
self.post = {'paraMap.token': "2696",
'paraMap.agentCode': "W00110002",
'paraMap.orgID': "01682900",
'paraMap.spsource': "NET",
'paraMap.redirectControl': "1",
'paraMap.pageId': "2",
'paraMap.isRegist': "1",
'paraMap.orgName': "北京市",
'initLicence': "京",
'paraMap.licence': plate,
'paraMap.contactor': name,
'paraMap.phone': self.random_phone(),
'paraMap.email': self.random_mail(),
'paraMap.premiumType': "1",
'paraMap.count': "0",
'paraMap.integralShow': "1",
}
self.vin = ''
self.engine = ''
self.plate = plate
self.name = name
def sure(self, repeat=2, delay=1):
try:
r = int(repeat)
t = delay
while r > 0:
r -= 1
e, v, en = self.get_vin_engine()
if e:
return e, v, en
else:
time.sleep(t)
except Exception, e:
return False, '', ''
def random_phone(self, digit=8):
head = self.__PHONE_HEAD[random.randrange(0, len(self.__PHONE_HEAD))]
return head + string.join(random.sample(self.__PHONE_BASE, digit)).replace(" ", "")
def random_mail(self):
digit = random.randint(1, 32)
tail = self.__MAIL_ORG[random.randrange(0, len(self.__MAIL_ORG))]
return string.join(random.sample(self.__MAIL_BASE, digit)).replace(" ", "") + tail
def get_vin_engine(self):
plate = self.plate
name = self.name
# 检测输入
# if plate == '' or name == '' or plate[0:3] != '\xe4\xba\xac':
# return False, self.vin, self.engine
# self.post.setdefault('paraMap.licence', plate)
# self.post.setdefault('paraMap.contactor', name)
try:
# 获得一个cookieJar实例
cj = cookielib.CookieJar()
# cookieJar作为参数,获得一个opener的实例
httpHandler = urllib2.HTTPHandler(debuglevel=1)
httpsHandler = urllib2.HTTPSHandler(debuglevel=1)
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj), httpHandler, httpsHandler)
# opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
# 伪装成一个正常的浏览器
opener.addheaders = [('User-agent',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36')]
data = urllib.urlencode(self.post)
op = opener.open(self.url, data)
da = op.read()
rev = re.search(r'name="paraMap.frameNo" value=[^i]{18}', da)
if rev is None:
return False, '', ''
sv = rev.group()
self.vin = sv[-17:]
ree = re.search(r'name="paraMap.engineNo" value=(.*)["]', da)
if ree is None:
return False, '', ''
b = re.search(r'value=([^ ]*)', ree.group())
if b is None:
return False, '', ''
c = re.search(r'="(.*)[^"]', b.group())
if c is None:
return False, '', ''
d = re.search(r'[^=^"](.*)', c.group())
if d is None:
return False, '', ''
self.engine = d.group()
# print self.vin, self.engine
if self.verify(self.vin):
return True, self.vin.upper(), self.engine
else:
return False, self.vin, self.engine
except Exception, e:
# print e
return False, self.vin, self.engine
def verify(self, v=''):
"""
校验VIN,成功返回True
"""
vv = str(v).upper()
for C in self.__VIN_NO_CONTAIN:
if C in vv:
return False
if vv in ('', None):
return False
if type(vv) != str:
return False
if len(vv) != 17:
return False
# 判断第九位校验位是否正确
count = 0
ki = 0
for i in vv:
ki += 1
if ki == 9:
continue
count += self.__VIN_VALUE[i] * self.__VIN_WEIGHT[ki]
count %= 11
if count == 10:
count = 'X'
else:
count = str(count)
if count == vv[8]:
return True
else:
return False
# 输出JSON数据
class PrintJson(object):
def __init__(self):
self._time = str(datetime.datetime.now())
self._error = 0
self._msg = None
self._data = {}
self._url = ''
self._jsonvalue = ''
self._jsonstr = ''
self._len = 0
def echo(self, msg=None, data=None, error=0, url=None, _len=None):
if error <> None and error <> "":
self._error = error
if msg <> None and msg <> "":
self._msg = msg
if data <> None and data <> "":
self._data = data
if url <> None and url <> "":
self._url = url
if _len <> None and _len <> "":
self._len = _len
self._jsonvalue = {"time": self._time, "error": self._error, "msg": self._msg, "data": self._data,
"url": self._url, "len": self._len}
self._jsonstr = json.dumps(self._jsonvalue)
return self._jsonstr
class GetCarInfo(object):
'''
Value 传入之,可以是vin号码,也可以是车型中文
searchType 搜索类型 1为VIN,0为车型中文字符串
'''
def __init__(self, Value="", searchType=1):
self.searchType = searchType
self.Value = Value
self.GstUrl_YG = "http://chexian.sinosig.com/Partner/netVehicleModel.action?searchCode=%s&searchType=%s&encoding=utf-8&isSeats=1&pageSize=1&&callback=%s" % (
self.Value, self.searchType, str(int(time.time())))
# print(self.GstUrl_YG)
# 网络获得阳光的车型信息
def GetInfo_YG(self):
ROWS = self.GetDBSelect(type="sinosig")
if ROWS:
return ROWS
try:
CarInfoList = urllib.urlopen(self.GstUrl_YG).read()
# print(CarInfoList)
CarInfoList = re.sub(r'([a-zA-Z_0-9\.]*\()|(\);?$)', '', CarInfoList)
CarInfoJson = json.loads(CarInfoList)
if len(CarInfoJson['rows']) < 1:
ROWS = False
else:
ROWS = CarInfoJson['rows'][0]
except Exception as e:
# print(e)
i_headers = {
"User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.1) Gecko/20090624 Firefox/3.5",
"Accept": "text/plain"
}
# CarInfoList = urllib2.urlopen(self.GstUrl_YG, timeout=20)
CarInfoList = urllib2.Request(self.GstUrl_YG, headers=i_headers)
CarInfoList = urllib2.urlopen(CarInfoList, timeout=20).read()
CarInfoList = re.sub(r'([a-zA-Z_0-9\.]*\()|(\);?$)', '', CarInfoList)
CarInfoJson = json.loads(CarInfoList)
if len(CarInfoJson['rows']) < 1:
ROWS = False
else:
ROWS = CarInfoJson['rows'][0]
return ROWS
# 网络获取安盛的车型信息
def GetAnshengCarInfo(self, cityCode="340100", pageSize="1", str=False):
if str:
V = str
else:
CarYG = self.GetInfo_YG()
# print(CarYG)
if CarYG == False:
return False
V = CarYG['vehiclefgwcode']
try:
V = V.replace(")", "")
V = V.split(".")
V = V[0]
V = V.split(" ")
V = V[0]
# print(V)
AnshengUrl = "http://chexian.axatp.com/selectCarInfo.do?cityCode=%s&searchCode=%s&pageSize=%s&encoding=utf-8&page=1&callback=json" % (
cityCode, V, pageSize)
CarInfoList = urllib.urlopen(AnshengUrl).read()
CarInfoList = re.sub(r'([a-zA-Z_0-9\.]*\()|(\);?$)', '', CarInfoList)
CarInfoJson = json.loads(CarInfoList.decode("GBK"))
# print(AnshengUrl)
if len(CarInfoJson['rows']) < 1:
for i in range(len(V)):
V = V[:-i-1]
AnshengUrl = "http://chexian.axatp.com/selectCarInfo.do?cityCode=%s&searchCode=%s&pageSize=%s&encoding=utf-8&page=1&callback=json" % (
cityCode, V, pageSize)
# print(AnshengUrl)
CarInfoList = urllib.urlopen(AnshengUrl).read()
CarInfoList = re.sub(r'([a-zA-Z_0-9\.]*\()|(\);?$)', '', CarInfoList)
CarInfoJson = json.loads(CarInfoList.decode("GBK"))
if len(CarInfoJson['rows']) > 0:
if self.searchType == 1:
GetCarArr = bxcarvin.objects.filter(vin=self.Value)
if GetCarArr.count() > 0:
if GetCarArr[0].bxcarinfo_set.filter(key=CarInfoJson['rows'][0]['key'],
value=CarInfoJson['rows'][0]['value'],
bxtype="axatp").count() < 1:
AddInfo = GetCarArr[0].bxcarinfo_set.create(key=CarInfoJson['rows'][0]['key'],
value=CarInfoJson['rows'][0]['value'],
bxtype="axatp", vehiclefgwcode=V)
AddInfo.save()
return CarInfoJson['rows'][0]
else:
if self.searchType == 1:
GetCarArr = bxcarvin.objects.filter(vin=self.Value)
if GetCarArr.count() > 0:
if GetCarArr[0].bxcarinfo_set.filter(key=CarInfoJson['rows'][0]['key'],
value=CarInfoJson['rows'][0]['value'],
bxtype="axatp").count() < 1:
AddInfo = GetCarArr[0].bxcarinfo_set.create(key=CarInfoJson['rows'][0]['key'],
value=CarInfoJson['rows'][0]['value'],
bxtype="axatp", vehiclefgwcode=V)
AddInfo.save()
return CarInfoJson['rows'][0]
except:
return False
def GetAnshengCarInfoNEW(self, cityCode="340100", pageSize="1", str=False):
if str:
V = str
else:
CarYG = self.GetInfo_YG()
# print(CarYG)
if CarYG == False:
return False
V = CarYG['vehiclefgwcode']
# print(V)
D = CarYG['key']
# print(D)
try:
# V = V.replace(")", "")
# V = V.split(".")
# V = V[0]
# V = V.split(" ")
# V = V[0]
# print(V)
# AnshengUrl = "http://chexian.axatp.com/selectCarInfo.do?cityCode=%s&searchCode=%s&pageSize=%s&encoding=utf-8&page=1&callback=json" % (
# cityCode, V, pageSize)
AnshengUrl = "http://chexian.axatp.com/ajaxCarBrandSelect.do?operation=configByInput&searchCode=%s&cityCode=%s&firstRegisterDate=&ecInsureId=F9A4168E5B47C8FABEE62B61F8D561FA7C91A135ABAB76F5&isRenewal=&isAgent=0&localProvinceCode=&planDefineId=3&rt=" % (
V,cityCode)
CarInfoList = urllib.urlopen(AnshengUrl).read()
# CarInfoList = re.sub(r'([a-zA-Z_0-9\.]*\()|(\);?$)', '', CarInfoList)
CarInfoJson = json.loads(CarInfoList.decode("GBK"))
# print(AnshengUrl)
# print(D)
# print(CarInfoJson['data'])
if len(CarInfoJson['data']) < 1:
for i in range(len(V)):
V = V[:-i-1]
# AnshengUrl = "http://chexian.axatp.com/selectCarInfo.do?cityCode=%s&searchCode=%s&pageSize=%s&encoding=utf-8&page=1&callback=json" % (
# cityCode, V, pageSize)
AnshengUrl = "http://chexian.axatp.com/ajaxCarBrandSelect.do?operation=configByInput&searchCode=%s&cityCode=%s&firstRegisterDate=&ecInsureId=F9A4168E5B47C8FABEE62B61F8D561FA7C91A135ABAB76F5&isRenewal=&isAgent=0&localProvinceCode=&planDefineId=3&rt=" % (
V,cityCode)
print(AnshengUrl)
CarInfoList = urllib.urlopen(AnshengUrl).read()
CarInfoList = re.sub(r'([a-zA-Z_0-9\.]*\()|(\);?$)', '', CarInfoList)
CarInfoJson = json.loads(CarInfoList.decode("GBK"))
if len(CarInfoJson['data']) > 0:
if self.searchType == 1:
GetCarArr = bxcarvin.objects.filter(vin=self.Value)
if GetCarArr.count() > 0:
for i in range(len(CarInfoJson['data'])):
if CarInfoJson['data'][i]['rbCode'] == D:
if GetCarArr[0].bxcarinfo_set.filter(key=CarInfoJson['data'][i]['id'],
value=CarInfoJson['data'][i]['name'],
bxtype="axatp").count() < 1:
AddInfo = GetCarArr[0].bxcarinfo_set.create(key=CarInfoJson['data'][i]['id'],
value=CarInfoJson['data'][i]['name'],
bxtype="axatp", vehiclefgwcode=V)
AddInfo.save()
return CarInfoJson['data'][i]
else:
if self.searchType == 1:
GetCarArr = bxcarvin.objects.filter(vin=self.Value)
if GetCarArr.count() > 0:
for i in range(len(CarInfoJson['data'])):
if CarInfoJson['data'][i]['rbCode'] == D:
if GetCarArr[0].bxcarinfo_set.filter(key=CarInfoJson['data'][i]['id'],
value=CarInfoJson['data'][i]['name'],
bxtype="axatp").count() < 1:
AddInfo = GetCarArr[0].bxcarinfo_set.create(key=CarInfoJson['data'][i]['id'],
value=CarInfoJson['data'][i]['name'],
bxtype="axatp", vehiclefgwcode=V)
AddInfo.save()
return CarInfoJson['data'][i]
except:
return False
# 直接从数据库读取车型信息
def GetDBSelect(self, type=""):
GetCarArr = bxcarvin.objects.filter(vin=self.Value)
if GetCarArr.count() < 1:
return False
Info = GetCarArr[0].bxcarinfo_set.filter(bxtype=type)
if Info.count() < 1:
return False
else:
CarInfo = Info.values()[0]
return CarInfo
class FengChaoCrypt(object):
def AESencrypt(self, plaintext='', password='fengchao', base64=False):
SALT_LENGTH = 32
DERIVATION_ROUNDS = 1337
BLOCK_SIZE = 16
KEY_SIZE = 32
MODE = AES.MODE_CBC
salt = os.urandom(SALT_LENGTH)
iv = os.urandom(BLOCK_SIZE)
paddingLength = 16 - (len(plaintext) % 16)
paddedPlaintext = plaintext + chr(paddingLength) * paddingLength
derivedKey = password
for i in range(0, DERIVATION_ROUNDS):
derivedKey = hashlib.sha256(derivedKey + salt).digest()
derivedKey = derivedKey[:KEY_SIZE]
cipherSpec = AES.new(derivedKey, MODE, iv)
ciphertext = cipherSpec.encrypt(paddedPlaintext)
ciphertext = ciphertext + iv + salt
if base64:
import base64
return base64.b64encode(ciphertext)
else:
return ciphertext.encode("hex")
def AESdecrypt(self, ciphertext='', password='fengchao', base64=False):
import hashlib
from Crypto.Cipher import AES
SALT_LENGTH = 32
DERIVATION_ROUNDS = 1337
BLOCK_SIZE = 16
KEY_SIZE = 32
MODE = AES.MODE_CBC
if base64:
import base64
decodedCiphertext = base64.b64decode(ciphertext)
else:
decodedCiphertext = ciphertext.decode("hex")
startIv = len(decodedCiphertext) - BLOCK_SIZE - SALT_LENGTH
startSalt = len(decodedCiphertext) - SALT_LENGTH
data, iv, salt = decodedCiphertext[:startIv], decodedCiphertext[startIv:startSalt], decodedCiphertext[
startSalt:]
derivedKey = password
for i in range(0, DERIVATION_ROUNDS):
derivedKey = hashlib.sha256(derivedKey + salt).digest()
derivedKey = derivedKey[:KEY_SIZE]
cipherSpec = AES.new(derivedKey, MODE, iv)
plaintextWithPadding = cipherSpec.decrypt(data)
paddingLength = ord(plaintextWithPadding[-1])
plaintext = plaintextWithPadding[:-paddingLength]
return plaintext
# 操作数据库
class BXDBAction(object):
def __init__(self):
pass
# 保存计算价格需要的信息
def CreateCarVin(self, licenseno="", ownername="", citycode="", vin="", engine="", user_id=''):
InDB = {"licenseno": licenseno,
"ownername": ownername,
"vin": vin,
"engine": engine,
"citycode": citycode}
AA,BB = bxcarvin.objects.get_or_create(**InDB)
return AA.id
# 车型信息
def CreateCarInfo(self, user_id="", car_id="", key="", vehiclefgwcode="", value="", bxtype=""):
if bxcarinfo.objects.filter(key=key, car_id=car_id).count() < 1:
InDB = {"car_id": car_id,
"key": key,
"vehiclefgwcode": vehiclefgwcode,
"value": value,
"bxtype": bxtype
}
if user_id and user_id <> "":
InDB['user_id'] = user_id
print(InDB)
print(1111)
CreateInfo = bxcarinfo.objects.create(**InDB)
if user_id and user_id <> "":
CreateInfo.user_id = user_id
CreateInfo.save()
return CreateInfo.id
else:
return False
# 判断是否已经存在车辆信息
def IsSet(self, licenseno="", citycode="", ownername="",id=False):
if id:
try:
R = bxcarvin.objects.get(id=id)
except ObjectDoesNotExist:
R = False
return R
else:
Car = bxcarvin.objects.filter(licenseno=licenseno, citycode=citycode, ownername=ownername)
if Car.exists():
CarDict = Car.values()[0]
return CarDict
else:
return False
def ReCarInfo(self, cid, bx="sinosig"):
GetInfoArr = bxcarinfo.objects.filter(car_id=cid, bxtype=bx)
if GetInfoArr.exists():
GetInfoArr = GetInfoArr.values()[0]
return GetInfoArr
else:
return False
# 将车辆投保算价所需信息写入数据库
def CreateTBSJinfo(
self,
licenseno="",
order_id="",
biz_begin_date="",
biz_end_date="",
traff_begin_date="",
traff_end_date="",
cs_traff_amt="",
dq_traff_amt="",
zr_traff_amt="",
first_register_date="",
bxtype="cic",
idcode=""):
# 查询是否已有这辆车,如果有,拿出车辆id
GetCarVin = bxcarvin.objects.filter(licenseno=licenseno)
if GetCarVin.count() > 0:
car_id = GetCarVin.values()[0]['id']
# 判断信息是否已经存在
IsSetVal = bxpriceinfo.objects.filter(car_id=car_id, bxtype=bxtype)
if IsSetVal.count() < 1:
InUPCarVINbase = bxcarvin.objects.get(id=car_id)
InUPCarVINbase.idcode = idcode
InUPCarVINbase.save()
CreateInfo = bxpriceinfo.objects.create(
car_id=car_id,
order_id=order_id,
biz_begin_date=biz_begin_date,
biz_end_date=biz_end_date,
traff_begin_date=traff_begin_date,
traff_end_date=traff_end_date,
cs_traff_amt=cs_traff_amt,
dq_traff_amt=dq_traff_amt,
zr_traff_amt=zr_traff_amt,
first_register_date=first_register_date,
bxtype=bxtype)
CreateInfo.save()
return CreateInfo.id
else:
return True
# 如果没有查询到车辆,则返回False
else:
# print("建立投保信息时没有找到车辆")
return False
# 查询投保算价所需要的信息
def GetTBSJinfo(self, licenseno="", bxtype=""):
GetCarVin = bxcarvin.objects.filter(licenseno=licenseno)
if GetCarVin.count() < 1:
return False
else:
CarID = GetCarVin.values()[0]['id']
GetInfo = bxpriceinfo.objects.filter(car_id=CarID, bxtype=bxtype)
if GetInfo.count() < 1:
return False
else:
GetInfo = GetInfo.values()[0]
GetCar = GetCarVin.values()[0]
return GetCar, GetInfo
# 中华报价信息
def CreatPriceinfo_zh(self,**pam):
GetCar = bxcarvin.objects.get(vin=pam['vin'],ownername=pam['ownername'])
IsSet= GetCar.bxzhpriceinfo_set.exists()
if IsSet:
GetCar.bxzhpriceinfo_set.update(
biztotalpremium = pam['biztotalpremium'],
vehicletaxpremium = pam['vehicletaxpremium'],
forcepremium = pam['forcepremium'],
bizbegindate = pam['bizbegindate'],
forcebegindate = pam['forcebegindate'],
kind_030004 = pam['kind_030004'],
kind_030006 = pam['kind_030006'],
kind_030012 = pam['kind_030012'],
kind_030018 = pam['kind_030018'],
kind_030025 = pam['kind_030025'],
kind_030059 = pam['kind_030059'],
kind_030065 = pam['kind_030065'],
kind_030070 = pam['kind_030070'],
kind_030072 = pam['kind_030072'],
kind_030106 = pam['kind_030106'],
kind_030125 = pam['kind_030125'],
kind_031901 = pam['kind_031901'],
kind_031902 = pam['kind_031902'],
kind_031903 = pam['kind_031903'],
kind_031911 = pam['kind_031911'],
kind_033531 = pam['kind_033531'],
kind_033532 = pam['kind_033532'],
kind_033535 = pam['kind_033535'],
)
else:
CreatINFO = GetCar.bxzhpriceinfo_set.create(
biztotalpremium = pam['biztotalpremium'],
vehicletaxpremium = pam['vehicletaxpremium'],
forcepremium = pam['forcepremium'],
bizbegindate = pam['bizbegindate'],
forcebegindate = pam['forcebegindate'],
kind_030004 = pam['kind_030004'],
kind_030006 = pam['kind_030006'],
kind_030012 = pam['kind_030012'],
kind_030018 = pam['kind_030018'],
kind_030025 = pam['kind_030025'],
kind_030059 = pam['kind_030059'],
kind_030065 = pam['kind_030065'],
kind_030070 = pam['kind_030070'],
kind_030072 = pam['kind_030072'],
kind_030106 = pam['kind_030106'],
kind_030125 = pam['kind_030125'],
kind_031901 = pam['kind_031901'],
kind_031902 = pam['kind_031902'],
kind_031903 = pam['kind_031903'],
kind_031911 = pam['kind_031911'],
kind_033531 = pam['kind_033531'],
kind_033532 = pam['kind_033532'],
kind_033535 = pam['kind_033535'],
)
CreatINFO.save()
# 回调浮动告知单
def IsRead(self,flag,orderno='0',businesscode=''):
GetInfo = bxpayinfo.objects.get(order_id=orderno)
Is = GetInfo.bxzhisread_set.exists()
if Is:
if businesscode == '11':
GetInfo.bxzhisread_set.update(
orderno=orderno,
businesscode_biz = "11",
businesscode_force = ""
)
if businesscode == '12':
GetInfo.bxzhisread_set.update(
orderno=orderno,
businesscode_force = "12"
)
else:
if businesscode == '11':
IsReadIN = GetInfo.bxzhisread_set.create(
orderno=orderno,
flag = flag,
businesscode_biz = businesscode,
businesscode_force = ""
)
IsReadIN.save()
if businesscode == '12':
IsReadIN = GetInfo.bxzhisread_set.create(
orderno=orderno,
flag = flag,
businesscode_biz = "",
businesscode_force = businesscode
)
IsReadIN.save()
# 读取浮动告知单信息
def GetRead(self,orderno,M):
try:
GetInfo = bxzhisread.objects.get(orderno=orderno)
except:
RE = {'error':'1','msg':'请阅读商业险浮动告知单或交强险浮动告知单并点击确认'}
return RE
if M == "11":
if GetInfo.businesscode_biz == "11":
RE = {'error':'0','msg':'1'}
return RE
else:
RE = {'error':'1','msg':'请阅读商业险浮动告知单并点击确认'}
return RE
if M == '12':
if GetInfo.businesscode_force == "12":
RE = {'error':'0','msg':'1'}
return RE
else:
RE = {'error':'1','msg':'请阅读交强险浮动告知单并点击确认'}
return RE
else:
RE = {'error':'1','msg':'请阅读商业险浮动告知单或交强险浮动告知单并点击确认'}
return RE
# 中华核保信息
def CreatePayInfo(self, **pam):
try:
GetCar = bxcarvin.objects.get(vin=pam['vin'],ownername=pam['C_IDET_NAME'])
if pam['flag']:
CreateInfoIsSet = GetCar.bxpayinfo_set.filter(app_name=pam['C_APP_NAME']).exists()
if CreateInfoIsSet:
GetCar.bxpayinfo_set.update(
session_id=pam['Session_ID'],
order_id=pam['ORDER_ID'],
app_name=pam['C_APP_NAME'],
app_ident_no=pam['C_APP_IDENT_NO'],
app_tel=pam['C_APP_TEL'],
app_addr=pam['C_APP_ADDR'],
app_email=pam['C_APP_EMAIL'],
insrnt_name=pam['C_INSRNT_NME'],
insrnt_ident_no=pam['C_INSRNT_IDENT_NO'],
insrnt_tel=pam['C_INSRNT_TEL'],
insrnt_addr=pam['C_INSRNT_ADDR'],
insrnt_email=pam['C_INSRNT_EMAIL'],
contact_name=pam['C_CONTACT_NAME'],
address=pam['C_ADDRESS'],
contact_tel=pam['C_CONTACT_TEL'],
idet_name=pam['C_IDET_NAME'],
ident_no=pam['C_IDENT_NO'],
delivery_province=pam['C_DELIVERY_PROVINCE'],
delivery_city=pam['C_DELIVERY_CITY'],
delivery_district=pam['C_DELIVERY_DISTRICT'],
bxgs_type=pam['bxgs_type'],
status=pam['status'],
)
else:
CreateInfo = GetCar.bxpayinfo_set.create(
session_id=pam['Session_ID'],
order_id=pam['ORDER_ID'],
app_name=pam['C_APP_NAME'],
app_ident_no=pam['C_APP_IDENT_NO'],
app_tel=pam['C_APP_TEL'],
app_addr=pam['C_APP_ADDR'],
app_email=pam['C_APP_EMAIL'],
insrnt_name=pam['C_INSRNT_NME'],
insrnt_ident_no=pam['C_INSRNT_IDENT_NO'],
insrnt_tel=pam['C_INSRNT_TEL'],
insrnt_addr=pam['C_INSRNT_ADDR'],
insrnt_email=pam['C_INSRNT_EMAIL'],
address=pam['C_ADDRESS'],
contact_tel=pam['C_CONTACT_TEL'],
idet_name=pam['C_IDET_NAME'],
ident_no=pam['C_IDENT_NO'],
delivery_province=pam['C_DELIVERY_PROVINCE'],
delivery_city=pam['C_DELIVERY_CITY'],
delivery_district=pam['C_DELIVERY_DISTRICT'],
businesscode=pam['businesscode'],
bxgs_type=pam['bxgs_type'],
)
CreateInfo.save()
else:
IsSet = GetCar.bxpayinfo_set.exists()
if IsSet:
GetCar.bxpayinfo_set.update(
c_proposal_no_biz = pam['c_proposal_no_biz'],
c_proposal_no_force = pam['c_proposal_no_force']
)
else:
return False
except ObjectDoesNotExist:
return False
# 中华回调信息
def CreateCallback(self, **pam):
GetInfo = bxpayinfo.objects.get(order_id=pam['ORDER_ID'])
print(GetInfo.app_name)
InfoIs = GetInfo.bxzhcallbackinfo_set.exists()
if InfoIs:
GetInfo.bxzhcallbackinfo_set.update(
chengbao_staus=pam['C_STAUS'],
message=pam['C_MESSAGE'],
biz_policy_no=pam['C_POLICY_NO_BIZ'],
force_policy_no=pam['C_POLICY_NO_FORCE'],
)
else:
CreateInfo = GetInfo.bxzhcallbackinfo_set.create(
order_id=pam['ORDER_ID'],
pay_transn = pam['C_PAY_TRANSNO'],
pay_amt=pam['C_PAY_AMT'],
pay_staus=pam['C_PAY_STAUS'],
pay_desc=pam['C_DESC'],
chengbao_staus=pam['C_STAUS'],
message=pam['C_MESSAGE'],
biz_policy_no=pam['C_POLICY_NO_BIZ'],
force_policy_no=pam['C_POLICY_NO_FORCE'],
businesscode=pam['BusinessCode']
)
CreateInfo.save()
def SelectPayInfo(self, vin=""):
ENCODE = FengChaoCrypt()
try:
vin = ENCODE.AESdecrypt(vin)
except:
vin = False
if vin:
GetCar = bxcarvin.objects.get(vin=vin)
PayInfoArr = GetCar.bxpayinfo_set
if PayInfoArr.count() > 0:
PayInfo = PayInfoArr.values()[0]
return PayInfo
else:
return False
# 查看订单状态
# 返回1是可以去支付,0是不能支付,False订单不存在
def OrderIDStatus(self, orderid=""):
try:
S = bxpayinfo.objects.get(order_id=orderid)
return S.status
except ObjectDoesNotExist:
return False
# 安盛核保信息
def CreatPayinfo_AS(self, **pam):
if pam['ID'] <> "" and pam['ID'] <> None:
GetCar = bxcarvin.objects.get(id=pam['ID'])
else:
GetCar = bxcarvin.objects.get(vin=pam['vin'],ownername=pam['ownername'])
CreateInfoIsSet = GetCar.bxashebaoinfo_set.exists()
if CreateInfoIsSet:
GetCar.bxashebaoinfo_set.update(
tborder_id=pam['tborder_id'],
item_id=pam['item_id'],
insuredname=pam['insuredname'],
insuredidno=pam['insuredidno'],
insuredmobile=pam['insuredmobile'],
insuredidtype=pam['insuredidtype'],
insuredgender=pam['insuredgender'],
insuredbirthday=pam['insuredbirthday'],
ownername=pam['ownername'],
owneridno=pam['owneridno'],
ownermobile=pam['ownermobile'],
owneremail=pam['owneremail'],
owneridtype=pam['owneridtype'],
ownergender=pam['ownergender'],
ownerbirthday=pam['ownerbirthday'],
ownerage=pam['ownerage'],
addresseename=pam['addresseename'],
addresseemobile=pam['addresseemobile'],
addresseeprovince=pam['addresseeprovince'],
addresseecity=pam['addresseecity'],
addresseetown=pam['addresseetown'],
addresseedetails=pam['addresseedetails'],
applicantname=pam['applicantname'],
applicantidno=pam['applicantidno'],
applicantmobile=pam['applicantmobile'],
applicantemail=pam['applicantemail'],
applicantbirthday=pam['applicantbirthday'],
applicantgender=pam['applicantgender'],
applicantidtype=pam['applicantidtype'],
bxgs_type=pam['bxgs_type'],
status = pam['status'],
session_id = pam['session_id'],
proposalno_biz=pam['proposalno_biz'],
proposalno_force=pam['proposalno_force']
)
else:
CreateInfo = GetCar.bxashebaoinfo_set.create(
tborder_id=pam['tborder_id'],
item_id=pam['item_id'],
insuredname=pam['insuredname'],
insuredidno=pam['insuredidno'],
insuredmobile=pam['insuredmobile'],
insuredidtype=pam['insuredidtype'],
insuredgender=pam['insuredgender'],
insuredbirthday=pam['insuredbirthday'],
ownername=pam['ownername'],
owneridno=pam['owneridno'],
ownermobile=pam['ownermobile'],
owneremail=pam['owneremail'],
owneridtype=pam['owneridtype'],
ownergender=pam['ownergender'],
ownerbirthday=pam['ownerbirthday'],
ownerage=pam['ownerage'],
addresseename=pam['addresseename'],
addresseemobile=pam['addresseemobile'],
addresseeprovince=pam['addresseeprovince'],
addresseecity=pam['addresseecity'],
addresseetown=pam['addresseetown'],
addresseedetails=pam['addresseedetails'],
applicantname=pam['applicantname'],
applicantidno=pam['applicantidno'],
applicantmobile=pam['applicantmobile'],
applicantemail=pam['applicantemail'],
applicantbirthday=pam['applicantbirthday'],
applicantgender=pam['applicantgender'],
applicantidtype=pam['applicantidtype'],
bxgs_type=pam['bxgs_type'],
status = pam['status'],
session_id = pam['session_id'],
proposalno_biz=pam['proposalno_biz'],
proposalno_force=pam['proposalno_force']
)
CreateInfo.save()
# 安盛报价信息
def CraetPriceinfo_as(self, **pam):
for n, v in pam.iteritems():
if n <> "BizFlag" and n <> "FroceFlag" and n <>"bizbegindate" and n <>"forcebegindate" \
and n <>"vin" and n <>"biztotalpremium" and n <>"totalpremium" and n <>"standardpremium" \
and n <>"forcepremium" and n <>"cov_600" and n <>"cov_701" and n <>"cov_702"\
and n <>"vehicletaxpremium" and n <>"forcepremium_f" and n <>"session_id" and n<> "ownername":
if float(v) > float(0):
pam[n] = 1
GetCar = bxcarvin.objects.get(vin=pam['vin'],ownername=pam['ownername'])
CreateInfoIsSet = GetCar.bxaspriceinfo_set.exists()
if CreateInfoIsSet:
GetCar.bxaspriceinfo_set.update(
bizflag=pam['BizFlag'],
forceflag=pam['FroceFlag'],
cov_200=pam['cov_200'],
cov_600=pam['cov_600'],
cov_701=pam['cov_701'],
cov_702=pam['cov_702'],
cov_500=pam['cov_500'],
cov_290=pam['cov_290'],
cov_231=pam['cov_231'],
cov_210=pam['cov_210'],
cov_310=pam['cov_310'],
cov_900=pam['cov_900'],
cov_910=pam['cov_910'],
cov_911=pam['cov_911'],
cov_912=pam['cov_912'],
cov_921=pam['cov_921'],
cov_922=pam['cov_922'],
cov_923=pam['cov_923'],
cov_924=pam['cov_924'],
cov_928=pam['cov_928'],
cov_929=pam['cov_929'],
cov_930=pam['cov_930'],
cov_931=pam['cov_931'],
biztotalpremium=pam['biztotalpremium'],
totalpremium=pam['totalpremium'],
standardpremium=pam['standardpremium'],
forcepremium=pam['forcepremium'],
bizbegindate=pam['bizbegindate'],
forcebegindate=pam['forcebegindate'],
forcepremium_f=pam['forcepremium_f'],
vehicletaxpremium = pam['vehicletaxpremium'],
session_id= pam['session_id']
)
else:
CreateInfo = GetCar.bxaspriceinfo_set.create(
bizflag=pam['BizFlag'],
forceflag=pam['FroceFlag'],
cov_200=pam['cov_200'],
cov_600=pam['cov_600'],
cov_701=pam['cov_701'],
cov_702=pam['cov_702'],
cov_500=pam['cov_500'],
cov_290=pam['cov_290'],
cov_231=pam['cov_231'],
cov_210=pam['cov_210'],
cov_310=pam['cov_310'],
cov_900=pam['cov_900'],
cov_910=pam['cov_910'],
cov_911=pam['cov_911'],
cov_912=pam['cov_912'],
cov_921=pam['cov_921'],
cov_922=pam['cov_922'],
cov_923=pam['cov_923'],
cov_924=pam['cov_924'],
cov_928=pam['cov_928'],
cov_929=pam['cov_929'],
cov_930=pam['cov_930'],
cov_931=pam['cov_931'],
biztotalpremium=pam['biztotalpremium'],
totalpremium=pam['totalpremium'],
standardpremium=pam['standardpremium'],
bizbegindate=pam['bizbegindate'],
forcebegindate=pam['forcebegindate'],
forcepremium = pam['forcepremium'],
forcepremium_f=pam['forcepremium_f'],
vehicletaxpremium = pam['vehicletaxpremium'],
session_id= pam['session_id']
)
CreateInfo.save()
def GetPriceinfo_as(self, vin="",ownername='',ID=""):
try:
if ID <> "" and ID <> None:
GetCar = bxcarvin.objects.get(id=ID)
else:
GetCar = bxcarvin.objects.get(vin=vin,ownername=ownername)
except:
return False
PriceinfoIsSet = GetCar.bxaspriceinfo_set.exists()
if PriceinfoIsSet:
CarInfo = GetCar.bxaspriceinfo_set.get()
return CarInfo
else:
return False
# 安盛回调信息存储
def CreatCallBack_as(self,**pam):
GetInfo = bxashebaoinfo.objects.get(session_id=pam['sessionid'])
CreateInfoIsSet = GetInfo.bxascallbackinfo_set.exists()
if CreateInfoIsSet:
GetInfo.bxascallbackinfo_set.update(
sessionid=pam['sessionid'],
requesttype=pam['requesttype'],
tborderid=pam['tborderid'],
premium=pam['premium'],
itemid=pam['itemid'],
bizpremium=pam['bizpremium'],
bizproposalno=pam['bizproposalno'],
bizpolicyno=pam['bizpolicyno'],
forcepremium=pam['forcepremium'],
forceproposalno = pam['forceproposalno'],
forcepolicyno = pam['forcepolicyno'],
status = pam['status']
)
else:
CrestInfo= GetInfo.bxascallbackinfo_set.create(
sessionid=pam['sessionid'],
requesttype=pam['requesttype'],
tborderid=pam['tborderid'],
premium=pam['premium'],
itemid=pam['itemid'],
bizpremium=pam['bizpremium'],
bizproposalno=pam['bizproposalno'],
bizpolicyno=pam['bizpolicyno'],
forcepremium=pam['forcepremium'],
forceproposalno = pam['forceproposalno'],
forcepolicyno = pam['forcepolicyno'],
status = pam['status']
)
CrestInfo.save()
def CreatPriceinfo_yg(self,**pam):
GetCar = bxcarvin.objects.get(vin=pam['vin'],ownername=pam['ownername'])
CreateInfoIsSet = GetCar.bxygpriceinfo_set.exists()
if CreateInfoIsSet:
GetCar.bxygpriceinfo_set.update(
forceflag=pam['forceflag'],
cov_200=pam['cov_200'],
cov_600=pam['cov_600'],
cov_701=pam['cov_701'],
cov_702=pam['cov_702'],
cov_500=pam['cov_500'],
cov_291=pam['cov_291'],
cov_231=pam['cov_231'],
cov_210=pam['cov_210'],
cov_310=pam['cov_310'],
cov_390=pam['cov_390'],
cov_640=pam['cov_640'],
cov_911=pam['cov_911'],
cov_912=pam['cov_912'],
cov_921=pam['cov_921'],
cov_922=pam['cov_922'],
cov_928=pam['cov_928'],
cov_929=pam['cov_929'],
biztotalpremium=pam['biztotalpremium'], # 商业保费
totalpremium=pam['totalpremium'], # 网购价
standardpremium=pam['standardpremium'], # 市场价
forcepremium=pam['forcepremium'], # 交强险
bizbegindate=pam['bizbegindate'], # 商业险起期
forcebegindate=pam['forcebegindate'], # 交强险起期
forceotalpremium=pam['forceotalpremium'], # 交强总保费
vehicletaxpremium=pam['vehicletaxpremium'],
session_id=pam['session_id']
)
else:
CreateInfo = GetCar.bxygpriceinfo_set.create(
forceflag=pam['forceflag'],
cov_200=pam['cov_200'],
cov_600=pam['cov_600'],
cov_701=pam['cov_701'],
cov_702=pam['cov_702'],
cov_500=pam['cov_500'],
cov_291=pam['cov_291'],
cov_231=pam['cov_231'],
cov_210=pam['cov_210'],
cov_310=pam['cov_310'],
cov_390=pam['cov_390'],
cov_640=pam['cov_640'],
cov_911=pam['cov_911'],
cov_912=pam['cov_912'],
cov_921=pam['cov_921'],
cov_922=pam['cov_922'],
cov_928=pam['cov_928'],
cov_929=pam['cov_929'],
biztotalpremium=pam['biztotalpremium'], # 商业保费
totalpremium=pam['totalpremium'], # 网购价
standardpremium=pam['standardpremium'], # 市场价
forcepremium=pam['forcepremium'], # 交强险
bizbegindate=pam['bizbegindate'], # 商业险起期
forcebegindate=pam['forcebegindate'], # 交强险起期
forceotalpremium=pam['forceotalpremium'], # 交强总保费
vehicletaxpremium=pam['vehicletaxpremium'],
session_id=pam['session_id']
)
CreateInfo.save()
def CreatPayinfo_yg(self,**pam):
GetCar = bxcarvin.objects.get(vin=pam['vin'],ownername=pam['ownername'])
CreateInfoIsSet = GetCar.bxyghebaoinfo_set.exists()
if CreateInfoIsSet:
GetCar.bxyghebaoinfo_set.update(
tborder_id=pam['tborder_id'],
item_id=pam['item_id'],
insuredname=pam['insuredname'],
insuredidno=pam['insuredidno'],
insuredmobile=pam['insuredmobile'],
insuredemail = pam['insuredemail'],
ownername=pam['ownername'],
owneridno=pam['owneridno'],
ownermobile=pam['ownermobile'],
owneremail=pam['owneremail'],
addresseename=pam['addresseename'],
addresseemobile=pam['addresseemobile'],
senddate=pam['senddate'],
addresseeprovince=pam['addresseeprovince'],
addresseecity=pam['addresseecity'],
addresseetown=pam['addresseetown'],
addresseedetails=pam['addresseedetails'],
applicantname=pam['applicantname'],
applicantidno=pam['applicantidno'],
applicantmobile=pam['applicantmobile'],
applicantemail=pam['applicantemail'],
insuredaddresseeDetails = pam['insuredaddresseeDetails'], # 被保险人身份证地址
bxgs_type=pam['bxgs_type'],
status=pam['status'],
session_id=pam['session_id'],
proposalno_biz=pam['proposalno_biz'],
proposalno_force=pam['proposalno_force']
)
else:
CreateInfo = GetCar.bxyghebaoinfo_set.create(
tborder_id=pam['tborder_id'],
item_id=pam['item_id'],
insuredname=pam['insuredname'],
insuredidno=pam['insuredidno'],
insuredmobile=pam['insuredmobile'],
insuredemail = pam['insuredemail'],
ownername=pam['ownername'],
owneridno=pam['owneridno'],
ownermobile=pam['ownermobile'],
owneremail=pam['owneremail'],
addresseename=pam['addresseename'],
addresseemobile=pam['addresseemobile'],
senddate="",
addresseeprovince=pam['addresseeprovince'],
addresseecity=pam['addresseecity'],
addresseetown=pam['addresseetown'],
addresseedetails=pam['addresseedetails'],
applicantname=pam['applicantname'],
applicantidno=pam['applicantidno'],
applicantmobile=pam['applicantmobile'],
applicantemail=pam['applicantemail'],
insuredaddresseeDetails = pam['insuredaddresseeDetails'], # 被保险人身份证地址
bxgs_type=pam['bxgs_type'],
status=pam['status'],
session_id=pam['session_id'],
proposalno_biz=pam['proposalno_biz'],
proposalno_force=pam['proposalno_force']
)
CreateInfo.save()
def GetYgInfo(self,vin='',ownername='',ID=''):
try:
if ID <> '' and ID <> None:
GetCar = bxcarvin.objects.get(id=ID)
else:
GetCar = bxcarvin.objects.get(vin=vin,ownername=ownername)
except:
return False
PriceinfoIsSet = GetCar.bxygpriceinfo_set.exists()
if PriceinfoIsSet:
YgInfo = GetCar.bxygpriceinfo_set.get()
return YgInfo
else:
return False
def GetYgHeboInfo(self,vin='',ownername='',ID=""):
try:
if ID <> '' and ID <>None:
GetCar = bxcarvin.objects.get(id=ID)
else:
GetCar = bxcarvin.objects.get(vin=vin,ownername=ownername)
except:
return False
PriceinfoIsSet = GetCar.bxyghebaoinfo_set.exists()
if PriceinfoIsSet:
YgInfo = GetCar.bxyghebaoinfo_set.get()
return YgInfo
else:
return False
def CreatCallBack_yg(self,**pam):
Gethebao = bxyghebaoinfo.objects.get(session_id=pam['session_id'])
IsSet = Gethebao.bxygcallbackinfo_set.filter().exists()
if IsSet:
Gethebao.bxygcallbackinfo_set.update(
session_id=pam['session_id'],
usercode = pam['usercode'],
orderno_biz = pam['orderno_biz'],
orderno_force = pam['orderno_force'],
proposalno_biz = pam['proposalno_biz'],
policyno_biz = pam['policyno_biz'],
proposalno_force = pam['proposalno_force'],
policyno_force = pam['policyno_force'],
startdate = pam['startdate'],
enddate = pam['enddate'],
forcepremium = pam['forcepremium'],
vehicletaxpremium = pam['vehicletaxpremium'],
paytime = pam['paytime'],
bizpremium = pam['bizpremium']
)
else:
GetInfo = Gethebao.bxygcallbackinfo_set.create(
session_id=pam['session_id'],
usercode = pam['usercode'],
orderno_biz = pam['orderno_biz'],
orderno_force = pam['orderno_force'],
proposalno_biz = pam['proposalno_biz'],
policyno_biz = pam['policyno_biz'],
proposalno_force = pam['proposalno_force'],
policyno_force = pam['policyno_force'],
startdate = pam['startdate'],
enddate = pam['enddate'],
forcepremium = pam['forcepremium'],
vehicletaxpremium = pam['vehicletaxpremium'],
paytime = pam['paytime'],
bizpremium = pam['bizpremium']
)
GetInfo.save()
def GetCityName(self,CityCode):
try:
IsGet=citycode.objects.filter(mid=CityCode)
if IsGet[0].citycode_yg == "" and IsGet[0].citycode_yg == None:
return IsGet[0].name,IsGet[0].citycode_yg
else:
return IsGet[0].name,IsGet[0].citycode_yg
except:
print(CityCode)
IsGet=citycode.objects.get(mid=CityCode)
if IsGet.citycode_yg == "" and IsGet.citycode_yg == None:
return IsGet.name,IsGet.citycode_yg
else:
return IsGet.name,IsGet.citycode_yg
def CreatCitycode(self,CityCode,citycode_yg):
try:
IsGet=citycode.objects.filter(mid=CityCode)
if IsGet:
IsGet[0].citycode_yg=citycode_yg
IsGet[0].save()
return True
else:
return False
except:
IsGet=citycode.objects.get(mid=CityCode)
if IsGet:
IsGet.citycode_yg=citycode_yg
IsGet.save()
return True
else:
return False
def CreatCallBackLog(self,xml,bxgs,interface_type):
SHIJIAN = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
loginfo = callbaklog.objects.create(
log = xml,
addtime = SHIJIAN,
bxgs_type = bxgs,
interface_type = interface_type
)
loginfo.save()
def GetCallBackInfo(self,sessionid='',bxgs=''):
if sessionid <> "" and bxgs <> "" :
if bxgs == 'zh':
GetInfo = bxpayinfo.objects.get(session_id=sessionid)
if GetInfo:
try:
Getorderno = GetInfo.bxzhcallbackinfo_set.get()
RE = {'error':'0','orderno':Getorderno.order_id,"bxgs":'zh'}
return RE
except:
RE = {'error':'1','orderno':""}
return RE
else:
RE = {'error':'1','orderno':""}
return RE
if bxgs == 'as':
try:
GetInfo = bxascallbackinfo.objects.get(sessionid=sessionid)
RE = {'error':'0','orderno':GetInfo.tborderid,"bxgs":'as'}
return RE
except:
RE = {'error':'1','orderno':""}
return RE
if bxgs == "yg":
try:
GetInfo = bxygcallbackinfo.objects.get(session_id=sessionid)
GetInfo = bxyghebaoinfo.objects.get(id=GetInfo.hebao_id)
RE = {'error':'0','orderno':GetInfo.tborder_id,"bxgs":'yg'}
return RE
except:
RE = {'error':'1','orderno':""}
return RE
def GetHeBaoInfo(self,order='',bxgs=''):
if order == '' or bxgs == "":
return False
if bxgs == "zh":
HeBaoInfo = bxpayinfo.objects.filter(order_id=order)
return HeBaoInfo[0]
if bxgs == 'as':
HeBaoInfo = bxashebaoinfo.objects.filter(tborder_id=order)
return HeBaoInfo[0]
if bxgs == 'yg':
HeBaoInfo = bxyghebaoinfo.objects.filter(tborder_id=order)
return HeBaoInfo[0]
class IsTerminal():
def __init__(self,agent):
self.agent=''.join(re.findall(r'[a-zA-Z]',agent))
def IsTer(self,):
# agent = re.findall(self.reg,self.agent)
if 'Android' in self.agent:
print(self.agent)
return True
if 'iPhone' in self.agent:
print(self.agent)
return True
if 'CFNetwork' in self.agent:
print(self.agent)
return True
if 'Windows' in self.agent:
print(self.agent)
return False
if self.agent == '':
return True
else:
return True
def IsNull(self,INFO=''):
if INFO == "":
return True
for i in range(len(INFO)):
if INFO[i][0] == 'C_INSRNT_EMAIL':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_APP_EMAIL':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_INSRNT_ADDR':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_IDENT_NO':
if INFO[i][1] == '' or len(INFO[i][1]) <> 18:
return True
if INFO[i][0] == 'C_INSRNT_TEL':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_APP_ADDR':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_APP_IDENT_NO':
if INFO[i][1] == '' or len(INFO[i][1]) <> 18:
return True
if INFO[i][0] == 'C_INSRNT_IDENT_NO':
if INFO[i][1] == '' or len(INFO[i][1]) <> 18:
return True
if INFO[i][0] == 'C_CONTACT_TEL':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_APP_NAME':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_IDET_NAME':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_INSRNT_NME':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_ADDRESS':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_APP_TEL':
if INFO[i][1] == '':
return True
if INFO[i][0] == 'C_CONTACT_NAME':
if INFO[i][1] == '':
return True
return False
# 获取注册日期
class GetregisterDate():
def GetregisterDate(self,licenseno,vin,engineNo,CityCode='110100'):
phone = ['133', '153', '180', '181', '189', '177',
'130', '131', '132', '155', '156', '145',
'185', '186', '176', '178', '188', '147',
'134', '135', '136', '137', '138', '139',
'150', '151', '152', '157', '158', '159',
'182', '183', '184', '187']
r = random.randint(0, len(phone))
try:
phone = phone[r]
except:
phone = phone[1]
self.phone = str(phone) + str(random.randint(10000000, 99999999))
birsday = ['0909','0304','0903','0806','0902','0103','0203']
b= random.randint(0, len(birsday))
bri = str(birsday[b])
P = citycode.objects.get(mid=CityCode)
citycodeNEW = P.pid
if CityCode == '110100':
citycodeNEW = '110000'
if CityCode == '120100':
citycodeNEW = '120000'
url = "http://www.ecpic.com.cn/cpiccar/sales/businessCollect/submitVehicleBaseInfo"
body = {"VehicleInfo":{"driveArea":2,"plateNo":licenseno},"Opportunity":{"licenseOwner":"一口价","mobile":self.phone,"email":"","giftInfo":"","extCustomerInfo":"","externalOrderNo":"","buyerNick":"","buyerId":"","bannerPath":""},"PolicyBaseInfo":{"provinceCode":citycodeNEW,"cityCode":CityCode,"branchCode":"","orgdeptCode":"","otherSource":"","cmpId":""},"productList":'',"hideUserName":"","userId":"","registNo":"","zjhm":bri}
REDICT = self.SendRE(url=url,body=body)
bodyNEW = {"VehicleInfo":{"driveArea":2,"carVIN":vin,"engineNo":engineNo,"plateNo":licenseno},"PolicyBaseInfo":{"provinceCode":citycodeNEW,"cityCode":CityCode},"random":REDICT['random'],"orderNo":REDICT['orderNo']}
urlNEW = 'http://www.ecpic.com.cn/cpiccar/sale/businessCollect/queryVehicleModelByVinAndEngineNo'
REDICT = self.SendRE(url=urlNEW,body=bodyNEW)
return REDICT['registerDate']
def SendRE(self,url,body):
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
req.add_header('X-Requested-With', 'XMLHttpRequest')
req.add_header('Referer', 'http://www.ecpic.com.cn/cpiccar/sales/businessCollect/initVehicleBaseInfo')
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:41.0) Gecko/20100101 Firefox/41.0')
req.add_header('Host', 'www.ecpic.com.cn')
postBody = json.dumps(body)
response = urllib2.urlopen(req, postBody)
response = response.read()
REDICT = json.loads(response)
return REDICT
class ConfirmRate():
def __init__(self,Order_id):
self.OrderID = Order_id
self.url_biz = "http://e.cic.cn/nsp/vehicle/initFloatNotice.do?orderNo=%s&businessCode=11" % self.OrderID
self.url_force = "http://e.cic.cn/nsp/vehicle/initFloatNotice.do?orderNo=%s&businessCode=12" % self.OrderID
self.url_b1 = "http://e.cic.cn/nsp/vehicle/confirmFolatNotice.do?orderNo=%s&businessCode=11" % self.OrderID
self.url_f1 = "http://e.cic.cn/nsp/vehicle/confirmFolatNotice.do?orderNo=%s&businessCode=12" % self.OrderID
self.url_b = "http://e.cic.cn/nsp/vehicle/confirmFloatNotifyPage.do?cooperateCode=001501&orderNo=%s&businessCode=11" % self.OrderID
self.url_f = "http://e.cic.cn/nsp/vehicle/confirmFloatNotifyPage.do?cooperateCode=001501&orderNo=%s&businessCode=12" % self.OrderID
def Confirm(self,M='11'):
try:
RE = self.Post(M=M,flag=True)
RE = self.Post(M=M,flag=False)
RE = {"error":"0","msg":"0"}
return RE
except:
RE = {'error':'1','msg':"1"}
return RE
def Post(self,flag=False,M='11'):
if M == '11':
if flag:
url = self.url_b1
else:
url = self.url_b
if M == '12':
if flag:
url = self.url_f1
else:
url = self.url_f
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
req.add_header('X-Requested-With', 'XMLHttpRequest')
if M == "11":
req.add_header('Referer',self.url_biz)
if M == "12":
req.add_header('Referer',self.url_force)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:41.0) Gecko/20100101 Firefox/41.0')
req.add_header('Host', 'e.cic.cn')
if flag:
body = {"businessCode": M, "orderNo": self.OrderID }
else:
body = {"businessCode": M, "cooperateCode":"001501","orderNo": self.OrderID }
postBody = json.dumps(body)
response = urllib2.urlopen(req, postBody)
return response
class MyURLOpener(urllib.FancyURLopener):
def open_http(self, url, data=None, method=None):
"""Use HTTP protocol."""
import httplib
user_passwd = None
proxy_passwd = None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# check whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
# now we proceed with the url we want to obtain
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'http':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
if proxy_bypass(realhost):
host = realhost
# print "proxy via http:", host, selector
if not host: raise IOError, ('http error', 'no host given')
if proxy_passwd:
import base64
proxy_auth = base64.b64encode(proxy_passwd).strip()
else:
proxy_auth = None
if user_passwd:
import base64
auth = base64.b64encode(user_passwd).strip()
else:
auth = None
h = httplib.HTTP(host)
if method is not None:
h.putrequest(method, selector)
else:
h.putrequest('GET', selector)
if data is not None:
# h.putrequest('POST', selector)
h.putheader('Content-Type', 'application/x-www-form-urlencoded')
h.putheader('Content-Length', '%d' % len(data))
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
if auth: h.putheader('Authorization', 'Basic %s' % auth)
if realhost: h.putheader('Host', realhost)
for args in self.addheaders: h.putheader(*args)
h.endheaders(data)
errcode, errmsg, headers = h.getreply()
fp = h.getfile()
if errcode == -1:
if fp: fp.close()
# something went wrong with the HTTP status line
raise IOError, ('http protocol error', 0,
'got a bad status line', None)
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if (200 <= errcode < 300):
return addinfourl(fp, headers, "http:" + url, errcode)
else:
if data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers, data)
def open(self, fullurl, data=None, method=None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
fullurl = unwrap(toBytes(fullurl))
# percent encode url, fixing lame server errors for e.g, like space
# within url paths.
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
if self.tempcache and fullurl in self.tempcache:
filename, headers = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
urltype, url = splittype(fullurl)
if not urltype:
urltype = 'file'
if urltype in self.proxies:
proxy = self.proxies[urltype]
urltype, proxyhost = splittype(proxy)
host, selector = splithost(proxyhost)
url = (host, fullurl) # Signal special case to open_*()
else:
proxy = None
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
if not hasattr(self, name):
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:
return self.open_unknown(fullurl, data)
try:
return getattr(self, name)(url, data, method)
except socket.error, msg:
raise IOError, ('socket error', msg), sys.exc_info()[2]
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,055
|
xiaoyanguoke/klb
|
refs/heads/master
|
/ebusiness/views.py
|
# -*- coding:utf-8 -*-
from django.shortcuts import *
from ebusiness.forms import initVehicleBaseInfoForm,EditInfoForm
from LYZ.klb_class import KLBCode,PingAn,GetCarVin,UrlCode,KLBOAuth
from ebusiness.models import vin_as_car_yg
from bxservice.ZhongHuaAction import *
from bxservice.ansheng import *
from bxservice.yangguang import *
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.hashers import make_password, check_password
from django.contrib.auth import authenticate, login, logout
import re
'''
$名称:录入车辆信息
$参数:
'''
def initVehicleBaseInfo(request):
TempDict = {}
Code = KLBCode()
#获取设定样式
style = request.REQUEST.get("style","")
#获取渠道码
sn = request.REQUEST.get("sn","")
urldata = dict(request.GET.items())
#判断提交方式
if request.method == "POST":
URL = UrlCode()
initForms = initVehicleBaseInfoForm(request.POST)
if initForms.is_valid():
#如果数据存在数据库,就直接取出车辆I,跳转比价
IsSet = initForms.IsSet()
if IsSet:
car = Code.encode(str(IsSet['id']))
urldata.update({"car":car})
GoUrl = "/ebusiness/initQuotation/?"+URL.Encode(urldata)
return HttpResponsePermanentRedirect(GoUrl)
#如果不存在,获取城市编码
else:
CarInfoList = initForms.GetCarInfo()
cityCode = initForms.GetCityCode()#获取城市编码
if cityCode == "":
TempDict.update({"licenseNo_error":"车牌号码不正确或该地区暂不支持"})
elif CarInfoList:
cityCode = (cityCode=="") and cityCode or Code.encode(str(cityCode))
ownerName = Code.encode(str(initForms.cleaned_data["ownerName"]))#车主姓名
licenseNo = Code.encode(str(initForms.cleaned_data["licenseNo"]))#车牌号码
engine = Code.encode(str(initForms.cleaned_data["engine"]))#车牌号码
vin = Code.encode(str(initForms.cleaned_data["vin"]))#车架号
InDB = {
"CarList":CarInfoList,
"cityCode":cityCode,
"ownerName":ownerName,
"licenseNo":licenseNo,
"engine":engine,
"vin":vin,
"style":style,
"sn":sn
}
TempDict.update(InDB)
return render_to_response('ebusiness/selectCarList.html', TempDict, context_instance=RequestContext(request))
else:
TempDict.update({"vin_error":"车架号不正确,请更换车架号重试!"})
TempDict.update({"forms":initForms})
else:
initForms = initVehicleBaseInfoForm()
TempDict.update({"forms":initForms})
return render_to_response('ebusiness/initVehicleBaseInfo.html', TempDict, context_instance=RequestContext(request))
'''
$名称:选择车辆品牌型号
$参数:
'''
def selectCerList(request):
from bxservice.common import BXDBAction
KCode = KLBCode()
DBAction = BXDBAction()
URL = UrlCode()
style = request.REQUEST.get("style","")#样式
cityCode = request.REQUEST.get("cityCode","")#城市代码
sn = request.REQUEST.get("sn","")#渠道代码
ownerName = request.REQUEST.get("ownerName","")#车主姓名
licenseNo = request.REQUEST.get("licenseNo","")#车牌号码
engine = request.REQUEST.get("engine","")#发动机号
vin = request.REQUEST.get("vin","")#车架号
car = request.REQUEST.get("car","")#车辆code
href = request.REQUEST.get("href","")#来源地址
urldata = {
"style":style,
"sn":sn,
}
if request.method == "POST":
kwargs = {
"licenseno": KCode.decode(licenseNo),
"ownername": KCode.decode(ownerName),
"citycode": KCode.decode(cityCode),
"vin": KCode.decode(vin),
"engine": KCode.decode(engine),
"user_id": ""
}
CarID = DBAction.CreateCarVin(**kwargs)
getCar = vin_as_car_yg.objects.get(id=KCode.decode(car))
kwargs = {
'user_id': "",
"car_id": CarID,
"key": getCar.key,
"vehiclefgwcode": getCar.vehicleFgwCode,
"value": getCar.value,
"bxtype": "sinosig",
}
DBAction.CreateCarInfo(**kwargs)
CarID = KCode.encode(str(CarID))
urldata.update({"car":CarID})
GoUrl = "/ebusiness/initQuotation/?"+URL.Encode(urldata)
return HttpResponsePermanentRedirect(GoUrl)
else:
return HttpResponse("<script>window.history.go(-1);</script>")
'''
$名称:保费试算
$参数:
'''
def initQuotation(request):
KCode = KLBCode()
URL = UrlCode()
TempDictArr = dict(request.REQUEST.items())
car = TempDictArr.get("car","")#车辆code
style = TempDictArr.get("style","")#样式
sn = TempDictArr.get("sn","")#渠道代码
if car=="" or car ==None:
return HttpResponsePermanentRedirect("/ebusiness/?%s"%URL.Encode(TempDictArr))
TempDict = {"id":KCode.decode(str(car)),"style":style,"sn":sn}
return render_to_response('ebusiness/initQuotation1.html', TempDict, context_instance=RequestContext(request))
'''
$名称:填写投保资料
$参数:
'''
def editInfo(request):
KCode = KLBCode()
TempDict = {}
company = request.REQUEST.get("company","")#保险公司
id = request.REQUEST.get("car","")#车辆ID
sn = request.REQUEST.get("sn","")
CarInfo = bxcarvin.objects.get(id=KCode.decode(str(id)))
revin = KCode.encode(CarInfo.vin)
editForms = EditInfoForm()
bxgs = KCode.decode(str(company))
if bxgs == "zh":
if id <> "" and id <> None:
PayInfo =CarInfo.bxpayinfo_set.values()[0]
# 确认浮动告知单
Confirm = ConfirmRate(Order_id=PayInfo['order_id'])
Confirm.Confirm(M='11')
Confirm.Confirm(M='12')
TempDict.update({"PayInfo": PayInfo})
TempDict.update({"company": "zh","sn":sn, "openwin": "1", "orderNo": PayInfo['order_id'],
"businessCode": PayInfo['businesscode'], "vin": revin})
if bxgs == "as":
TempDict.update({"bxgs": "as", "openwin": "","sn":sn, "vin": revin})
if bxgs == "yg":
TempDict.update({"bxgs": "yg", "openwin": "","sn":sn, "vin": revin})
TempDict.update({"forms": editForms})
TempDict.update({'id': id})
return render_to_response('ebusiness/editInfo.html', TempDict, context_instance=RequestContext(request))
def ConfirmInsure(request):
'''向保险公司提交投保信息
company = 'as'
company = 'zh'
company = 'yg'
'''
KCode = KLBCode()
TempDict ={}
agent = request.META.get('HTTP_USER_AGENT',"")
bxgs = request.REQUEST.get("company","")
id = request.REQUEST.get("id","")
C_APP_NAME = request.REQUEST.get("C_APP_NAME","")
C_APP_IDENT_NO = request.REQUEST.get("C_APP_IDENT_NO","")
C_APP_TEL = request.REQUEST.get("C_APP_TEL","")
C_APP_ADDR = request.REQUEST.get("C_APP_ADDR","")
C_APP_EMAIL = request.REQUEST.get("C_APP_EMAIL","")
C_CONTACT_TEL = request.REQUEST.get("C_CONTACT_TEL","")
C_CONTACT_NAME = request.REQUEST.get("C_CONTACT_NAME","")
C_ADDRESS = request.REQUEST.get("C_ADDRESS","")
vin = request.REQUEST.get("vin", "")
Session_ID = request.REQUEST.get("Session_ID", "")
ORDER_ID = request.REQUEST.get("ORDER_ID", "")
Verify = request.REQUEST.get("yzm","")
# 判断是否为移动端
Ister = IsTerminal(agent=agent)
IsterInfo = Ister.IsTer()
VinNew = KCode.decode(vin)
bxgs = KCode.decode(str(bxgs))
ID = KCode.decode(id)
if request.method == "POST":
if request.REQUEST.get("action",'') == 'yzm':
YG = YangGuang()
if Verify <> "":
Result, REDICT, PAY_URL, ErrorMessage = YG.Get_126(vin=VinNew,ownername=KCode.decode(C_APP_NAME), IsterInfo=IsterInfo, Verify=Verify,ID=ID)
if not REDICT.has_key('error') :
TempDict.update({"URL":PAY_URL})
return HttpResponsePermanentRedirect(PAY_URL)
else:
return render_to_response('ebusiness/error.html')
else:
TempDict.update({"yzm":"1"})
TempDict.update({"company":'yg',"vin":vin})
return render_to_response('ebusiness/editInfo.html', TempDict,
context_instance=RequestContext(request))
editForms = EditInfoForm(request.POST)
if editForms.is_valid():
if bxgs == "zh":
REDICT = {"Session_ID": KCode.decode(str(Session_ID)),
"ORDER_ID": KCode.decode(str(ORDER_ID)),
"vin": VinNew,
"C_APP_NAME": C_APP_NAME, # 投保人姓名
"C_APP_IDENT_NO": C_APP_IDENT_NO, # 投保人证件号码
"C_APP_TEL": C_APP_TEL, # 投保人电话
"C_APP_ADDR": C_APP_ADDR, # 投保人地址
"C_APP_EMAIL": C_APP_EMAIL, # 投保人邮箱
"C_INSRNT_NME": C_APP_NAME, # 被保险人姓名
"C_INSRNT_IDENT_NO": C_APP_IDENT_NO, # 被保险人证件号码
"C_INSRNT_TEL": C_APP_TEL, # 被保险人电话
"C_INSRNT_ADDR": C_APP_ADDR, # 被保险人地址
"C_INSRNT_EMAIL": C_APP_EMAIL, # 被保险人邮箱
"C_DELIVERY_PROVINCE": "", # 配送地址省代码
"C_DELIVERY_CITY": "", # 配送地址市代码
"C_DELIVERY_DISTRICT": "", # 区代码
"C_CONTACT_NAME": C_CONTACT_NAME,
"C_CONTACT_TEL": C_CONTACT_TEL,
"C_ADDRESS": C_ADDRESS, # 收件地址
"C_IDET_NAME": C_APP_NAME, # 车主姓名
"C_IDENT_NO": C_APP_IDENT_NO, # 身份证号
"IsterInfo": IsterInfo,
"ID":ID
}
ZH = ZhongHuaAction()
ERR, REDICT, PAY_URL = ZH.Get_1038(**REDICT)
if bxgs == "as":
REDICT = {
"vin": VinNew,
"applicantname": C_APP_NAME, # 投保人姓名
"applicantidno": C_APP_IDENT_NO, # 投保人证件号码
"applicantmobile": C_APP_TEL, # 投保人电话
# "C_APP_ADDR": C_APP_ADDR, # 投保人地址
"applicantemail": C_APP_EMAIL, # 投保人邮箱
"insuredname": C_APP_NAME, # 被保险人姓名
"insuredidno": C_APP_IDENT_NO, # 被保险人证件号码
"insuredmobile": C_APP_TEL, # 被保险人电话
# "C_INSRNT_ADDR": C_INSRNT_ADDR, # 被保险人地址
# "C_INSRNT_EMAIL": C_INSRNT_EMAIL, # 被保险人邮箱
"addresseeprovince": "", # 配送地址省代码
"addresseecity": "", # 配送地址市代码
"addresseetown": "", # 区代码
"addresseename": C_CONTACT_NAME,
"addresseemobile": C_CONTACT_TEL,
"addresseedetails": C_ADDRESS, # 收件地址
"ownername": C_APP_NAME, # 车主姓名
"owneridno": C_APP_IDENT_NO, # 身份证号
"IsterInfo": IsterInfo,
"ID":ID
}
AS = AnSheng()
X, REDICT, ErrorMessage, PAY_URL = AS.Get_115(**REDICT)
if bxgs == "yg":
REDICT = {
"vin": VinNew,
"applicantname": C_APP_NAME, # 投保人姓名
"applicantidno": C_APP_IDENT_NO, # 投保人证件号码
"applicantmobile": C_APP_TEL, # 投保人电话
# "C_APP_ADDR": C_APP_ADDR, # 投保人地址
"applicantemail": C_APP_EMAIL, # 投保人邮箱
"insuredname": C_APP_NAME, # 被保险人姓名
"insuredidno": C_APP_IDENT_NO, # 被保险人证件号码
"insuredmobile": C_APP_TEL, # 被保险人电话
"insuredaddresseeDetails": C_APP_ADDR, # 被保险人地址
"insuredEmail": C_APP_EMAIL, # 被保险人邮箱
"addresseeprovince": "", # 配送地址省代码
"addresseecity": "", # 配送地址市代码
"addresseetown": "", # 区代码
"addresseename": C_CONTACT_NAME,
"addresseemobile": C_CONTACT_TEL,
"addresseedetails": C_ADDRESS, # 收件地址
"ownername": C_APP_NAME, # 车主姓名
"owneridno": C_APP_IDENT_NO, # 身份证号
"IsterInfo": IsterInfo,
"ID":ID
}
YG = YangGuang()
Result, REDICT, PAY_URL, ErrorMessage = YG.Get_120(**REDICT)
if not REDICT.has_key('error') :
TempDict.update({"URL":PAY_URL})
return HttpResponsePermanentRedirect(PAY_URL)
elif REDICT['error'] == '2':
TempDict = REDICT
TempDict.update({"yzm":"1"})
TempDict.update({"company":'yg',"vin":vin,'ownername':C_APP_NAME})
return render_to_response('ebusiness/editInfo.html', TempDict,
context_instance=RequestContext(request))
else:
print(REDICT['msg'])
return render_to_response('ebusiness/error.html')
else:
TempDict.update({"forms": editForms})
TempDict.update({'id': id})
return render_to_response('ebusiness/editInfo.html', TempDict, context_instance=RequestContext(request))
def auto(request):
KLBJSON = PrintJson()
ID = ['9','16','41','53','60','76','17']
try:
i = random.randint(0,len(ID))
CarIN = bxcarvin.objects.get(id=ID[i])
except:
CarIN = bxcarvin.objects.get(id=ID[0])
db = {"licenseNo":CarIN.licenseno,
"engine":CarIN.engine,
"ownerName":CarIN.ownername,
"vin":CarIN.vin}
J = KLBJSON.echo(msg="数据返回",error=0, data=db)
return HttpResponse(J, content_type="application/json")
def GetVIN(request):
licenseNo = request.REQUEST.get("n1","")
ownerName = request.REQUEST.get("n2","")
action = request.REQUEST.get("a","")
re_licenseNo = re.match(u"^[\u4e00-\u9fa5]{1}[A-Z0-9]{6}$", licenseNo)
re_ownerName = re.match(u"^[\u4e00-\u9fa5]{2,5}$", ownerName)
JSON = PrintJson()
if re_licenseNo and re_ownerName :
VIN = GetCarVin(licenseNo=licenseNo,ownerName=ownerName)
INDB = VIN.isInDB()
if INDB:
TempDict={"n1":INDB['vin'],"n2":INDB['engine']}
J = JSON.echo(msg="数据返回",error=0, data=TempDict)
return HttpResponse(J, content_type="application/json")
#a1为阳光
if action=="a1":
YGVIN = VIN.GetYangGuang()
if YGVIN:
TempDict={"n1":YGVIN['vin'],"n2":YGVIN['engine']}
J = JSON.echo(msg="数据返回",error=0, data=TempDict)
return HttpResponse(J, content_type="application/json")
else:
J = JSON.echo(msg="格式错误",error=1)
return HttpResponse(J, content_type="application/json")
else:
J = JSON.echo(msg="格式错误",error=1)
return HttpResponse(J, content_type="application/json")
'''
用户个人中心
'''
@login_required(login_url="/ebusiness/")
def UserCenter(request):
TempDict={}
return render_to_response('ebusiness/UserCenter.html', TempDict, context_instance=RequestContext(request))
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,056
|
xiaoyanguoke/klb
|
refs/heads/master
|
/members/templatetags/__init__.py
|
__author__ = 'fengdaye'
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,057
|
xiaoyanguoke/klb
|
refs/heads/master
|
/LYZ/settings.py
|
# -*- coding:utf-8 -*-
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
WEB_ROOT = BASE_DIR.replace('\\','/')
PUBLIC_DIR=os.path.join(WEB_ROOT,'public/').replace('\\','/')
FONTS_DIR=os.path.join(WEB_ROOT,'fonts/').replace('\\','/')
# 网页模版目录
TEMPLATE_DIRS = (
os.path.join(WEB_ROOT,'templates/').replace('\\','/'),
)
SECRET_KEY = 'g^&l4+od9edl%%8%%_q3&jikt72+=&v%%k!ag3+b%6wa_1sauj'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tokenapi',
'members',
'klbapp',
'bxservice',
'web',
'wechat',
'webadmin',
'ClientAdmin',
'ebusiness',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
)
AUTHENTICATION_BACKENDS =(
'django.contrib.auth.backends.ModelBackend',
'tokenapi.backends.TokenBackend',
)
TOKEN_TIMEOUT_DAYS = 7
TOKEN_CHECK_ACTIVE_USER = True
ROOT_URLCONF = 'LYZ.urls'
WSGI_APPLICATION = 'LYZ.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'bxweb',
'HOST': '101.200.1.153',
'PORT': 3306,
'USER': 'root',
'PASSWORD': 'klb139726845!@#$%^&*()',
}
}
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
)
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(WEB_ROOT,'static/').replace('\\', '/')
# STATIC_ROOT = os.path.join(os.path.join(os.path.abspath(os.path.dirname(__file__)),'..'),'static')
STATICFILES_DIRS = (
("stylesheets", os.path.join(STATIC_ROOT,'stylesheets')),
("javascripts", os.path.join(STATIC_ROOT,'javascripts')),
("images", os.path.join(STATIC_ROOT,'images')),
("wechat", os.path.join(STATIC_ROOT,'wechat')),
("webadmin", os.path.join(STATIC_ROOT,'webadmin')),
("ClientAdmin", os.path.join(STATIC_ROOT,'ClientAdmin')),
("ebusiness", os.path.join(STATIC_ROOT,'ebusiness')),
)
# STATIC_ROOT = os.path.join(WEB_ROOT,'static/').replace('\\', '/')
MEDIA_ROOT = os.path.join(WEB_ROOT,'media/').replace('\\', '/')
MEDIA_URL = '/media/'
SYS_APP = "klb_bxweb"
SYS_MOBILE = "klb_bxapp"
#登录地址
LOGIN_URL = '/members/login/'
#短信
SMS_USER = "18629158186"
SMS_PWD = "9F9EF20F84F2B93330A71ACEF4DB"
SMS_URL = "http://web.1xinxi.cn/asmx/smsservice.aspx"
#微信
WECHAT_APPID = "wxe3aebe66d444b72b"
WECHAT_APPSECRET = "485bae4e950f7b4d85440084a3b56e72"
WECHAT_TOKEN = "weiphp"
WECHAT_URL = "http://web.kalaibao.com"
#微信open
OPEN_WECHAT_APPID = "wxc7eadf63b02d39bd"
OPEN_WECHAT_APPSECRET = "00d33900f6500980b553c55db1e24363"
OPEN_WECHAT_URL = "http://www.kalaibao.com"
#发送邮件
# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.exmail.qq.com'
EMAIL_HOST_USER = 'kalaibao@kalaibao.com'
EMAIL_HOST_PASSWORD = 'tdf1618'
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,058
|
xiaoyanguoke/klb
|
refs/heads/master
|
/bxservice/ZhongHuaViews.py
|
# -*- coding:utf-8 -*-
from django.shortcuts import *
from bxservice.zhonghua import *
from bxservice.ansheng import *
from bxservice.yangguang import *
import dicttoxml
def ZhongHuaIndex(request):
YGServ = YangGuang()
Z = YGServ.Send()
return HttpResponse(Z)
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,059
|
xiaoyanguoke/klb
|
refs/heads/master
|
/bxservice/urls.py
|
# -*- coding:utf-8 -*-
from django.conf.urls import patterns,include,url
from bxservice.ZhongHuaViews import *
from bxservice.views import *
urlpatterns = patterns('',
url(r'^PriceList/$',PriceList,name="PriceList"),
url(r'^zh/$',ZhongHuaIndex,name="ZhongHuaIndex"),
url(r'^getvin/$',GetVIN,name="GetVIN"),
url(r'^createvin/$',CerateCarVin,name="CerateCarVin"),
url('^VINIsSet/$',VINIsSet,name="VINIsSet"),
url('^ConfirmTouBao/$',ConfirmTouBao,name="ConfirmTouBao"),
url('^IsReadCallback/$',IsReadCallback,name="IsReadCallback"),
url('^GetRead/$',GetRead,name="GetRead"),
url('^GetCallBack/$',GetCallBack,name="GetCallBack"),
url('^ConfirmFeiLv/$',ConfirmFeiLv,name="ConfirmFeiLv"),
)
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,060
|
xiaoyanguoke/klb
|
refs/heads/master
|
/bxservice/zhonghua.py
|
# -*- coding:utf-8 -*-
from LYZ.common import makeNew
from common import *
import datetime, random, urllib, urllib2, time
from LYZ.settings import *
from suds.client import *
import sys, xmltodict
import json
import re
import dicttoxml
reload(sys)
sys.setdefaultencoding('utf-8')
# 中华保险
class ZhongHua(object):
# 测试地址
TEST_URL = "http://220.171.28.152:9080/nsp/services/NetSalePlatform?wsdl"
# 测试支付地址
TEST_PAY_URL = "http://220.171.28.152:9080/nsp/payment/payment.do?orderNo="
# 阳光车型查询地址
YG_SEACH_URL = "http://chexian.sinosig.com/Partner/netVehicleModel.action?page=1&pageSize=6&searchCode=&searchType=1&encoding=utf-8&isSeats=1&callback=jQuery111206209229775800245_1441631982195&_=1441631982199"
# 用户名
USER_NAME = "ECUser"
# 密码
USER_PSW = "EC100"
# 渠道代码
CHANNELCODE = "001501"
def __init__(self,
citycode,
licenseNo,
ownerName,
vin,
engineNo,
vehicleModelName,
):
self.citycode = citycode
self.licenseNo = licenseNo
self.ownerName = ownerName
self.vin = vin
self.engineNo = engineNo
self.vehicleModelName = vehicleModelName
# 车辆注册日期
self.firstRegisterDate = str((datetime.date.today() + datetime.timedelta(days=-365 * 2)).strftime("%Y-%m-%d"))
# 商业保险起期
self.bizBeginDate = str((datetime.date.today() + datetime.timedelta(days=1)).strftime("%Y-%m-%d 00:00:00"))
# 交强险起期
self.forceBeginDate = str((datetime.date.today() + datetime.timedelta(days=1)).strftime("%Y-%m-%d 00:00:00"))
self.SessionID = str(datetime.datetime.now().strftime("%Y%m%d%H%M%S")) + str(
random.randint(100000000000000000, 999999999999999999))
self.AddTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.client = Client(self.TEST_URL, location=self.TEST_URL, cache=None)
self.my1030 = None # 接收用户信息
self.my1031 = None # 1031接口返回字典
self.my1031s = None # 接收用户输信息
self.my1032 = None # 1032接口返回字典
self.my1033 = None # 1033接口返回字典(订单号,保费)
self.my1038 = None # 1038接口返回投保单号
self.my1033bx = None # 1033险别信息
self.ownerid = makeNew()
# 1030基本信息录入接口
def Get_1030(self):
# cdq20150907 修改
SendVal = (
self.citycode,
self.licenseNo,
'0'
)
X = self.Send(Interface=1030, SendVal=SendVal)
self.my1030 = SendVal
return self.Send(Interface=1030, SendVal=SendVal)
# 车辆车型查询接口
def Get_1031(self, **pam):
"""
车辆车型查询接口
Args:
pam (list): 传入参数
SendVal (tuple): 需要向保险服务器提交的数据
Returns:
str: 返回结果
"""
SendVal = (
self.citycode, # 地区编码
self.licenseNo, # 车牌号码
self.ownerName, # 行驶证车主
self.engineNo, # 发动机号
self.vin, # 车架号
self.vehicleModelName, # 品牌型号
self.firstRegisterDate, # 车辆初始登记日期
)
self.my1031s = SendVal
self.RT = self.Send(Interface=1031, SendVal=SendVal)
self.my1031 = xmltodict.parse(self.RT, encoding='utf-8')
return self.RT
# 车险承保方案信息接口
def Get_1032(self, **pam):
# cdq20150907 修改
DBAction = BXDBAction()
try:
SendVal = (
self.my1031s[0], # 地区编码
self.bizBeginDate, # 商业险保单起期
time.strftime('%Y-%m-%d'), # 录单日期
'', # 服务专员
'', # 机构代码
'', # 业务来源
'', # 服务代码
self.my1031s[1], # 车牌号码
self.my1030[2], # 是否新车
self.my1031s[2], # 车主姓名
'', #
self.my1031s[3], # 发动机号
self.my1031s[4], # 车架号
self.my1031s[6], # 车辆初始登记日期
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_VEHICLE_CODE'], # 车型代码(车型编码)
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_MODEL_DESC'], # 车型描述
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_VEHICLE_BRAND'], # 品牌名称
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_VEHICLE_NAME'], # 车型名称
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_VEHICLE_FAMILY'], # 车系名称(车型库车系)
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_IMPORT_FLAG'], # 车型种类 (国产/进口/合资)
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['N_LIMIT_LOAD_PERSON'], # 核定载客人数
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_WHOLE_WEIGHT'], # 整备质量
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['N_VEHICLE_TONNAGE'], # 载重量
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_EXT_MSR'], # 排气量
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['C_MARKET_TIMESTAMP'], # 上市年份
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL'][0]['N_VEHICLE_PRICE'], # 新车购置价
)
except KeyError:
SendVal = (
self.my1031s[0], # 地区编码
self.bizBeginDate, # 商业险保单起期
time.strftime('%Y-%m-%d'), # 录单日期
'', # 服务专员
'', # 机构代码
'', # 业务来源
'', # 服务代码
self.my1031s[1], # 车牌号码
self.my1030[2], # 是否新车
self.my1031s[2], # 车主姓名
'', #
self.my1031s[3], # 发动机号
self.my1031s[4], # 车架号
self.my1031s[6], # 车辆初始登记日期
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_VEHICLE_CODE'], # 车型代码(车型编码)
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_MODEL_DESC'], # 车型描述
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_VEHICLE_BRAND'], # 品牌名称
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_VEHICLE_NAME'], # 车型名称
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_VEHICLE_FAMILY'], # 车系名称(车型库车系)
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_IMPORT_FLAG'], # 车型种类 (国产/进口/合资)
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['N_LIMIT_LOAD_PERSON'], # 核定载客人数
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_WHOLE_WEIGHT'], # 整备质量
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['N_VEHICLE_TONNAGE'], # 载重量
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_EXT_MSR'], # 排气量
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['C_MARKET_TIMESTAMP'], # 上市年份
self.my1031['INSUREQRET']['VHLMODEL_LIST']['VHLMODEL']['N_VEHICLE_PRICE'], # 新车购置价
)
finally:
try:
self.X = self.Send(Interface=1032, SendVal=SendVal)
self.my1032 = xmltodict.parse(self.X, encoding='utf-8')
DBAction.CreateTBSJinfo(licenseno=self.licenseNo,
order_id=self.my1032['INSUREQRET']['BASE']['C_ORDER_NO'],
biz_begin_date=self.bizBeginDate,
biz_end_date='',
traff_begin_date='',
traff_end_date='',
cs_traff_amt=self.my1032['INSUREQRET']['KIND_LIST']['KIND'][0]['N_DEFAULT_AMT'],
dq_traff_amt=self.my1032['INSUREQRET']['KIND_LIST']['KIND'][4]['N_DEFAULT_AMT'],
zr_traff_amt=self.my1032['INSUREQRET']['KIND_LIST']['KIND'][11]['N_DEFAULT_AMT'],
first_register_date=self.firstRegisterDate
)
return self.X
except:
a = ('问题出现在1031接口')
print(a.encode('GBK') + (self.RT).encode('GBK'))
# 车辆报价接口
def Get_1033(self,
licenseNo='',
CHESHUN='', # 机动车损失险
SANZHE='1', # 第三者责任险
SHIJI='', # 人员责任(司机)
CHENGKE='', # 人员险(乘客)
DAOQIANG='', # 盗抢
ZIRAN='', # 自燃险
BOLI='', # 玻璃破碎险
DAOQIANG_BJ='', # 不计盗抢
CHESHUN_BJ='', # 不计车损
SANZHE_BJ='', # 不计三者
SHIJI_BJ='', # 不计司机
CHENGKE_BJ='', # 不计乘客
BAOE_SZ='50000', # 三者险保额
BAOE_CK='', # 责任险乘客保额
BAOE_SJ='', # 责任险司机保额
HUAHEN='', # 划痕险
SHESHUI='', # 涉水险
FUJIA_BJ ='' # 附加不计免赔
):
DBAction = BXDBAction()
CARINFO,TBSJINFO = DBAction.GetTBSJinfo(licenseno=licenseNo, bxtype="cic")
HUAHEN = (HUAHEN == '0') and ''or HUAHEN
SHESHUI = (SHESHUI == '0') and ''or SHESHUI
CHESHUN = (CHESHUN == '0') and '' or CHESHUN
BAOE_CHESUN = (CHESHUN == '1') and TBSJINFO['cs_traff_amt'] or ''
SANZHE = (SANZHE == '0') and '' or SANZHE
SHIJI = (SHIJI == '0') and '' or SHIJI
CHENGKE = (CHENGKE == '0') and '' or CHENGKE
DAOQIANG = (DAOQIANG == '0') and '' or DAOQIANG
BAOE_DABQIANG = (DAOQIANG == '1') and TBSJINFO['dq_traff_amt'] or ''
ZIRAN = (ZIRAN == '0') and '' or ZIRAN
BAOE_ZIRAN = (ZIRAN == '1') and TBSJINFO['zr_traff_amt'] or ''
BOLI = (BOLI == '0') and '' or BOLI
DAOQIANG_BJ = (DAOQIANG_BJ == '0') and '' or DAOQIANG_BJ
CHESHUN_BJ = (CHESHUN_BJ == '0') and '' or CHESHUN_BJ
SANZHE_BJ = (SANZHE_BJ == '0') and '' or SANZHE_BJ
SHIJI_BJ = (SHIJI_BJ == '0') and '' or SHIJI_BJ
CHENGKE_BJ = (CHENGKE_BJ == '0') and '' or CHENGKE_BJ
BAOE_BJ_CHESUN = (CHESHUN_BJ == '1') and '0' or ''
BAOE_BJ_CHENGKE = (CHENGKE_BJ == '1') and '0' or ''
BAOE_BJ_SIJI = (SHIJI_BJ == '1') and '0' or ''
BAOE_BJ_DAOQIANG = (DAOQIANG_BJ == '1') and '0' or ''
BAOE_BJ_SANZHE = (SANZHE_BJ == '1') and '0' or ''
BAOE_BOLI = (BOLI == '1') and '0' or ''
BAOE_CK = (CHENGKE == '1') and BAOE_CK or '' # 乘客保额
BAOE_SZ = (SANZHE == '1') and BAOE_SZ or '' # 第三者责任险保额
BAOE_SJ = (SHIJI == '1') and BAOE_SJ or '' # 司机保额
CODE_CHESHUN = (CHESHUN == '1') and '030006' or '' # 车损险
CODE_ZERENSANZHE = (SANZHE == '1') and '030018' or '' # 三者险
CODE_DAOQIANG = (DAOQIANG == '1') and '030059' or '' # 盗抢险
CODE_ZERENSIJI = (SHIJI == '1') and '030070' or '' # 责任险(司机)
CODE_ZERENCK = (CHENGKE == '1') and '030072' or '' # 责任险(乘客)
CODE_BOLI = (BOLI == '1') and '030004' or '' # 玻璃单独破碎
CODE_ZIRAN = (ZIRAN == '1') and '030012' or '' # 自燃险
CODE_BJ_CHESHUN = (CHESHUN_BJ == '1') and '031901' or '' # 不计车损
CODE_BJ_SHANZHE = (SANZHE_BJ == '1') and '031902' or '' # 不计三者
CODE_BJ_DAOQIANG = (DAOQIANG_BJ == '1') and '030106' or '' # 不计盗抢
CODE_BJ_SIJI = (SHIJI_BJ == '1') and '033531' or '' # 不计司机
CODE_BJ_CHENGKE = (CHENGKE_BJ == '1') and '030072' or '' # 不计乘客
SendVal = (
CARINFO['citycode'], # 地区编码
TBSJINFO['order_id'], # 订单号
TBSJINFO['biz_begin_date'], # 商业保单起期
'', # 商业保单止期
'', # 交强险起期
'', # 交强险止期
'1', # 是否约省
'1',
CODE_CHESHUN, # 机动车损失险
CHESHUN, # 是否投保
BAOE_CHESUN,
'2',
CODE_ZERENCK, # 责任险(乘客)
BAOE_CK, # 保额/限额(元)
'3',
CODE_ZERENSIJI, # 责任险(司机)
BAOE_SJ, # 保额/限额(元)
'4',
CODE_ZERENSANZHE, # 第三者责任险
BAOE_SZ, # 保额
'5',
CODE_BJ_SHANZHE, # 不计三者险
SANZHE_BJ, # 是否投保
BAOE_BJ_SANZHE, #
'6',
CODE_DAOQIANG, # 机动车盗抢险
DAOQIANG,
BAOE_DABQIANG, # 投保限额
'7',
CODE_BJ_DAOQIANG, # 不计盗抢
DAOQIANG_BJ,
BAOE_BJ_DAOQIANG,
'8',
CODE_BJ_SIJI, # 不计司机
SHIJI_BJ,
BAOE_BJ_SIJI,
'9',
CODE_BJ_CHENGKE, # 不计乘客
CHENGKE_BJ,
BAOE_BJ_CHENGKE,
'10',
CODE_BOLI, # 玻璃单独破碎险
BOLI,
BAOE_BOLI, # 玻璃保额
'11',
CODE_ZIRAN, # 自然损失险
ZIRAN,
BAOE_ZIRAN,
'12',
CODE_BJ_CHESHUN, # 不计车损
CHESHUN,
BAOE_BJ_CHESUN,
self.ownerid, # 车主证件号码
'', # 购车发票开具日期
'', # 车辆来历凭证种类
'', # 车辆来历凭证编号
'', # 开具车辆来历凭证所载日期
CARINFO['ownername'], # 驾驶员名称
self.ownerid, # 驾驶证号(身份证号)makeNew()自动生成身份证号
TBSJINFO['first_register_date'] # 初次领证日期
)
self.my1033bx = SendVal
X = self.Send(Interface=1033, SendVal=SendVal)
print(X)
self.my1033 = xmltodict.parse(X, encoding='utf-8')
Q = self.ReArr(X)
return Q
# 投保信息校验接口
def Get_1036(self, licenseno='',**pam):
# cdq 20150906 修改
DBAction = BXDBAction()
CARINFO,TBSJINFO = DBAction.GetTBSJinfo(licenseno=licenseno, bxtype="cic")
SendVal = (
TBSJINFO['order_id'], # 订单号
CARINFO['ownername'], # 投保人姓名
pam['C_APP_SEX'], # 投保人性别
pam['C_APP_IDENT_TYPE'], # 投保人证件类型
pam['C_APP_IDENT_NO'], # 投保人证件号码
pam['C_APP_TEL'], # 投保人电话
pam['C_APP_ADDR'], # 投保人地址
pam['C_APP_EMAIL'], # 投保人邮箱
pam['C_APP_ZIPCODE'], # 投保人邮编
pam['C_INSRNT_NME'], # 被保险人姓名
pam['C_INSRNT_SEX'], # 别保险人性别
pam['C_INSRNT_IDENT_TYPE'], # 被保险人证件类型
pam['C_INSRNT_IDENT_NO'], # 被保险人证件号码
pam['C_INSRNT_TEL'], # 被保险人电话
pam['C_INSRNT_ADDR'], # 被保险人地址
pam['C_INSRNT_EMAIL'], # 被保险人邮箱
pam['C_INSRNT_ZIPCODE'], # 被保险人邮编
pam['C_CONTACT_NAME'], # 联系人姓名
pam['C_CONTACT_TEL'], # 联系人电话
pam['C_CONTACT_EMAIL'], # 联系人邮箱
pam['C_DELIVERY_PROVINCE'], # 配送地址省代码
pam['C_DELIVERY_CITY'], # 配送地址市代码
pam['C_DELIVERY_DISTRICT'], # 配送地址区代码
pam['C_ADDRESS'], # 收件地址
CARINFO['ownername'], # 行驶证车主
pam['C_IDENT_TYPE'], # 证件类型
pam['C_IDENT_NO'] # 证件号
)
X = self.Send(Interface=1036, SendVal=SendVal)
return X
# 车险投保确认
def Get_1037(self, licenseno=''):
# cdq 20150906修改
DBAction = BXDBAction()
CARINFO,TBSJINFO = DBAction.GetTBSJinfo(licenseno=licenseno, bxtype="cic")
SendVal = (
TBSJINFO['order_id'], # 订单号
'' # 任意字段(不加Type为unicode)
)
X = self.Send(Interface=1037, SendVal=SendVal)
print X
return X
# 车险申请购买
def Get_1038(self, licenseno=''):
# cdq 20150906修改
DBAction = BXDBAction()
CARINFO,TBSJINFO = DBAction.GetTBSJinfo(licenseno=licenseno, bxtype="cic")
SendVal = (
TBSJINFO['order_id'], # 订单号
'' # 任意字段(不加Type为unicode 报错)
)
X = self.Send(Interface=1038, SendVal=SendVal)
self.my1038 = xmltodict.parse(X, encoding='utf-8')
return (X)
def Send(self, Interface, SendVal):
# 头部认证信息
InVal = (self.USER_NAME,
self.USER_PSW,
self.SessionID,
self.AddTime,
self.CHANNELCODE,
)
# 组合替换字段
SendVal = InVal + SendVal
# 打开文件
file = WEB_ROOT + "/bxxml/zhonghua/" + str(Interface) + ".xml"
Val = open(file).read()
# 替换变量
XMLVal = Val % SendVal
print(XMLVal)
# 发送XML
response = self.client.service.getRequest(content=XMLVal)
return response
# 截取返回的XMl中报价的数据并返回报价字典
# cdq 20150910 修改
def ReArr(self, data):
Data = data.replace("\n", "")
# 替换空格
result, number = re.subn(">(\s{1,})<", "><", Data)
TotalPremium = re.findall('<N_REAL_PRM>(.*?)</N_REAL_PRM>', result.encode('UTF-8'))
VehicleLoss = re.findall(
'<C_KIND_CDE>030006</C_KIND_CDE><C_KIND_NAME>机动车损失保险</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
ZeRenCK = re.findall(
'<C_KIND_CDE>030072</C_KIND_CDE><C_KIND_NAME>机动车车上人员责任保险(乘客)</C_KIND_NAME><N_SEAT_NUM>(.*?)</N_SEAT_NUM><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
ZeRenSJ = re.findall(
'<C_KIND_CDE>030070</C_KIND_CDE><C_KIND_NAME>机动车车上人员责任保险(司机)</C_KIND_NAME><N_SEAT_NUM>1</N_SEAT_NUM><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
SanZhe = re.findall(
'<C_KIND_CDE>030018</C_KIND_CDE><C_KIND_NAME>机动车第三者责任保险</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
BuJiSZ = re.findall(
'<C_KIND_CDE>031902</C_KIND_CDE><C_KIND_NAME>不计免赔特约条款(三者险)</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
DaoQiang = re.findall(
'<C_KIND_CDE>030059</C_KIND_CDE><C_KIND_NAME>机动车全车盗窃保险</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
BuJiDQ = re.findall(
'<C_KIND_CDE>030106</C_KIND_CDE><C_KIND_NAME>不计免赔特约条款(盗抢险)</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
BuJiSJ = re.findall(
'<C_KIND_CDE>033531</C_KIND_CDE><C_KIND_NAME>不计免赔特约条款(车上人员司机)</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
BuJiCK = re.findall(
'<C_KIND_CDE>033532</C_KIND_CDE><C_KIND_NAME>不计免赔特约条款(车上人员乘客)</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
BUJiCS = re.findall(
'<C_KIND_CDE>031901</C_KIND_CDE><C_KIND_NAME>不计免赔特约条款(车损险)</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
BoLiPS = re.findall(
'<C_KIND_CDE>030004</C_KIND_CDE><C_KIND_NAME>玻璃单独破碎险</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
ZiRan = re.findall(
'<C_KIND_CDE>030012</C_KIND_CDE><C_KIND_NAME>自燃损失险</C_KIND_NAME><N_AMT>(.*?)</N_AMT><N_PRM>(.*?)</N_PRM><N_BEN_PRM>(.*?)</N_BEN_PRM>',
result.encode('UTF-8'))
ReDict = {'TotalPremium': ' 0', # 保费合计
'BizPremium': ' 0', # 商业险总计
'ForePremium': '0', # 交强险总计
'InsuranceGift': '暂无礼品', # 保险公司礼品
'klbGift': '暂无礼品', # 卡来宝礼品
'bizPremium': '0', # 商业险总计
'VehicleLoss': '0', # 车辆损失险
'SanZhe': '0', # 第三者责任险
'DaoQiang': '0', # 盗抢险
'ZeRenSJ': '0', # 车上人员责任险(司机)
'ZeRenCK': '0', # 车上人员责任险(乘客)
'BoLiPS': '0', # 玻璃单独破碎险
'HuaHen': '暂不支持', # 划痕险
'ZiRan': '0', # 自燃险
'SheShui': '0', # 涉水险
'BuJiMPZJ': '0', # 不计免赔总计
'BUJiCS': '0', # 不计车损
'BuJiSZ': '0', # 不计三者
'BuJiDQ': '0', # 不计盗抢
'BuJiSJ': '0', # 不计司机
'BuJiCK': '0', # 不计乘客
'forcePremium': '暂不支持', # 交强总计
'forcePre': '暂不支持', # 交强险
'VehTaxPremium': '暂不支持', # 车船税
'totalPremium': '0', # 保险合计
'ORDER_ID': '0' # 订单号
}
try:
ReDict['ORDER_ID'] = self.my1032['INSUREQRET']['BASE']['C_ORDER_NO']
except:
pass
try:
ReDict['DaoQiang'] = DaoQiang[0][2]
except:
pass
try:
ReDict['BuJiSZ'] = BuJiSZ[0][2]
except:
pass
try:
ReDict['BuJiDQ'] = BuJiDQ[0][2]
except:
pass
try:
ReDict['BuJiSJ'] = BuJiSJ[0][2]
except:
pass
try:
ReDict['BuJiCK'] = BuJiCK[0][2]
except:
pass
try:
ReDict['BUJiCS'] = BUJiCS[0][2]
except:
pass
try:
ReDict['BuJiMPZJ'] = float(BuJiSZ[0][2]) + float(BuJiDQ[0][2]) + float(BuJiSJ[0][2]) + float(
BuJiCK[0][2]) + float(BUJiCS[0][2])
except:
pass
try:
ReDict['BoLiPS'] = BoLiPS[0][2]
except:
pass
try:
ReDict['ZiRan'] = ZiRan[0][2]
except:
pass
try:
ReDict['TotalPremium'] = TotalPremium
except:
pass
try:
ReDict['BizPremium'] = TotalPremium
except:
pass
try:
ReDict['VehicleLoss'] = VehicleLoss[0][2]
except:
pass
try:
ReDict['ZeRenCK'] = ZeRenCK[0][3]
except:
pass
try:
ReDict['ZeRenSJ'] = ZeRenSJ[0][2]
except:
pass
try:
ReDict['SanZhe'] = SanZhe[0][2]
except:
pass
try:
ReDict['bizPremium'] = TotalPremium
except:
pass
try:
ReDict['totalPremium'] = TotalPremium
except:
pass
return ReDict
class KLBNet(Object):
GetCarInfoUrl = "http://chexian.sinosig.com/Net/vehicleStandard.action"
GetCarInfoUrl1 = "http://chexian.sinosig.com/Net/vehicleModel.action"
def GetCarInfo(self, q="", t=0, v=1):
t = int(t)
if t == 0:
para = {
"limit": "0",
"timestamp": int(time.time()) * 1000,
"queryVehicle": q.upper(),
"frameNo": "",
"id": "",
"frameNoFlag": "true",
}
postData = urllib.urlencode(para)
req = urllib2.Request(self.GetCarInfoUrl, postData)
else:
postData = urllib.urlencode({"vehicleFgwCode": q, "isGetValue": v})
req = urllib2.Request(self.GetCarInfoUrl1, postData)
resp = urllib2.urlopen(req).read()
return resp
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,061
|
xiaoyanguoke/klb
|
refs/heads/master
|
/ClientAdmin/views.py
|
# -*- coding:utf-8 -*-
from django.shortcuts import *
from ClientAdmin.forms import AddUserForms
from django.contrib.auth.hashers import make_password, check_password
def Index(request):
return render_to_response('ClientAdmin/Index.html', {}, context_instance=RequestContext(request))
def Login(request):
return HttpResponse("")
def Logout(request):
return HttpResponse("")
def AddUser(request):
TempData = {}
if request.method == "POST":
forms = AddUserForms(request.POST)
if forms.is_valid():
In,IsSet = forms.CheckAuthenticate()
TempData.update({"isok":True,"forms":forms,"User":In})
else:
TempData.update({"forms":forms,"isok":False})
else:
forms = AddUserForms()
TempData.update({"forms":forms})
return render_to_response('ClientAdmin/AddUser.html', TempData, context_instance=RequestContext(request))
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,062
|
xiaoyanguoke/klb
|
refs/heads/master
|
/ClientAdmin/templatetags/function.py
|
# -*- coding:utf-8 -*-
import re
'''
$函数名:CheckStrFormat
$介绍:检查字符串格式
$参数:str(string)
$返回:int 1邮件2手机3中文
'''
def CheckStrFormat(str):
# 判断用户登录方式:手机号,邮件地址,用户名
PhoneRegex = "^(13[0-9]|15[012356789]|17[678]|18[0-9]|14[57])[0-9]{8}$" # 手机号正则
EmailRegex = "^([a-zA-Z0-9_\.\-])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,4})+$" # 邮件正则
StrRegex = "^[\u4e00-\u9fa5]+" # 中文名普通正则
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,063
|
xiaoyanguoke/klb
|
refs/heads/master
|
/members/views.py
|
# -*- coding:utf-8 -*-
from django.shortcuts import *
from LYZ.settings import *
from django.contrib.auth.hashers import make_password, check_password
from django.contrib.auth.decorators import login_required
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from LYZ.common import *
from LYZ.klb_class import *
from klbapp.appclass import *
from models import *
import random, datetime, time
from DjangoCaptcha import Captcha
import qrcode
import hashlib
from cStringIO import StringIO
import urllib, urllib2, json
def Default_Index(request):
return HttpResponse("☀")
'''
@用户中心首页
'''
def Index(request):
# m = make_password("haha",None,"unsalted_md5")
# print(m)
# print(check_password("haha",m))
if not request.user.is_authenticated():
return HttpResponseRedirect("/members/login/")
return render_to_response('members/member_index.html', {}, context_instance=RequestContext(request))
'''
@用户登录
'''
def Login(request):
Next = request.REQUEST.get("next", "")
print(Next)
if request.user.is_authenticated():
return HttpResponseRedirect("/members/")
if request.method == 'POST':
UserName = request.REQUEST.get("username", "")
Password = request.REQUEST.get("password", "")
user = authenticate(username=UserName, password=Password)
if user is not None:
login(request, user)
if validateEmail(UserName):
UserName = UserName.split("@")[0]
request.session['klb_username'] = UserName
print(request.user.id)
print(Next)
J = Json_Code(data='', msg="登录成功", error=0, url=Next)
return HttpResponse(J)
else:
J = Json_Code(data='', msg="登录失败,请检查输入是否有误", error=1)
return HttpResponse(J)
else:
return render_to_response('members/member_login.html', {}, context_instance=RequestContext(request))
'''
@用户注册
'''
def Reg(request):
if request.user.is_authenticated():
return HttpResponseRedirect("/members/")
if request.method == 'POST':
Action = request.REQUEST.get("action", "")
# 手机注册
if Action == "mobile":
Mobile = request.REQUEST.get("Mobile", "")
ValidatedCode = request.REQUEST.get("ValidatedCode", "")
Password = request.REQUEST.get("Password", "")
InputConfirmPassword = request.REQUEST.get("InputConfirmPassword", "")
RecomCode = request.REQUEST.get("u", "")
# 判断手机
if phonecheck(Mobile) == False:
J = Json_Code(data='', msg="手机号不能为空", error=1)
return HttpResponse(J)
# 判断密码
if Password == "" or len(Password) < 6 or Password <> InputConfirmPassword:
J = Json_Code(data='', msg="密码不能少于6位,且两次输入必须一致", error=1)
return HttpResponse(J)
# 检查验证码
if _CheckVcode(Mobile, ValidatedCode) == -1:
J = Json_Code(data='', msg="验证码不正确", error=1)
return HttpResponse(J)
else:
MCode = _CheckVcode(Mobile, ValidatedCode)
if MCode == 1:
J = Json_Code(data='', msg="验证码已经使用过", error=1)
return HttpResponse(J)
# 检查推荐码
if RecomCode <> "":
Is_recomcode = recomcode.objects.filter(code=RecomCode).exists()
if Is_recomcode == False:
J = Json_Code(data='', msg="推荐码不存在", error=1)
return HttpResponse(J)
if User.objects.filter(phone=Mobile).exists():
J = Json_Code(data='', msg="该手机已经被注册,请不要重复注册", error=1)
return HttpResponse(J)
CreateUser = User.objects.create_user(username=Mobile,
password=Password,
phone=Mobile,
nick_name=Mobile,
)
CreateUser.save()
request.session['klb_username'] = Mobile
user = authenticate(username=Mobile, password=Password)
login(request, user)
_InSetRecomCode(CreateUser.id)
J = Json_Code(data='', msg="注册成功", error=0)
return HttpResponse(J)
# 邮箱注册
if Action == "email":
Email = request.REQUEST.get("Email", "")
Password = request.REQUEST.get("Password", "")
InputConfirmPassword = request.REQUEST.get("InputConfirmPassword", "")
RecomCode = request.REQUEST.get("RecomCode", "")
ValidCode = request.REQUEST.get("ValidCode", "")
if validateEmail(Email) == False:
J = Json_Code(data='', msg="邮件地址不正确", error=1)
return HttpResponse(J)
IS_EmailSet = User.objects.filter(email=Email).exists()
if IS_EmailSet:
J = Json_Code(data='', msg="该邮件地址已经存在,请更换邮件地址", error=1)
return HttpResponse(J)
if len(Password) < 6 or len(Password) > 50:
J = Json_Code(data='', msg="密码长度不能少于6位", error=1)
return HttpResponse(J)
if Password <> InputConfirmPassword:
J = Json_Code(data='', msg="两次密码输入不一致", error=1)
return HttpResponse(J)
if RecomCode <> "":
Is_recomcode = recomcode.objects.filter(code=RecomCode).exists()
if Is_recomcode == False:
J = Json_Code(data='', msg="推荐码不存在", error=1)
return HttpResponse(J)
CreateUser = User.objects.create_user(username=Email,
password=Password,
email=Email,
nick_name=Email,
)
CreateUser.save()
user = authenticate(username=Email, password=Password)
login(request, user)
if validateEmail(Email):
Email = Email.split("@")[0]
request.session['klb_username'] = Email
_InSetRecomCode(CreateUser.id)
J = Json_Code(data='', msg="注册成功", error=0)
return HttpResponse(J)
else:
return render_to_response('members/member_reg.html', {}, context_instance=RequestContext(request))
'''
@用户退出
'''
def Logout_View(request):
logout(request)
request.session["klb_username"] = ""
return HttpResponseRedirect("/members/login/")
'''
@订单详情
'''
def OrderDetails(request, t=''):
if t == 'paid':
return render_to_response('members/member_orderdetails.html', {}, context_instance=RequestContext(request))
elif t == 'non_payment':
return render_to_response('members/member_orderdetails.html', {}, context_instance=RequestContext(request))
else:
return render_to_response('members/member_orderdetails.html', {}, context_instance=RequestContext(request))
'''
@用户帮助
'''
def Help(request):
return HttpResponse("help")
'''
@用户基本资料设置
'''
@login_required
def Setting(request, action=''):
UClass = UserClass()
db = {}
if action == 'info':
ErrorMsg = {}
if request.method == "POST":
uid = request.user.id
username = request.REQUEST.get("username", "")
real_name = request.REQUEST.get("real_name", "")
sex = request.REQUEST.get("sex", "")
email = request.REQUEST.get("email", "")
idcard = request.REQUEST.get("idcard", "")
phone = request.REQUEST.get("phone", "")
ver_code = request.REQUEST.get("ver_code", "")
addr = request.REQUEST.get("addr", "")
if phone <> "" and ver_code <> "":
CV = _CheckVcode(phone, ver_code)
if CV == -1:
ErrorMsg = {"phone": "手机验证码不正确"}
elif CV == 1:
ErrorMsg = {"phone": "手机验证码已经使用"}
elif email <> "":
pass
else:
UClass.SetUser(
uid=uid,
username=username,
real_name=real_name,
sex=sex,
email=email,
phone=phone,
idcard=idcard,
addr=addr
)
db.update(ErrorMsg)
return render_to_response('members/member_setting_info.html', {}, context_instance=RequestContext(request))
elif action == 'security':
return render_to_response('members/member_setting_security.html', {}, context_instance=RequestContext(request))
elif action == 'message':
return render_to_response('members/member_setting_message.html', {}, context_instance=RequestContext(request))
else:
return render_to_response('members/member_setting_info.html', {}, context_instance=RequestContext(request))
'''
@用户推广中心
'''
def DiffuseCente(request):
pass
'''
'''
def TuiJianMa(request):
action = request.REQUEST.get("a", "")
if not request.user.is_authenticated():
J = Json_Code(data='', msg="没有权限", error=1)
return HttpResponse(J)
else:
if action <> "set":
try:
# 推荐码存在
Rcode = recomcode.objects.get(user_id=request.user.id)
C = Rcode.code
except ObjectDoesNotExist:
# 推荐码不存在
C = random.randint(100000, 999999)
In = recomcode(user_id=request.user.id, code=C)
In.save()
J = Json_Code(data={"code": C}, msg="推荐码", error=0)
return HttpResponse(J)
else:
time.sleep(1)
NewCode = request.REQUEST.get("code", "")
if len(NewCode) < 6 or len(NewCode) > 60:
J = Json_Code(data='', msg="推荐码不能少于6位,大于20位", error=1)
return HttpResponse(J)
elif recomcode.objects.filter(code=NewCode).exists():
J = Json_Code(data='', msg="推荐码已经被使用", error=1)
return HttpResponse(J)
else:
print(NewCode)
In = recomcode.objects.get(user_id=request.user.id)
In.code = NewCode
In.save()
# 更新短网址
klbcode = KLBCode()
Curl = "http://" + request.get_host() + "/members/reg/?u=" + klbcode.encode(NewCode)
print(Curl)
para = {
"url": Curl,
"alias": "klb_" + NewCode
}
postData = urllib.urlencode(para)
req = urllib2.Request("http://dwz.cn/create.php", postData)
resp = urllib2.urlopen(req).read()
M = json.loads(resp)
if M['status'] <> 0:
J = Json_Code(data='', msg=M["err_msg"], error=1)
return HttpResponse(J)
else:
In = recomcode.objects.get(user_id=request.user.id)
In.dwz = M['tinyurl']
In.save()
J = Json_Code(data={"code": NewCode, "url": M['tinyurl']}, msg="设置成功", error=0)
return HttpResponse(J)
'''
我的车
'''
@login_required
def MyCar(request):
EchoJson = PrintJson()
MC = MyCarClass()
uid = request.user.id
if request.method == 'POST':
# 判断执行动作是否为绑定
action = request.REQUEST.get("a", "")
if action == "bind":
chepai = request.REQUEST.get("chepai", "")
carusername = request.REQUEST.get("carusername", "")
vin = request.REQUEST.get("vin", "")
fadongji = request.REQUEST.get("fadongji", "")
if chepai == "" or carusername == "" or vin == "" or fadongji == "":
J = EchoJson.echo(msg="请输入完整的信息", error=1)
return HttpResponse(J)
if MC.CarIsSet(c=chepai, vin=vin):
J = EchoJson.echo(msg="该车已经绑定", error=1)
return HttpResponse(J)
NewId = MC.CreateCar(
uid=uid,
chepai=chepai,
carusername=carusername,
vin=vin,
fadongji=fadongji,
)
if NewId == False:
J = EchoJson.echo(msg="VIN号码解析错误", error=1)
return HttpResponse(J)
else:
J = EchoJson.echo(msg="添加成功", error=0, data={"id": NewId})
return HttpResponse(J)
else:
J = EchoJson.echo(msg="参数不正确", error=1)
return HttpResponse(J)
else:
C = MC.GetCar(uid=uid)
db = {"car": C}
return render_to_response('members/member_mycar.html', db, context_instance=RequestContext(request))
'''
汽车召回
'''
@login_required
def CarRecall(request):
EchoJson = PrintJson()
MC = MyCarClass()
Log = MC.ReCallLog(uid=request.user.id)
db = {"Log": Log}
if request.method == "POST":
vin = request.REQUEST.get("vin", "")
year = request.REQUEST.get("year", "")
if vin == "" or year == "":
pass
else:
ReCall = MC.ReCall(vin=vin, year=year, uid=request.user.id)
db.update({"ReCall": ReCall})
GetMycar = {"MyCar": MC.GetCar(uid=request.user.id)}
db.update(GetMycar)
return render_to_response('members/member_recall.html', db, context_instance=RequestContext(request))
'''
@会员升级
'''
def UpDateToVip(request):
return render_to_response('members/member_updatetovip.html', {}, context_instance=RequestContext(request))
'''
@头像上传
'''
def ImgUpload(request):
if not request.user.is_authenticated():
J = Json_Code(data='', msg="没有权限", error=1)
return HttpResponse(J)
else:
if request.method == 'POST':
img = request.FILES.get("img", "")
print(img.size)
uid = request.user.id
print(uid)
upimg = photo(user_id=uid, image=img)
upimg.save()
print(upimg.image)
print(upimg.thumbnail)
print("http://" + request.get_host() + "/media/" + str(upimg.image))
data = {"image": str(upimg.image), "thumbnail": str(upimg.thumbnail)}
J = Json_Code(data=data, msg="上传成功", error=0)
return HttpResponse(J)
else:
return render_to_response('members/member_upload.html', {}, context_instance=RequestContext(request))
'''
@获取验证码
'''
def GetVcode(request):
phone = request.REQUEST.get("phone", "")
IP = _getIP(request)
if phonecheck(phone) == False:
J = Json_Code(data='', msg="手机号码错误", error=1)
return HttpResponse(J)
else:
Code = random.randint(1000, 9999)
Is_set = sendsms.objects.filter(phone=phone).exists()
if Is_set:
Ycode = sendsms.objects.filter(phone=phone)
if Ycode.values()[0]["is_active"] == 1:
J = Json_Code(data='', msg="该手机已经注册,请更换手机号!", error=1)
return HttpResponse(J)
if Ycode.values()[0]["sendnum"] > 5:
J = Json_Code(data='', msg="该手机号已经超过发送次数,请联系管理员", error=1)
return HttpResponse(J)
Xtime = datetime.datetime.now()
Ytime = Ycode.values()[0]["addtime"]
Stime = (Xtime - Ytime).seconds
if Stime < 180:
Msg = "请%s秒后重试" % (180 - Stime)
J = Json_Code(data='', msg=Msg, error=1)
return HttpResponse(J)
else:
N = sendsms.objects.get(id=Ycode.values()[0]["id"])
N.validated_code = Code
N.sendnum = N.sendnum + 1
N.save()
else:
CodeCreate = sendsms(phone=phone, validated_code=Code, sendip=IP)
CodeCreate.save()
sms(phone, Code)
J = Json_Code(data='', msg="验证码发送成功", error=0)
return HttpResponse(J)
'''
'''
def ValidCode(request):
mod = request.REQUEST.get("a", "")
if mod <> "check":
ca = Captcha(request)
# ca.words = ['hello','world','helloworld']
ca.type = 'number'
# ca.type = 'word'
ca.img_width = 140
ca.img_height = 30
return ca.display()
else:
_code = request.GET.get('code')
ca = Captcha(request)
if ca.check(_code):
J = Json_Code(data='', msg="验证成功", error=0)
else:
J = Json_Code(data='', msg="验证失败", error=1)
return HttpResponse(J)
'''
@获取IP
'''
def _getIP(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
'''
@检查验证码
'''
def _CheckVcode(p, c):
CH = sendsms.objects.filter(phone=p, validated_code=c).exists()
if CH:
CHDATA = sendsms.objects.filter(phone=p, validated_code=c).values()
Code = CHDATA[0]['is_active']
return Code
else:
return -1
def _InSetRecomCode(uid):
C = random.randint(100000, 999999)
if recomcode.objects.filter(user_id=uid, code=C).exists():
C = _InSetRecomCode(uid)
else:
In = recomcode(user_id=uid, code=C)
In.save()
return C
def GenerateQrcode(request):
a = request.REQUEST.get("a","")
d = request.REQUEST.get("d","")
data = request.REQUEST.get("s", "")
if a == "encode":
if data <>"":
try:
Code = KLBCode()
data = Code.decode(data)
except:
pass
img = qrcode.make(data)
buf = StringIO()
img.save(buf)
image_stream = buf.getvalue()
if d=="download":
response = HttpResponse(image_stream,mimetype='application/octet-stream')
response['Content-Disposition'] = 'attachment; filename=%s' %str(int(time.time()))+'.png'
else:
response = HttpResponse(image_stream, content_type="image/png")
return response
# 微信登录
def WechatLogin(request):
OA = KLBOAuth()
code = request.REQUEST.get("code", "")
a = request.REQUEST.get("a","")
KCode = KLBCode()
Action = KCode.decode(a)
if code == "" or code == None:
#如果没有code,判断
if Action=="ebusiness":
return HttpResponse("<script>alert('操作错误!');history.back();</script>")
else:
return render_to_response('members/member_login.html', {}, context_instance=RequestContext(request))
params = urllib.urlencode(
{'appid': OPEN_WECHAT_APPID, 'secret': OPEN_WECHAT_APPSECRET, "code": code, "grant_type": "authorization_code"})
Req_Url = "https://api.weixin.qq.com/sns/oauth2/access_token?%s" % params
wechat_json = urllib.urlopen(Req_Url).read()
wechat_array = json.loads(wechat_json)
access_token = wechat_array['access_token']
openid = wechat_array['openid']
params = urllib.urlencode({'access_token': access_token, 'openid': openid})
UserInfoUrl = "https://api.weixin.qq.com/sns/userinfo?%s" % (params)
wechat_json = urllib.urlopen(UserInfoUrl).read()
wechat_array = json.loads(wechat_json)
try:
GetUser = wechat.objects.get(openid=openid)
UserInfo = User.objects.get(id=GetUser.user_id)
user = authenticate(username=UserInfo.username,password="klb@weixin")
login(request, user)
except ObjectDoesNotExist:
OA.CreateUser_Wechat(
request=request,
openid=openid,
nickname=wechat_array['nickname'],
sex=wechat_array['sex'],
language=wechat_array['language'],
city=wechat_array['city'],
country=wechat_array['country'],
province=wechat_array['province'],
headimgurl=wechat_array['headimgurl'],
unionid=wechat_array['unionid'],
wechat=True,
reopenid=False
)
if Action=="ebusiness":
ReURL= dict(request.REQUEST.items())
ReGo = "/ebusiness/editInfo/?"+urllib.urlencode(ReURL)
return HttpResponsePermanentRedirect(ReGo)
else:
return HttpResponsePermanentRedirect("/members/")
@login_required
def pay(request):
BANK = (
"ABC", "BCCB", "BCM", "BOCSH", "BOS", "BRCB", "CCB", "CEB", "CIB", "CMB", "CMBC", "CNCB", "GDB", "HXB", "HZB", "NBCB",
"PAB", "PSBC", "SPBD", "SRCB", "OTHER")
uid = request.user.id
MerNo = "183871" # 商户ID
MD5KEY = "_zxtVhUK"
ReturnURL = "http://www.kalaibao.com/members/payResult/"
NotifyURL = "http://www.kalaibao.com/members/NotifyURL/"
BillNo = str(datetime.datetime.now().strftime("%Y%m%d%H%M%S")) # 订单号
db = {
"bank": BANK,
"MerNo":MerNo,
"MD5KEY":MD5KEY,
"ReturnURL":ReturnURL,
"BillNo":BillNo,
"NotifyURL":NotifyURL
}
return render_to_response('members/member_pay.html', db, context_instance=RequestContext(request))
@login_required
def GetPayMD5Info(request):
uid = request.user.id
Amount = request.REQUEST.get("Amount", "") #支付金额
BillNo = request.REQUEST.get("BillNo","")# 订单号
Amount = Amount if True else "100"
MerNo = "183871" # 商户ID
MD5KEY = "_zxtVhUK"
ReturnURL = "http://www.kalaibao.com/members/payResult/"
print(hashlib.new("md5", "BYAABD0057").hexdigest())
MD5KEY_MD5 = hashlib.new("md5", MD5KEY).hexdigest().upper()
MStr = "Amount=%s&BillNo=%s&MerNo=%s&ReturnURL=%s&%s"%(Amount,BillNo,MerNo,ReturnURL,MD5KEY_MD5)
MD5info = hashlib.new("md5", MStr).hexdigest().upper()
issetpay = order_pay.objects.filter(order_number=BillNo)
if not issetpay.exists():
createpay = order_pay.objects.create(user_id=uid,order_number=BillNo,order_sum=Amount)
createpay.save()
db = {"MD5info":MD5info}
J = Json_Code(data=db, msg="ok", error=0)
return HttpResponse(J,content_type="application/json")
def payResult(request):
Ret = dict(request.REQUEST.items())
J = Json_Code(data=Ret,msg="ok", error=0)
return HttpResponse(J,content_type="application/json")
def NotifyURL(request):
Ret = dict(request.REQUEST.items())
J = Json_Code(data=Ret,msg="ok", error=0)
return HttpResponse(J,content_type="application/json")
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
27,064
|
xiaoyanguoke/klb
|
refs/heads/master
|
/ebusiness/models.py
|
# -*- coding:utf-8 -*-
from django.db import models
class vin_as_car_yg(models.Model):
vehicleFgwCode = models.CharField(max_length=255,default="",blank=True,null="")
value = models.CharField(max_length=255,default="",blank=True,null="")
key = models.CharField(max_length=255,default="",blank=True,null="")
vin = models.CharField(max_length=255,default="",blank=True,null="")
class code(models.Model):
pass
|
{"/members/urls.py": ["/members/views.py"], "/ebusiness/urls.py": ["/ebusiness/views.py"], "/ClientAdmin/forms.py": ["/webadmin/models.py"], "/bxservice/ZhongHuaViews.py": ["/bxservice/zhonghua.py", "/bxservice/ansheng.py", "/bxservice/yangguang.py"], "/bxservice/urls.py": ["/bxservice/ZhongHuaViews.py", "/bxservice/views.py"], "/ClientAdmin/views.py": ["/ClientAdmin/forms.py"], "/LYZ/urls.py": ["/members/urls.py", "/klbapp/urls.py", "/web/urls.py", "/wechat/urls.py", "/webadmin/urls.py", "/bxservice/urls.py", "/ClientAdmin/urls.py", "/ebusiness/urls.py"], "/ebusiness/forms.py": ["/bxservice/models.py", "/LYZ/settings.py", "/ebusiness/models.py"], "/webadmin/forms.py": ["/webadmin/models.py"], "/klbapp/urls.py": ["/klbapp/views.py"], "/web/urls.py": ["/web/views.py"], "/webadmin/admin.py": ["/webadmin/models.py"], "/web/views.py": ["/LYZ/common.py", "/LYZ/klb_class.py", "/klbapp/appclass.py", "/bxservice/models.py", "/bxservice/common.py"], "/ClientAdmin/urls.py": ["/ClientAdmin/views.py"], "/ebusiness/templatetags/EbusinessTag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/webadmin/models.py"], "/members/models.py": ["/LYZ/settings.py", "/bxservice/models.py"], "/web/templatetags/webtag.py": ["/LYZ/klb_class.py"], "/wechat/templatetags/wechattag.py": ["/LYZ/klb_class.py", "/bxservice/models.py", "/LYZ/settings.py"], "/webadmin/urls.py": ["/webadmin/views.py"], "/wechat/urls.py": ["/wechat/views.py"], "/members/admin.py": ["/members/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.