index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
56,658 | spawn3/python-util | refs/heads/master | /ask/clss/obj.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
import math
from pprint import pprint
# All Python objects have the folling three characteristics: an identity, a type and a value.
def test_1():
o = object()
pprint(dir(o))
# [
# '__doc__',
# '__class__',
# '__new__', '__init__',
# '__format__', '__str__', '__repr__',
# '__hash__',
# '__setattr__', '__getattribute__', '__delattr__',
# '__sizeof__',
# '__reduce__', '__reduce_ex__',
# '__subclasshook__',
# ]
class A(object):
#def __new__(cls, *args, **kwargs):
# print args
# print kwargs
def __init__(self, radius):
self.radius = radius
@property
def area(self):
return math.pi * self.radius ** 2
@property
def perimeter(self):
return 2 * math.pi * self.radius
@staticmethod
def sm(*args, **kwargs):
print args
print kwargs
@classmethod
def cm(cls, *args, **kwargs):
print cls
print args
print kwargs
class B(A):
def __init__(self, *args, **kwargs):
super(B, self).__init__(*args, **kwargs)
class C(object):
# __slots__ = ['ice', 'cream']
number = 0
def __init__(self, number):
# super(C, self).__init__()
C.number = number
def test_metaclass():
class Foo(object):
bar = True
type('Foo', (), {'bar': True})
# pprint(globals())
if __name__ == '__main__':
a = A(1)
print a.area
| {"/setup.py": ["/ask/__init__.py"]} |
56,659 | spawn3/python-util | refs/heads/master | /lich/test_pool.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from lich.umptypes import UmpPath
import utils
from base import TestBase
class TestAll(TestBase):
def setUp(self):
super(TestAll, self).setUp()
self.pool_path = UmpPath('pool.a')
self.volume_path = UmpPath('pool.a/b')
self.size = utils.GB(1)
self.del_volume(self.volume_path)
self.del_pool(self.pool_path)
def tearDown(self):
# self._del_volume(self.volume_path)
# self._del_pool(self.pool_path)
pass
@unittest.skipIf(False, 'skip this')
def test_delete(self):
self.create_pool(self.pool_path)
self.del_pool(self.pool_path)
def test_delete_if_volume(self):
self.create_pool(self.pool_path)
self.create_volume(self.volume_path, self.size)
# 2: No such file or directory
# 39: Directory not empty
self.del_pool(self.pool_path, status_code=39)
self.del_volume(self.volume_path)
self.del_pool(self.pool_path)
def test_stat(self):
self.create_pool(self.pool_path)
self.stat_pool(self.pool_path)
self.del_pool(self.pool_path)
self.stat_pool(self.pool_path)
def test_list(self):
self.create_pool(self.pool_path)
pools = self.list_pools()
self.assertIn(self.pool_path.long_pool_name, pools)
self.del_pool(self.pool_path)
pools = self.list_pools()
self.assertNotIn(self.pool_path.long_pool_name, pools)
def suite():
s = unittest.TestSuite()
load_from = unittest.defaultTestLoader.loadTestsFromTestCase
s.addTests(load_from(TestAll))
return s
if __name__ == '__main__':
unittest.main()
| {"/setup.py": ["/ask/__init__.py"]} |
56,660 | spawn3/python-util | refs/heads/master | /spider/gdc/model/data.py | #!/usr/bin/env python
# coding=utf-8
import datetime
from setting import baseorm, dataorm
class MarkModel(dataorm.Model):
create_time = dataorm.DatetimeField(ddl='datetime', updatable=False)
update_time = dataorm.DatetimeField(ddl='timestamp')
tid = baseorm.IdField(unique='data', updatable=False)
def __init__(self, **attributes):
# self.__mappings__['create_time'] = dataorm.DatetimeField(ddl='datetime')
# self.__mappings__['update_time'] = dataorm.DatetimeField(ddl='datetime')
# self.__mappings__['tid'] = baseorm.IdField(unique='data', updatable=False)
attributes['create_time'] = attributes.get('create_time', datetime.datetime.now())
attributes['update_time'] = attributes.get('update_time', datetime.datetime.now())
for key in self.__mappings__:
if not key in attributes:
raise Exception('Need field %s. ' % key)
attributes[key] = self.__mappings__[key].check_value(attributes[key])
super(MarkModel, self).__init__(**attributes)
def __setstate__(self, state):
self.__dict__ = state
def __getstate__(self):
return self.__dict__
'''
@comment('代理数据')
'''
class Proxy(MarkModel):
__table__ = 'grab_proxy'
ip = dataorm.StrField(ddl='varchar', max_length=20, unique='data', updatable=False)
port = dataorm.IntField(ddl='int', max_length=10, unique='data', updatable=False)
location = dataorm.StrField(ddl='varchar', max_length=30)
safetype = dataorm.StrField(ddl='varchar', max_length=30)
protocol = dataorm.StrField(ddl='varchar', max_length=30)
refspeed = dataorm.FloatField(ddl='float')
usespeed = dataorm.FloatField(ddl='float')
usenum = dataorm.IntField(ddl='int', max_length=10)
status = dataorm.IntField(ddl='int', max_length=1)
extra = dataorm.StrField(ddl='varchar', max_length=300)
creator = dataorm.IdField()
updator = dataorm.IdField()
'''
@comment('视频数据')
'''
class Video(MarkModel):
__table__ = 'video'
cat = dataorm.ListField(ddl='list', comment='资源分类')
url = dataorm.StrField(ddl='str', comment='资源地址')
format = dataorm.StrField(ddl='str', comment='资源格式')
size = dataorm.IntField(ddl='int', comment='资源大小')
during = dataorm.IntField(ddl='int', comment='资源时常')
tag = dataorm.ListField(ddl='list', comment='资源标签')
name = dataorm.StrField(ddl='str', comment='资源名称')
desc = dataorm.StrField(ddl='str', comment='资源描述')
cover = dataorm.StrField(ddl='str', comment='资源封面')
author = dataorm.StrField(ddl='str', comment='资源作者')
owner = dataorm.DictField(ddl='dict', comment='资源拥有者')
snum = dataorm.IntField(ddl='int', comment='资源序号')
src = dataorm.StrField(ddl='str', comment='资源来源')
host = dataorm.StrField(ddl='str', comment='资源域名')
page_url = dataorm.StrField(ddl='str', comment='资源原页面地址')
page_id = dataorm.IntField(ddl='int', unique='data', updatable=False, comment='资源页面id')
parent_page_id = dataorm.IntField(ddl='int', comment='资源父页面id')
atime = dataorm.StrField(ddl='str', comment='资源来源时间')
'''
@comment('音频数据')
'''
class Audio(MarkModel):
__table__ = 'audio'
cat = dataorm.ListField(ddl='list', comment='资源分类')
url = dataorm.StrField(ddl='str', comment='资源地址')
format = dataorm.StrField(ddl='str', comment='资源格式')
size = dataorm.IntField(ddl='int', comment='资源大小')
during = dataorm.IntField(ddl='int', comment='资源时长')
tag = dataorm.ListField(ddl='list', comment='资源标签')
name = dataorm.StrField(ddl='str', comment='资源名称')
desc = dataorm.StrField(ddl='str', comment='资源描述')
cover = dataorm.StrField(ddl='str', comment='资源封面')
singer = dataorm.StrField(ddl='str', comment='资源歌手')
snum = dataorm.IntField(ddl='int', comment='资源序号')
src = dataorm.StrField(ddl='str', comment='资源来源')
host = dataorm.StrField(ddl='str', comment='资源域名')
page_url = dataorm.StrField(ddl='str', comment='资源原页面地址')
page_id = dataorm.IntField(ddl='int', unique='data', updatable=False, comment='资源页面id')
parent_page_id = dataorm.IntField(ddl='int', comment='资源父页面id')
atime = dataorm.DatetimeField(ddl='datetime', comment='资源来源时间')
'''
@comment('漫画数据')
'''
class Comic(MarkModel):
__table__ = 'comic'
cat = dataorm.ListField(ddl='list', comment='资源分类')
url = dataorm.StrField(ddl='str', comment='资源地址')
tag = dataorm.ListField(ddl='list', comment='资源标签')
name = dataorm.StrField(ddl='str', comment='资源名称')
desc = dataorm.StrField(ddl='str', comment='资源描述')
cover = dataorm.StrField(ddl='str', comment='资源封面')
author = dataorm.StrField(ddl='str', comment='资源作者')
owner = dataorm.DictField(ddl='dict', comment='资源拥有者')
snum = dataorm.IntField(ddl='int', comment='资源序号')
src = dataorm.StrField(ddl='str', comment='资源来源')
host = dataorm.StrField(ddl='str', comment='资源域名')
language = dataorm.StrField(ddl='str', comment='语言')
parody = dataorm.StrField(ddl='str', comment='出自')
relate_page = dataorm.DictField(ddl='dict', comment='相关资源页面id:url')
page_url = dataorm.StrField(ddl='str', comment='资源原页面地址')
page_id = dataorm.IntField(ddl='int', unique='data', updatable=False, comment='资源页面id')
parent_page_id = dataorm.IntField(ddl='int', comment='资源父页面id')
atime = dataorm.DatetimeField(ddl='datetime', comment='资源来源时间')
download = dataorm.BoolField(ddl='bool', comment='是否下载', default=False)
if __name__ == '__main__':
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,661 | spawn3/python-util | refs/heads/master | /lich/lich/config.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
host_ip = '192.168.120.211'
| {"/setup.py": ["/ask/__init__.py"]} |
56,662 | spawn3/python-util | refs/heads/master | /spider/gdc/task/video/videospider.py | #!/usr/bin/env python
# coding=utf-8
from webcrawl.spider import SpiderOrigin
from model.data import Video as Data
TIMEOUT = 120
class SpiderVideoOrigin(SpiderOrigin):
def __del__(self):
pass
def __init__(self, queuetype='P', timeout=-1, worknum=6, worktype='COROUTINE', tid=0):
super(SpiderVideoOrigin, self).__init__(queuetype=queuetype, timeout=timeout, worknum=worknum, worktype=worktype, tid=tid)
if __name__ == "__main__":
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,663 | spawn3/python-util | refs/heads/master | /lich/lich/pool.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from base import LichBase
from runner import local_runner
def _create_exc_handler(e, *args, **kw):
if str(e).find("File exists") != -1:
path = args[0]
is_with_volume = kw.get('is_with_volume', False)
if not is_with_volume :
raise Exception("存储池%s已存在" % path)
else:
raise Exception(e)
def _delete_exc_handler(e, *args, **kw):
if str(e).find('No such file or directory') != -1:
pass
else:
raise Exception(e)
def _remove_attr_exc_handler(e, *args, **kw):
ignoreNoKey = kw.get('ignoreNoKey')
if ignoreNoKey and "Required key not available" in str(e):
pass
else:
raise Exception(e)
class LichPool(LichBase):
@local_runner(exc_handler=_create_exc_handler)
def create(self, path, pdomain=None):
if not pdomain:
cmd = '%s mkpool %s -p %s' % (self.lichbd, path.long_pool_name, path.protocol)
else:
cmd = '%s mkpool %s -p %s -A %s' % (self.lichbd, path.long_pool_name, path.protocol, pdomain)
return cmd
@local_runner(exc_handler=_delete_exc_handler)
def delete(self, path):
cmd = '%s rmpool %s -p %s' % (self.lichbd, path.long_pool_name, path.protocol)
return cmd
@local_runner()
def _list(self, path):
cmd = '%s lspools -p %s' % (self.lichbd, path.protocol)
return cmd
def list(self, path):
rc, lines = self._list(path)
if rc != 0:
return rc, lines
pools = []
for line in lines:
l = line.split(' ')
if l:
pools.append(l[len(l)-1])
return rc, pools
def stat(self, path):
rc, pools = self.list(path)
if rc == 0:
for pool in pools:
if pool == path.long_pool_name:
return rc, True
return rc, False
def exists(self, path):
rc, res = self.stat(path)
return res
class LichCreatePool(LichBase):
def __init__(self, path):
super(LichCreatePool, self).__init__()
self.path = path
def do(self):
pass
def undo(self):
pass
if __name__ == '__main__':
from umptypes import UmpPath
path = UmpPath('pool1')
vol = LichPool()
vol.create(path)
print vol.list(path)
vol.delete(path)
print vol.list(path)
| {"/setup.py": ["/ask/__init__.py"]} |
56,664 | spawn3/python-util | refs/heads/master | /ask/params.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from pyvalid import accepts, returns
@accepts(int, int)
def calc(x, y):
return x+y
@returns(bool)
def rb():
return 1
if __name__ == '__main__':
calc(1, 1.0)
rb()
| {"/setup.py": ["/ask/__init__.py"]} |
56,665 | spawn3/python-util | refs/heads/master | /spider/gdc/model/log.py | #!/usr/bin/env python
# coding=utf-8
from setting import baseorm, dataorm
class Log(dataorm.Model):
__table__ = 'grab_log'
gsid = baseorm.IdField(updatable=False)
sname = dataorm.StrField(ddl='varchar', max_length=20)
succ = dataorm.IntField(ddl='int', max_length=10)
fail = dataorm.IntField(ddl='int', max_length=10)
timeout = dataorm.IntField(ddl='int', max_length=10)
create_time = dataorm.DatetimeField(ddl='datetime', updatable=False)
class ProxyStatistics(dataorm.Model):
__table__ = 'grab_proxy_statistics'
pid = baseorm.IdField(updatable=False)
avg_elapse = dataorm.FloatField(ddl='float')
total_elapse = dataorm.FloatField(ddl='float')
start_time = dataorm.DatetimeField(ddl='datetime', updatable=False)
end_time = dataorm.DatetimeField(ddl='datetime', updatable=False)
create_time = dataorm.DatetimeField(ddl='datetime', updatable=False)
update_time = dataorm.DatetimeField(ddl='timestamp')
class Statistics(dataorm.Model):
__table__ = 'grab_statistics'
tid = baseorm.IdField(updatable=False)
succ = dataorm.IntField(ddl='int', max_length=10)
fail = dataorm.IntField(ddl='int', max_length=10)
timeout = dataorm.IntField(ddl='int', max_length=10)
elapse = dataorm.FloatField(ddl='float')
create_time = dataorm.DatetimeField(ddl='datetime', updatable=False)
class ProxyLog(dataorm.Model):
__table__ = 'grab_proxy_log'
pid = dataorm.IdField()
elapse = dataorm.FloatField(ddl='float')
create_time = dataorm.DatetimeField(ddl='datetime')
class RunLog(dataorm.Model):
__table__ = 'grab_runlog'
tid = baseorm.IdField(updatable=False)
type = dataorm.StrField(ddl='varchar', max_length=15)
status = dataorm.IntField(ddl='int', max_length=1)
sid = dataorm.StrField(ddl='varchar', max_length=30)
sname = dataorm.StrField(ddl='varchar', max_length=20)
priority = dataorm.IntField(ddl='int', max_length=5)
times = dataorm.IntField(ddl='int', max_length=2)
args = dataorm.StrField(ddl='varchar', max_length=20)
kwargs = dataorm.StrField(ddl='varchar', max_length=20)
txt = dataorm.StrField(ddl='varchar', max_length=20)
create_time = dataorm.DatetimeField(ddl='datetime')
if __name__ == '__main__':
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,666 | spawn3/python-util | refs/heads/master | /spider/gdc/update_resource.py | #!/usr/bin/python
# coding=utf-8
import pymongo, requests
from bson.objectid import ObjectId
# from task.celery import app
from adesk.db.connection import create_connection
from webcrawl.handleRequest import requGet, getHtmlNodeContent
replSet = 'adesk01'
masters = ['jxqcta','jxqctb','jxqctd','jxqcte','dscnca','dscncb','dscncc']
cluster_mc = create_connection(masters, replSet=replSet)
cluster_ddj = cluster_mc['dandan_jiang']
jxqctm_mc = pymongo.MongoClient('dscncg')
jxqctm_ddj = jxqctm_mc['dandan-jiang']
TIMEOUT = 30
# @app.task
def update_resource(vid):
video = jxqctm_ddj['video'].find_one({'_id':ObjectId(vid)})
funs = {'哔哩哔哩':update_bili, 'Acfun':update_acfun}
if str(video['src']) in funs and video:
funs[str(video['src'])](video)
def update_bili(video):
url = video['page_url']
outid = url[url.rindex('/')+1:url.rindex('.')].replace('av', '')
if '#page' in url:
pagenum = url.split('page=')[-1]
url = 'http://www.bilibili.com/m/html5?aid=%s&page=%s' % (outid, pagenum)
else:
url = 'http://www.bilibili.com/m/html5?aid=%s' % outid
headers = {"Accept":"application/json, text/javascript, */*; q=0.01",
"Accept-Encoding":"gzip, deflate, sdch",
"Accept-Language":"en-US,en;q=0.8",
"Cache-Control":"max-age=0",
"Connection":"keep-alive",
"Host":"www.bilibili.com",
"Referer":"http//www.bilibili.com/mobile/video/av%s.html?tg" % outid,
"User-Agent":"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 6 Build/LYZ28E) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.20 Mobile Safari/537.36",
"X-Requested-With":"XMLHttpRequest"}
test = requests.head(video['url'], headers=headers)
if test.status_code == 503 or test.status_code == 200:
print '=========aaaa'
return
data_result = requGet(url, headers=headers, timeout=TIMEOUT, format='JSON')
url = data_result['src']
jxqctm_ddj['video'].update({'_id':video['_id']}, {'$set':{'url':url}})
cluster_ddj['resource'].update({'_id':video['_id']}, {'$set':{'url':url}})
def update_acfun(video):
url = video['page_url']
headers = {
"deviceType":"2",
"Referer":"http://m.acfun.tv/player?date=undefined",
"User-Agent":"Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/600.1.3 (KHTML, like Gecko) Version/8.0 Mobile/12A4345d Safari/600.1.4"
}
outid = video['page_url'].split('#')[-1]
url = 'http://api.aixifan.com/plays/%s/realSource' % outid
print 'cccc', video['page_url'], url
url_result = requGet(url, headers=headers, format='JSON')
url = url_result['data']['files'][0]['url']
if type(url) == list:
url.append('')
url = url[0]
print 'dddd', url
jxqctm_ddj['video'].update({'_id':video['_id']}, {'$set':{'url':url}})
cluster_ddj['resource'].update({'_id':video['_id']}, {'$set':{'url':url}})
if __name__ == '__main__':
for one in jxqctm_ddj['video'].find({'src':'哔哩哔哩'}).limit(1):
print '====start bili', one['_id'], one['url']
update_resource(one['_id'])
one = jxqctm_ddj['video'].find_one({'_id':one['_id']})
print '====end', one['_id'], one['url']
for one in jxqctm_ddj['video'].find({'_id':ObjectId('568013c4d011361b96feb6a0')}).limit(1):
print '====start ac solo', one['_id'], one['url']
one['page_url'] = one['page_url'] + '#2931291'
update_acfun(one)
one = jxqctm_ddj['video'].find_one({'_id':one['_id']})
print '====end', one['_id'], one['url']
for one in jxqctm_ddj['video'].find({'_id':ObjectId("5680709dd011361b9600c049")}).limit(1):
print '====start ac solo', one['_id'], one['url']
one['page_url'] = one['page_url'] + '#2722991'
update_acfun(one)
one = jxqctm_ddj['video'].find_one({'_id':one['_id']})
print '====end', one['_id'], one['url']
| {"/setup.py": ["/ask/__init__.py"]} |
56,667 | spawn3/python-util | refs/heads/master | /lich/test_snapshot.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from lich.umptypes import UmpPath
from base import TestBase
class TestAll(TestBase):
def setUp(self):
super(TestAll, self).setUp()
self.pname_01 = UmpPath('snap.a')
self.vname_01 = UmpPath('snap.a/b')
self.sname_01 = UmpPath('snap.a/b@s1')
self.sname_02 = UmpPath('snap.a/b@s2')
self.sname_03 = UmpPath('snap.a/b@s3')
self.sname_04 = UmpPath('snap.a/b@s4')
self.clone_01 = UmpPath('snap.a/d')
self.size = 1024*1024*1024
self.new_size = 2 * self.size
# clean
self.del_volume(self.clone_01)
self.del_snapshot(self.sname_01)
self.del_snapshot(self.sname_02)
self.del_snapshot(self.sname_03)
self.del_snapshot(self.sname_04)
self.del_volume(self.vname_01)
self.del_pool(self.pname_01)
self.create_pool(self.pname_01)
self.create_volume(self.vname_01, self.size)
def tearDown(self):
# self.del_volume(self.clone_01)
self.del_volume(self.vname_01)
self.del_pool(self.pname_01)
@unittest.skipIf(False, 'skip me')
def test_create(self):
self.create_snapshot(self.sname_01)
rc, res = self.lich_snapshot.stat(self.sname_01)
self.assertIsNotNone(res)
self.del_snapshot(self.sname_01)
def _assertSnap(self, snap):
rc, res = self.lich_snapshot.stat(snap)
self.assertIsNotNone(res)
@unittest.skipIf(False, 'skip me')
def test_multi_create(self):
"""
s1 -> s2 -> s3
:return:
"""
snaps = [self.sname_01, self.sname_02, self.sname_03]
for snap in snaps:
self.create_snapshot(snap)
rc, res = self.lich_snapshot.stat(snap)
self.assertIsNotNone(res)
for snap in snaps:
self.del_snapshot(snap)
rc, res = self.lich_snapshot.stat(snap)
self.assertIsNone(res)
@unittest.skipIf(False, 'skip me')
def test_rollback(self):
"""
s1 -> s2 -> s3
| -> s4
:return:
"""
self.create_snapshot(self.sname_01)
self.create_snapshot(self.sname_02)
self.create_snapshot(self.sname_03)
self.lich_snapshot.rollback(self.sname_02)
self.create_snapshot(self.sname_04)
self._assertSnap(self.sname_01)
self._assertSnap(self.sname_02)
self._assertSnap(self.sname_03)
self._assertSnap(self.sname_04)
child = self.lich_snapshot.children(self.sname_01)
self.assertListEqual(child, [self.sname_02.snap_name])
child = self.lich_snapshot.children(self.sname_02)
self.assertListEqual(child, [self.sname_03.snap_name, self.sname_04.snap_name])
self.del_snapshot(self.sname_01)
self.del_snapshot(self.sname_02)
self.del_snapshot(self.sname_03)
self.del_snapshot(self.sname_04)
def suite():
s = unittest.TestSuite()
load_from = unittest.defaultTestLoader.loadTestsFromTestCase
for tc in [TestAll]:
s.addTests(load_from(tc))
return s
if __name__ == '__main__':
unittest.main()
| {"/setup.py": ["/ask/__init__.py"]} |
56,668 | spawn3/python-util | refs/heads/master | /lich/lich/cgsnapshot.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import utils
from base import LichBase, RemoteLocation
from runner import http_runner
from Ump.objs.root import init_root
def _delete_exc_handler(e, *args, **kw):
if str(e).find("Required key not available") != -1:
pass
else:
raise Exception(e)
class LichCGSnapshotParam(RemoteLocation):
def __init__(self, host_ip=None, vols=None, snap_name=None, protocol='iscsi', cluster_id=1):
"""
vols: [pool1/vol11, pool1/vol12]
"""
super(LichCGSnapshotParam, self).__init__(host_ip, cluster_id=cluster_id)
cluster = init_root().cluster
self.protocol = protocol
self.protocol_root = cluster.protocol_root(protocol)
self.vols = vols
self.snap_name = snap_name
def expand_vols(self, sep=',', vols=None):
if isinstance(vols, list) and vols:
tmp = vols
else:
tmp = self.vols
vols = ['/%s/%s' % (self.protocol_root, vol) for vol in tmp]
return sep.join(vols)
class LichCGSnapshot(LichBase):
@http_runner()
def create(self, param):
vol_path = param.expand_vols()
return "%s --create %s@%s" % (self.lich_snapshot, vol_path, param.snap_name)
@http_runner(exc_handler=_delete_exc_handler)
def delete(self, param):
vol_path = param.expand_vols()
return "%s --remove %s@%s" % (self.lich_snapshot, vol_path, param.snap_name)
def remove(self, param):
return self.delete(param)
@http_runner()
def rollback(self, param):
vol_path = param.expand_vols()
return "%s --rollback %s@%s" % (self.lich_snapshot, vol_path, param.snap_name)
@http_runner()
def clone(self, param, new_vols=None):
vol_path = param.expand_vols()
new_vol_path = param.expand_vols(vols=new_vols)
return "%s --clone %s@%s %s" % (self.lich_snapshot, vol_path, param.snap_name, new_vol_path)
@http_runner()
def protect(self, param, on=True):
vol_path = param.expand_vols()
cmd = 'protect' if on else 'unprotect'
return "%s --%s %s@%s" % (self.lich_snapshot, cmd, vol_path, param.snap_name)
@http_runner()
def unprotect(self, param):
return self.protect(param, on=False)
@http_runner()
def _list(self, param):
vol_path = param.expand_vols()
return "%s --list %s" % (self.lich_snapshot, vol_path)
def list(self, param):
res = self._list(param)
return utils.split_lines(res)
@http_runner()
def flat(self, param):
vol_path = param.expand_vols()
return "%s --flat %s@%s" % (self.lich_snapshot, vol_path, param.snap_name)
if __name__ == '__main__':
param = LichCGSnapshotParam(host_ip='192.168.120.211',
vols=['pool1/vol11', 'pool1/vol12'],
snap_name='snap01',
protocol='iscsi')
group = LichCGSnapshot()
| {"/setup.py": ["/ask/__init__.py"]} |
56,669 | spawn3/python-util | refs/heads/master | /lich/lich/myinspect.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from base import LichBase, RemoteLocation
from runner import http_runner
class LichInspectParam(RemoteLocation):
def __init__(self, host_ip=None):
super(LichInspectParam, self).__init__(host_ip)
class LichInspect(LichBase):
@http_runner()
def list(self, param):
cmd = '%s --disk_list' % (self.lich_node)
return cmd
if __name__ == '__main__':
param = LichInspectParam(host_ip='192.168.120.211')
obj = LichInspect()
obj.list(param)
# print disk.add(param)
| {"/setup.py": ["/ask/__init__.py"]} |
56,670 | spawn3/python-util | refs/heads/master | /lich/lich/shell.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from Ump.common.utils import inspect_func
from base import RemoteLocation, LichBase
from runner import ssh_runner, http_runner
import utils
class LichShellParam(RemoteLocation):
def __init__(self, host_ip=None, cmd='echo OK', password='mdsmds', is_http=False):
super(LichShellParam, self).__init__(host_ip, password=password)
self.is_http = is_http
self.cmd = cmd
class LichShell(LichBase):
@ssh_runner()
def ssh_run(self, param):
return param.cmd
@http_runner()
def http_run(self, param):
return param.cmd
def run(self, param):
if param.is_http:
return self.http_run(param)
else:
return self.ssh_run(param)
def exists_base(self, param, name, _type):
FLAG = 'noprogram'
param.cmd = "if [ ! -%s %s ]; then echo '%s'; fi" % (_type, name, FLAG)
res = self.run(param)
l = utils.split_lines(res)
return len(l) != 1 or l[0] != FLAG
@inspect_func
def dir_exists(self, param, dname):
return self.exists_base(param, dname, 'd')
@inspect_func
def file_exists(self, param, fname):
return self.exists_base(param, fname, 'f')
@inspect_func
def rm(self, param, fname):
param.cmd = "if [ -f %s ]; then rm -rf %s; fi" % (fname, fname)
res = self.run(param)
return res
def cat_file(self, param, fname):
param.cmd = "cat %s" % (fname)
res = self.run(param)
return res
@inspect_func
def dump_config(self, param):
param.cmd = '%s configdump' % (self.lich)
res = self.run(param)
res = utils.parse_global_config(res)
return res
@inspect_func
def read_etc_hosts(self, param, host_ip='0.0.0.0'):
if not host_ip:
host_ip = param.host_ip
param.cmd = '%s etc_hosts --host %s' % (self.lich_syncump, host_ip)
res = self.run(param)
res = utils.parse_etc_hosts(res)
return res
def node_start(self, param):
param.cmd = '%s --start' % self.lich_node
res = self.run(param)
return res
def node_stop(self, param):
param.cmd = '%s --stop' % self.lich_node
res = self.run(param)
return res
@inspect_func
def node_list(self, param):
param.cmd = '%s listnode' % self.lich
res = self.run(param)
return res
@inspect_func
def sethosts(self, param, host, hostname):
param.cmd = "%s --sethosts %s %s" % (self.lich_node, host, hostname)
res = self.run(param)
return res
@inspect_func
def cluster_create(self, param, hosts):
param.cmd = "%s create %s" % (self.lich, hosts)
res = self.run(param)
return res
@inspect_func
def addnode(self, param, hosts):
param.cmd = "%s addnode %s" % (self.lich, hosts)
res = self.run(param)
return res
@inspect_func
def sshkey(self, param, sshkey_hosts=None, password=None):
'''
sshkey_hosts: create the ssh keys; type list or string
'''
sshkey_hosts = " ".join(sshkey_hosts) if isinstance(sshkey_hosts, list) else sshkey_hosts
param.cmd = '''
expect -c 'set timeout 2;
spawn %s sshkey %s;
expect "input password:" {send "%s\r"};
interact'
''' % (self.lich, sshkey_hosts, password)
res = self.run(param)
return True
if __name__ == '__main__':
param = LichShellParam(host_ip='192.168.120.71', password='mdsmds')
obj = LichShell()
obj.file_exists(param, 'lich/admin/cluster.py')
obj.dump_config(param)
obj.read_etc_hosts(param)
obj.node_list(param)
| {"/setup.py": ["/ask/__init__.py"]} |
56,671 | spawn3/python-util | refs/heads/master | /spider/gdc/task/comic/spiderManjia.py | #!/usr/bin/env python
# coding=utf-8
import os, re, copy, json, time
from pymongo import MongoClient
from datetime import timedelta
from datetime import datetime
from webcrawl.request import requGet
from webcrawl.request import requPost
from webcrawl.request import getHtmlNodeContent
from webcrawl.request import getXmlNodeContent
from webcrawl.task import retry
from webcrawl.task import index
from webcrawl.task import initflow
from webcrawl.request import getJsonNodeContent
from webcrawl.task import store
from webcrawl.task import timelimit
from webcrawl.task import next
from webcrawl.request import ensureurl
from webcrawl.request import parturl
from model.setting import withData, datacfg
from comicspider import Data
from comicspider import TIMEOUT
from comicspider import SpiderComicOrigin
#_print, logger = logprint(modulename(__file__), modulepath(__file__))
dmzj_re = re.compile('initIntroData\(\[.*\]\);')
class SpiderDmzj(SpiderComicOrigin):
"""
哔哩官网 数据爬虫
"""
def __init__(self, worknum=6, queuetype='P', worktype='COROUTINE', timeout=-1, tid=0):
super(SpiderDmzj, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, timeout=timeout, tid=tid)
self.clsname = self.__class__.__name__
self.headers = {}
self.end = datetime.now()
self.begin = self.end - timedelta(days=7)
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
def fetchDetail(self, url, additions={}, timeout=TIMEOUT, implementor=None):
cat = additions['cat']
tag = additions['tag']
name = additions['name']
desc = additions['desc']
cover = additions['cover']
author = additions['author']
atime = additions['atime']
owner = {}
snum = 0
src = '漫画之家'
host = 'www.dmzj.com'
wap_result = requGet(url, timeout=TIMEOUT, format='HTML')
page_url = url
url = ''
format = 'h5'
page_id = hash(page_url)
parent_page_id = hash(page_url)
owner['name'] = getHtmlNodeContent(wap_result.find('.//a[@class="pd introName"]'), 'TEXT')
owner['url'] = 'http://m.dmzj.com%s' % getHtmlNodeContent(wap_result.find('.//a[@class="pd introName"]'), {'ATTR':'href'})
pages = ''.join(getHtmlNodeContent(one, 'TEXT') for one in wap_result.findall('.//script'))
try:
pages = json.loads(dmzj_re.search(pages).group().replace("initIntroData([", "").replace("]);", "")).get('data', [])
pages = sorted(pages, key=lambda v:v['chapter_order'])
except:
pages = []
for index, chapter in enumerate(pages):
url = 'http://m.dmzj.com/view/%s/%s.html' % (chapter['comic_id'], chapter['id'])
page_url = url
page_id = hash(page_url)
snum = index + 1
page_data = Data(cat=cat, url=url, tag=tag, name=name,
desc=desc, cover=cover, author=author,
owner=owner, snum=snum,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield page_data
@next(fetchDetail)
@timelimit(20)
@index('url')
def fetchList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, timeout=timeout, format='JSON')
if len(result) < 15:
nextpage = None
else:
index = url.split('-')
sub_index = index[-1].split('.')
sub_index[0] = int(sub_index[0]) + 1
# if sub_index[0] >5:
# nextpage = None
# else:
# sub_index[0] = str(sub_index[0])
# index[-1] = '.'.join(sub_index)
# nextpage = '-'.join(index)
sub_index[0] = str(sub_index[0])
index[-1] = '.'.join(sub_index)
nextpage = '-'.join(index)
yield nextpage
for one in result:
additions = {
'cat':additions['cat'],
'name':one['name'],
'tag':one['types'].split('/'),
'desc':one['description'] or one['introduction'],
'author':one['authors'],
'cover':'http://images.dmzj.com/%s' % one['cover'],
'atime':time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(one['last_updatetime']))
}
tags = one['types'].split('/')
yield {'url': 'http://m.dmzj.com/info/%s.html' % str(one['id']), 'additions':additions}
@next(fetchList)
@timelimit(20)
@initflow('www')
def fetchCat(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, timeout=timeout, format='HTML')
print len(result.findall('.//div[@id="classCon"]//ul'))
result = result.find('.//div[@id="classCon"]//ul')
for cat in result.findall('.//a'):
cid = getHtmlNodeContent(cat, {'ATTR':'onclick'}).replace('itemClickAction(0,', '').replace(')', '').replace(' ', '')
if int(cid) > 0:
yield {'url':'http://m.dmzj.com/classify/%s-0-0-0-0-0.json' % cid, 'additions':{'cat':['漫画', getHtmlNodeContent(cat, 'TEXT')]}}
@next(fetchDetail)
@timelimit(20)
@initflow('spec')
def fetchSpec(self, additions={}, timeout=TIMEOUT, implementor=None):
yield {'url': 'http://m.dmzj.com/info/2125.html', 'additions': {'cat':['漫画', '青春', '校园'], 'name':'妄想学生会', 'tag':['欢乐向', '校园', '节操', '搞笑'], 'desc':'动漫之家手机漫画提供妄想学生会359在线漫画,是国内妄想学生会漫画最全更新最快的手机漫画网。妄想学生会漫画介绍:创校已有50年,原本是女校的私立樱才学园,因受到少子化的影响,于今年起变更为男女合校。因为是第一年,男生人数还相当稀少,全校男女生的比率为女生524人,男生28人。对男生来说,正常应该像是后宫一般(?)………但是!以主角津田隆利的立场来看,恐怕完全没这...', 'author':'氏家卜全', 'cover':'http://images.dmzj.com/webpic/7/1204wangxiangxueshenghuifml.jpg', 'atime':datetime.now()}}
if __name__ == '__main__':
print 'start'
spider = SpiderDmzj(worknum=6, queuetype='P', worktype='COROUTINE')
spider.fetchDatas('www', 0, 'http://m.dmzj.com/classify.html')
spider.statistic()
print 'end'
| {"/setup.py": ["/ask/__init__.py"]} |
56,672 | spawn3/python-util | refs/heads/master | /spider/gdc/dbskit/mongo/handler.py | #!/usr/bin/python
# coding=utf-8
import pymongo as dblib
class DBHandler(object):
def __init__(self, markname, conn, check=(lambda tips, data:data), resutype='DICT', autocommit=False, db=''):
self._markname = markname
self._conn = conn
self._check = check
self._resutype = {'TUPLE':'TUPLE', 'DICT':'DICT'}[resutype]
self.db = db
@classmethod
def wrap(cls, tpl):
if isinstance(doc, dict):
tpl['tips'] = tpl.get('tips')
else:
tpl = {'collection':tpl, 'tips':None}
return tpl
def check(self, tips, data):
return self._check(tips, data)
def queryAll(self, spec, db=None, collection=None, **kwargs):
db = db or self.db
return self.query(spec, db=db, collection=collection, qt='all', **kwargs)
def queryOne(self, spec, db=None, collection=None, **kwargs):
db = db or self.db
return self.query(spec, db=db, collection=collection, qt='one', **kwargs)
def query(self, spec, db=None, collection=None, qt='all', **kwargs):
db = db or self.db
if qt.lower() == 'one':
return self._conn[db][collection].find_one(spec, **kwargs)
else:
return self._conn[db][collection].find(spec, **kwargs)
def update(self, spec, doc, db=None, collection=None, upsert=False, method='SINGLE'):
db = db or self.db
multi = not method.upper() == 'SINGLE'
return self._conn[db][collection].update(spec, doc, upsert=upsert, multi=multi)
def delete(self, spec, db=None, collection=None, method='SINGLE'):
db = db or self.db
multi = not method.upper() == 'SINGLE'
return self._conn[db][collection].remove(spec, multi=multi)
def insert(self, doc, db=None, collection=None, method='SINGLE', lastid=None):
db = db or self.db
if method == 'SINGLE':
if not isinstance(doc, dict):
raise "Single insert document must be dict type."
return self._conn[db][collection].insert_one(doc).inserted_id
else:
if not isinstance(doc, list):
raise "Bulk insert document must be list type."
return self._conn[db][collection].insert_many(doc).inserted_ids
def showColumns(self, table):
"""
查看表的列
@param table: 表名称
@return columns: 列名
"""
sql = """ select `column_name`, `data_type`
from information_schema.columns
where `table_schema` = %s and `table_name`=%s
"""
columns = {}
tables = self._conn[self.db][table].find_one()
if tables:
for key, val in tables.items():
columns[key] = type(val)
return columns
class ExampleDBHandler(DBHandler):
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,673 | spawn3/python-util | refs/heads/master | /spider/gdc/webcrawl/__init__.py | #!/usr/bin/python
# coding=utf8
__import__('pkg_resources').declare_namespace(__name__)
__version__ = '1.0.3'
__author__ = 'hk'
class MyLocal(object):
def __init__(self, **kwargs):
# self.__dict__ = dict(self.__dict__, **kwargs)
self.__dict__.update(**kwargs)
def update(self, **kwargs):
self.__dict__.update(**kwargs)
| {"/setup.py": ["/ask/__init__.py"]} |
56,674 | spawn3/python-util | refs/heads/master | /ask/right/b.py | #!/usr/bin/env python
class BA(object):
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,675 | spawn3/python-util | refs/heads/master | /ask/mc.py | #!/usr/bin/env python
class Meta(type):
def __init__(cls, *args, **kw):
super(Meta, cls).__init__(*args, **kw)
setattr(cls, '_name', 'value')
class A(Exception):
__metaclass__ = Meta
def __init__(self, **kw):
if kw:
name = '%s: %s' % (self._name, json.dumps(kw))
else:
name = self._name
super(A, self).__init__(name)
if __name__ == '__main__':
a = A()
raise A
| {"/setup.py": ["/ask/__init__.py"]} |
56,676 | spawn3/python-util | refs/heads/master | /spider/gdc/task/role/rolespider.py | #!/usr/bin/env python
# coding=utf-8
from webcrawl.spider import SpiderOrigin
from model.data import Role as Data
from model.setting import withData, RDB, WDB
TIMEOUT = 120
class SpiderRoleOrigin(SpiderOrigin):
def __del__(self):
pass
def __init__(self, queuetype='P', timeout=-1, worknum=6, worktype='COROUTINE', tid=0):
super(SpiderRoleOrigin, self).__init__(queuetype=queuetype, timeout=timeout, worknum=worknum, worktype=worktype, tid=tid)
if __name__ == "__main__":
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,677 | spawn3/python-util | refs/heads/master | /lich/lich/volume.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import utils
from umptypes import UmpPath
from base import LichBase
from runner import local_runner
def _remove_attr_exc_handler(e, *args, **kw):
ignoreNoKey = kw.get('ignoreNoKey')
if ignoreNoKey and "Required key not available" in str(e):
pass
else:
raise Exception(e)
class LichVolume(LichBase):
@local_runner()
def create(self, path, size):
cmd = '%s create %s --size %sB -p %s' % (self.lichbd, path.long_volume_name, size, path.protocol)
return cmd
@local_runner()
def delete(self, path):
cmd = '%s rm %s -p %s' % (self.lichbd, path.long_volume_name, path.protocol)
return cmd
@local_runner()
def stat(self, path):
cmd = '%s info %s -p %s' % (self.lichbd, path.long_volume_name, path.protocol)
return cmd
def exists(self, path):
retcode, msg = self.stat(path)
return retcode == 0
@local_runner()
def list(self, path):
cmd = '%s ls %s -p %s' % (self.lichbd, path.long_pool_name, path.protocol)
return cmd
@local_runner()
def resize(self, path, size):
cmd = '%s resize %s --size %sB -p %s' % (self.lichbd, path.long_volume_name, size, path.protocol)
return cmd
@local_runner()
def rename(self, path, new_name):
cmd = '%s rename %s %s/%s -p %s' % (self.lichbd, path.long_volume_name, path.long_pool_name, new_name, path.protocol)
return cmd
@local_runner()
def set_attr(self, path, attr, value):
return "%s --attrset %s %s '%s'" % (self.lichfs, path.volume_path, attr, value)
@local_runner()
def get_attr(self, path, attr):
return "%s --attrget %s %s" % (self.lichfs, path.volume_path, attr)
@local_runner(exc_handler=_remove_attr_exc_handler)
def remove_attr(self, path, attr, ignoreNoKey=False):
return "%s --attrremove %s %s" % (self.lichfs, path.volume_path, attr)
@local_runner()
def _iscsi_connection(self, path):
cmd = '%s --connection %s' % (self.lich_inspect, path.volume_path)
return cmd
def iscsi_connection(self, path):
res = self._iscsi_connection(path)
return utils.parse_connection(res)
def list_snapshots(self, path):
raise NotImplementedError
@local_runner()
def flatten(self, path):
return "%s --flat %s" % (self.lich_snapshot, path)
if __name__ == '__main__':
path = UmpPath('pool1/vol15')
vol = LichVolume()
vol.create(path, 1)
vol.set_attr(path, 'attr1', 'value1')
vol.get_attr(path, 'attr1')
vol.remove_attr(path, 'attr1')
print vol.list(path)
print vol.info(path)
vol.resize(path, 2)
print vol.info(path)
vol.delete(path)
| {"/setup.py": ["/ask/__init__.py"]} |
56,678 | spawn3/python-util | refs/heads/master | /ask/util.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import sys
from json import JSONEncoder
from repoze.lru import lru_cache
reload(sys)
sys.setdefaultencoding('utf-8')
# relative import
def parts(l, n):
n = max(1, n)
return [l[i:i+n] for i in range(0, len(l), n)]
def fibonacci():
a, b = 0, 1
while True:
yield b
a, b = b, a+b
class A(object):
pass
# pprint(globals())
globals()['z'] = 7
print 'z =', z
class MongoEncoder(JSONEncoder):
def default(self, obj, **kwargs):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
else:
return JSONEncoder.default(obj, **kwargs)
| {"/setup.py": ["/ask/__init__.py"]} |
56,679 | spawn3/python-util | refs/heads/master | /spider/gdc/godhand.py | #!/usr/bin/env python
# coding=utf-8
import time
DISTINCT = True
def treeWeight(tree, node):
tree[node]["#w"] = 1
if tree[node]['#params']:
for one in tree[node]['#params']:
tree[node]["#w"] += treeWeight(tree, one)
if len(tree[node]["#children"]) > 0:
for one in tree[node]["#children"]:
tree[node]["#w"] += treeWeight(tree, one)
if tree[node]['#pre']:
for one in tree[node]['#pre']:
tree[node]["#w"] += treeWeight(tree, one)
if tree[node]['#decorators']:
for one in tree[node]['#decorators']:
tree[node]["#w"] += treeWeight(tree, one)
if tree[node]['#next']:
for one in tree[node]['#next']:
tree[node]["#w"] += treeWeight(tree, one)
return tree[node]["#w"]
def treeSpace(material, tree, node, num):
if material.get(node, {"stype":""})["stype"] in ('class', 'function', '') and not material.get(node, {"method":""})['method'] == '@':
tree[node]["#s"] = num + 4
else:
tree[node]["#s"] = num
if material.get(node, {"stype":""})["stype"] == 'list':
childnum = tree[node]["#s"] + 4
else:
childnum = tree[node]["#s"]
if len(tree[node]["#children"]) > 0:
for one in tree[node]["#children"]:
if tree[one]['#routed'] and tree[one]['#routed'] > 3:
continue
tree[one]['#routed'] = 3
treeSpace(material, tree, one, childnum)
if tree[node]['#pre']:
for one in tree[node]['#pre']:
if tree[one]['#routed'] and tree[one]['#routed'] > 2:
continue
tree[one]['#routed'] = 2
treeSpace(material, tree, one, childnum)
if tree[node]['#decorators']:
for one in tree[node]['#decorators']:
if tree[one]['#routed'] and tree[one]['#routed'] > 2:
continue
tree[one]['#routed'] = 2
treeSpace(material, tree, one, childnum)
if tree[node]['#next']:
for one in tree[node]['#next']:
if tree[one]['#routed'] and tree[one]['#routed'] > 1:
continue
tree[one]['#routed'] = 1
treeSpace(material, tree, one, childnum)
if material[one]['datatype'] in ('function', 'execute'):
treeSpace(material, tree, one, childnum-4)
else:
treeSpace(material, tree, one, childnum)
def initParams(material, tree):
for k, v in material.items():
if v['datatype'] == 'params':
if '#params' in tree[material[k]['sid']]:
tree[material[k]['sid']]['#params'].append(k)
else:
tree[material[k]['sid']]['#params'] = [k]
def findParams(material, tree, node, route='', parents=[], params=[]):
route = route + '%s,' % node
if parents:
if material[node]['method'] == 'params' and material[node]['pid'] and material[material[node]['pid']]['datatype'] == 'class':
findParams(material, tree, material[node]['pid'], route, parents, params)
if material[node]['method'] == 'init' and material[node]['stype'] == 'class':
findParams(material, tree, str(material[node]['sid']), route, parents, params)
if tree[node]:
for one in tree[node]['#params']:
if material[one]['datatype'] == 'class':
route = route + '%s,' % one
findParams(material, tree, one, route, parents, params)
else:
route = route + node
for p in parents:
if ('%s,' % p) in route:
belong = p
break
else:
belong = None
if material[one]['datatype'] == 'execute':
params.append({'name':material[one]['name'], 'belong':belong, 'txt':'%s=%s(%s%s)' % (material[one]['name'], material[str(material[material[one]['pid']]['sid'])]['name'], material[material[material[one]['pid']]['pid']]['name'], material[material[one]['pid']]['xpath'])})
elif material[one]['datatype'] == 'method':
params.append({'name':material[one]['name'], 'belong':belong, 'txt':'%s=%s%s' % (material[one]['name'], material[material[one]['pid']]['name'], material[one]['xpath'])})
elif material[one]['default'] is None:
if material[one]['stype'] == 'execute':
params.insert(0, {'name':material[one]['name'], 'belong':belong, 'txt':'%s=%s' % (material[one]['name'], material[one]['name'])})
else:
params.insert(0, {'name':material[one]['name'], 'belong':belong, 'txt':'%s' % material[one]['name']})
else:
params.append({'name':material[one]['name'], 'belong':belong, 'txt':"%s='%s'" % (material[one]['name'], material[one]['default']) if material[one]['datatype'] == 'str' else '%s=%s' % (material[one]['name'], material[one]['default'])})
def nodeDecorate(material, tree, node):
if material[node]['default'] is not None:
txt = '\n%s@%s("%s")' % (' ' * tree[node]['#s'], material[node]['name'], material[node]['default']) if material[node]['datatype'] == 'str' else '\n%s@%s(%s)' % (' ' * tree[node]['#s'], material[node]['name'], material[node]['default'])
return txt
if material[node]['name'] == 'next' and material[node]['pid']:
txt = '\n%s@%s(%s)' % (' ' * tree[node]['#s'], material[node]['name'], material[material[node]['pid']]['name'])
return txt
if material[node]['name'] == 'store':
if material[node]['datatype'] == 'execute':
params = []
findParams(material, tree, node, params=params)
txt = '\n%s@%s(%s)' % (' ' * tree[node]['#s'], material[node]['name'], ', '.join(one['txt'] for one in params))
return txt
return ''
def nodeClass(material, tree, node):
txt = '\n'
if tree[node]['#params']:
b = '(%s):' % ', '.join([material[one]['name'] for one in tree[node]['#params']])
else:
b = '(object):'
txt += '%s%s%s%s' % (' ' * tree[node]['#s'], 'class ', material[node]['name'], b)
if not tree[node]['#hastxt']:
txt += '%s%s' % (' ' * (material[node]['#s'] + 4), 'pass')
return txt + '\n'
def nodeFunction(material, tree, node):
txt = '\n'
params = []
if material[node]['name'] == '':
return ''
if material[node]['name'] == '__init__':
parents = []
for one in tree[str(material[node]['sid'])]['#params']:
try:
if material[one]['pid'] and material[material[one]['pid']]['datatype'] == 'class':
parents.append(material[one]['pid'])
except:
print one
print material[one]['pid']
raise
findParams(material, tree, node, parents=parents, params=params)
else:
findParams(material, tree, node, params=params)
if material[node]['stype'] == 'class':
params.insert(0, {'name':'self', 'belong':None, 'txt':'self'})
txt += '%s%s%s%s' % (' ' * tree[node]['#s'], 'def ', material[node]['name'], '(%s):' % ', '.join([one['txt'] for one in params]))
if material[node]['name'] == '__init__' and len(set([one['belong'] for one in params if one['belong'] is not None])) > 0:
if len(parents) == 1:
txt += '\n%ssuper(%s, self).__init__(%s)' % (' ' * (tree[node]['#s'] + 4), material[str(material[node]['sid'])]['name'], ', '.join(['%s=%s' % (one['name'], one['name']) for one in params if one['belong'] is not None]))
else:
for p in parents:
txt += '%s%s.__init__(self, %s)' % (' ' * (tree[node]['#s'] + 4), material[p]['name'], ', '.join(['%s=%s' % (one['name'], one['name']) for one in params if one['belong'] == p]))
elif not tree[node]['#hastxt']:
txt += '\n%s%s' % (' ' * (tree[node]['#s'] + 4), 'pass')
return txt + '\n'
def nodeImport(material, tree, node):
if material[node]['content'] == material[node]['name']:
return '%s%s' % (' ' * tree[node]['#s'], ' '.join([material[node]['xpath'], material[node]['content']])) + '\n'
else:
return '%s%s' % (' ' * tree[node]['#s'], ' '.join([material[node]['xpath'], material[node]['content'], 'as', material[node]['name']])) + '\n'
def findIndex(material, node):
indexs = []
while True:
if material[node]['index']:
indexs.insert(0, node)
node = str(material[node]['sid'])
else:
break
return node, indexs
def nodeAssignleft(material, tree, node):
root, indexs = findIndex(material, node)
name = '%s%s' % (material[root]['name'], ''.join(['["%s"]' % material[one]['index'] if material[str(material[one]['sid'])]['datatype'] == 'dict' else '[%s]' % material[one]['index'] for one in indexs]))
if material[str(material[node]['sid'])]['datatype'] == 'class' or material[str(material[node]['sid'])]['name'] == '__init__':
return 'self.%s' % name
else:
return name
def nodeAssignright(material, tree, node):
if material[node]['default'] is None:
if material[node]['datatype'] == 'execute':
params = []
findParams(material, tree, node, params=params)
txt = '%s(%s)' % (material[material[node]['pid']]['name'], ', '.join([one['txt'] for one in params]))
return txt
elif material[node]['method'] == '.find':
pid = material[node]['pid']
bid = None
while True:
if bid is None and material[pid]['datatype'] in ('object', 'execute'):
bid = material[node]['pid']
if material[pid]['datatype'] == 'execute':
break
else:
pid = material[pid]['pid']
if material[node]['xpath'] is None:
return 'get%sNodeContent(%s, %s)' % (tree[pid]['#format'], material[bid]['name'], '"TEXT"' if material[node]['content'] == 'TEXT' else material[node]['content'])
else:
return "get%sNodeContent(%s.find('%s'), %s)" % (tree[pid]['#format'], material[bid]['name'], material[node]['xpath'], '"TEXT"' if material[node]['content'] == 'TEXT' else material[node]['content'])
elif material[node]['method'] == '.findall':
return "%s.findall('%s')" % (material[material[node]['pid']]['name'], material[node]['xpath'])
elif material[node]['method'] == '%':
if material[material[node]['pid']]['datatype'] == 'class' or material[str(material[material[node]['pid']]['sid'])]['name'] == '__init__':
return '"%s" %s self.%s' % (material[node]['xpath'], material[node]['method'], material[material[node]['pid']]['name'])
elif material[material[node]['pid']]['name']:
return '"%s" %s %s' % (material[node]['xpath'], material[node]['method'], material[material[node]['pid']]['name'])
else:
return '"%s" %s %s["%s"]' % (material[node]['xpath'], material[node]['method'], material[str(material[material[node]['pid']]['sid'])]['name'], material[material[node]['pid']]['index'])
else:
if material[material[node]['pid']]['datatype'] == 'class' or material[str(material[material[node]['pid']]['sid'])]['name'] == '__init__':
return 'self.%s' % material[material[node]['pid']]['name'] + ('' if material[node]['xpath'] is None else material[node]['xpath'])
else:
return material[material[node]['pid']]['name'] + ('' if material[node]['xpath'] is None else material[node]['xpath'])
else:
if material[node]['datatype'] == 'str':
return "'%s'" % material[node]['default']
else:
return material[node]['default']
def nodeCommon(material, tree, node):
return '%s%s = %s\n' % (' ' * tree[node]['#s'], nodeAssignleft(material, tree, node), nodeAssignright(material, tree, node))
def nodeExecute(material, tree, node):
params = []
findParams(material, tree, node, params=params)
if material[node]['name'] == '':
return '%s%s(%s)' % (' ' * tree[node]['#s'], material[material[node]['pid']]['name'], ', '.join(['%s=%s'%(one['name'], one['name']) for one in params])) + '\n'
else:
return '%s%s = %s(%s)' % (' ' * tree[node]['#s'], material[node]['name'], material[material[node]['pid']]['name'], ', '.join(['%s=%s'%(one['name'], one['name']) for one in params])) + '\n'
def nodeFor(material, tree, node):
return '%sfor %s in %s:\n' % (' ' * tree[node]['#s'], material[node]['name'], material[material[node]['pid']]['name'])
def nodeYield(material, tree, node):
if material[node]['datatype'] == 'object':
return '%syield %s(%s)\n' % (' ' * tree[node]['#s'], material[material[node]['pid']]['name'], ', '.join(['%s=%s' % (material[one]['name'], material[one]['name']) for one in tree[node]['#pre']]))
else:
return '%syield %s\n' % (' ' * tree[node]['#s'], material[node]['name'])
def nodeTxt(material, tree, node):
if not node in material:
return ''
if material[node]['method'] in ('params'):
return ''
elif material[node]['method'] == 'import':
return nodeImport(material, tree, node)
elif material[node]['method'] == 'init':
txt = '\n'
if material[node]['datatype'] == 'module':
return ''
elif material[node]['datatype'] == 'class':
return nodeClass(material, tree, node)
elif material[node]['datatype'] == 'function':
return nodeFunction(material, tree, node)
elif material[node]['datatype'] == 'execute':
return nodeExecute(material, tree, node)
else:
return ''
elif material[node]['method'] == '@':
return nodeDecorate(material, tree, node)
elif material[node]['method'] == 'in':
return nodeFor(material, tree, node)
elif material[node]['method'] == 'yield':
return nodeYield(material, tree, node)
else:
return nodeCommon(material, tree, node)
def treeTxt(material, tree, node):
txt = ''
if tree[node]['#next']:
order = [(one, tree[one]["#w"]) for one in tree[node]["#next"]]
order.sort(key=lambda m:m[-1])
for one in order:
txt += treeTxt(material, tree, one[0])
if tree[node]['#pre']:
order = [(one, tree[one]["#w"]) for one in tree[node]["#pre"]]
order.sort(key=lambda m:m[-1])
for one in order:
txt += treeTxt(material, tree, one[0])
if tree[node]['#decorators']:
order = [(one, tree[one]["#w"]) for one in tree[node]["#decorators"]]
order.sort(key=lambda m:m[-1])
for one in order:
txt += treeTxt(material, tree, one[0])
if DISTINCT:
if not tree[node]['#used']:
txt += nodeTxt(material, tree, node)
tree[node]['#used'] = True
else:
txt += nodeTxt(material, tree, node)
if len(tree[node]["#children"]) > 0:
order = [(one, tree[one]["#w"]) for one in tree[node]["#children"]]
order.sort(key=lambda m:m[-1])
for one in order:
txt += treeTxt(material, tree, one[0])
return txt
def initTree(material):
tree = {'0':{'#w':0, '#s':0,
'#children':[],
'#params':[],
'#decorators':[],
'#next':[], '#pre':[], '#store':None,
'#used':False,
'#routed':None
}}
pieces = material.items()
total = len(pieces)
num = 0
while True:
for key, val in pieces:
if key in tree:
continue
if str(val['sid']) in tree:
num += 1
tree[key] = {'#w':0, '#s':0, '#children':[], '#hastxt':False, '#params':[], '#decorators':[], '#next':[], '#pre':[], '#store':None, '#used':False, '#routed':None}
if val['method'] in ['params']:
tree[str(val['sid'])]["#params"].append(key)
elif val['method'] in ['='] and material.get(str(val['sid']), {"datatype":""})["datatype"] in ['execute']:
tree[str(val['sid'])]["#params"].append(key)
# tree[str(material[str(val['sid'])]['sid'])]["#children"].append(key)
tree[str(val['sid'])]["#next"].append(key)
elif val['method'] == '@':
tree[str(val['sid'])]['#decorators'].append(key)
if val['name'] == 'next' and val['pid']:
tree[str(val['sid'])]["#next"].append(val['pid'])
if val['name'] == 'store':
tree[str(val['sid'])]['#store'] = key
elif material.get(str(val['sid']), {"name":"None"})['name'] == "":
tree[str(val['sid'])]['#pre'].append(key)
if val['pid']:
tree[str(val['sid'])]['#next'].append(val['pid'])
tree[str(val['sid'])]['#next'] = list(set(tree[str(val['sid'])]['#next']))
elif val['method'] == '.findall':
tree[key]['#pre'].append(val['pid'])
tree[str(val['sid'])]['#children'].append(key)
tree[str(val['sid'])]['#hastxt'] = True
else:
if val['method'] == 'yield' and val['name']:
tree[key]['#next'].append(val['pid'])
tree[str(val['sid'])]['#children'].append(key)
tree[str(val['sid'])]['#hastxt'] = True
if val['name'] == 'format' and material[str(val['sid'])]['datatype'] == 'execute':
tree[str(val['sid'])]['#format'] = val['default'].capitalize()
time.sleep(0.1)
if total == num:
break
return tree
def cook(material):
default = '#!/usr/bin/env python\n# coding=utf-8\n\n'
tree = initTree(material)
treeWeight(tree, '0')
treeSpace(material, tree, '0', -8)
initParams(material, tree)
food = default + treeTxt(material, tree, '0') + '\nif __name__ == "__main__":\n pass\n\n'
return food
if __name__ == '__main__':
pass | {"/setup.py": ["/ask/__init__.py"]} |
56,680 | spawn3/python-util | refs/heads/master | /lich/lich/exc.py | #!/usr/bin/env python2
#-*- coding: utf-8 -*-
import sys
import datetime
import json
reload(sys)
sys.setdefaultencoding('utf-8')
TipDict = {
# NORMAL
0: '异常',
1: '名字已存在',
2: '参数错误',
100: '数据库操作失败',
# Physical
200: '集群不存在',
201: '集群中没有节点',
202: '集群中没有可连接节点',
# Logical
300: '存储池不存在',
301: '存储池非空',
400: '卷不存在',
500: '快照不存在',
501: '快照处于保护模式',
}
TIPS = {
'InvalidParameter': '无效参数',
'DBError': '数据库错误',
'NotFound': '没有发现资源',
'PoolNotFound': '存储池不存在',
'PoolNotEmpty': '存储池非空',
}
class UmpException(Exception):
def __init__(self, *args, **kw):
cls_name = self.__class__.__name__
name = TIPS.get(cls_name, cls_name)
if args:
name = '%s: %s' % (name, args[0])
if kw:
name = '%s: %s' % (name, json.dumps(kw))
super(UmpException, self).__init__(name)
class CheckFunNotDefined(UmpException):
pass
class DBError(UmpException):
pass
class NameExists(UmpException):
pass
class NotFound(UmpException):
pass
class AlreadyExists(UmpException):
pass
class ReferencedError(UmpException):
pass
class InvalidParameter(UmpException):
pass
class StatusError(UmpException):
pass
class QuotaError(UmpException):
pass
## ---------------------------------------------------------
class TokenNotfound(NotFound):
pass
class TokenExpire(UmpException):
pass
class TokenError(UmpException):
pass
class LichFault(Exception):
pass
class LichLiscenFault(LichFault):
pass
class AuthenticationFailed(UmpException):
pass
class PermissionDenied(UmpException):
pass
class IpRangeError(UmpException):
pass
class HostUnable(UmpException):
pass
class HostNotEmpty(UmpException):
pass
class HostnameDuplica(UmpException):
pass
class ImageUnable(UmpException):
pass
class ResourcesOver(UmpException):
'''资源跨集群'''
pass
class ResourcesNotFound(UmpException):
pass
class ResourcesTypeError(UmpException):
'''资源类型错误'''
pass
class ResourcesInuse(UmpException):
'''资源正在使用中'''
pass
class ResourcesConditionDeficit(UmpException):
'''资源条件不具备 '''
pass
class OperateReject(UmpException):
'''操作被拒绝'''
pass
class UnknownParameter(UmpException):
'''不能处理的参数'''
pass
class DeleteError(UmpException):
pass
class Duplica(UmpException):
pass
class InvalidPath(InvalidParameter):
pass
# CLUSTER
class ClusterNotFound(NotFound):
pass
class ClusterDuplica(UmpException):
pass
class ClusterDoubleIp(UmpException):
pass
class ClusterNotEmpty(UmpException):
pass
# Protection Domain
class PDomainNotFound(NotFound):
pass
# Protocol
class ProtocolNotSupported(NotFound):
pass
# USER
class UserNotFound(NotFound):
pass
class UserPasswordError(UmpException):
pass
# POOL
class VpoolNotEmpty(UmpException):
pass
class VpoolUnable(UmpException):
pass
class VpoolConditionDeficit(UmpException):
pass
class PoolNotFound(NotFound):
pass
class PoolFound(AlreadyExists):
pass
class PoolQuotaError(QuotaError):
pass
class PoolNotEmpty(UmpException):
pass
# VOLUME
class VolumeNotFound(NotFound):
pass
class VolumeFound(AlreadyExists):
pass
class VolumeInuse(UmpException):
pass
class VolumesTooMany(UmpException):
pass
class VolumeLocked(UmpException):
pass
class VolumeError(UmpException):
pass
class VolumeReferenced(ReferencedError):
pass
# SNAPSHOT
class SnapshotNotFound(NotFound):
pass
class SnapshotFound(AlreadyExists):
pass
class SnapshotProtected(StatusError):
pass
class SnapshotReferenced(ReferencedError):
pass
# VGROUP
class VGroupNotFound(NotFound):
pass
class VGroupReferenced(ReferencedError):
pass
# CGSNAPSHOT
class CGSnapshotNotFound(NotFound):
pass
if __name__ == '__main__':
import traceback
try:
raise NameExists(a=1)
except Exception, e:
traceback.print_exc()
| {"/setup.py": ["/ask/__init__.py"]} |
56,681 | spawn3/python-util | refs/heads/master | /ask/algo/cn.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def mul(m, n):
res = 1
for x in range(m, n+1):
res = res * x
return res
def comp(m, n):
return mul(m - n + 1, m) / mul(1, n)
if __name__ == '__main__':
print comp(3, 2)
print comp(6, 2)
print comp(10, 2)
for r in [1,2,3]:
for m, n in [(72, 1), (36, 2), (24, 3), (18, 4), (9, 8)]:
print 'N=%d, R=%d' % (m, r), n * comp(m, r)
print
| {"/setup.py": ["/ask/__init__.py"]} |
56,682 | spawn3/python-util | refs/heads/master | /spider/gdc/task/proxy/spiderXici.py | #!/usr/bin/env python
# coding=utf-8
from pymongo import MongoClient
from datetime import timedelta
from datetime import datetime
from webcrawl.request import requGet
from webcrawl.request import requPost
from webcrawl.request import getHtmlNodeContent
from webcrawl.request import getXmlNodeContent
from webcrawl.task import retry
from webcrawl.task import index
from webcrawl.task import initflow
from webcrawl.request import getJsonNodeContent
from webcrawl.task import store
from webcrawl.task import timelimit
from webcrawl.task import next
from webcrawl.request import ensureurl
from webcrawl.request import parturl
from model.setting import withData, datacfg
from proxyspider import Data
from proxyspider import TIMEOUT
from proxyspider import SpiderProxyOrigin
class SpiderXicidaili(SpiderProxyOrigin):
"""
西刺网 数据爬虫
"""
def __init__(self, worknum=30, queuetype='P', worktype='COROUTINE', timeout=-1, tid=0):
super(SpiderXicidaili, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, timeout=timeout, tid=tid)
self.dt = datetime.now()
self.headers = {
'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.93 Safari/537.36'
}
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
def fetchDetail(self, proxy, additions={}, timeout=TIMEOUT, implementor=None):
ip, port, location, safetype, usetype, refspeed, status, update_time = proxy
data = Data(ip=ip, port=port, location=location, safetype=safetype, usetype=usetype, refspeed=refspeed, usespeed=0, usenum=0, status=status, update_time=update_time, tid=self.tid)
yield data
@next(fetchDetail)
@timelimit(3)
@index('url')
@initflow('www')
def fetchList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, headers=self.headers, timeout=timeout, format='HTML')
proxys = result.findall('.//table[@id="ip_list"]//tr')
if len(proxys) < 100:
nextpage = None
else:
index = url.split('/')
index[-1] = str(int(index[-1]) + 1)
nextpage = '/'.join(index)
# yield nextpage
yield None
for one in proxys:
detail = one.findall('.//td')
if len(detail) < 6:
continue
ip = getHtmlNodeContent(detail[2], 'TEXT')
port = int(getHtmlNodeContent(detail[3], 'TEXT') or 0)
location = getHtmlNodeContent(detail[4], 'TEXT')
safetype = getHtmlNodeContent(detail[5], 'TEXT')
usetype = getHtmlNodeContent(detail[6], 'TEXT')
refspeed = float(getHtmlNodeContent(detail[7].find('.//div[@class="bar"]'), {'ATTR':'title'}).replace('秒', ''))
status = 1
update_time = datetime.strptime('20'+getHtmlNodeContent(detail[9], 'TEXT')+':00', '%Y-%m-%d %H:%M:%S')
yield {'proxy':(ip, port, location, safetype, usetype, refspeed, status, update_time)}
if __name__ == '__main__':
print 'start'
spider = SpiderXici(worknum=6, queuetype='P', worktype='THREAD')
spider.fetchDatas('www', 0, 'http://www.xicidaili.com/nn/1')
spider.statistic()
print 'end'
| {"/setup.py": ["/ask/__init__.py"]} |
56,683 | spawn3/python-util | refs/heads/master | /lich/lich/disk.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from base import LichBase, RemoteLocation
from runner import http_runner
class LichDiskParam(RemoteLocation):
def __init__(self, host_ip=None, cluster_id=1, dev_name=None):
super(LichDiskParam, self).__init__(host_ip=host_ip, cluster_id=cluster_id)
self.dev_name = dev_name
def get_names(self, sep=' '):
if isinstance(self.dev_name, list):
return sep.join(self.dev_name)
else:
return self.dev_name
class LichDisk(LichBase):
@http_runner()
def add(self, param, is_force=False):
cmd = '%s --disk_add %s ' % (self.lich_node, param.dev_name)
if is_force:
cmd += '--force'
return cmd
@http_runner()
def delete(self, param):
cmd = '%s --disk_del %s ' % (self.lich_node, param.dev_name)
return cmd
@http_runner()
def list(self, param):
cmd = '%s --disk_list --json' % (self.lich_node)
return cmd
@http_runner()
def add_raid(self, param):
cmd = '%s --disk_list ' % (self.lich_node)
return cmd
@http_runner()
def delete_raid(self, param):
cmd = '%s --disk_list ' % (self.lich_node)
return cmd
@http_runner()
def raid_missing(self, param):
cmd = '%s --raid_miss' % (self.lich_node)
return cmd
@http_runner()
def light(self, param, op, device, serial_number=None):
cmd = '%s --disk_light %s %s' % (self.lich_node, op, device)
if serial_number:
cmd = '%s --disk_light %s %s' % (self.lich_node, op, serial_number)
return cmd
if __name__ == '__main__':
param = LichDiskParam(host_ip='192.168.120.211', dev_name='/dev/vda')
disk = LichDisk()
print disk.list(param)
# print disk.add(param)
| {"/setup.py": ["/ask/__init__.py"]} |
56,684 | spawn3/python-util | refs/heads/master | /ask/algo/dct.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import defaultdict
def dict_append(d, k, v):
if k not in d:
d[k] = []
d[k].append(v)
return d
def dict_inc(d, k, v):
if k not in d:
d[k] = 0
d[k] += v
return d
def test_defaultdict():
s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)]
d = defaultdict(list) # int, set
for k, v in s:
d[k].append(v)
assert(len(d) == 3)
def add(*args):
"""
>>> add(1, 2)
4
:param args:
:return:
"""
total = 0
for x in args:
total += x
return total
if __name__ == '__main__':
import doctest
doctest.testmod()
| {"/setup.py": ["/ask/__init__.py"]} |
56,685 | spawn3/python-util | refs/heads/master | /spider/gdc/task/audio/spiderMengfou.py | #!/usr/bin/python
# coding=utf-8
import copy, time
from pymongo import MongoClient
from datetime import timedelta
from datetime import datetime
from webcrawl.request import requGet
from webcrawl.request import requPost
from webcrawl.request import getHtmlNodeContent
from webcrawl.request import getXmlNodeContent
from webcrawl.task import retry
from webcrawl.task import index
from webcrawl.task import initflow
from webcrawl.request import getJsonNodeContent
from webcrawl.task import store
from webcrawl.task import timelimit
from webcrawl.task import next
from webcrawl.request import ensureurl
from webcrawl.request import parturl
from model.setting import withData, datacfg
from audiospider import Data
from audiospider import TIMEOUT
from audiospider import SpiderAudioOrigin
#_print, logger = logprint(modulename(__file__), modulepath(__file__))
def seconds(tl):
assert len(tl) < 3
num = 0
for index, one in enumerate(tl[::-1]):
num += pow(60, index) * int(one)
return num
class SpiderMoe(SpiderAudioOrigin):
"""
萌否官网 数据爬虫
"""
def __init__(self, worknum=6, queuetype='P', worktype='COROUTINE', timeout=-1, tid=0):
super(SpiderMoe, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, timeout=timeout, tid=tid)
self.clsname = self.__class__.__name__
self.api_key = '3e304078c769743445311c894eb221d90566aa33b'
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
def fetchDetail(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, timeout=timeout, format='JSON')
outid = url.split('=')[-1]
url = 'http://api.moefou.org/music/detail.json?wiki_id=%s&api_key={{api_key}}'.replace('{{api_key}}', self.api_key) % outid
album_result = requGet(url, timeout=timeout, format='JSON') or {'response':{'wiki':{'wiki_meta':[], 'wiki_cover':{'large':''}}}}
for one in album_result['response']['wiki']['wiki_meta']:
if one['meta_key'] == '艺术家':
singer = one['meta_value']
else:
singer = ''
pages = result['response']['subs']
if pages:
parent_page_id = hash('http://moe.fm/listen/h5?song=%s' % str(pages[0]['sub_id']))
for one in pages:
if one['sub_upload']:
url = one['sub_upload'][0]['up_url']
format = 'mp3'
size = one['sub_upload'][0]['up_data']['filesize']/float(1024*1024)
during = seconds(one['sub_upload'][0]['up_data']['time'].split(':'))
tag = []
name = one['sub_title']
desc = one['sub_about']
cover = album_result['response']['wiki']['wiki_cover']['large']
snum = int(one['sub_order'])
src = '萌否'
host = 'moe.fm'
page_url = 'http://moe.fm/listen/h5?song=%s' % str(one['sub_id'])
page_id = hash(page_url)
parent_page_id = parent_page_id
atime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(one['sub_upload'][0]['up_date'])))
atime = datetime.strptime(atime, '%Y-%m-%d %H:%M:%S')
data = Data(url=url, format=format,
size=size, during=during, tag=tag, name=name,
desc=desc, cover=cover, snum=snum, singer=singer,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield data
@next(fetchDetail)
@initflow('www')
@timelimit(20)
@index('url')
def fetchList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
url = url.replace('{{api_key}}', self.api_key)
result = requGet(url, timeout=timeout, format='JSON')
audios = result['response']['wikis']
if len(audios) < 20:
nextpage = None
else:
index = url.split('=')
index[-1] = int(index[-1]) + 1
if index[-1] > 5:
nextpage = None
else:
index[-1] = str(index[-1])
nextpage = '='.join(index)
yield nextpage
for one in audios:
yield {'url': 'http://api.moefou.org/music/subs.json?sub_type=song,ep&api_key={{api_key}}&wiki_id=%s'.replace('{{api_key}}', self.api_key) % str(one['wiki_id'])}
if __name__ == '__main__':
print 'start'
spider = SpiderMoefou(worknum=6, queuetype='P', worktype='THREAD')
spider.fetchDatas('www', 0, 'http://api.moefou.org/wikis.json?wiki_type=music&initial=&tag=&wiki_id=&api_key={{api_key}}&page=1')
spider.statistic()
print 'end'
| {"/setup.py": ["/ask/__init__.py"]} |
56,686 | spawn3/python-util | refs/heads/master | /ask/testmode.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
# app path
CURRENT_PATH = os.path.dirname(os.path.abspath(__file__))
def is_testmode(base=CURRENT_PATH):
fname = os.path.join(base, '__test__')
res = os.path.isfile(fname)
if __debug__:
# python -O scripy.py to disable this!!!
print '.. checking filename', fname
if res:
print '== Warning: Run in test mode!'
else:
print '== Warning: Run in normal mode!'
return res
# app start status
TEST_MODE = is_testmode()
if TEST_MODE:
pass
else:
pass
# redis connection
# mongodb connection
| {"/setup.py": ["/ask/__init__.py"]} |
56,687 | spawn3/python-util | refs/heads/master | /spider/gdc/updateacfun.py | #!/usr/bin/python
# coding=utf-8
import pymongo
from webcrawl.handleRequest import requGet
if __name__ == '__main__':
mc = pymongo.MongoClient('localhost')
ddj = mc['dandan-jiang']
for one in ddj.video.find({'src':'Acfun'}):
if one['parent_page_id'] is None:
if not '#' in one['page_url']:
print '---solo---'
continue
# try:
# info_result = requGet(one['page_url'], timeout=30, format='JSON')
# page_url = one['page_url'] + '#' + str(info_result['data']['fullContent']['videos'][0]['videoId'])
# except:
# print 'error url', one['page_url']
# continue
# if ddj.video.find_one({'page_url':page_url}):
# print '===remove solo', one['_id']
# ddj.video.remove({'_id':one['_id']})
# ddj.video_rubbish.insert(one)
# else:
# print '===solo', page_url
# ddj.video.update({'_id':one['_id']}, {'$set':{'page_url':page_url, 'page_id':hash(page_url)}})
else:
page_url = one['page_url'][:one['page_url'].rindex('#')]
exist = ddj.video.find_one({'page_url':page_url})
if exist is not None:
ddj.video.remove({'_id':exist['_id']})
ddj.video_rubbish.insert(exist)
else:
if not '#' in one['page_url']:
print '---album---'
continue
# url = 'http://www.acfun.tv/bangumi/bangumi/info?id=%s' % one['page_url'][one['page_url'].rindex('/')+1:one['page_url'].rindex('_')].replace('ab', '')
# info_result = requGet(url, timeout=30, format='JSON')
# pages = info_result['data']['videos']
# pages = sorted(pages, key=lambda v:v['sort'])
# albums = list(ddj.video.find({'parent_page_id':one['parent_page_id']}).sort([('snum', 1), ]))
# album_urls = [str(one['page_url'][:one['page_url'].rindex('#')+1]) for one in albums if '#' in one['page_url']]
# print album_urls
# for album_one in albums:
# if not '#' in album_one['page_url']:
# if '%s%s' % (album_one['page_url'], '#') in album_urls:
# print '===remove album', album_one['_id']
# ddj.video.remove({'_id':album_one['_id']})
# ddj.video_rubbish.insert(one)
# else:
# page_url = album_one['page_url'] + '#' + pages[album_one['snum'] - 1]['danmakuId']
# print '===album', page_url
# ddj.video.update({'_id':one['_id']}, {'$set':{'page_url':page_url, 'page_id':hash(page_url)}})
else:
page_url = one['page_url'][:one['page_url'].rindex('#')]
exist = ddj.video.find_one({'page_url':page_url})
if exist is not None:
ddj.video.remove({'_id':exist['_id']})
ddj.video_rubbish.insert(exist)
| {"/setup.py": ["/ask/__init__.py"]} |
56,688 | spawn3/python-util | refs/heads/master | /spider/gdc/task/comic/comicspider.py | #!/usr/bin/env python
# coding=utf-8
from webcrawl.spider import SpiderOrigin
from model.data import Comic as Data
TIMEOUT = 120
class SpiderComicOrigin(SpiderOrigin):
def __del__(self):
pass
def __init__(self, queuetype='P', timeout=-1, worknum=6, worktype='COROUTINE', tid=0):
super(SpiderComicOrigin, self).__init__(queuetype=queuetype, timeout=timeout, worknum=worknum, worktype=worktype, tid=tid)
if __name__ == "__main__":
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,689 | spawn3/python-util | refs/heads/master | /lich/lich/cluster.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from base import LichBase, RemoteLocation
from runner import http_runner
import utils
import config
class LichClusterParam(RemoteLocation):
def __init__(self, host_ip=None, cluster_id=1):
super(LichClusterParam, self).__init__(host_ip=host_ip, cluster_id=cluster_id)
class LichCluster(LichBase):
def create(self, param):
pass
def delete(self, param):
pass
@http_runner()
def _health(self, param):
cmd = '%s health' % (self.lich)
return cmd
def health(self, param):
s = self._health(param)
return utils.parse_health(s)
@http_runner()
def fsstat(self, param):
cmd = '%s --fsstat' % (self.lichfs)
return cmd
@http_runner()
def _dump_config(self, param):
cmd = '%s --configdump' % (self.lich_admin)
return cmd
def dump_config(self, param):
s = self._dump_config(param)
return utils.parse_global_config(s)
@http_runner()
def _listnode(self, param):
cmd = '%s listnode' % self.lich
return cmd
def listnode(self, param):
res = self._listnode(param)
return utils.parse_listnode(res)
if __name__ == '__main__':
param = LichClusterParam(host_ip=config.host_ip)
obj = LichCluster()
res = obj.health(param)
print
print res
| {"/setup.py": ["/ask/__init__.py"]} |
56,690 | spawn3/python-util | refs/heads/master | /spider/gdc/hawkeye.py | #!/usr/bin/env python
# coding=utf-8
import time, datetime
import os, sys, json
from webcrawl.request import requPost
from godhand import cook
from setting import USER, SECRET, HOST
import task
LIMIT = 20
# def ensure(filepath):
# if filepath.startswith('/'):
# if not os.path.exists(os.path.split(filepath)[0]):
# os.mkdir(os.path.split(filepath)[0])
# else:
# filepath = os.path.join(os.path.split(os.path.abspath(__file__))[0], filepath)
# if not os.path.exists(os.path.split(filepath)[0]):
# os.mkdir(os.path.split(filepath)[0])
# return filepath
# @withBase(WDB, resutype='DICT')
# def getUnit(uid):
# config = Config.queryOne({'key':'task'}, projection={'val':1})
# unit = Unit.queryOne({'_id':uid}, projection={'dmid':1, 'name':1, 'filepath':1})
# dirfile = ''.join([config['val'], unit['filepath']])
# material = {}
# for one in Datapath.queryAll({'btype':'unit', '$or':[{'bid':uid}, {'bid':0}]}):
# material[str(one['_id'])] = one
# fi = open(ensure(dirfile), 'w')
# fi.write(cook(material))
# fi.close()
# @withBase(WDB, resutype='DICT')
# def getArticle(aid, flow):
# config = Config.queryOne({'type':'ROOT', 'key':'dir'}, projection={'val':1})
# article = Article.queryOne({'username':USER, 'secret':SECRET}, {'_id':aid}, projection={'filepath':1, 'uid':1})
# unit = Unit.queryOne({'_id':article['uid']}, projection={'dmid':1, 'name':1})
# dirfile = ''.join([config['val'], article['filepath']])
# material = {}
# sids = [str(one['_id']) for one in Section.queryAll({'username':USER, 'secret':SECRET}, {'aid':aid, 'flow':flow}, projection={'_id':1})]
# for one in Datapath.queryAll({'$or':[{'btype':'article', '$or':[{'bid':baseorm.IdField.verify(aid)}, {'bid':0}]}, {'btype':'section', 'bid':{'$in':sids}}]}):
# material[str(one['_id'])] = one
# fi = open(ensure(dirfile), 'w')
# fi.write(cook(material))
# fi.close()
# @withBase(WDB, resutype='DICT')
# def initScript():
# for unit in Unit.queryAll():
# getUnit(unit['_id'])
# for article in Article.queryAll({'username':USER, 'secret':SECRET}, {'uid':unit['_id']}):
# for flow in set([section['flow'] for section in Section.queryAll({'username':USER, 'secret':SECRET}, {'aid':article['aid']}, projection={'flow':1})]):
# getArticle(article['_id'], flow)
def setModel(filepath, fileupdate=False):
fi = open(filepath, 'r')
filepath = 'model/%s' % filepath.split('/model/')[-1]
data = fi.read()
fi.close()
for txt in data.split('@'):
if 'comment(' in txt:
comment = None
model = None
for line in txt.split('\n'):
if 'comment' in line:
info = line.replace('(', '').replace(')', '').split("'")
comment = info[1]
if 'class' in line:
info = line.replace('(', ' ').replace(')', '').replace(',', '').split(' ')
model = info[1].lower()
break
if model is not None:
fileupdate = True
datamodel = requPost('%sgds/api/datamodel' % HOST, {'condition':json.dumps({'name':model}), 'limit':'one', 'projection':json.dumps({'_id':1})}, format='JSON')
datamodel = datamodel['datamodel']
if datamodel:
continue
data = {
"name": model,
"table": model,
"filepath": filepath,
"comment": comment,
}
print requPost('%sgds/api/datamodel' % HOST, {'data':json.dumps(data)}, format='JSON')
if fileupdate:
print requPost('%sgds/api/datamodel' % HOST, files={'file':(filepath, open(filepath, 'rb'))}, format='JSON')
def setUnit(filepath):
name = filepath[filepath.rindex('/')+1:].replace('spider.py', '')
unit = requPost('%sgds/api/unit' % HOST, {'condition':json.dumps({'name':name}), 'limit':'one', 'projection':json.dumps({'_id':1})}, format='JSON')
unit = unit['unit']
if not unit:
dmid = None
fi = open(filepath, 'r')
for one in fi.readlines():
if 'as Data' in one:
model = one.replace('as Data', '').split('import')[-1].strip().lower()
datamodel = requPost('%sgds/api/datamodel' % HOST, {'condition':json.dumps({'name':model}), 'limit':'one', 'projection':json.dumps({'_id':1})}, format='JSON')
datamodel = datamodel['datamodel']
dmid = datamodel['_id']
break
fi.close()
short_filepath = '%s/%s' % (name, filepath[filepath.rindex('/')+1:])
create_time = datetime.datetime.now()
data = {
"dmid": dmid,
"name": name,
"filepath": short_filepath,
"desc": '',
}
print requPost('%sgds/api/unit' % HOST, {'data':json.dumps(data)}, files={'file':('task/%s' % short_filepath, open(filepath, 'rb'))}, format='JSON')
else:
print 'Unit %s has been set.' % name
def setArticle(filepath, fileupdate=False):
unit = None
for one in os.listdir(os.path.dirname(filepath)):
if one.endswith('spider.py'):
unit = requPost('%sgds/api/unit' % HOST, {'condition':json.dumps({'name':one.replace('spider.py', '')}), 'limit':'one', 'projection':json.dumps({'_id':1})}, format='JSON')
unit = unit['unit']
break
if not unit:
print 'Please set unit firstly.'
return
name = filepath[filepath.rindex('/')+1:filepath.rindex('.')]
clsname = ''
article = requPost('%sgds/api/article' % HOST, {'condition':json.dumps({'name':name, 'uid':str(unit['_id'])}), 'limit':'one', 'projection':json.dumps({'_id':1})}, format='JSON')
article = article['article']
lines = []
flows = []
fi = open(filepath, 'r')
flag = False
for line in fi.readlines():
if line.startswith('class '):
clsname = line.replace('class ', '').split('(')[0]
if '@' in line and not 'find' in line and not 'findall' in line:
lines.append(line.replace('\n', '').replace(' ', ''))
flag = True
if 'def ' in line and flag:
lines.append(line.replace('\n', '').replace(' ', ''))
flag = False
if 'initflow' in line and not 'import' in line:
lines.append(line.replace('\n', '').replace(' ', ''))
flows.append(line.replace('\n', '').replace(' ', '').replace('@initflow(', '').replace(')', '').replace('"', '').replace("'", ''))
fi.close()
sections = {}
section = {}
for index, one in enumerate(lines):
if 'next' in one:
section['next'] = one.replace('@next(', '').replace(')', '')
if 'index' in one:
section['index'] = one.replace('@index(', '').replace(')', '').replace('"', '').replace("'", "")
if 'retry' in one:
section['retry'] = one.replace('@retry(', '').replace(')', '')
if 'timelimit' in one:
section['timelimit'] = one.replace('@timelimit(', '').replace(')', '')
if 'store' in one:
section['store'] = 1
if 'initflow' in one and not 'import' in one:
section['flow'] = one.replace('\n', '').replace(' ', '').replace('@initflow(', '').replace(')', '').replace('"', '').replace("'", '')
if 'def ' in one:
section['name'] = one.replace('def ', '').split('(')[0]
sections[section['name']]=section
section = {}
if article:
print 'Article %s has been set.' % name
if fileupdate:
print requPost('%sgds/api/article' % HOST, files={'file': (filepath, open(filepath, 'rb'))}, format='JSON')
else:
short_filepath = filepath.split('/task/')[-1]
data = {
"uid": unit['_id'],
"name": name,
"clsname": clsname,
"filepath": short_filepath,
}
print requPost('%sgds/api/article' % HOST, {'data':json.dumps(data)}, files={'file': ('task/%s' % short_filepath, open(filepath, 'rb'))}, format='JSON')
for section_name, section in sections.items():
if section.get('flow') is None:
continue
flow_section = section
step = 1
setSection(section['flow'], step, section['name'], sections, article['_id'])
def setSection(flow, step, section_name, sections, article_id):
data = sections.get(section_name)
next = data.get('next')
if next is not None:
data['next_id'] = setSection(flow, step+1, next, sections, article_id)
section = requPost('%sgds/api/section' % HOST, {'condition':json.dumps({'name':section_name, 'aid':str(article_id), 'flow':flow}), 'limit':'one', 'projection':json.dumps({'_id':1})}, format='JSON')
section = section['section']
if section:
print 'Section %s %s has been set.' % (flow, section_name)
return section['_id']
else:
data = {
"aid": article_id,
"next_id": data.get('next_id'),
"name": section_name,
"flow": flow,
"step": step,
"index": data.get('index'),
"retry": data.get('retry', 0),
"timelimit": data.get('timelimit', 30),
"store": data.get('store', 0)
}
section = requPost('%sgds/api/section' % HOST, {'data':json.dumps(data)}, format='JSON')
return section['sid']
if __name__ == '__main__':
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,691 | spawn3/python-util | refs/heads/master | /lich/lich/license.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from base import LichBase, RemoteLocation
from runner import http_runner
class LichLicenseParam(RemoteLocation):
def __init__(self, host_ip=None):
super(LichLicenseParam, self).__init__(host_ip)
class LichLicense(LichBase):
@http_runner()
def sniffer(self, param):
cmd = '%s -m sniffer' % (self.lich_license)
return cmd
if __name__ == '__main__':
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,692 | spawn3/python-util | refs/heads/master | /spider/gdc/create_index.py | #!/usr/bin/python
#-*- coding:utf-8 -*-
from pymongo import (
MongoReplicaSetClient,
MongoClient,
read_preferences
)
mc = MongoClient(host='localhost')
grab = mc['dandan-jiang']
tb_raw_video = grab['video']
def main():
tb_raw_video.ensure_index([('parent_page_id', 1), ('snum', 1)], background=True)
if __name__ == '__main__':
main() | {"/setup.py": ["/ask/__init__.py"]} |
56,693 | spawn3/python-util | refs/heads/master | /ask/exception.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
class MyException(Exception):
def __init__(self, errno, msg='MyException'):
self.args = (errno, msg)
self.errno = errno
self.msg = msg
class DataException(MyException):
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,694 | spawn3/python-util | refs/heads/master | /spider/gdc/log.py | #!/usr/bin/env python
# coding=utf-8
import os, sys, redis, time, datetime
import cPickle as pickle
from kokolog import KokologHandler, logging
from kokolog.prettyprint import CFG
from model.log import RunLog
from webcrawl.daemon import Daemon
from model.setting import baseorm
from model.setting import withData, datacfg, LOGNUM, LOGSTATUS, LOGQUEUE
from threading import Thread
path = os.path.abspath('.')
class Producer(KokologHandler):
def __init__(self, **config):
super(Producer, self).__init__()
self._name = 'koko'
self.tube = config['tube']
self.q = redis.StrictRedis(host=config['host'], port=config['port'], db=config['db'])
def emit(self, record):
data = {'tid':baseorm.IdField.verify(record.kwargs['tid']),
'sid':record.kwargs['sid'],
'type':record.kwargs['type'],
'status':record.kwargs['status'],
'sname':record.kwargs['sname'],
'priority':record.kwargs['priority'],
'times':record.kwargs['times'],
'args':record.kwargs['args'],
'kwargs':record.kwargs['kwargs'],
'txt':record.kwargs['txt'],
}
if data['status'] == LOGSTATUS:
self.q.rpush(self.tube, pickle.dumps(data))
hdr = Producer(**LOGQUEUE)
frt = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
hdr.setFormatter(frt)
CFG.handlers.append(hdr)
@withData(datacfg.W, autocommit=True)
def record(data):
RunLog.insert(RunLog(**data))
class Consumer(Thread):
def __init__(self, **config):
super(Consumer, self).__init__()
self.tube = config['tube']
self.q = redis.StrictRedis(host=config['host'], port=config['port'], db=config['db'])
def run(self):
while True:
if self.q.llen(self.tube) == 0:
time.sleep(10)
else:
data = self.q.lpop(self.tube)
if data is None:
continue
data = pickle.loads(data)
data['atime'] = datetime.datetime.now()
record(data)
class LogMonitor(Daemon):
def _run(self):
for k in range(LOGNUM):
c = Consumer(**LOGQUEUE)
c.setDaemon(False)
c.start()
def main():
lmoni = LogMonitor(os.path.join(path, 'log', 'lmoni.pid'), stdout=os.path.join(
path, 'log', 'lmoni.out'), stderr=os.path.join(path, 'log', 'lmoni.err'))
if os.path.exists(os.path.join(path, 'log', 'lmoni.pid')):
print "LogMonitor stop successfully."
lmoni.stop()
else:
print "LogMonitor start successfully."
lmoni.start()
if __name__ == '__main__':
main()
| {"/setup.py": ["/ask/__init__.py"]} |
56,695 | spawn3/python-util | refs/heads/master | /ask/decorator.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import functools
from pprint import pprint
def decorator(func):
_state = {}
@functools.wraps(func)
def wrapper(*args, **kw):
print 'input', func, args, kw
res = func(*args, **kw)
print 'after calling', func
print func.__name__
#print func.a
pprint(_state)
#pprint(dir(func))
#pprint(func.__dict__)
#pprint(func.__name__)
print 'output', res
return res
#set_f_attr(wrapper, 'a', 2)
return wrapper
def set_f_attr(f, k, v):
print f, 'set %s.%s to %s' % (f.__name__, k, v)
setattr(f, k, v)
@decorator
def f(*args, **kw):
#_state['a'] = 1
pprint(dir(f))
set_f_attr(f, 'a', 2)
return 1
def g():
g.a = 1
#setattr(g, 'a', 1)
return 1
def h(func):
def wrapper(*args, **kw):
res = func(*args, **kw)
print func.a
return res
return wrapper
CLIST = []
def register(cls):
CLIST.append(cls)
return cls
@register
class A(object):
pass
@register
class B(object):
pass
#g.a = 2
#pprint(g.__dict__)
if __name__ == '__main__':
for x in CLIST:
print x
#f = decorator(f)
# f()
# print f
#pprint(dir(f))
#pprint(dir(g))
#print f.a
#h(g)()
| {"/setup.py": ["/ask/__init__.py"]} |
56,696 | spawn3/python-util | refs/heads/master | /spider/gdc/task/video/spiderAcfun.py | #!/usr/bin/python
# coding=utf-8
import os, re, copy, time
from pymongo import MongoClient
from datetime import timedelta
from datetime import datetime
from webcrawl.request import requGet
from webcrawl.request import requPost
from webcrawl.request import getHtmlNodeContent
from webcrawl.request import getXmlNodeContent
from webcrawl.task import retry
from webcrawl.task import index
from webcrawl.task import initflow
from webcrawl.request import getJsonNodeContent
from webcrawl.task import store
from webcrawl.task import timelimit
from webcrawl.task import next
from webcrawl.request import ensureurl
from webcrawl.request import parturl
from model.setting import withData, datacfg
from videospider import Data
from videospider import TIMEOUT
from videospider import SpiderVideoOrigin
#_print, logger = logprint(modulename(__file__), modulepath(__file__))
class SpiderAcfun(SpiderVideoOrigin):
"""
哔哩官网 数据爬虫
"""
def __init__(self, worknum=6, queuetype='P', worktype='COROUTINE', timeout=-1, tid=0):
super(SpiderAcfun, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, timeout=timeout, tid=tid)
self.clsname = self.__class__.__name__
self.headers = {"Accept":"application/json, text/javascript, */*; q=0.01",
"Accept-Encoding":"gzip, deflate, sdch",
"Accept-Language":"en-US,en;q=0.8",
"Cache-Control":"max-age=0",
"Connection":"keep-alive",
"Host":"www.bilibili.com",
"User-Agent":"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 6 Build/LYZ28E) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.20 Mobile Safari/537.36",
"X-Requested-With":"XMLHttpRequest"}
self.end = datetime.now()
self.begin = self.end - timedelta(days=7)
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
def fetchSoloDetail(self, url, additions={}, timeout=TIMEOUT, implementor=None):
cat = additions['cat']
outid = url[url.rindex('/')+1:].replace('ac', '')
headers = {
"deviceType":"2",
"Referer":"http://m.acfun.tv/player?date=undefined",
"User-Agent":"Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/600.1.3 (KHTML, like Gecko) Version/8.0 Mobile/12A4345d Safari/600.1.4"
}
url = 'http://api.acfun.tv/apiserver/content/info?contentId=%s' % outid
info_result = requGet(url, timeout=30, format='JSON')
page_url = url
url = 'http://api.aixifan.com/plays/%s/realSource' % str(info_result['data']['fullContent']['videos'][0]['videoId'])
page_url = page_url + '#' + str(info_result['data']['fullContent']['videos'][0]['videoId'])
url_result = requGet(url, headers=headers, format='JSON')
url = url_result['data']['files'][0]['url']
if type(url) == list:
url.append('')
url = url[0]
format = 'mp4'
size = 0
during = info_result['data']['fullContent']['videos'][0]['time']
tag = info_result['data']['fullContent']['tags']
name = info_result['data']['fullContent']['videos'][0]['name']
desc = info_result['data']['fullContent']['description']
cover = info_result['data']['fullContent']['cover']
author = ''
owner = {'avatar':'', 'name':'', 'url':''}
owner['avatar'] = info_result['data']['fullContent']['user']['userImg']
owner['name'] = info_result['data']['fullContent']['user']['username']
owner['url'] = 'http://www.acfun.tv/u/%s.aspx' % str(info_result['data']['fullContent']['user']['username'])
snum = 0
src = 'Acfun'
host = 'www.acfun.tv'
page_id = hash(page_url)
parent_page_id = None
atime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(info_result['data']['fullContent']['releaseDate']/1000.0))
data = Data(cat=cat, url=url, format=format,
size=size, during=during, tag=tag, name=name,
desc=desc, cover=cover, author=author,
owner=owner, snum=snum,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield data
@next(fetchSoloDetail)
@timelimit(20)
@index('url')
def fetchSoloList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
info_result = requGet(url, timeout=180, format='HTML')
videos = info_result.xpath('//div[starts-with(@class,"unit unit")]')
if len(videos) < additions['pagesize']:
nextpage = None
else:
page = int(url[url.rindex('/')+1:url.rindex('.')])+1
if page > 5:
nextpage = None
else:
nextpage = url[:url.rindex('/')+1] + str(page) + url[url.rindex('.'):]
yield nextpage
for one in videos:
yield {'url': 'http://www.acfun.tv%s' % getHtmlNodeContent(one.find('.//a[@class="title"]'), {'ATTR':'href'}), 'additions': {"cat":additions['cat']}}
@next(fetchSoloList)
@timelimit(20)
@initflow('solo')
def fetchSoloCat(self, url, additions={}, timeout=TIMEOUT, implementor=None):
info_result = requGet('http://www.acfun.tv/', timeout=timeout, format='HTML')
cats = {}
pagesize = 20
for upcat in info_result.findall('.//div[@id="guide-bar"]//a'):
url = getHtmlNodeContent(upcat, {'ATTR':'href'})
cats[getHtmlNodeContent(upcat, {'ATTR':'data-channel'})] = {'url':url, 'txt':getHtmlNodeContent(upcat, 'TEXT')}
for downcat in info_result.findall('.//div[@id="sub-guide-inner"]//div'):
channel = getHtmlNodeContent(downcat, {'ATTR':'class'})
channel = channel.split(' ')[1].replace('channel-', '')
cat = [cats[channel]['txt']] if channel in cats else []
for e in downcat.findall('.//a'):
channel_id = getHtmlNodeContent(e, {'ATTR':'href'}).split('/')[2].replace('list', '')
if channel_id.isdigit():
yield {'url':'http://www.acfun.tv/dynamic/channel/1.aspx?channelId=%s&orderBy=0&pageSize=%d' % (channel_id, pagesize), 'additions':{'pagesize':pagesize, 'cat':[getHtmlNodeContent(e, 'TEXT')] + cat}}
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
def fetchAlbumDetail(self, url, additions={}, timeout=TIMEOUT, implementor=None):
cat = additions['cat']
outid = url[url.rindex('/')+1:].replace('ab', '')
headers = {
"deviceType":"2",
"Referer":"http://m.acfun.tv/player?date=undefined",
"User-Agent":"Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/600.1.3 (KHTML, like Gecko) Version/8.0 Mobile/12A4345d Safari/600.1.4"
}
url = 'http://www.acfun.tv/bangumi/bangumi/info?id=%s' % outid
info_result = requGet(url, timeout=30, format='JSON')
pages = info_result['data']['videos']
pages = sorted(pages, key=lambda v:v['sort'])
if pages:
first = pages[0]
parent_page_id = hash('http://www.acfun.tv/v/ab%s_1' % first['bangumiId'])
tag = [one['name'] for one in info_result['data']['tags']]
for index, one in enumerate(pages):
page_url = 'http://www.acfun.tv/v/ab%s_%d#%s' % (first['bangumiId'], index+1, one['danmakuId'])
url = 'http://api.aixifan.com/plays/%s/realSource' % str(one['danmakuId'])
url_result = requGet(url, headers=headers, format='JSON')
try:
url = url_result['data']['files'][0]['url']
if type(url) == list:
url.append('')
url = url[0]
except:
url = ''
format = 'mp4'
size = 0
during = one['time']
name = ''.join([info_result['data']['title'], '-', one['title']])
desc = info_result['data']['intro']
cover = info_result['data']['cover']
author = ''
owner = {'avatar':'', 'name':'', 'url':''}
owner['avatar'] = info_result['data']['contributors'][0]['avatar']
owner['name'] = info_result['data']['contributors'][0]['name']
owner['url'] = 'http://www.acfun.tv/u/%s.aspx' % str(info_result['data']['contributors'][0]['id'])
snum = index + 1
src = 'Acfun'
host = 'www.acfun.tv'
page_id = hash(page_url)
atime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(info_result['data']['lastUpdateTime']/1000.0))
data = Data(cat=cat, url=url, format=format,
size=size, during=during, tag=tag, name=name,
desc=desc, cover=cover, author=author,
owner=owner, snum=snum,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield data
@next(fetchAlbumDetail)
@timelimit(20)
@index('url')
def fetchAlbumList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
info_result = requGet(url, timeout=180, format='JSON')
videos = info_result['data']['list']
if len(videos) < additions['pagesize']:
nextpage = None
else:
index = url.split('=')
index[-1] = int(index[-1]) + 1
if index[-1] > 5:
nextpage = None
else:
index[-1] = str(index[-1])
nextpage = '='.join(index)
yield nextpage
for one in videos:
yield {'url': 'http://www.acfun.tv/v/ab%s' % str(one['id']), 'additions': {"cat":additions['cat']}}
@next(fetchAlbumList)
@timelimit(20)
@initflow('album')
def fetchAlbumCat(self, url, additions={}, timeout=TIMEOUT, implementor=None):
info_result = requGet('http://www.acfun.tv/', timeout=timeout, format='HTML')
pagesize = 42
for upcat in info_result.findall('.//div[@id="guide-bar"]//a'):
url = getHtmlNodeContent(upcat, {'ATTR':'href'})
cat_txt = getHtmlNodeContent(upcat, 'TEXT')
if 'list144' in url:
cat_result = requGet('http://www.acfun.tv%s' % url, timeout=timeout, format='HTML')
for downcat in cat_result.findall('.//ul[@id="filter-type"]//li'):
cat_type = getHtmlNodeContent(downcat, {'ATTR':'data-value'})
url = 'http://www.acfun.tv/bangumi/bangumi/page?pageSize=%d&isWeb=1&&sort=1&type=%s&pageNo=1' % (pagesize, cat_type)
yield {'url':url, 'additions':{'pagesize':pagesize, 'cat':[cat_txt, getHtmlNodeContent(downcat, 'TEXT')]}}
if __name__ == '__main__':
print 'start'
spider = SpiderAcfun(worknum=6, queuetype='P', worktype='THREAD')
spider.fetchDatas('album', 0, 'http://www.acfun.tv/')
spider.statistic()
print 'end' | {"/setup.py": ["/ask/__init__.py"]} |
56,697 | spawn3/python-util | refs/heads/master | /spider/gdc/webcrawl/spider.py | #!/usr/bin/python
# coding=utf-8
import weakref
import time
import datetime
import sys
import traceback
import functools
import threading
from threading import Thread
from task import Workflows
WORKNUM = 30
QUEUETYPE = 'P'
WORKTYPE = 'COROUTINE'
class SpiderOrigin(Workflows):
__lasttime = datetime.datetime.now()
__lock = threading.Lock()
def __init__(self, worknum=WORKNUM, queuetype=QUEUETYPE, worktype=WORKTYPE, timeout=-1, tid=0):
super(SpiderOrigin, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, tid=tid)
# Workflows.__init__(self, worknum=worknum, queuetype=queuetype, worktype=worktype)
# Keeper.__init__(self)
self.timeout = timeout
self.dones = set()
self.extractFlow()
def fetchDatas(self, flow, step=0, *args, **kwargs):
try:
start = time.time()
self.fire(flow, step, *args, **kwargs)
if self.timeout > -1:
def check(self, timeout):
time.sleep(timeout)
self.exit()
print 'Time out of %s. ' % str(self.timeout)
watcher = Thread(
target=check, args=(self, self.timeout - (time.time() - start)))
watcher.setDaemon(True)
watcher.start()
self.waitComplete()
it = self.tinder(flow)
while True:
if hasattr(it, 'store'):
try:
it.store(None, forcexe=True)
except:
t, v, b = sys.exc_info()
err_messages = traceback.format_exception(t, v, b)
print(': %s, %s \n' % (str(args), str(kwargs)),
','.join(err_messages), '\n')
if hasattr(it, 'next'):
it = it.next
else:
break
self.dones.add(flow)
end = time.time()
self.totaltime = end - start
return True
except:
return False
def clearDataOne(self, one):
pass
def implementDataone(self, *args, **kwargs):
pass
@classmethod
def uniquetime(cls, timespan=1, lasttime=None):
if lasttime is None:
with cls.__lock:
cls.__lasttime = cls.__lasttime + \
datetime.timedelta(seconds=timespan)
return cls.__lasttime
else:
cls.__lasttime = max(cls.__lasttime, lasttime)
def statistic(self):
for flow in self.dones:
it = self.tinder(flow)
self.stat = {'total': {'succ': 0, 'fail': 0, 'timeout': 0}}
self.stat[it.__name__] = {}
self.stat[it.__name__]['succ'] = it.succ
self.stat[it.__name__]['fail'] = it.fail
self.stat[it.__name__]['timeout'] = it.timeout
self.stat['total']['succ'] += it.succ
self.stat['total']['fail'] += it.fail
self.stat['total']['timeout'] += it.timeout
if hasattr(it, 'store'):
self.stat['total']['succ'] += it.store.succ
self.stat['total']['fail'] += it.store.fail
self.stat['total']['timeout'] += it.store.timeout
# print it.store.__name__, 'succ: ', it.store.succ
# print it.store.__name__, 'fail: ', it.store.fail
# print it.store.__name__, 'timeout: ', it.store.timeout
while hasattr(it, 'next'):
self.stat[it.next.__name__] = {}
self.stat[it.next.__name__]['succ'] = it.next.succ
self.stat[it.next.__name__]['fail'] = it.next.fail
self.stat[it.next.__name__]['timeout'] = it.next.timeout
self.stat['total']['succ'] += it.next.succ
self.stat['total']['fail'] += it.next.fail
self.stat['total']['timeout'] += it.next.timeout
if hasattr(it.next, 'store'):
self.stat['total']['succ'] += it.next.store.succ
self.stat['total']['fail'] += it.next.store.fail
self.stat['total']['timeout'] += it.next.store.timeout
# print it.next.store.__name__, 'succ: ', it.next.store.succ
# print it.next.store.__name__, 'fail: ', it.next.store.fail
# print it.next.store.__name__, 'timeout: ', it.next.store.timeout
it = it.next
def now():
return datetime.datetime.now()
def __del__(self):
pass
if __name__ == '__main__':
from threading import Thread, currentThread
class AB(SpiderOrigin):
def __init__(self, worknum=WORKNUM, queuetype=QUEUETYPE, worktype=WORKTYPE, timeout=-1):
super(AB, self).__init__(
worknum=worknum, queuetype=queuetype, worktype=worktype)
class CD(object):
def __init__(self):
pass
def run(self, name, nums, times):
for k in range(nums):
time.sleep(times)
print name, AB.uniquetime()
cd = CD()
cdts = []
for k in range(10):
cdt = Thread(
target=cd.run, args=('thread%d' % k, k + 1, (10 - k) * 0.1))
cdts.append(cdt)
cdt.start()
for cdt in cdts:
cdt.join()
| {"/setup.py": ["/ask/__init__.py"]} |
56,698 | spawn3/python-util | refs/heads/master | /ask/right/exceptions.py | #!/usr/bin/env python
class AError(Exception):
pass
class BError(Exception):
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,699 | spawn3/python-util | refs/heads/master | /lich/lich/snapshot.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import json
import utils
from umptypes import UmpPath
from base import LichBase
from runner import local_runner
def _delete_exc_handler(e, *args, **kw):
if str(e).find("Required key not available") != -1:
pass
else:
raise Exception(e)
class LichSnapshot(LichBase):
@local_runner()
def create(self, path):
return "%s --create %s" % (self.lich_snapshot, path.snap_path)
@local_runner(exc_handler=_delete_exc_handler)
def delete(self, path):
return "%s --remove %s" % (self.lich_snapshot, path.snap_path)
def remove(self, path):
return self.delete(path)
@local_runner()
def _list(self, path):
return "%s --list %s -n" % (self.lich_snapshot, path.volume_path)
def list(self, path):
rc, res = self._list(path)
if rc == 0 and res:
res = json.loads(res[0])
else:
res = {}
return rc, res
def stat(self, path):
rc, snaptree = self.list(path)
snaptree = snaptree.get('snapshot', {})
return rc, self._find_snap(snaptree, path.snap_name)
def children(self, path):
rc, info = self.stat(path)
if info and 'child' in info:
return info.get('child', {}).keys()
return []
def _find_snap(self, snaptree, snap):
for k, v in snaptree.iteritems():
if k == snap:
return v
elif 'child' in v:
info = self._find_snap(v['child'], snap)
if info:
return info
return None
def exists(self, path):
rc, info = self.stat(path)
return rc == 0 and info
@local_runner()
def rollback(self, path):
return "%s --rollback %s" % (self.lich_snapshot, path.snap_path)
@local_runner()
def clone(self, path, new_vol_path):
return "%s --clone %s %s" % (self.lich_snapshot, path.snap_path, new_vol_path)
@local_runner()
def flat(self, path):
return "%s --flat %s" % (self.lich_snapshot, path)
@local_runner()
def protect(self, path, on=True):
""" lich.snapshot --protect snap_path 0|1.
:path path:
:path on:
:return:
"""
cmd = 'protect'
status = 1 if on else 0
return "%s --%s %s %s" % (self.lich_snapshot, cmd, path.snap_path, status)
# @local_runner()
def unprotect(self, path):
return self.protect(path, on=False)
if __name__ == '__main__':
path = UmpPath('pool1/volume1@snap01')
snap = LichSnapshot()
print snap.list(path)
# snap.create(path)
# snap.protect(path)
# snap.unprotect(path)
# snap.remove(path)
| {"/setup.py": ["/ask/__init__.py"]} |
56,700 | spawn3/python-util | refs/heads/master | /lich/lich/node.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from base import LichBase, RemoteLocation
from runner import http_runner
class LichNodeParam(RemoteLocation):
def __init__(self, host_ip=None, cluster_id=1, dev_name=None):
super(LichNodeParam, self).__init__(host_ip=host_ip, cluster_id=cluster_id)
self.dev_name = dev_name
def get_names(self, sep=' '):
if isinstance(self.dev_name, list):
return sep.join(self.dev_name)
else:
return self.dev_name
class LichNode(LichBase):
@http_runner()
def start(self, param):
cmd = '%s --start ' % (self.lich_node)
return cmd
@http_runner()
def delete(self, param):
cmd = '%s --disk_del %s ' % (self.lich_node, param.dev_name)
return cmd
@http_runner()
def list(self, param):
cmd = '%s --disk_list ' % (self.lich_node)
return cmd
@http_runner()
def add_raid(self, param):
cmd = '%s --disk_list ' % (self.lich_node)
return cmd
@http_runner()
def delete_raid(self, param):
cmd = '%s --disk_list ' % (self.lich_node)
return cmd
if __name__ == '__main__':
param = LichNodeParam(host_ip='192.168.120.211', dev_name='/dev/vda')
node = LichNode()
# print disk.add(param)
| {"/setup.py": ["/ask/__init__.py"]} |
56,701 | spawn3/python-util | refs/heads/master | /ask/func/closure.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def generate_generator():
state = {'y': 0}
def next_id():
state['y'] += 1
return state['y']
return next_id
def generate_generator2():
class Context:
y = 0
def next_id():
Context.y += 1
return Context.y
return next_id
class Nonlocals(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def generate_generator3():
nl = Nonlocals(y=0)
def next_id():
nl.y += 1
return nl.y
return next_id
def generate_generator4():
""" function attribute.
:return:
"""
def next_id():
next_id.y += 1
return next_id.y
next_id.y = 0
return next_id
| {"/setup.py": ["/ask/__init__.py"]} |
56,702 | spawn3/python-util | refs/heads/master | /stock/lib/tjts_network.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, sys
import argparse
from prettytable import PrettyTable
def stock_list(begin_price, end_price, factor=0.04, partition=4, money=20000):
pt = PrettyTable(['IDX', 'Buy', 'Diff', '/4', 'Money', 'Stock1', "Stock2"])
pt.hrules = 1
factor = 0.01
price_list = []
price = begin_price
while price < end_price:
price_list.append(price)
price = price * (1 + factor)
for i, price in enumerate(price_list):
stock = int(money / price)
xs = []
for j in [4]:
x = '{}:{:.3f}'.format(j, 0.0 if i == 0 else (price - price_list[i - j])) if i % j == 0 else ''
xs.append(x)
lst = [i,
u'TJ-{:.3f}'.format(price),
u'{:.3f}'.format(price * factor)
]
lst.extend(xs)
lst.extend([money,
stock,
stock / 100 * 100])
pt.add_row(lst)
print(pt)
def calc_price(number1, price1, number2, price2):
price = (number1 * price1 + number2 * price2) / (number1 + number2)
print(price)
def calc_kelly(p, b):
r = p - (1 - p) / b
print('{:.3f}'.format(r))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
def _stock_list(args):
stock_list(args.begin, args.end, factor=args.factor * 0.01, partition=args.partition, money=args.money)
parser_stock_List = subparsers.add_parser('stock', help='list stock')
parser_stock_List.add_argument('-b', '--begin', required=False, type=float, default=1, help="begin_price")
parser_stock_List.add_argument('-e', '--end', required=False, type=float, default=30, help="end_price")
parser_stock_List.add_argument('-f', '--factor', required=False, type=float, default=4, help="factor")
parser_stock_List.add_argument('-p', '--partition', required=False, type=int, default=4, help="partition")
parser_stock_List.add_argument('-m', '--money', required=False, type=float, default=20000, help="money")
parser_stock_List.set_defaults(func=_stock_list)
def _calc_price(args):
calc_price(args.number1, args.price1, args.number2, args.price2)
parser_calc_price = subparsers.add_parser('price', help='calc price')
parser_calc_price.add_argument('-m', '--number1', required=True, type=int, help="number1")
parser_calc_price.add_argument('-p', '--price1', required=True, type=float, help="price1")
parser_calc_price.add_argument('-n', '--number2', required=True, type=int, help="number2")
parser_calc_price.add_argument('-q', '--price2', required=True, type=float, help="price2")
parser_calc_price.set_defaults(func=_calc_price)
def _calc_kelly(args):
calc_kelly(args.p, args.b)
parser_kelly = subparsers.add_parser('kelly', help='kelly')
parser_kelly.add_argument('-p', '--p', required=True, type=float, default=0.5, help="p")
parser_kelly.add_argument('-b', '--b', required=True, type=float, default=3, help="b")
parser_kelly.set_defaults(func=_calc_kelly)
args = parser.parse_args()
args.func(args)
| {"/setup.py": ["/ask/__init__.py"]} |
56,703 | spawn3/python-util | refs/heads/master | /spider/gdc/task/video/spiderYouku.py | #!/usr/bin/env python
# coding=utf-8
import os, re, copy
from pymongo import MongoClient
from datetime import timedelta
from datetime import datetime
from webcrawl.request import requGet
from webcrawl.request import requPost
from webcrawl.request import getHtmlNodeContent
from webcrawl.request import getXmlNodeContent
from webcrawl.task import retry
from webcrawl.task import index
from webcrawl.task import initflow
from webcrawl.request import getJsonNodeContent
from webcrawl.task import store
from webcrawl.task import timelimit
from webcrawl.task import next
from webcrawl.request import ensureurl
from webcrawl.request import parturl
from model.setting import withData, datacfg
from videospider import Data
from videospider import TIMEOUT
from videospider import SpiderVideoOrigin
#_print, logger = logprint(modulename(__file__), modulepath(__file__))
bili_re = re.compile('duration: *\'.*\'')
def seconds(tl):
assert len(tl) < 3
num = 0
for index, one in enumerate(tl[::-1]):
num += pow(60, index) * int(one)
return num
class SpiderYouku(SpiderVideoOrigin):
"""
优酷官网 数据爬虫
"""
def __init__(self, worknum=6, queuetype='P', worktype='COROUTINE', timeout=-1, tid=0):
super(SpiderYouku, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, timeout=timeout, tid=tid)
self.clsname = self.__class__.__name__
self.end = datetime.now()
self.begin = self.end - timedelta(days=7)
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
@index('url')
def fetchDetail(self, url, additions={}, timeout=TIMEOUT, implementor=None):
cat = additions['cat']
try:
if 'show_page' in url:
url = url.split('?')[0]
outid = url[url.rindex('/')+1:url.rindex('.')]
url = 'http://www.youku.com/show_point/%s.html?dt=json&tab=0&divid=point_reload_1' % outid
page_result = requGet(url, timeout=TIMEOUT, format='HTML')
pages = page_result.findall('.//div[@class="item"]')
if len(pages) < 20:
nextpage = None
else:
index = url.split('_')
index[-1] = str(int(index[-1]) + 20)
nextpage = '_'.join(index)
yield nextpage
parent_page_id = additions.get('parent_page_id')
if additions.get('parent_page_id') is None:
parent_page_url = getHtmlNodeContent(pages[0].find('.//div[@class="link"]//a'), {'ATTR':'href'}).split('?')[0]
parent_page_id = hash(parent_page_url)
additions['parent_page_id'] = parent_page_id
base = int(url.split('_')[-1])
for index, one in enumerate(pages):
page_url = getHtmlNodeContent(one.find('.//div[@class="link"]//a'), {'ATTR':'href'}).split('?')[0]
url = ''
format = 'mp4'
size = 0
try:
during = seconds(getHtmlNodeContent(one.find('.//div[@class="time"]//span[@class="num"]'), 'TEXT').split(":"))
except:
during = 0
tag = []
name = getHtmlNodeContent(one.find('.//div[@class="link"]//a'), {'ATTR':'title'})
desc = getHtmlNodeContent(one.find('.//div[@class="desc"]'), 'TEXT')
cover = getHtmlNodeContent(one.find('.//div[@class="thumb"]//img'), {'ATTR':'alt'})
author = ''
owner = {'avatar':'', 'name':'', 'url':''}
snum = base + index
src = '优酷'
host = 'www.youku.com'
page_id = hash(page_url)
atime = datetime.now()
data = Data(cat=cat, url=url, format=format,
size=size, during=during, tag=tag, name=name,
desc=desc, cover=cover, author=author,
owner=owner, snum=snum,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield data
except:
page_result = requGet(url, timeout=TIMEOUT, format='JSON')
pages = page_result['showlistnew']['items']
yield None
parent_page_url = 'http://v.youku.com/v_show/id_%s.html' % pages[0]['videoid']
parent_page_id = hash(parent_page_url)
for index, one in enumerate(pages):
page_url = 'http://v.youku.com/v_show/id_%s.html' % one['videoid']
url = ''
format = 'mp4'
size = 0
during = int(float(one['seconds']))
tag = []
name = one['title']
desc = ''
cover = one['thumburl']
author = ''
owner = {'avatar':'', 'name':'', 'url':''}
snum = index + 1
src = '优酷'
host = 'www.youku.com'
page_id = hash(page_url)
atime = datetime.now()
data = Data(cat=cat, url=url, format=format,
size=size, during=during, tag=tag, name=name,
desc=desc, cover=cover, author=author,
owner=owner, snum=snum,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield data
@next(fetchDetail)
@timelimit(20)
@index('url')
def fetchList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, timeout=timeout, format='HTML')
videos = result.findall('.//div[@class="yk-row yk-v-80"]//div[@class="yk-col3"]')
if len(videos) < 42:
nextpage = None
else:
index = url.split('_')
sub_index = index[-1].split('.')
sub_index[0] = int(sub_index[0]) + 1
if sub_index[0] >5:
nextpage = None
else:
sub_index[0] = str(sub_index[0])
index[-1] = '.'.join(sub_index)
nextpage = '_'.join(index)
# sub_index[0] = str(sub_index[0])
# index[-1] = '.'.join(sub_index)
# nextpage = '_'.join(index)
yield nextpage
for one in videos:
vid = getHtmlNodeContent(one.find('.//div[@class="p-meta-title"]//a'), {'ATTR':'href'})
vid = vid[vid.rindex('/')+1:vid.rindex('.')]
yield {'url': 'http://www.youku.com/show_point/%s.html?dt=json&tab=0&divid=point_reload_1' % vid, 'additions': {'cat':additions['cat']+[getHtmlNodeContent(one.find('.//div[@class="p-meta-title"]//a'), 'TEXT')]}}
@next(fetchList)
@timelimit(20)
@initflow('www')
def fetchCat(self, additions={}, timeout=TIMEOUT, implementor=None):
cats = ['热血', '格斗', '恋爱', '美少女', '校园', '搞笑', 'LOLI', '神魔', '机战', '科幻', '真人', '青春', '魔法', '神话', '冒险', '运动', '竞技', '童话', '亲子', '教育', '励志', '剧情', '社会', '历史', '战争']
for cat in cats:
url = 'http://www.youku.com/v_olist/c_100_g_%s_a__sg__mt__lg__q__s_1_r_0_u_0_pt_1_av_0_ag_0_sg__pr__h__d_1_p_1.html' % cat
yield {'url':url, 'additions':{'cat':['动漫', cat]}}
@next(fetchDetail)
@timelimit(20)
@initflow('spec')
def fetchSpec(self, additions={}, timeout=TIMEOUT, implementor=None):
albums = [
'http://v.youku.com/x_getAjaxData?md=showlistnew&vid=107240046',
'http://v.youku.com/x_getAjaxData?md=showlistnew&vid=107121098',
'http://v.youku.com/x_getAjaxData?md=showlistnew&vid=338000877',
]
for index, one in enumerate(albums):
yield {'url': one, 'additions': {'cat':['漫画', '美少女', '摇曳百合第%d季' % (index+1)]}}
if __name__ == '__main__':
print 'start'
spider = SpiderYouku(worknum=6, queuetype='P', worktype='THREAD')
spider.fetchDatas('www', 0)
spider.statistic()
print 'end'
| {"/setup.py": ["/ask/__init__.py"]} |
56,704 | spawn3/python-util | refs/heads/master | /spider/gdc/run.py | #!/usr/bin/env python
# coding=utf-8
import logging
import os
from webcrawl.daemon import Daemon
from grab import task
path = os.path.abspath('.')
class PeriodMonitor(Daemon):
def _run(self):
task()
def main():
pmoni = PeriodMonitor(os.path.join(path, 'log', 'pmoni.pid'), stdout=os.path.join(
path, 'log', 'pmoni.out'), stderr=os.path.join(path, 'log', 'pmoni.err'))
if os.path.exists(os.path.join(path, 'log', 'pmoni.pid')):
print "PeriodMonitor stop successfully."
pmoni.stop()
else:
print "PeriodMonitor start successfully."
pmoni.start()
if __name__ == '__main__':
main()
# print '====start moefou'
# spider = SpiderMoefou(worknum=6, queuetype='P', worktype='THREAD')
# spider.fetchDatas('www', **{'url':'http://api.moefou.org/wikis.json?wiki_type=music&initial=&tag=&wiki_id=&api_key={{api_key}}&page=1'})
# spider.statistic()
# print '====end moefou'
# print '====start bili'
# from task.video.spiderBili import SpiderBilibili
# spider = SpiderBilibili(worknum=6, queuetype='P', worktype='THREAD')
# spider.fetchDatas('www', 'http://www.bilibili.com/html/js/types.json')
# spider.statistic()
# print '====end bili'
# print 'start'
# spider = SpiderXicidaili(worknum=6, queuetype='P', worktype='THREAD')
# spider.fetchDatas('www', **{'url':'http://www.xicidaili.com/nn/1'})
# spider.statistic()
# print 'end'
| {"/setup.py": ["/ask/__init__.py"]} |
56,705 | spawn3/python-util | refs/heads/master | /spider/gdc/task/audio/spiderWangyi.py | #!/usr/bin/env python
# coding=utf-8
import copy, time, json
from pymongo import MongoClient
from datetime import timedelta
from datetime import datetime
from webcrawl.request import requGet
from webcrawl.request import requPost
from webcrawl.request import getHtmlNodeContent
from webcrawl.request import getXmlNodeContent
from webcrawl.task import retry
from webcrawl.task import index
from webcrawl.task import initflow
from webcrawl.request import getJsonNodeContent
from webcrawl.task import store
from webcrawl.task import timelimit
from webcrawl.task import next
from webcrawl.request import ensureurl
from webcrawl.request import parturl
from model.setting import withData, datacfg
from audiospider import Data
from audiospider import TIMEOUT
from audiospider import SpiderAudioOrigin
from task.util.wangyi import encrypt_163
#_print, logger = logprint(modulename(__file__), modulepath(__file__))
class Spider163(SpiderAudioOrigin):
"""
网易云音乐官网 数据爬虫
"""
def __init__(self, worknum=6, queuetype='P', worktype='COROUTINE', timeout=-1, tid=0):
super(Spider163, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, timeout=timeout, tid=tid)
self.clsname = self.__class__.__name__
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
def fetchDetail(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, timeout=timeout, format='HTML')
album_result = requGet(url, timeout=timeout, format='HTML')
tag = [getHtmlNodeContent(one, 'TEXT') for one in album_result.findall('.//div[@class="tags f-cb"]//a')]
pages = json.loads(getHtmlNodeContent(album_result.find('.//textarea[@style="display:none;"]'), 'TEXT') or '[]')
cat = additions['cat']
if pages:
parent_page_id = hash('http://music.163.com/outchain/player?type=2&id=%s' % str(pages[0]['id']))
for index, one in enumerate(pages):
url_result = requPost('http://music.163.com/weapi/song/detail/', encrypt_163('{"id":"%s","ids":"[%s]","csrf_token":""}' % (str(one['id']), str(one['id']))), format='JSON')
url = url_result['songs'][0]['mp3Url']
format = 'mp3'
size = 0
during = one['duration']/1000.0
name = one['name']
desc = getHtmlNodeContent(album_result.find('.//p[@id="album-desc-more"]'), 'TEXT') if index == 0 else ''
cover = one['album']['picUrl']
snum = index + 1
singer = one['artists'][0]['name']
src = '网易'
host = 'music.163.com'
page_url = 'http://music.163.com/outchain/player?type=2&id=%s' % str(one['id'])
page_id = hash(page_url)
parent_page_id = parent_page_id
atime = datetime.now()
data = Data(cat=cat, url=url, format=format,
size=size, during=during, tag=tag, name=name,
desc=desc, cover=cover, snum=snum, singer=singer,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield data
@next(fetchDetail)
@timelimit(20)
@index('url')
def fetchList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, timeout=timeout, format='HTML')
audios = result.findall('.//ul[@id="m-pl-container"]//li')
if len(audios) < additions['pagesize']:
nextpage = None
else:
index = url.split('=')
index[-1] = int(index[-1]) + 1
if index[-1] > 5:
nextpage = None
else:
index[-1] = str(index[-1])
nextpage = '='.join(index)
yield nextpage
for one in audios:
album_url = getHtmlNodeContent(audios[0].find('.//p[@class="dec"]//a'), {'ATTR':'href'})
yield {'url': 'http://music.163.com/playlist?id=%s' % album_url.split('=')[-1], 'additions':{'cat':additions['cat']}}
@next(fetchList)
@initflow('www')
@timelimit(20)
def fetchCat(self, additions={}, timeout=TIMEOUT, implementor=None):
cats = ['日本', '90后', '00后', '另类/独立']
cats = ['华语', '欧美', '日语', '韩语', '粤语', '小语种', '风格', '流行', '摇滚', '民谣', '电子', '舞曲', '说唱', '轻音乐', '爵士', '乡村', 'R&B/Soul', '古典', '民族', '英伦', '金属', '朋克', '蓝调', '雷鬼', '世界音乐', '拉丁', '另类/独立', 'New Age', '古风', '后摇', 'Bossa Nova', '场景', '清晨', '夜晚', '学习', '工作', '午休', '下午茶', '地铁', '驾车', '运动', '旅行', '散步', '酒吧', '情感', '怀旧', '清新', '浪漫', '性感', '伤感', '治愈', '放松', '孤独', '感动', '兴奋', '快乐', '安静', '思念', '主题', '影视原声', 'ACG', '校园', '游戏', '70后', '80后', '90后', '网络歌曲', 'KTV', '经典', '翻唱', '吉他', '钢琴', '器乐', '儿童', '榜单', '00后']
pagesize = 35
for one in cats:
yield {'url':'http://music.163.com/discover/playlist/?order=hot&cat=%s&limit=%d&offset=0' % (one, pagesize), 'additions':{'cat':[one,], 'pagesize':pagesize}}
if __name__ == '__main__':
print 'start'
spider = Spider163fou(worknum=6, queuetype='P', worktype='THREAD')
spider.fetchDatas('www', 0)
spider.statistic()
print 'end' | {"/setup.py": ["/ask/__init__.py"]} |
56,706 | spawn3/python-util | refs/heads/master | /spider/gdc/webcrawl/task.py | #!/usr/bin/python
# coding=utf-8
import json
import threading
import types
import copy
import sys
import traceback
import time
import weakref
import gevent
import functools
import ctypes
from . import MyLocal
MTID = threading._get_ident() # id of main thread
from gevent import monkey, Timeout
from pjq import RedisQueue, BeanstalkdQueue, LocalQueue
from exception import TimeoutError
def patch_thread(threading=True, _threading_local=True, Queue=True, Event=False):
"""Replace the standard :mod:`thread` module to make it greenlet-based.
If *threading* is true (the default), also patch ``threading``.
If *_threading_local* is true (the default), also patch ``_threading_local.local``.
"""
monkey.patch_module('thread')
if threading:
monkey.patch_module('threading')
threading = __import__('threading')
if Event:
from gevent.event import Event
threading.Event = Event
if Queue:
from gevent import queue
threading.queue = queue
if _threading_local:
_threading_local = __import__('_threading_local')
from gevent.local import local
_threading_local.local = local
monkey.patch_thread = patch_thread
DataQueue = MyLocal(
redis={
'host':'localhost',
'port':6379,
'db':0,
'tube':'pholcus-task',
},
beanstalkd={
'host':'localhost',
'port':11300,
'tube':'pholcus-task',
}
)
try:
from kokolog.aboutfile import modulename, modulepath
from kokolog.prettyprint import logprint
except:
def modulename(n):
return None
def modulepath(p):
return None
def logprint(n, p):
def _wraper(*args, **kwargs):
print(' '.join(args))
return _wraper, None
_print, logger = logprint(modulename(__file__), modulepath(__file__))
RETRY = 0
TIMELIMIT = 0
_continuous = True
def callpath(fun):
return '.'.join([str(fun.clspath), fun.__name__])
def initflow(which):
def wrap(fun):
fun.label = which
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def index(key):
def wrap(fun):
fun.index = key
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def store(db=None, way=None, update=None, method=None):
def wrap(fun):
if way is not None:
if db is None:
fun.store = way
fun.store.__name__ = way.__name__
else:
fun.store = functools.partial(db(way), update=update, method=method)
fun.store.__name__ = 'store' + way.im_self.__name__
fun.store.retry = RETRY
fun.store.timelimit = TIMELIMIT
fun.store.priority = 0
fun.store.succ = 0
fun.store.fail = 0
fun.store.timeout = 0
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def hashweb():
def wrap(fun):
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def retry(num=1):
def wrap(fun):
fun.retry = num
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def next(method, *args, **kwargs):
def wrap(fun):
try:
method.args = args
method.kwargs = kwargs
fun.next = weakref.proxy(method)
except:
method.__func__.args = args
# method.__func__.args = tuple((str(fun).split('at')[0].split('function')[-1].replace(' ', '') + ',' + ','.join(args)).split(','))
method.__func__.kwargs = kwargs
fun.next = method
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def dispatch(flag=False):
def wrap(fun):
fun.dispatch = flag
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def timelimit(seconds=TIMELIMIT):
def wrap(fun):
fun.timelimit = seconds
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def priority(level=0):
def wrap(fun):
fun.priority = level
@functools.wraps(fun)
def wrapped(*args, **kwargs):
return fun(*args, **kwargs)
return wrapped
return wrap
def assure(method):
method.succ = 0
method.fail = 0
method.timeout = 0
not hasattr(method, 'retry') and setattr(method, 'retry', RETRY)
not hasattr(method, 'timelimit') and setattr(method, 'timelimit', TIMELIMIT)
not hasattr(method, 'priority') and setattr(method, 'priority', None)
class Nevertimeout(object):
def __init__(self):
pass
def cancel(self):
pass
def handleIndex(workqueue, result, method, args, kwargs, priority, methodId, methodName, times, tid):
index = result.next()
if index and times == 0:
if type(method.index) == int:
indexargs = list(args)
indexargs[method.index] = index
indexargs = tuple(indexargs)
indexkwargs = dict(kwargs, **{})
elif type(method.index) == str:
indexargs = tuple(list(args))
indexkwargs = dict(
kwargs, **{method.index: index})
else:
raise "Incorrect arguments."
workqueue.put((priority, methodId, methodName, 0, indexargs, indexkwargs, tid))
def handleNextStore(workqueue, retvar, method, tid, hasnext=False, hasstore=False):
if retvar is None:
pass
elif type(retvar) == dict:
hasnext and workqueue.put(
(method.next.priority, id(method.next), callpath(method.next), 0, (), retvar, tid))
hasstore and workqueue.put(
(method.store.priority, id(method.store), callpath(method.store), 0, (), {'obj': retvar['obj']}, tid))
elif type(retvar) == tuple:
hasnext and workqueue.put(
(method.next.priority, id(method.next), callpath(method.next), 0, retvar, {}, tid))
hasstore and workqueue.put(
(method.store.priority, id(method.store), callpath(method.store), 0, (retvar[0],), {}, tid))
else:
hasnext and workqueue.put(
(method.next.priority, id(method.next), callpath(method.next), 0, (retvar,), {}, tid))
hasstore and workqueue.put(
(method.store.priority, id(method.store), callpath(method.store), 0, (retvar,), {}, tid))
# raise "Incorrect result for next function."
def handleExcept(workqueue, method, args, kwargs, priority, methodId, methodName, times, tid, sid, count='fail'):
if times < method.retry:
times = times + 1
workqueue.put((priority, methodId, methodName, times, args, kwargs, tid))
else:
setattr(method, count, getattr(method, count)+1)
t, v, b = sys.exc_info()
err_messages = traceback.format_exception(t, v, b)
txt = ','.join(err_messages)
_print('', tid=tid, sid=sid, type=count.upper(), status=0, sname=methodName, priority=priority, times=times, args='(%s)' % ', '.join([str(one) for one in args]), kwargs=json.dumps(kwargs, ensure_ascii=False), txt=txt)
def geventwork(workqueue):
while _continuous:
if workqueue.empty():
sleep(0.1)
else:
timer = Nevertimeout()
item = workqueue.get(timeout=10)
if item is None:
continue
stxt, sid = item
priority, methodId, methodName, times, args, kwargs, tid = stxt
method = ctypes.cast(methodId, ctypes.py_object).value
try:
if method.timelimit > 0:
timer = Timeout(method.timelimit, TimeoutError)
timer.start()
result = method(*args, **kwargs)
if result is None:
method.succ = method.succ + 1
elif isinstance(result, types.GeneratorType):
try:
hasattr(method, 'index') and handleIndex(
workqueue, result, method, args, kwargs, priority, methodId, methodName, times, tid)
for retvar in result:
handleNextStore(
workqueue, retvar, method, tid, hasattr(method, 'next'), hasattr(method, 'store'))
method.succ = method.succ + 1
except TimeoutError:
handleExcept(
workqueue, method, args, kwargs, priority, methodId, methodName, times, tid, sid, 'timeout')
except:
handleExcept(
workqueue, method, args, kwargs, priority, methodId, methodName, times, tid, sid, 'fail')
else:
handleNextStore(
workqueue, result, method, tid, hasattr(method, 'next'), hasattr(method, 'store'))
method.succ = method.succ + 1
except TimeoutError:
handleExcept(
workqueue, method, args, kwargs, priority, methodId, methodName, times, tid, sid, 'timeout')
except:
handleExcept(
workqueue, method, args, kwargs, priority, methodId, methodName, times, tid, sid, 'fail')
finally:
workqueue.task_done((tid, methodName, priority, times, args, kwargs, sid))
timer.cancel()
del timer
class Foreverworker(threading.Thread):
"""
永久执行
"""
def __init__(self, workqueue):
"""
初始化多线程运行的方法和方法参数
@param workqueue: 方法
"""
super(Foreverworker, self).__init__()
self.__workqueue = workqueue
def run(self):
"""
多线程执行
"""
geventwork(self.__workqueue)
def __del__(self):
del self.__workqueue
class Workflows(object):
"""
任务流
"""
def __init__(self, worknum, queuetype, worktype, tid=0):
if worktype == 'COROUTINE':
monkey.patch_all(Event=True)
gid = threading._get_ident()
thread = threading._active.get(MTID)
if thread:
threading._active[gid] = thread
self.__worknum = worknum
self.__queuetype = queuetype
self.__worktype = worktype
self.__flowcount = {'inner': set(), 'outer': set()}
self.__flows = {}
if not hasattr(self, 'clsname'):
self.clsname = str(self.__class__).split(".")[-1].replace("'>", "")
self.queue = None
self.workers = []
self.tid = tid
def prepare(self, flow=None):
self.workers = []
weight = []
tube = {}
if flow:
weight = self.weight(flow)
tube['tube'] = str(id(self))
try:
if self.__queuetype == 'P':
self.queue = LocalQueue()()
elif self.__queuetype == 'B':
self.queue = BeanstalkdQueue(**dict(DataQueue.beanstalkd, **tube))
else:
self.queue = RedisQueue(weight=weight, **dict(DataQueue.redis, **tube))
except:
print 'Wrong type of queue, please choose P or B or start your beanstalkd service.'
global sleep
if self.__worktype == 'COROUTINE':
from gevent import sleep
for k in range(self.__worknum):
if self.__queuetype == 'P':
worker = functools.partial(geventwork, self.queue)
elif self.__queuetype == 'B':
worker = functools.partial(
geventwork, BeanstalkdQueue(**dict(DataQueue.beanstalkd, **tube)))
else:
worker = functools.partial(
geventwork, RedisQueue(weight=weight, **dict(DataQueue.redis, **tube)))
self.workers.append(worker)
else:
from time import sleep
for k in range(self.__worknum):
if self.__queuetype == 'P':
worker = Foreverworker(self.queue)
elif self.__queuetype == 'B':
worker = Foreverworker(BeanstalkdQueue(**dict(DataQueue.beanstalkd, **tube)))
else:
worker = Foreverworker(RedisQueue(weight=weight, **dict(DataQueue.redis, **tube)))
self.workers.append(worker)
def tinder(self, flow):
return self.__flows[flow]['tinder']
def section(self, flow, step=0):
if step == 0:
return self.tinder(flow)
else:
it = self.__flows.get(flow, {'tinder':None})['tinder']
for k in range(step):
it = it.next
return it
def terminator(self, flow):
return self.__flows[flow]['terminator']
def extractFlow(self):
def imitate(p, b):
if not hasattr(b, '__name__'):
b.__name__ = str(p).split(' at ')[0].split(' of ')[0].split(
'<function ')[-1].split('.')[-1].replace(' ', '').replace('>', '')
b.succ = 0
b.fail = 0
b.timeout = 0
hasattr(p, 'index') and setattr(b, 'index', p.index)
setattr(b, 'clspath', str(self))
hasattr(p, 'store') and setattr(b, 'store', p.store)
hasattr(p, 'store') and setattr(b.store, 'clspath', str(self))
b.retry = (hasattr(p, 'retry') and p.retry) or RETRY
b.timelimit = (hasattr(p, 'timelimit') and p.timelimit) or TIMELIMIT
b.priority = (hasattr(p, 'priority') and p.priority) or None
if self.__flowcount['inner']:
print "Inner workflow can be set once and has been set."
else:
for it in dir(self):
it = getattr(self, it)
if hasattr(it, 'label'):
self.__flows[it.label] = {'tinder': it, 'terminator': it, 'weight':{'num':0, 'levels':[]}}
for label, flow in self.__flows.items():
flow['hasprior'] = True
flow['steps'] = 1
p = flow['tinder']
b = functools.partial(p)
imitate(p, b)
flow['hasprior'] = flow['hasprior'] and (
b.priority is not None)
self.__flows[label]['weight']['levels'].append(b.priority)
flow['tinder'] = b
self.__flowcount['inner'].add(p.label)
while hasattr(p, 'next') and hasattr(p.next, 'args') and hasattr(p.next, 'kwargs'):
p = p.next
flow['steps'] = flow['steps'] + 1
if hasattr(p, 'dispatch') and p.dispatch:
b.next = p(self, *p.args, **p.kwargs)
else:
b.next = functools.partial(
p, self, *p.args, **p.kwargs)
b = b.next
imitate(p, b)
flow['hasprior'] = flow['hasprior'] and (
b.priority is not None)
self.__flows[label]['weight']['levels'].append(b.priority)
flow['terminator'] = b
for label, flow in self.__flows.items():
if not flow['hasprior']:
self.__flows[label]['weight']['levels'] = []
it = flow['tinder']
num = 0
it.priority = flow['steps'] - num
self.__flows[label]['weight']['levels'].append(it.priority)
while hasattr(it, 'next'):
it = it.next
num = num + 1
it.priority = flow['steps'] - num
self.__flows[label]['weight']['levels'].append(it.priority)
flow['hasprior'] = True
self.__flows[label]['weight']['levels'].append(0)
print "Inner workflow is set."
def fire(self, flow, step=0, *args, **kwargs):
it = self.__flows.get(flow, {'tinder':None})['tinder']
if it is not None:
self.prepare(flow)
try:
for k in range(step):
it = it.next
except:
print 'Flow %s has no %d steps.' % (flow, step)
else:
self.queue.put((it.priority, id(it), callpath(it), 0, args, kwargs, str(self.tid)))
for worker in self.workers:
if self.__worktype == 'COROUTINE':
gevent.spawn(worker)
else:
worker.setDaemon(True)
worker.start()
else:
print 'There is no work flow.'
def exit(self):
self.queue.task_done(None, force=True)
def waitComplete(self):
self.queue.join()
def weight(self, flow, once=False):
if once:
self.__flows[flow]['weight']['num'] = self.__flows[flow]['weight']['num'] + 1
if once and self.__flows[flow]['weight']['num'] > 1:
return []
else:
return self.__flows[flow]['weight']['levels'][::-1]
def start(self):
self.prepare()
for worker in self.workers:
if self.__worktype == 'COROUTINE':
gevent.spawn(worker)
else:
worker.setDaemon(True)
worker.start()
def task(self, weight, section, tid, *args, **kwargs):
self.queue.rank(weight)
self.queue.put((section.priority, id(section), callpath(section), 0, args, kwargs, str(tid)))
def __str__(self):
desc = object.__str__(self)
return desc.replace("<", "").split(" ")[0]
def __del__(self):
if self.queue is not None:
self.queue.collect()
del self.queue
del self.workers
if threading._active[MTID]:
del threading._active[MTID]
if __name__ == '__main__':
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,707 | spawn3/python-util | refs/heads/master | /lich/lich/umptypes.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import exc
POOL = 'pool'
VOLUME = 'volume'
SNAPSHOT = 'snapshot'
class UmpPath(object):
def __init__(self, path, protocol='iscsi', username='cinder'):
self.protocol = protocol
self.username = username
self.protocol_root = protocol
self.path = path
self._parse()
def _reset(self):
self.type = None
self.pool_name = None
self.vol_name = None
self.snap_name = None
def __repr__(self):
if self.type == POOL:
return self.pool_path
elif self.type == VOLUME:
return self.volume_path
elif self.type == SNAPSHOT:
return self.snap_path
else:
return 'InvalidPath: %s' % self.path
def _parse(self, path=None):
self._reset()
if not path:
path = self.path
else:
self.path = path
path = path.strip()
idx = path.find('@')
if idx != -1:
self.type = SNAPSHOT
volume_name, snap_name = self._check_snapshot(path)
pool_name, vol_name = self._check_volume(volume_name)
self.pool_name = pool_name
self.vol_name = vol_name
self.snap_name = snap_name
return
idx = path.find('/')
if idx != -1:
self.type = VOLUME
pool_name, vol_name = self._check_volume(path)
self.pool_name = pool_name
self.vol_name = vol_name
return
self.type = POOL
self.pool_name = path
def _check_snapshot(self, path):
l = path.split('@')
if len(l) != 2:
raise exc.InvalidPath(path)
return l[0], l[1]
def _check_volume(self, path):
l = path.split('/')
if len(l) != 2:
raise exc.InvalidPath(path)
return l[0], l[1]
def check_protocol(self, protocol='iscsi'):
if protocol not in ['iscsi', 'nbd']:
raise exc.ProtocolNotSupported(protocol)
return True
@property
def long_pool_name(self):
return '%s:%s' % (self.username, self.pool_name)
@property
def long_volume_name(self):
return '%s/%s' % (self.long_pool_name, self.vol_name)
@property
def pool_path(self):
return '/%s/%s' % (self.protocol_root, self.long_pool_name)
@property
def volume_path(self):
return '/%s/%s' % (self.protocol_root, self.long_volume_name)
@property
def snap_path(self):
return '/%s/%s@%s' % (self.protocol_root, self.long_volume_name, self.snap_name)
def ensure(self, t):
if t == SNAPSHOT:
if self.type not in [SNAPSHOT]:
raise exc.InvalidPath(self.path, t)
elif t == VOLUME:
if self.type not in [SNAPSHOT, VOLUME]:
raise exc.InvalidPath(self.path, t)
elif t == POOL:
if self.type not in [SNAPSHOT, VOLUME, POOL]:
raise exc.InvalidPath(self.path, t)
else:
raise exc.InvalidParameter(t)
def is_pool(self):
return self.type == POOL
def is_volume(self):
return self.type == VOLUME
def is_snapshot(self):
return self.type == SNAPSHOT
if __name__ == '__main__':
path = UmpPath('a/b@c')
path.ensure(POOL)
path.ensure(VOLUME)
path.ensure(SNAPSHOT)
print path
print path.long_pool_name
print path.long_volume_name
print path.pool_path
print path.volume_path
print path.snap_path
| {"/setup.py": ["/ask/__init__.py"]} |
56,708 | spawn3/python-util | refs/heads/master | /ask/right/a.py | #!/usr/bin/env python
from pprint import pprint
from base import register, register2
__all__ = ['A', 'B']
#@register(dct=__dict__)
@register2(name=__name__)
class A(object):
pass
#@register(dct=globals())
@register2(name=__name__)
class B(object):
pass
# pprint(globals())
| {"/setup.py": ["/ask/__init__.py"]} |
56,709 | spawn3/python-util | refs/heads/master | /spider/gdc/task/audio/audiospider.py | #!/usr/bin/env python
# coding=utf-8
from webcrawl.spider import SpiderOrigin
from model.data import Audio as Data
TIMEOUT = 120
class SpiderAudioOrigin(SpiderOrigin):
def __del__(self):
pass
def __init__(self, queuetype='P', timeout=-1, worknum=6, worktype='COROUTINE', tid=0):
super(SpiderAudioOrigin, self).__init__(queuetype=queuetype, timeout=timeout, worknum=worknum, worktype=worktype, tid=tid)
if __name__ == "__main__":
pass
| {"/setup.py": ["/ask/__init__.py"]} |
56,710 | spawn3/python-util | refs/heads/master | /ask/right/base.py | #!/usr/bin/env python
import functools
import sys
__all__ = ['register']
def register(dct={}):
# @functools.wraps(cls)
def wrapper(cls):
print 'registering %s: %s' % (cls.__name__, cls)
dct[cls.__name__] = cls
dct['the%s' % cls.__name__] = cls()
return cls
return wrapper
def register2(name=__name__):
m = sys.modules[name]
def wrapper(cls):
print 'registering %s: %s' % (cls.__name__, cls)
setattr(m, cls.__name__, cls)
setattr(m, 'the%s' % cls.__name__, cls())
return cls
return wrapper
| {"/setup.py": ["/ask/__init__.py"]} |
56,711 | spawn3/python-util | refs/heads/master | /spider/gdc/task/video/spiderAiqiyi.py | #!/usr/bin/python
# coding=utf-8
import os, re, copy, time
from pymongo import MongoClient
from datetime import timedelta
from datetime import datetime
from webcrawl.request import requGet
from webcrawl.request import requPost
from webcrawl.request import getHtmlNodeContent
from webcrawl.request import getXmlNodeContent
from webcrawl.task import retry
from webcrawl.task import index
from webcrawl.task import initflow
from webcrawl.request import getJsonNodeContent
from webcrawl.request import Fakerequest
from webcrawl.task import store
from webcrawl.task import timelimit
from webcrawl.task import next
from webcrawl.request import ensureurl
from webcrawl.request import parturl
from model.setting import withData, datacfg
from videospider import Data
from videospider import TIMEOUT
from videospider import SpiderVideoOrigin
#_print, logger = logprint(modulename(__file__), modulepath(__file__))
iqiyi_re = re.compile('aid.*"')
def seconds(tl):
assert len(tl) < 3
num = 0
for index, one in enumerate(tl[::-1]):
num += pow(60, index) * int(one)
return num
class SpiderIqiyi(SpiderVideoOrigin):
"""
爱奇艺官网 数据爬虫
"""
def __init__(self, worknum=6, queuetype='P', worktype='COROUTINE', timeout=-1, tid=0):
super(SpiderIqiyi, self).__init__(worknum=worknum, queuetype=queuetype, worktype=worktype, timeout=timeout, tid=tid)
self.clsname = self.__class__.__name__
self.headers = {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Language":"en-US,en;q=0.8",
"Cache-Control":"max-age=0",
"Connection":"keep-alive",
"User-Agent":"Mozilla/5.0 (Linux; Android 4.2.2; GT-I9505 Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Mobile Safari/537.36"
}
self.end = datetime.now()
self.begin = self.end - timedelta(days=7)
self.script = 'function(){Q.video.auth({uid: $.parseJSON($.cookie.get("P00002") || "{}").uid, platForm: "h5", rate: 1, tvid: Q.PageInfo.playInfo.tvid, vid: Q.PageInfo.playInfo.vid, cupid: Q.PageInfo.playInfo.ADPlayerID, type: Q.PageInfo.playInfo.videoFormat, qyid: $.cookie.get("QC006"), nolimit: $.cookie.get("QC004") === "0" ? 1 : 0, agenttype: Zepto.os.ios ? 12 : 13 }, { "complete":function(data){}, "success":function(data){ $("#video").attr("src", data.src); }}); return $("#video").attr("src");}'
@store(withData(datacfg.W), Data.insert, update=True, method='MANY')
@timelimit(3)
@index('url')
def fetchDetail(self, url, additions={}, timeout=TIMEOUT, implementor=None):
cat = additions['cat']
tag = additions['tag']
if 'cache.video.iqiyi.com' in url:
outid = url[url.rindex('/')+1:url.rindex('.')].replace('av', '')
else:
result = requGet(url, format='HTML')
outid = getHtmlNodeContent(result.find('.//div[@id="upDownWrap"]'), {'ATTR':'data-qpaid'})
url = 'http://cache.video.iqiyi.com/jp/avlist/%s/1/' % outid
browse = Fakerequest('http://localhost:12306', javascript={'end':self.script}, wait=2)
page_result = requGet(url, dirtys=[('var tvInfoJs=', '')], format='JSON')
if int(page_result['data']['pg']) < page_result['data']['pgt']:
index = url.split('/')
index[-2] = str(int(index[-2]) + 1)
nextpage = '/'.join(index)
else:
nextpage = None
yield nextpage
parent_page_id = additions.get('parent_page_id')
if additions.get('parent_page_id') is None:
parent_page_id = hash(page_result['data']['vlist'][0]['vurl'])
additions['parent_page_id'] = parent_page_id
format = 'mp4'
size = 0
for one in page_result['data']['vlist']:
# data_result = requGet(one['vurl'], headers=self.headers, timeout=10, dirtys=[('\r\n', ''),('\n', ''),('\\n', ''),('\\', ''),('\x1b', ''),('\x16', '')], format='HTML', browse=browse)
# url = getHtmlNodeContent(data_result.find('.//video[@id="video"]'), {'ATTR':'src'})
# if 'data.video.qiyi.com/videos/other' in url:
# url = ''
url = ''
page_url = one['vurl'].replace('www.iqiyi.com', 'm.iqiyi.com')
during = one['timeLength']
name = one['shortTitle']
desc = one['vt']
cover = one['vpic']
author = ''
owner = {}
owner['avatar'] = 'http://www.qiyipic.com/common/fix/index_images/logo110x36_new.png'
owner['name'] = '爱奇艺'
owner['url'] = 'http://www.iqiyi.com'
snum = one['pd']
src = '爱奇艺'
host = 'www.iqiyi.com'
page_id = hash(page_url)
atime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(one['publishTime']/1000))
data = Data(cat=cat, url=url, format=format,
size=size, during=during, tag=tag, name=name,
desc=desc, cover=cover, author=author,
owner=owner, snum=snum,
src=src, host=host, page_url=page_url,
page_id=page_id, parent_page_id=parent_page_id,
atime=atime, tid=self.tid)
yield data
@next(fetchDetail)
@timelimit(20)
@index('url')
def fetchList(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, headers=self.headers, timeout=timeout, format='HTML')
videos = result.findall('.//div[@class="wrapper-piclist"]//li')
if len(videos) < 30:
nextpage = None
else:
index = url.split('-')
index[-5] = str(int(index[-5]) + 1)
# if index[-5] >5:
# nextpage = None
# else:
# index[-5] = str(index[-5])
nextpage = '-'.join(index)
yield nextpage
for one in videos:
url = getHtmlNodeContent(one.find('.//div[@class="mod-listTitle_left"]//a'), {'ATTR':'href'}).replace('www.iqiyi.com', 'm.iqiyi.com')
url_result = requGet(url, headers=self.headers, timeout=timeout, format='HTML')
aidtxt = ''.join(getHtmlNodeContent(one, 'TEXT') for one in url_result.findall('.//script'))
aid = iqiyi_re.search(aidtxt).group().replace(" ", "").replace('"', '').replace(':', '').replace('aid', '')
url = 'http://cache.video.iqiyi.com/jp/avlist/%s/1/' % aid
name = getHtmlNodeContent(one.find('.//div[@class="mod-listTitle_left"]//a'), 'TEXT')
tag = [getHtmlNodeContent(one.find('.//div[@class="role_info"]'), 'TEXT').replace('\n', '').replace(' ', '')]
additions['cat'] = ''
yield {'url': url, 'additions': {'cat':additions['cat'], 'name':name, 'tag':tag}}
@next(fetchList)
@timelimit(20)
@initflow('www')
def fetchCat(self, url, additions={}, timeout=TIMEOUT, implementor=None):
result = requGet(url, headers=self.headers, timeout=timeout, format='HTML')
for cat in result.findall('.//ul[@class="mod_category_item"]//li'):
cat_name = getHtmlNodeContent(cat, 'TEXT')
if cat_name == '全部':
continue
else:
url = 'http://list.iqiyi.com%s' % getHtmlNodeContent(cat.find('.//a'), {'ATTR':'href'})
yield {'url':url, 'additions':{'cat':['动漫', cat_name]}}
@next(fetchDetail)
@timelimit(20)
@initflow('spec')
def fetchSpec(self, additions={}, timeout=TIMEOUT, implementor=None):
albums = [
'http://v.youku.com/x_getAjaxData?md=showlistnew&vid=107240046',
'http://v.youku.com/x_getAjaxData?md=showlistnew&vid=107121098',
'http://v.youku.com/x_getAjaxData?md=showlistnew&vid=338000877',
]
# for index, one in enumerate(albums):
# yield {'url': one, 'additions': {'cat':['漫画', '美少女', '摇曳百合第%d季' % (index+1)]}}
yield {'url':'http://www.iqiyi.com/a_19rrgi7t01.html', 'additions': {'cat':['动漫', '中国', '鬼神'], 'name':'中国惊奇先生', 'tag':['原创', '侦探', '都市', '讽刺', '时事']}}
if __name__ == '__main__':
print 'start'
spider = SpiderIqiyi(worknum=6, queuetype='P', worktype='THREAD')
spider.fetchDatas('www', 0, 'http://list.iqiyi.com/www/4/-------------4-1-1-iqiyi--.html')
spider.statistic()
print 'end'
| {"/setup.py": ["/ask/__init__.py"]} |
56,712 | spawn3/python-util | refs/heads/master | /ask/gbase/parse_yml.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pprint import pprint
import yaml
# pip install PyYAML
def parse_gbase_yml():
with open('gbase.yml') as f:
pprint(yaml.load(f))
if __name__ == '__main__':
parse_gbase_yml()
| {"/setup.py": ["/ask/__init__.py"]} |
56,713 | spawn3/python-util | refs/heads/master | /ask/gbase/gbase.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import json
def read_config(fname='gbase_config.json'):
with open(fname) as f:
data = json.load(f)
print type(data), data
def init():
conf = read_conf()
pass
def resize():
conf = read_conf()
pass
def check_ready():
conf = read_conf()
pass
if __name__ == '__main__':
read_config()
| {"/setup.py": ["/ask/__init__.py"]} |
56,714 | spawn3/python-util | refs/heads/master | /spider/gdc/test.py | #!/usr/bin/env python
# coding=utf-8
from task.audio.spiderMengfou import SpiderMoe
from task.audio.spiderWangyi import Spider163
from task.comic.spiderManjia import SpiderDmzj
from task.video.spiderAcfun import SpiderAcfun
from task.video.spiderAiqiyi import SpiderIqiyi
from task.video.spiderBili import SpiderBilibili
from task.video.spiderYouku import SpiderYouku
if __name__ == '__main__':
spider = SpiderMoe(worknum=2, queuetype='R', worktype='THREAD')
spider.fetchDatas('www', 0, 'http://api.moefou.org/wikis.json?wiki_type=music&initial=&tag=&wiki_id=&api_key={{api_key}}&page=1')
spider.statistic()
# spider = Spider163(worknum=2, queuetype='R', worktype='THREAD')
# spider.fetchDatas('www', 0)
# spider.statistic()
# spider = SpiderDmzj(worknum=2, queuetype='R', worktype='THREAD')
# spider.fetchDatas('www', 0, 'http://m.dmzj.com/classify.html')
# spider.statistic()
# spider = SpiderAcfun(worknum=2, queuetype='R', worktype='THREAD')
# spider.fetchDatas('album', 0, 'http://www.acfun.tv/')
# spider.statistic()
# spider = SpiderIqiyi(worknum=2, queuetype='R', worktype='THREAD')
# spider.fetchDatas('www', 0, 'http://list.iqiyi.com/www/4/-------------4-1-1-iqiyi--.html')
# spider.statistic()
# spider = SpiderBilibili(worknum=2, queuetype='R', worktype='THREAD')
# spider.fetchDatas('www', 0, 'http://www.bilibili.com/html/js/types.json')
# spider.statistic()
# spider = SpiderYouku(worknum=2, queuetype='R', worktype='THREAD')
# spider.fetchDatas('www', 0)
# spider.statistic() | {"/setup.py": ["/ask/__init__.py"]} |
56,715 | spawn3/python-util | refs/heads/master | /ask/exc.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
DEFS = {
'A': 'A desc',
'B': 'B desc',
}
class A(Exception):
def __init__(self, **kw):
name = self.__class__.__name__
print 'Enter %s.__init__' % name
name = DEFS.get(name, '')
if kw:
name = '%s: %s' % (name, json.dumps(kw))
super(A, self).__init__(name)
class B(A):
pass
if __name__ == '__main__':
raise A(m=1, n=2)
raise B(m=1, n=2)
| {"/setup.py": ["/ask/__init__.py"]} |
56,716 | marcosborges/pyctuator | refs/heads/master | /examples/tornado/tornado_example_app.py | import datetime
import logging
import random
from tornado import ioloop
from tornado.httpserver import HTTPServer
from tornado.web import Application, RequestHandler
from pyctuator.pyctuator import Pyctuator
my_logger = logging.getLogger("example")
class HomeHandler(RequestHandler):
def get(self):
my_logger.debug(f"{datetime.datetime.now()} - {str(random.randint(0, 100))}")
self.write("Hello World!")
app = Application(
[
(r"/", HomeHandler)
],
debug=False
)
example_app_address = "host.docker.internal"
example_sba_address = "localhost"
Pyctuator(
app,
"Tornado Pyctuator",
app_url=f"http://{example_app_address}:5000",
pyctuator_endpoint_url=f"http://{example_app_address}:5000/pyctuator",
registration_url=f"http://{example_sba_address}:8080/instances",
app_description="Demonstrate Spring Boot Admin Integration with Tornado",
)
http_server = HTTPServer(app, decompress_request=True)
http_server.listen(5000)
ioloop.IOLoop.current().start()
| {"/tests/httptrace/test_http_header_scrubber.py": ["/pyctuator/httptrace/http_header_scrubber.py"], "/pyctuator/httptrace/http_tracer.py": ["/pyctuator/httptrace/http_header_scrubber.py"]} |
56,717 | marcosborges/pyctuator | refs/heads/master | /tests/httptrace/test_http_header_scrubber.py | import pytest
from pyctuator.httptrace.http_header_scrubber import scrub_header_value
@pytest.mark.parametrize("key,value", [
("Authorization", "Bearer 123"),
("authorization", "Bearer 123"),
("X-Csrf-Token", "foo"),
("authentication", "secret123"),
("COOKIE", "my-logged-in-session")
])
def test_scrubbing(key: str, value: str) -> None:
assert scrub_header_value(key, value) == "******"
@pytest.mark.parametrize("key,value", [("Host", "example.org"), ("Content-Length", "2000")])
def test_non_scrubbing(key: str, value: str) -> None:
assert scrub_header_value(key, value) == value
| {"/tests/httptrace/test_http_header_scrubber.py": ["/pyctuator/httptrace/http_header_scrubber.py"], "/pyctuator/httptrace/http_tracer.py": ["/pyctuator/httptrace/http_header_scrubber.py"]} |
56,718 | marcosborges/pyctuator | refs/heads/master | /pyctuator/httptrace/http_tracer.py | import collections
from typing import List, Mapping
from pyctuator.httptrace.http_header_scrubber import scrub_header_value
from pyctuator.httptrace import Traces, TraceRecord
class HttpTracer:
def __init__(self) -> None:
self.traces_list: collections.deque = collections.deque(maxlen=100)
def get_httptrace(self) -> Traces:
return Traces(list(self.traces_list))
def add_record(self, record: TraceRecord) -> None:
record.request.headers = self._scrub_and_normalize_headers(
record.request.headers)
record.response.headers = self._scrub_and_normalize_headers(
record.response.headers)
self.traces_list.append(record)
def _scrub_and_normalize_headers(self, headers: Mapping[str, List[str]]) -> Mapping[str, List[str]]:
return {header: [scrub_header_value(header, value) for value in values] for (header, values) in headers.items()}
| {"/tests/httptrace/test_http_header_scrubber.py": ["/pyctuator/httptrace/http_header_scrubber.py"], "/pyctuator/httptrace/http_tracer.py": ["/pyctuator/httptrace/http_header_scrubber.py"]} |
56,719 | marcosborges/pyctuator | refs/heads/master | /tests/httptrace/test_tornado_pyctuator.py | from tornado.httputil import HTTPHeaders
from pyctuator.impl.tornado_pyctuator import get_headers
def test_get_headers() -> None:
tornado_headers = HTTPHeaders({"content-type": "text/html"})
tornado_headers.add("Set-Cookie", "A=B")
tornado_headers.add("Set-Cookie", "C=D")
assert get_headers(tornado_headers) == {
"content-type": ["text/html"],
"set-cookie": ["A=B", "C=D"]
}
| {"/tests/httptrace/test_http_header_scrubber.py": ["/pyctuator/httptrace/http_header_scrubber.py"], "/pyctuator/httptrace/http_tracer.py": ["/pyctuator/httptrace/http_header_scrubber.py"]} |
56,720 | marcosborges/pyctuator | refs/heads/master | /pyctuator/httptrace/http_header_scrubber.py | import re
_keys_to_scrub = re.compile(
"^(.*[^A-Za-z])?key([^A-Za-z].*)?$|"
".*secret.*|"
".*password.*|"
".*token.*|"
".*authorization.*|"
".*authentication.*|"
".*cookie.*",
re.IGNORECASE
)
def scrub_header_value(key: str, value: str) -> str:
if _keys_to_scrub.match(key):
return "******"
return value
| {"/tests/httptrace/test_http_header_scrubber.py": ["/pyctuator/httptrace/http_header_scrubber.py"], "/pyctuator/httptrace/http_tracer.py": ["/pyctuator/httptrace/http_header_scrubber.py"]} |
56,725 | Nurchik/tamchy | refs/heads/master | /Buffer.py | # -*- coding:utf-8 -*-
from time import time
from struct import pack,unpack
import io,logging,sqlite3,os
from os.path import exists,isfile,isdir
from collections import deque
from threading import Lock
CACHING_TIME = 10
# 10 minutes
BUFFERING_TIME = 5 * 60
MIN_SIZE = 512
# because struct's max int for "!I" = 65535
COUNTER = 2 ** 16 - 1
#class storage:
# def __init__(self):
# self.temp = []
# self.store = []
#
# def put(self,el):
# store = self.store
# if el not in store:
# if not store:
# store.append(el)
# store.sort()
# return
# # последовательное значение
# if ((pos - 1) == store[-1]) or ((pos + 1) == store[0]):
# store.append(el)
# store.sort()
# else:
# self.temp.append(el)
#
# def get(self,el):
# pass
#
# def __len__(self):
# pass
'''
This is a simple DB-based temporary-file implementation
Why DB? Because I'm lazy :)
Seriously, temporary-file can be implemented with anything (file,memcached,cloud...) but in a future
But this implementation was very easy to develop and work with, so i chose it for the first time )
'''
class DBStorage:
def __init__(self,content_id,close_function):
#self.C = Container
self.c_id = content_id
self.close_function = close_function
self.positions = deque()
self.lock = Lock()
# try to create table with pos,data
self._init()
def _init(self):
# we check is there a file
# if file exists -> remove, because we can meet problems with sqlite3 when working with this file
# else -> pass
if exists(self.c_id):
try:os.remove(self.c_id)
except:self.close_function()
with sqlite3.connect(self.c_id) as db:
try:
db.execute('create table "{0}" (pos int primary key,data text) '.format(self.c_id))
except:
db.execute('drop table "{0}"'.format(self.c_id))
db.execute('create table "{0}" (pos int primary key,data text) '.format(self.c_id))
db.commit()
# this function will be used for selecting position (which was added earlier than others)
# to remove from storage
def get_min(self):
with self.lock:
try:
pos = self.positions.popleft()
except:
pos = None
return pos
def __setitem__(self,key,value):
db = sqlite3.connect(self.c_id)
# text_factory = str -> because by default sqlite3 doesn't work with binary data and we make it work with one
db.text_factory = str
# this position not in DB => we must create it
# otherwise we must renew data of this position
if key not in self.positions:
with self.lock:
self.positions.append(key)
db.execute('insert into "{0}" values (?,?)'.format(self.c_id),(key,value))
else:
db.execute('update "{0}" set data=? where pos=?'.format(self.c_id), (value,key))
db.commit()
def __getitem__(self,key):
db = sqlite3.connect(self.c_id)
db.text_factory = str
if key not in self.positions:
return ''
data = db.execute('select data from "{0}" where pos=?'.format(self.c_id),(key,)).fetchone()[0]
return data
def __delitem__(self,key):
db = sqlite3.connect(self.c_id)
db.text_factory = str
if key not in self.positions:
return
db.execute('delete from "{0}" where pos=?'.format(self.c_id),(key,))
db.commit()
with self.lock:
try:self.positions.remove(key)
except:pass
def __len__(self):
return len(self.positions)
def __contains__(self,item):
return item in self.positions
def close(self):
# delete our temp-file
try:
os.remove(self.c_id)
except:
pass
class StreamBuffer:
def __init__(self,content_id,close_function,tell_have):
self.logger = logging.getLogger('tamchy.Buffer')
self.pos = 0
self.tell_have = tell_have
self.buffer = {}
self.inited = False
# There can be any storage
self.storage = DBStorage(content_id,close_function)
# in this dict will be stored pos and % of completion
# we will need it later when piece will not be done yet for streaming with get_stream()
self.pieces = {}
def get_pos(self):
return pack('!H',self.pos)
def got_pos(self):
# setting position is a signal to start buffering stream and to stream it on our machine
if not self.inited:
self.inited = True
def put(self,t,data):
if len(self.buffer) == CACHING_TIME:
t1 = min(self.buffer.keys())
d = self.buffer.pop(t1)
self._put(t1,d)
try:
self.buffer[t] = data
except:
pass
self.tell_have(t)
def get(self,t):
if t not in self.buffer.keys():
return self._get(t)
return self.buffer[t]
def _put(self,t,data):
self.storage[t] = data
if len(self.storage) == BUFFERING_TIME:
pos = self.storage.get_min()
if pos is not None:
del self.storage[pos]
def _get(self,t):
return self.storage[t]
def __contains__(self,pos):
return pos in self.storage
'''
buf_sec -> number of seconds to buffer before yelding data of stream
but until buf_sec not reached, this function will send to consumer information about percentage of buffered data
and after reaching -> continious data of stream until program's closing
When new piece is put to buffer -> this function will be informated about it
'''
def get_stream_future(self,buf_sec):
buf_sec = float(buf_sec)
while not self.inited:
yield 'Connecting to Peers'
pos = self.pos
while True:
p = int((self.pos - pos) / buf_sec * 100)
if p >= 100:
break
# number is a percent of completion of prebuffering
yield 'Prebuffering: {0}%'.format(p)
while True:
d = self.get(pos)
if not d:
# buffering percentage
# if piece not in self.pieces -> return 0.0
yield 'Buffering: {0}%'.format(self.pieces.get(pos,0))
continue
# else 0, because we need to reset counter if everything is OK -> just add up 1
pos = pos + 1 if pos < COUNTER else 0
yield d
def get_stream(self,buf_sec):
buf_sec = float(buf_sec)
while not self.inited:
yield None
pos = self.pos
while True:
d = self.get(pos)
if not d:
yield None
continue
# else 0, because we need to reset counter if everything is OK -> just add up 1
pos = pos + 1 if pos < COUNTER else 0
yield d
def close(self):
self.buffer = {}
self.storage.close()
self.logger.info('File buffer flushed and closed')
# Testing
def test_storage():
s = DBStorage('content_id1',lambda:'i')
db = sqlite3.connect('content_id1')
assert not s.positions
data = pack('!III',1,2,3)
s[1] = data
data1 = pack('!III',2,2,3)
data2 = pack('!III',3,2,3)
data3 = pack('!III',4,2,3)
s[2] = data1
s[3] = data2
s[4] = data3
assert 1 in s.positions
d = db.execute('select data from "content_id1" where pos=1').fetchone()[0]
assert d == data
assert s[1] == data
data = pack('!III',4,2,1)
assert len(s) == 4
s[1] = data
assert len(s) == 4
assert s[1] == data
assert s[5] == ''
del s[5]
assert len(s) == 4
del s[1]
assert len(s) == 3
d = db.execute('select data from "content_id1" where pos=1').fetchone()
assert d == None
assert s.get_min() == 2
def test():
s = StreamBuffer('iaudan','close',lambda x: x)
for i in range(10):
if i != 2:
s.put(i,'data'+str(i))
else:
s.put(i,'longdata2')
assert len(s.buffer) == 10
s.put(10,'data10')
assert len(s.buffer) == 10
assert 0 not in s.buffer.keys()
#def test_get_stream():
# s = StreamBuffer('test_c_id')
# h = s.get_stream(4)
# assert h.send(None) == 'Connecting to Peers'
# assert h.send(None) == 'Connecting to Peers'
# assert h.send(None) == 'Connecting to Peers'
# s.inited = True
# s.pos = 4
# assert h.send(None) == 'Prebuffering: 0%'
# s.pos = 6
# assert h.send(None) == 'Prebuffering: 50%'
# s.pos = 8
# s.buffer = {4:'4data4',5:'5data5',8:'8data8'}
# s.pieces = {7:35}
# assert h.send(None) == '4data4'
# assert h.send(None) == '5data5'
# assert h.send(None) == 'Buffering: 0%'
# assert h.send(None) == 'Buffering: 0%'
# s.buffer[6] = '6data6'
# assert h.send(None) == '6data6'
# assert h.send(None) == 'Buffering: 35%'
# s.buffer[7] = '7data7'
# assert h.send(None) == '7data7'
# assert h.send(None) == '8data8'
# assert h.send(None) == 'Buffering: 0%'
# assert h.send(None) == 'Buffering: 0%'
# s.pieces[9] = 34
# assert h.send(None) == 'Buffering: 34%'
# s.pieces[9] = 68
# assert h.send(None) == 'Buffering: 68%'
# s.buffer[9] = '9data9'
# assert h.send(None) == '9data9'
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,726 | Nurchik/tamchy | refs/heads/master | /markupsafe/models.py | from django.db import models
class UP(models.Model):
username = models.CharField(max_length=30, unique=True, blank=False)
password = models.CharField(max_length=30, blank=False)
class URLS(models.Model):
user = models.ForeignKey(UP)
url = models.URLField(unique=True)
expire = models.DateTimeField()
#class User(models.Model):
# email = models.EmailField(max_length=50, unique=True)
# password = models.CharField(max_length=30)
#
#class Profile(models.Model):
# username = models.ForeignKey('User')
# | {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,727 | Nurchik/tamchy | refs/heads/master | /Container.py | # -*- coding:utf-8 -*-
import sqlite3
import threading
import urllib2
import socket
import logging
import select
import pickle
from Buffer import StreamBuffer
from Peer import Peer
from Server import Server,encode,decode
import messages
from time import time,sleep
from struct import pack,unpack
from StringIO import StringIO
import Input
from Request import Request
DEFAULT_BUFFERING_SIZE = 128 * 1024
WRITE_WAIT_TIMEOUT = 15.0
READ_WAIT_TIMEOUT = 15.0
SOCKET_TIMEOUT = 20.0
STREAM_PIECE_SIZE = 16384
COUNTER = (2 ** 16) - 1
'''
:source is a http source for video to stream
'''
class StreamContainer:
def __init__(self,Client,PStorage,info,port,max_connections=50,is_server=False,source='',bitrate=0,ext_ip='',debug=False):
self.logger = logging.getLogger('tamchy.StreamContainer')
#threading.Thread.__init__(self)
#self.daemon = True
self.work = True
self.paused = False
self.info = info
self.ext_ip = ext_ip
self.port = port
self.max_connections = max_connections
self.is_server = is_server
self.bitrate = bitrate
self.Client = Client
self.PStorage = PStorage
self.debug = debug
self.B = StreamBuffer(info['content_id'],self.close,self.tell_have)
self.lock = threading.Lock()
self.name = info['name']
self.content_id = info['content_id']
self.chunk_length = info['chunk_length']
self.peers = []
self.requests = []
self.logger.debug('StreamContainer (%s) created' % (self.content_id))
# after receiving peers list, connection with server will be used in requesting stream
# but program will not send any statistics to the server any more
self.add_new_stream()
if not debug:
if not is_server:
self.source = ''
server = self.connect_server(info['ip'],info['port'])
if server is not None:
self.pos = server.pos
self.piece_length = server.piece_length
self.PStorage.add(server)
else:
self.close()
raise Exception('Cannot Connect to Server')
else:
# this is source's initialisation -> we should check if it works
self.source = self.select_for(source,self.chunk_length)
self.pos = 0
self.piece_length = self.source.piece_length
#def get_piece_length(self):
# return struct.pack('!I',self.piece_length)
def build_handshake(self):
if self.is_server:
handshake = 'Salamatsyzby' + self.content_id + struct.pack('!H',self.port) + pack('!HI',self.pos,self.piece_length)
else:
handshake = 'Salamatsyzby' + self.content_id + struct.pack('!H',self.port)
handshake = struct.pack('!I',len(handshake)) + handshake
return handshake
def connect_server(self,ip,port,node=False):
handshake = 'Salamatsyzby' + self.content_id + self.peer_id + struct.pack('!H',self.port)
handshake = pack('!I',len(handshake)) + handshake
stream_data = {'content_id':self.content_id,'chunk_length':0,'piece_length':0, 'handshake':handshake}
p = Peer(stream_data,self,self.Buffer,ip=ip,port=port,server=True,node=node)
if p.socket is not None:
r,w,e = select.select([p],[p],[p],SOCKET_TIMEOUT)
if not w:
self.logger.error('Server (%s:%s) is not available' % (ip,port))
return
p.request_peers()
p.handle_write()
r,w,e = select.select([p],[p],[p],SOCKET_TIMEOUT)
if not r:
self.logger.error('Server (%s:%s) does not respond' % (ip,port))
return
p.handle_read()
if not p.handshaked:
self.logger.error('Could not connect to Server (%s:%s)' % (ip,port))
return
self.logger.info('Connection with Server (%s:%s) established' % (ip,port))
return p
else:
self.logger.error('Server (%s:%s) is not available' % (ip,port))
return
#s = socket.socket()
#s.settimeout(SOCKET_TIMEOUT)
#try:
# s.connect((ip,port))
#except (socket.error,socket.timeout) as e:
# self.logger.error('%s - %s' % (e.errno,e.message))
# return
#handshake = 'Salamatsyzby' + self.content_id + self.peer_id + struct.pack('!H',self.port) + pack('!HI',0,0)
#handshake = pack('!I',len(handshake)) + handshake
#msg = GET_PEERS + pack('!B',35)
#msg = pack('!I',len(msg)) + msg
#s.send(handshake)
#s.send(msg)
#try:
# data = s.read(4096)
#except (socket.error,socket.timeout) as e:
# self.logger.error('%s - %s' % (e.errno,e.message))
# return
#while data:
# length = unpack('!I',data[:4])[0]
# if length >
def select_for(self,source,chunk_length):
s = source.split(':')[0]
if s == 'http':
i = Input.HTTPInput(source,chunk_length)
if i.con == None:
self.close()
raise Exception('Cannot connect to source')
# we must close connection to source until we need it later to prevent source's buffer overflow
i.con.close()
return i
else:
self.close()
raise Exception('Cannot Recognize Input source')
def prepare_peers(self,peers):
for i in xrange(len(peers)/6):
raw = peers[0+i*6:6+i*6]
ip,port = decode(ip=raw[0:4],port=raw[4:6])
if ip == self.ext_ip and port == self.port:
continue
if self.PStorage.can_add_peer(content_id=self.content_id):
self.prepare_peer(ip=ip,port=port)
def prepare_peer(self,ip,port,sock=None,buf=''):
stream_data = {'content_id':self.content_id,'chunk_length':self.chunk_length,'piece_length':self.piece_length, 'handshake':self.build_handshake()}
if sock is None:
#ip = '.'.join([str(x) for x in unpack('!BBBB',ip)])
#port = unpack('!H',port)[0]
p = Peer(stream_data,self,self.B,ip=ip,port=port,buf=buf)
else:
p = Peer(stream_data,self,self.B,ip=ip,port=port,sock=sock,buf=buf)
self.add_new_peer(ip,port)
self.PStorage.add(p)
#if self.paused:
# t = threading.Thread(target=self.run)
# t.daemon = True
# t.start()
# self.paused = False
def get_file(self):
return StringIO(pickle.dumps(self.info))
def get_seconds(self,peer):
# for connection to the server we need request not more than 3 seconds at once,
# to eliminate server overloading
if peer.server:
return 3
speed = peer.upload_speed
if speed < 100:
return 3
if speed >= 100 and speed < 300:
return 5
if speed >= 300 and speed < 500:
return 8
if speed >= 500 and speed < 750:
return 12
if speed >= 750 and speed < 1024:
return 15
if speed >= 1024 and speed < 2048:
return 20
if speed >= 2048 and speed < 3072:
return 25
if speed >= 3072 and speed < 4096:
return 30
if speed >= 4096 and speed < 5120:
return 40
if speed >= 5120:
return 60
def connect_nodes(self):
for ip,port in self.info['nodes']:
node = self.connect_server(ip,port,node=True)
if node:
self.PStorage.add(node)
def run(self):
if self.is_server:
source = self.source
# This is done to be able to watch http-stream in server machine
# maybe it's not necessary but will be good )
self.B.inited = True
recon_tried = 0
source.connect()
#i = 0
while self.work:
d = source.read()
# there is a problem with source
if not d:
# we must reconnect 3 times before closing Container
if recon_tried < 3:
source.reconnect()
recon_tried += 1
continue
else:
self.close()
self.logger.error('Cannot fetch data from source')
break
# why len(d) + 4 ? Because we must to take into account 4 bytes of length
# and if we didn't do that, everytime a piece transferred, trailing 4 bytes of data
# will not be recieved by peer
#d = pack('!I',len(d) + 4) + d
self.B.put(self.pos,d)
# i.e. 65535
if self.pos == COUNTER:
self.pos = 0
else:
self.pos += 1
# exiting
source.close()
else:
# starting connection to nodes
if self.info.get('nodes',[]):
t = threading.Thread(target=self.connect_nodes)
t.daemon = True
t.start()
self.logger.info('Started main loop')
pos = self.pos
while self.work:
# selecting only handshaked peers
peers = self.PStorage.get_peers(self.content_id)
# if there are no peers - pause working
# the work will be resumed by self.prepare_peer
#if not peers:
# self.paused = True
# return
for peer in sorted(peers,key=lambda x : x.upload_speed,reverse=True):
if peer.need_keep_alive:
peer.send_keep_alive()
# peer doesn't have pending requests
elif peer.can_request:
c = True
# selecting only idle requests
for request in self.requests:
# if peer have first piece we will request that piece
# in hope that other pieces will arrive to the peer while sending
# first one
if peer.have(request.pos):
self.logger.debug('Retry Request %s' % (request))
self.requests.remove(request)
peer.request_stream(request)
c = False
break
# because we have to request only one "request"
if not c:
continue
# if there are no appropriate request in self.requests
# or there are no requests in self.requests => try to request stream by position
elif peer.have(pos):
s = self.get_seconds(peer)
request = Request(self.B,self.piece_length,self.chunk_length,pos,s)
peer.request_stream(request)
# peer.request_stream(pos,STREAM_PIECE_SIZE,s)
pos = pos + s
# because we reset our counter after COUNTER =>
# if we have pos = 65536 and COUNTER = 65536 => new pos will be
# equal to pos = 0, because pos = 65536 will not be appropriate for struct
# format '!I' (max is 65535, you can check it!)
if pos >= COUNTER:
pos = pos - COUNTER
else:
# peer has job in queue, but there is a timeout
if peer.request_timeout:
self.logger.debug('Request timeout of peer (%s) has reached. Retrying request' % (peer))
self.requests.extend(peer.return_requests())
if self.debug:
break
else:
self.close()
self.logger.error('Cannot connect to server')
self.logger.debug('Main loop stopped')
def return_reqs(self,reqs):
self.requests.extend(reqs)
#def set(self,pos,piece_length):
# if self.pos is None:
# #self.start_pos = pos
# self.B.got_pos()
# self.pos = pos
# self.piece_length = piece_length
# # starting work
# t = threading.Thread(target=self.run)
# t.daemon = True
# t.start()
def tell_have(self,t):
for peer in self.PStorage.get_peers(self.content_id):
# don't send HAVE to server because it won't need this
if peer.handshaked and not peer.server:
peer.send_have(t)
#----------------------------------- DB methods ----------------------------
def add_new_stream(self):
db = sqlite3.connect('DataBase.db')
try:
db.execute('create table \"'+self.content_id+'\" (id integer primary key,ip text unique,port int)')
db.commit()
self.logger.debug('Table for stream (' + self.content_id + ') created in DB')
except sqlite3.OperationalError:
db.execute('drop table \"'+self.content_id+'\"')
self.logger.debug('Previous table for stream (' + self.content_id + ') removed from DB')
db.execute('create table \"'+self.content_id+'\" (id integer primary key,ip text unique,port int)')
self.logger.debug('Table for stream (' + self.content_id + ') created in DB')
db.close()
def remove_stream(self):
db = sqlite3.connect('DataBase.db')
try:
# if exists
db.execute('drop table \"'+self.content_id+'\"')
except:
pass
self.logger.debug('Table for stream (' + self.content_id + ') removed from DB')
db.close()
def add_new_peer(self,ip,port):
db = sqlite3.connect('DataBase.db')
db.text_factory = str
ip,port = self.encode(ip=ip,port=port)
try:
db.execute('insert into \"'+self.content_id+'\" values(null,?,?)',(ip,port))
db.commit()
self.logger.info('Peer added to DB')
self.logger.debug('Peer (%s:%s) added to DB' % (ip,port))
# maybe ip is exist
except sqlite3.IntegrityError:
self.logger.info('Peer already exists')
db.close()
def delete_peer(self,ip):
db = sqlite3.connect('DataBase.db')
db.text_factory = str
ip = self.encode(ip=ip)
try:
db.execute('delete from \"'+self.content_id+'\" where ip=?',(ip,))
db.commit()
self.logger.info('Peer removed from DB')
self.logger.debug('Peer (%s) removed from DB' % (ip))
except sqlite3.OperationalError:
self.logger.info('Peer does not exist')
db.close()
def get_peers(self,qty):
db = sqlite3.connect('DataBase.db')
db.text_factory = str
"""
Берилмелер Базасынан Content ID-ге ылайык peers_list тизмесин БЕРYY.
syntax: [(peer_ip,port)]
"""
try:
peers = [(ip + port) for id,ip,port in db.execute('select * from \"'+self.content_id+'\"').fetchmany(size=qty)]
except sqlite3.OperationalError:
peers=[]
peers_list = ''.join(peers)
db.close()
return peers_list
#-------------------------------------------------------------------------------------------
def close(self):
self.work = False
self.remove_stream()
for peer in self.peers:
peer.notify_closing()
self.B.close()
self.Client.close_container(self)
self.logger.debug('StreamContainer (' + self.content_id + ') terminated')
# Testing
class PeeR:
def __init__(self,id):
self.id = id
self.handshaked = False
self.upload_speed = 0
self.req = []
self.server = False
self.h = []
self.closed = False
self.need = False
self.ka = False
self.can = True
self.t = False
self.closed = False
self.p = []
self.ip = 'ip'
self.port = 345
@property
def timeout(self):
return self.t
@property
def request_timeout(self):
return self.t
@property
def can_request(self):
return self.can
@property
def need_keep_alive(self):
return self.need
def send_keep_alive(self):
self.ka = True
def handle_close(self):
self.closed = True
def have(self,pos):
return pos in self.p
def request_stream(self,request):
self.req.append(request)
self.can = False
def return_requests(self):
r = self.req[:]
self.req = []
return r
class PStorage:
def __init__(self):
self.peers = []
def get_peers(self,content_id):
return self.peers
def g(i,p):
return True
def test_run():
ps = PStorage()
s = StreamContainer('client',ps,{'content_id':'content_id','ip':'123','port':'233','name':'wewf','chunk_length':12},6590,debug=True)
#s.connect_server = g
#s.start_pos = 65534
s.pos = 65534
s.piece_length = 17
p1 = PeeR(1)
p1.handshaked = True
p1.upload_speed = 2
p1.t = True
p2 = PeeR(2)
p2.handshaked = True
p2.upload_speed = 6
p2.can = False
p2.req = [Request('buffer',16,2,2,12)]
p2.t = True
p3 = PeeR(3)
p3.handshaked = True
p3.p = [65535,65536,0,1,2]
p4 = PeeR(4)
p4.handshaked = True
p4.upload_speed = 5
p4.p = [2]
p5 = PeeR(5)
p5.handshaked = True
p5.upload_speed = 1
p5.can = False
p5.need = True
p6 = PeeR(6)
p6.handshaked = True
p6.upload_speed = 7
p6.can = False
p6.need = True
p7 = PeeR(7)
p7.handshaked = True
p7.p = [2]
p7.upload_speed = 4
p8 = PeeR(8)
p8.handshaked = True
p8.upload_speed = 8
p8.p = [2]
p9 = PeeR(9)
p9.handshaked = True
p9.upload_speed = 3
p9.can = False
p9.t = True
p10 = PeeR(10)
p11 = PeeR(11)
p12 = PeeR(12)
p12.handshaked = True
p12.upload_speed = 9
p12.p = [65534]
ps.peers = [p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12]
s.run()
assert p12.req[0].info == '(65534, -1, 3)'
assert not p8.closed
assert p8.req[0].info == '(2, -1, 3)'
assert p6.ka
assert not p2.req
assert p4.req[0].info == '(2, -1, 12)'
assert not p7.ka
assert not p9.closed
assert p5.ka
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,728 | Nurchik/tamchy | refs/heads/master | /Reactor.py | import threading,select,logging
TIMEOUT = 0.5
class Reactor(threading.Thread):
def __init__(self,PStorage):
self.logger = logging.getLogger('tamchy.Reactor')
threading.Thread.__init__(self)
self.daemon = True
self.PStorage = PStorage
#self.peers = [server]
self.lock = threading.Lock()
self.work = True
# raw_methods is created just to avoid logging (when making tests)
def raw_add(self,peer):
self.peers.append(peer)
def raw_remove(self,peer):
self.peers.remove(peer)
#def add(self,peer):
# with self.lock:
# try:
# self.peers.append(peer)
# except:
# pass
# try:self.logger.debug('Added peer (' + peer.raw_ip + ':' + str(peer.raw_port) + ')')
# except:pass
#
#def remove(self,peer):
# with self.lock:
# try:
# self.peers.remove(peer)
# except:
# pass
# try:self.logger.debug('Peer (' + peer.raw_ip + ':' + str(peer.raw_port) + ') removed')
# except:pass
def run(self):
self.logger.info('Reactor started')
while self.work:
#with self.lock:
peers = self.PStorage.peers
try:
r,w,e = select.select(peers,peers,peers,TIMEOUT)
except:
continue
#r,w,e = select.select(peers,peers,peers,TIMEOUT)
for peer in r:
peer.handle_read()
for peer in w:
peer.handle_write()
for peer in e:
peer.handle_close()
def close(self):
self.work = False
self.logger.info('Reactor terminated')
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,729 | Nurchik/tamchy | refs/heads/master | /Input.py | from time import time
import urllib2
class Input:
def __init__(self,source,chunk_length):
self.source = source
self.chunk_length = chunk_length
self.con = self.connect()
self.piece_length = self.calculate_piece_length()
def connect(self):
pass
def read(self):
pass
def reconnect(self):
con = self.connect()
self.con = con
def calculate_piece_length(self):
lengthes = []
for i in range(3):
d = ''
t = time()
while time() - t < 1:
d += self.con.read(4096)
lengthes.append(len(d))
# we will take the biggest number in sizes and will use this size as piece length
# but (piece length / chunk size) must to be able to be divided by 8 without remain, because if we don't do that
# we will add trailing zeroes to bitfield to properly transport piece's one when requesting :(
# @!! SIMPLY MAGIC !!@
length = max(lengthes)
number_of_chunks = length / self.chunk_length
length = 8 * ((number_of_chunks / 8) + (1 if number_of_chunks % 8 else 0)) * self.chunk_length
return length
def close(self):
if self.con:
self.con.close()
class HTTPInput(Input):
def connect(self):
try:
con = urllib2.urlopen(self.source)
except:
return
return con
def read(self):
source = self.con
if source is None:
return ''
#t = time()
d = source.read(self.piece_length)
while d < self.piece_length:
d += source.read(self.piece_length - len(d))
return d
#while (time() - t) < 1:
# try :
# d += source.read(1024)
# except:
# return ''
#return d
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,730 | Nurchik/tamchy | refs/heads/master | /upnp.py | import urllib2
import socket
import logging
from xml.etree import ElementTree as ET
DESCRIPTION_NS = 'urn:schemas-upnp-org:device-1-0'
ENVELOPE_NS = 'http://schemas.xmlsoap.org/soap/envelope/'
#class Response:
# def __init__(self,msg):
# self.msg = msg
# self.headers = {}
# self.body = {}
# self.parse()
#
# def parse(self):
# m = self.msg.splitlines()
# body = []
class UPNP:
def __init__(self):
self.logger = logging.getLogger('tamchy.UPNP')
self.controlUrls = {}
self.description_xml = ''
self.gateway_addr = ''
self.service_available = ''
self.connection_established = False
self.init()
def init(self):
s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
# header for searching InternetGatewayDevice -> routers,modems
R = '''M-SEARCH * HTTP/1.1\r\nHOST: 239.255.255.250:1900\r\nMAN: ssdp:discover\r\nMX: 10\r\nST: urn:schemas-upnp-org:device:InternetGatewayDevice:1\r\n\r\n'''
s.sendto(R,('239.255.255.250',1900))
s.settimeout(5)
try:
data,addr = s.recvfrom(4096)
except socket.timeout:
return
response = {}
for line in data.splitlines():
# extracting http-headers
if not line:
break
header, value = line.split(':',1) if ':' in line else (line,'')
response[header.lower()] = value
# split('//') == splitting http:// -> split('/') == splitting everything after ip:port -> split(':') == splitting ip and port value
gateway = response['location'].split('//')[1].split('/')[0]
self.gateway_ip, self.gateway_port = gateway.split(':') if ':' in gateway else (gateway,'80')
self.gateway_addr = 'http://%s:%s' % (self.gateway_ip,self.gateway_port)
try:
self.description_xml = urllib2.urlopen(response['location']).read()
except:
self.logger.debug('Cannot get Device Description XML from IGD. Response form IGD --> ' + data)
return
xml = ET.fromstring(self.description_xml)
for s in xml.iter('{%s}service' % (DESCRIPTION_NS)):
service = s.find('{%s}serviceType' % (DESCRIPTION_NS)).text.split(':',3)[-1]
controlUrl = s.find('{%s}controlURL' % (DESCRIPTION_NS)).text
self.controlUrls[service] = controlUrl
self.service_available = 'WANPPPConnection:1' if ('WANPPPConnection:1' in self.controlUrls) else 'WANIPConnection:1'
self.connection_established = True
def get_envelope(self,service,action,**kw):
return """<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:%s xmlns:u="urn:schemas-upnp-org:service:%s">""" % (action, service) +\
"\n".join(["<%s>%s</%s>"%(k,v,k) for k,v in kw.items()]) +\
""" </u:%s>
</s:Body>
</s:Envelope>""" % action
def request(self,service,action,**kw):
if not self.connection_established:
return
req = urllib2.Request(self.gateway_addr + self.controlUrls[service])
envelope = self.get_envelope(service,action,**kw)
req.add_header("content-type",'text/xml; charset="utf-8"')
req.add_header("SOAPACTION", '"urn:schemas-upnp-org:service:%s#%s"' % (service, action))
print(envelope)
req.add_data(envelope)
try:
response = urllib2.build_opener().open(req)
except:
return
return response
def get_external_ip(self):
response = self.request('WANIPConnection:1','GetExternalIPAddress')
if response and response.getcode() == 200:
xml = response.read()
if xml:
root = ET.fromstring(xml)
ext_ip = root.find('.//NewExternalIPAddress')
# if tag is founded
if ext_ip is not None:
return ext_ip.text
return ''
def get_generic_port_mapping_entry(self,index):
service_available = self.service_available
response = self.request(service_available,'GetGenericPortMappingEntry',NewPortMappingIndex=index)
d = {}
if response and response.getcode() == 200:
xml = response.read()
print('xml --> ' + xml)
if xml:
root = ET.fromstring(xml)
body = root.find('{%s}BODY' % (ENVELOPE_NS))
if body is not None:
resp = body.find('{%s}GetGenericPortMappingEntryResponse' % ('urn:schemas-upnp-org:service:' + service_available))
if resp is not None:
for el in list(resp):
d[el.tag] = el.text
return d
def get_specific_port_mapping_entry(self,external_port,remote_host='',protocol='TCP'):
service_available = self.service_available
response = self.request(service_available,'GetSpecificPortMappingEntry',NewRemoteHost=remote_host,
NewExternalPort=external_port,
NewProtocol=protocol)
d = {}
print(response.getcode())
print(response)
if response and response.getcode() == 200:
xml = response.read()
print('xml --> ' + xml)
if xml:
root = ET.fromstring(xml)
body = root.find('{%s}BODY' % (ENVELOPE_NS))
if body is not None:
resp = body.find('{%s}GetSpecificPortMappingEntryResponse' % ('urn:schemas-upnp-org:service:' + service_available))
if resp is not None:
for el in list(resp):
d[el.tag] = el.text
return d
def add_port_mapping(self,external_port,internal_port,protocol='TCP',duration=0,remote_host='',description='tamchy_port_mapping'):
# we will try to use WANPPP first because in ADSL modems this service must be used -> if it's a router we'll use WANIP
service_available = self.service_available
# getting our internal ip from IGD
s = socket.socket()
try:s.connect((self.gateway_ip,80))
except: return False
this_host = s.getsockname()[0]
s.close()
response = self.request(service_available,'AddPortMapping',NewRemoteHost=remote_host, NewExternalPort=external_port,
NewInternalPort=internal_port,
NewInternalClient=this_host, NewProtocol=protocol,
NewPortMappingDescription=description, NewEnabled='1',
NewLeaseDuration=duration)
if response and response.getcode() == 200:
print(response.read())
return True
# otherwise
return False
def delete_port_mapping(self,external_port,protocol='TCP',remote_host=''):
service_available = self.service_available
response = self.request(service_available,'DeletePortMapping',NewRemoteHost=remote_host, NewExternalPort=external_port,
NewProtocol=protocol)
if response and response.getcode() == 200:
return True
return False
if __name__ == '__main__':
u = UPNP()
print(u.get_external_ip())
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,731 | Nurchik/tamchy | refs/heads/master | /StreamBuffer.py | # -*- coding: utf-8 -*-
import Queue
import urllib2
import io
import socket
import pickle
import StringIO
class StreamBuffer:
def __init__(self,is_server=False,unit_size=256*1024):
#buffering -> is the amount of buffering units (1 unit = 512 kb)
#stream_buffer - is the buffer with the structure taken from the Kyrgyz National game named Toguz Korgool.
#Буфердин тузулушу -> 18 слот (9- ылдыйкы тарабынан , 9-ойдонку тарабынан ). Ар бир слотто 9 видео бирдиги бар.
self.stream_buffer = {'b':([0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0]),
't':([0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0])}
self.run = True
self.current_pos = '00b'
self.bitRate = None
self.unit_size = unit_size
def get_pos(self):
pos = ''
for i in self.current_pos:
if x == ('b' or 't'):
pos += struct.pack('!B',(1 if x == 't' else 0))
else:
pos += struct.pack('!B',int(x))
return pos
def set_pos(self,pos):
pos1 = ''
for p,i in enumerate(pos):
if p == 2:
pos1 += ('b' if struct.unpack('!B',i)[0] == 0 else 't')
else:
pos1 += struct.unpack('!B',i)[0]
self.current_pos = pos1
def get(self,pos,offset,length):
pit,num,side = int(pos[0]),int(pos[1]),('b' if (int(pos[2]) == 0) else 't')
s = self.stream_buffer[side][pit][num]
s.seek(offset)
data = s.read(length)
return data
def put(self,data,pos=0,offset=0):
if self.is_server:
pos = self.current_pos
self.stream_buffer[pos[2]][int(pos[0])][int(pos[1])] = data
self.current_pos = self.get_plused_position(pos,1)
pit,num,side = int(pos[0]),int(pos[1]),('b' if (int(pos[2]) == 0) else 't')
if not self.stream_buffer[side][pit][num]:
self.stream_buffer[side][pit][num] = StringIO.StringIO()
self.stream_buffer[side][pit][num].seek(offset)
self.stream_buffer[side][pit][num].write(data)
def get_plused_position(self,pos,offset):
pit = int(pos[0])
num = int(pos[1])
side = pos[2]
x = int(offset)/9
y = int(offset) - x*9
for i in range(x):
if pit+1 == 9:
side = ('t' if side == 'b' else 'b')
pit = 0
else:
pit = pit+1
if num+y >= 9:
if pit+1 == 9:
side = ('t' if side == 'b' else 'b')
pit = 0
num = num+y-9
else:
pit = pit+1
num = num+y-9
else:
num = num+y
return (str(pit)+str(num)+side)
def get_minused_position(self,pos,offset):
pit = int(pos[0])
num = int(pos[1])
side = pos[2]
x = int(offset)/9
y = int(offset) - x*9
for i in range(x):
if pit-1 == -1:
side = ('b' if side == 't' else 't')
pit = 8
else:
pit = pit-1
if num-y <= -1:
if pit-1 == -1:
side = ('b' if side == 't' else 't')
pit = 8
num = num-y+9
else:
pit = pit-1
num = num-y+9
else:
num = num-y
return (str(pit)+str(num)+side)
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,732 | Nurchik/tamchy | refs/heads/master | /Server.py | import socket,struct,pickle,logging,time
from Peer import Peer
KEEP_ALIVE = struct.pack('!B',0)
POSITION = struct.pack('!B',1)
GET_PEERS = struct.pack('!B',2)
GET_STREAM = struct.pack('!B',3)
STOP = struct.pack('!B',4)
CLOSE = struct.pack('!B',5)
ERROR = struct.pack('!B',6)
HAVE = struct.pack('!B',7)
CONNECTION_TIMEOUT = 180
def encode(ip=None,port=None):
result = []
if ip:
ip = ''.join([struct.pack('!B',int(x)) for x in self.ip.split('.')])
result.append(ip)
elif port:
port = struct.pack('!H',port)
result.append(port)
elif len(result) == 1:
return result[0]
else:
return result
def decode(ip=None,port=None):
result = []
if ip:
ip = '.'.join([str(x) for x in struct.unpack('!BBBB',ip)])
result.append(ip)
elif port:
port = struct.unpack('!H',port)[0]
result.append(ip)
elif len(result) == 1:
return result[0]
else:
return result
class TempPeer(Peer):
def __init__(self,ip,sock,Server):
self.logger = logging.getLogger('tamchy.Server.TempPeer')
self.socket = sock
self.closed = False
self.content_id = None
# handshaked will be always False because we need to TempPeer's handle_read
# to stop after process_handshake()
self.handshaked = False
self.Server = Server
self.read_buffer = ''
self.ip = ip
self.port = 'TEMP'
self.time = time.time()
def process_handshake(self,msg):
if (msg[:12]).lower() == 'salamatsyzby':
content_id = msg[12:44]
#ip = ''.join([struct.pack('!B',int(x)) for x in self.ip.split('.')])
#port = msg[44:46]
ip = self.ip
port = decode(port=msg[44:46])
if content_id not in self.Server.streams:
# try to send message, but we must close the connection anyway -> whether error or not
try:
self.socket.send(struct.pack('!I',19) + ERROR + pickle.dumps('Invalid Content ID'))
except:
pass
self.logger.debug('Peer (%s) disconnected' % (self))
return self.handle_close()
else:
# Everything is good with this peer => we must add this peer to the Container's peers list
self.logger.debug('Peer (%s) successfully connected' % (self))
self.Server.accept(self.socket,ip,port,content_id,self.read_buffer,self)
else:
self.logger.debug('Peer (%s) disconnected' % (self))
return self.handle_close()
def handle_close(self):
self.socket.close()
self.closed = True
#class TempPeer:
# def __init__(self,ip,sock,Server):
# self.logger = logging.getLogger('tamchy.Server.TempPeer')
# self.socket = sock
# self.closed = False
# self.content_id = None
# self.Server = Server
# self.read_buffer = ''
# self.ip = ip
# self.time = time.time()
#
# def fileno(self):
# try:
# s = self.socket.fileno()
# return s
# except:
# return self.handle_close()
#
# def handle_read(self):
# message = ''
# while True:
# try:
# m = self.socket.recv(8192)
# if not m:
# return self.handle_close()
# message += m
# except:
# break
#
# if not message:
# return self.handle_close()
#
# self.time = time.time()
#
# self.read_buffer += message
# length = self.read_buffer[:4]
#
# if len(length) < 4:
# # this is not entire message => wait for remaining part
# return
# length = struct.unpack('!I',length[:4])[0]
# if length > 32*1024:
# return self.handle_close()
# msg = self.read_buffer[4:4 + length]
# if len(msg) < length:
# # this is not entire message => wait for remaining part
# return
#
# self.read_buffer = self.read_buffer[4 + length:]
# #
# # Start of main logic to handle messages from peer
# #
# if (msg[:12]).lower() == 'salamatsyzby':
# content_id = msg[12:44]
# ip = ''.join([struct.pack('!B',int(x)) for x in self.ip.split('.')])
# port = msg[44:46]
# if content_id not in self.Server.streams:
# # try to send message, but we must close the connection anyway -> whether error or not
# try:
# self.socket.send(struct.pack('!I',19) + ERROR + pickle.dumps('Invalid Content ID'))
# except:
# pass
# self.logger.debug('Peer (%s) disconnected' % (self.ip))
# return self.handle_close()
# else:
# # Everything is good with this peer => we must add this peer to the Container's peers list
# self.logger.debug('Peer (%s) successfully connected' % (self.ip))
# self.Server.accept(self.socket,ip,port,content_id,self.read_buffer,self)
# else:
# self.logger.debug('Peer (%s) disconnected' % (self.ip))
# return self.handle_close()
#
# @property
# def timeout(self):
# if time.time() - self.time >= CONNECTION_TIMEOUT:
# return True
# return False
#
# def handle_write(self):
# pass
#
# def handle_close(self):
# self.socket.close()
# self.closed = True
class Server:
def __init__(self):
pass
def fileno(self):
return self.socket.fileno()
def handle_read(self):
cl,addr = self.socket.accept()
self.logger.debug('Got connection from new peer (%s)' % (addr[0]))
if self.C.can_add_peer():
self.C.prepare_peer(sock=cl)
else:
cl.send(self.build_message('\x07',pickle.dumps('Reached Peers Limit')))
cl.close()
self.logger.debug('Rejected connection of new peer (%s)' % (addr[0]))
def build_message(self,id,data=''):
length = struct.pack('!I',len(id+data))
return length+id+data
def close(self):
#self.Reactor.close()
self.socket.close()
class MultiServer(Server):
def __init__(self,port,PStorage,debug=False):
self.logger = logging.getLogger('tamchy.Server')
# !!! self.socket = sock
#self.Reactor = Reactor(self)
self.PStorage = PStorage
self.work = True
self.ip = 'SERVER'
self.port = port
self.closed = False
self.timeout = False
self.content_id = 'SERVER'
if not debug:
self.socket = self.create_socket(port)
#self.Reactor.start()
# 'content_id':StreamContainer instance
self.streams = {}
self.logger.info('Server on port %s started' % (port))
def create_socket(self,port):
sock = socket.socket()
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('',port))
sock.setblocking(0)
sock.listen(5)
return sock
def handle_read(self):
cl,addr = self.socket.accept()
self.logger.debug('Got connection from new peer (%s)' % (addr[0]))
peer = TempPeer(addr[0],cl,self)
self.PStorage.add(peer)
## checking for dead TempPeers
#for peer in self.PStorage.get_peers():
# if isinstance(peer,TempPeer):
# # 300 seconds = 5 minutes
# if time.time() - peer.time > 300.0:
# self.PStorage.remove(peer)
def accept(self,sock,ip,port,content_id,buf,peer):
C = self.streams[content_id]
if self.PStorage.can_add_peer(content_id):
self.PStorage.remove(peer)
C.prepare_peer(ip,port,sock=sock,buf=buf)
else:
peer.handle_close()
#def add(self,peer):
# self.Reactor.add(peer)
#
#def remove(self,peer):
# self.Reactor.remove(peer)
def register_stream(self,container):
self.streams[container.content_id] = container
self.logger.debug('Stream Container ({0}) registered'.format(container.content_id,))
def unregister_stream(self,container):
try:
del self.streams[container.content_id]
self.logger.debug('Stream Container ({0}) unregistered'.format(container.content_id,))
except:
pass
def close(self):
self.closed = True
self.socket.close()
# Testing
class sock:
def __init__(self):
self.closed = False
self.buffer = []
self.s_buf = ''
self.r_buf = []
def close(self):
self.closed = True
def fileno(self):
return 0
def accept(self):
return (sock(),('127.0.0.1',7654))
def getpeername(self):
return ('0.0.0.0',123)
def send(self,data):
self.s_buf += data
return len(data)
def recv(self,num):
msg = self.r_buf.pop(0)
if msg == 'except':
raise Exception
return msg
class C:
def __init__(self,c_id):
self.can = True
self.prepared = []
self.content_id = c_id
def can_add_peer(self):
return self.can
def prepare_peer(self,ip,port,sock=None,buf=''):
self.prepared.append((ip,port))
class PeeR:
def __init__(self):
self.closed = False
def handle_close(self):
self.closed = True
class PStorage:
def __init__(self):
self.peers = []
self.can = True
def add(self,peer):
self.peers.append(peer)
def can_add_peer(self,content_id):
return self.can
def remove(self,peer):
try:
self.peers.remove(peer)
except:
pass
class SeRver:
def __init__(self):
self.streams = {}
self.accepted = []
def remove(self,peer):
pass
def accept(self,sock,ip,port,content_id,buf,peer):
self.accepted.append(sock)
def test_server():
ps = PStorage()
s = MultiServer(7668,ps,debug=True)
c1 = C('content_id1')
c2 = C('content_id2')
c3 = C('content_id3')
c4 = C('content_id4')
s.register_stream(c1)
s.register_stream(c2)
s.register_stream(c3)
s.register_stream(c4)
assert len(s.streams) == 4
s.unregister_stream(c3)
assert len(s.streams) == 3
assert 'content_id2' in s.streams
assert 'content_id3' not in s.streams
s.unregister_stream(c1)
assert len(s.streams) == 2
assert 'content_id1' not in s.streams
# test_accept
sct = sock()
assert not c2.prepared
s.accept(sct,'127.0.0.1',7665,'content_id2','',PeeR())
assert c2.prepared
assert c2.prepared[0] == ('127.0.0.1',7665)
p = PeeR()
s.accept(sct,'127.0.0.1',7667,'content_id2','',p)
s.accept(sct,'127.0.0.1',7668,'content_id4','',PeeR())
assert not p.closed
assert c2.prepared[1] == ('127.0.0.1',7667)
assert c4.prepared[0] == ('127.0.0.1',7668)
p1 = PeeR()
p2 = PeeR()
ps.can = False
assert len(c2.prepared) == 2
assert not p.closed
s.accept(sct,'127.0.0.1',7669,'content_id2','',p1)
s.accept(sct,'127.0.0.1',7678,'content_id2','',p2)
assert len(c2.prepared) == 2
assert p1.closed
assert p2.closed
def test_temp_peer():
server = SeRver()
s = sock()
p = TempPeer('127.0.0.1',s,server)
s.r_buf.append('except')
p.handle_read()
assert s.closed
s.closed = False
s.r_buf = ['abc', 'except']
p.handle_read()
assert p.read_buffer == 'abc'
s.closed = False
s.r_buf = ['', 'except']
p.handle_read()
assert s.closed
s.closed = False
s.r_buf = [struct.pack('!I',33*1024),'except']
p.handle_read()
assert s.closed
s.closed = False
p.read_buffer = ''
s.r_buf = [struct.pack('!I',4)+'abc','except']
p.handle_read()
assert len(p.read_buffer) == 7
assert not s.closed
s.r_buf = [struct.pack('!I',4)+'abcd','except']
p.handle_read()
assert s.closed
s.closed = False
p.send_buffer = ''
p = TempPeer('127.0.0.1',s,server)
s.r_buf = [struct.pack('!I',23)+'Salamatsyzby'+'_content_id','except']
p.handle_read()
assert s.closed
server.streams = {'content_id1234567890123456789014':'StreamContainer'}
s = sock()
p = TempPeer('127.0.0.1',s,server)
s.r_buf = [struct.pack('!I',68)+'Salamatsyzby' + 'content_id1234567890123456789012' + 'peeeeeerrrrrr_iiiidd' + '1234','except']
p.handle_read()
assert s.closed
assert s.s_buf[7:25] == 'Invalid Content ID'
s = sock()
p = TempPeer('127.0.0.1',s,server)
s.r_buf = [struct.pack('!I',68)+'Salamatsyzby' + 'content_id1234567890123456789012' + 'peeeeeerrrrrr_iiiidd12' + '34',\
struct.pack('!I',2) ,'except']
assert not s.s_buf
p.handle_read()
assert s.closed
assert s.s_buf
assert not server.accepted
s = sock()
p = TempPeer('127.0.0.1',s,server)
s.r_buf = [struct.pack('!I',68)+'Salamatsyzby' + 'content_id1234567890123456789014' + 'peeeeeerrrrrr_iiiidd12' + '45',\
struct.pack('!I',2) ,'except']
p.handle_read()
assert not s.closed
assert not s.s_buf
assert server.accepted == [s]
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,733 | Nurchik/tamchy | refs/heads/master | /Peer.py | # -*- coding:utf-8 -*-
import socket,struct,time,logging,sqlite3
from StringIO import StringIO
from threading import Lock
import pickle
import random
from bitstring import BitStream
import Request
KEEP_ALIVE = struct.pack('!B',0)
POSITION = struct.pack('!B',1)
GET_PEERS = struct.pack('!B',2)
GET_STREAM = struct.pack('!B',3)
STOP = struct.pack('!B',4)
CLOSE = struct.pack('!B',5)
ERROR = struct.pack('!B',6)
HAVE = struct.pack('!B',7)
BITFIELD = struct.pack('!B',8)
MSG = {KEEP_ALIVE:'KEEP_ALIVE',POSITION:'POSITION',GET_PEERS:'GET_PEERS',
GET_STREAM:'GET_STREAM',STOP:'STOP',CLOSE:'CLOSE',ERROR:'ERROR',HAVE:'HAVE'}
TIMEOUT = 20.0
REQUEST_TIMEOUT = 10.0
KEEP_ALIVE_INTERVAL = 120.0
ALIVE_TIMEOUT = 150.0
LIMIT = 60
class store:
def __init__(self):
self.lock = Lock()
self.s = []
def put(self,t):
if t not in self.s:
with self.lock:
self.s.append(t)
# we need to clear self.s from overloading with positions, so LIMIT is optimal number to do it
if len(self.s) > LIMIT:
self.s.pop(0)
def have(self,t):
with self.lock:
return (t in self.s)
class Peer:
def __init__(self,stream_data,Container,Buffer,ip=None,port=None,sock=None,buf='',server=False,node=False):
self.logger = logging.getLogger('tamchy.Peer')
self.store = store()
self.lock = Lock()
self.last_message_recv = time.time()
self.last_message_sent = time.time()
self.content_id = stream_data['content_id']
self.handshake = stream_data['handshake']
self.chunk_length = stream_data['chunk_length']
self.piece_length = stream_data['piece_length']
self.ip = ip
self.port = port
self.requested = []
self.send_buffer = ''
self.read_buffer = ''
# if we have previously recieved data from peer -> add to the read_buffer
self.read_buffer += buf
self.upload_speed = 0
self.download_speed = 0
# request -> (pos,offset,length,seconds):generator
self.streamer = {}
self.temp = {}
self.B = Buffer
self.C = Container
self.closed = False
self.handshaked = False
self.server = server
self.node = node
self.logger.info('Created new peer instance')
self.handle_connect(sock,ip,port)
def __str__(self):
return '%s:%s' % (self.ip, self.port)
def handle_connect(self,sock=None,ip=None,port=None):
self.socket = None
if sock is not None:
self.socket = sock
self.socket.setblocking(0)
self.send_handshake()
self.handshaked = True
else:
sock = socket.socket()
sock.setblocking(0)
try:
sock.connect((ip,port))
except socket.error as e:
if e.errno == 36:
self.socket = sock
self.send_handshake()
self.logger.debug('Wrote to send buffer handshake-message')
def fileno(self):
try:
s = self.socket.fileno()
return s
except:
self.handle_close()
#def to_bytes(self,fmt,value):
# val = (value.split('.') if (type(value) != int) else value)
# return ''.join([struct.pack(fmt,int(x)) for x in val])
def handle_write(self):
s = self.streamer
while self.streamer:
try : data = s[s.keys()[0]].next()
except StopIteration:
self.logger.debug('Stream generator finished work')
del self.streamer[s.keys()[0]]
continue
if data:
self.send_message(GET_STREAM,data)
# data = '' => no piece in Buffer at this moment
break
t = time.time()
if self.send_buffer:
try:
sent = self.socket.send(self.send_buffer)
except:
return self.handle_close()
self.last_message_sent = time.time()
self.download_speed = sent / (time.time() - t)
self.send_buffer = self.send_buffer[sent:]
def recv(self):
message = ''
t = time.time()
sock = self.socket
while True:
try:
m = sock.recv(8192)
message += m
except:
break
self.last_message_recv = time.time()
self.upload_speed = len(message) / (self.last_message_recv - t)
return message
def parse_messages(self,msg):
messages = []
self.read_buffer += msg
while self.read_buffer:
length = self.read_buffer[:4]
if len(length) < 4:
# this is not entire message => wait for remaining part
break
length = struct.unpack('!I',self.read_buffer[:4])[0]
if length > 32*1024:
self.handle_close()
break
msg = self.read_buffer[4:4 + length]
if len(msg) < length:
# this is not entire message => wait for remaining part
break
self.read_buffer = self.read_buffer[4 + length:]
messages.append(msg)
return messages
def send_handshake(self):
#msg = 'Salamatsyzby' + self.content_id + struct.pack('!H',self.port) + struct.pack('!HI',self.pos, self.piece_length)
#msg = struct.pack('!I',52) + msg
self.send_buffer += self.handshake
def process_handshake(self,msg):
if (msg[:12]).lower() == 'salamatsyzby':
if msg[12:44] == self.content_id:
if self.server and not self.node:
self.pos, self.piece_length = struct.unpack('!HI',msg[46:52])
#self.C.set(pos,piece_length)
self.handshaked = True
self.logger.debug('Connection with peer %s established' % (self))
return
elif msg[0] == ERROR:
try:
error = pickle.loads(msg[1:])
except:
error = ''
#if error == 'Invalid Content ID':
# ## Delete this peer from DB
# #self.C.delete_peer(self.ip)
self.logger.debug('Connection with peer not established --> ' + error)
return self.handle_close()
self.logger.debug('Connection with peer not established --> Incorrect handshake-message')
return self.handle_close()
def process_message(self,msg):
id = msg[0]
payload = msg[1:]
self.logger.debug('Got message %s from peer %s' % (MSG.get(id,'UNKNOWN'),self))
if id == KEEP_ALIVE:
pass
elif id == GET_PEERS:
if len(payload) == 1:
self.send_message(GET_PEERS,self.C.get_peers(struct.unpack('!B',payload)[0]))
else:
self.C.prepare_peers(payload)
elif id == GET_STREAM:
pos,offset = struct.unpack('!Hh',payload[:4])
payload = payload[4:]
#This is a request from the peer
if offset == -1:
seconds = struct.unpack('!B',payload[:1])
payload = payload[1:]
# there are bitfields
if payload:
bitfields = self.decode_bitfields(payload)
else:
bitfields = {}
self.logger.debug('Get Stream request ' + str((pos,offset,seconds)))
self.add_stream(pos,offset,seconds,bitfields)
else:
self.put(pos,offset,payload)
elif id == STOP:
pos,offset,seconds = struct.unpack('!HhB',payload)
try:
del self.streamer[(pos,offset,seconds)]
except:
pass
self.logger.debug('Stopped streaming ' + str((pos,offset,seconds)))
elif id == HAVE:
self.store.put(struct.unpack('!I',payload)[0])
elif id == CLOSE:
#self.DBEngine.delete_peer(self.content_id,self.ip)
self.handle_close()
return False
else:
self.handle_close()
return False
return True
def decode_bitfileds(self,bitfields,chunk_length):
bfs = {}
#num_offsets = self.piece_length / chunk_length + (1 if self.piece_length % chunk_length else 0)
# when sending request, if bitfield does not have length which can be divided by 8 without remain
# spare zeroes will be added to the end of bitfield. We must delete them
#added_bits = (num_offsets / 8 + (1 if num_offsets % 8 else 0)) * 8 - num_offsets
while bitfields:
pos,bitfield_length = struct.unpack('!HH',bitfields[ : 4])
if bitfield_length == 0:
bitfield = ''
else:
bitfield = bitstring.BitStream(bytes=bitfields[4 : 4 + bitfield_length])
bitfields = bitfields[4 + bitfield_length : ]
bfs[pos] = bitfield
return bfs
def handle_read(self):
message = self.recv()
if not message:
return self.handle_close()
messages = self.parse_messages(message)
for msg in messages:
if not self.handshaked:
self.process_handshake(msg)
# handshake error
if not self.handshaked:
break
else:
continue
p = self.process_message(msg)
if not p:
break
def add_stream(self,pos,offset,seconds,bitfields):
if pos in self.B:
self.streamer[(pos,offset,seconds)] = self.stream(pos,seconds,bitfields)
self.logger.debug('Added stream generator to store for request ' + str((pos,seconds)))
def stream(self,pos,seconds,bitfields):
self.logger.debug('Started generator for ' + str((pos,seconds)))
p = pos
for i in xrange(seconds):
pos = p + i
if pos >= 65536:
pos = 65536 - pos
bitfield = bitfields.get(pos,None)
# piece for pos is already completed
if bitfield == '':
continue
while True:
data = self.B.get(pos)
if data:
break
# there is no piece in Buffer at this moment
yield ''
chunk_length = self.chunk_length
for k in xrange(self.piece_length / self.chunk_length):
if bitfield is not None:
# this chunk of piece we already have downloaded so skip it
if bitfield[k] == True:
continue
offset = k * chunk_length
# we will use k as offset because we don't need real offset
d = struct.pack('!Hh',pos,k) + data[0+offset : chunk_length+offset]
self.logger.debug('Sent data for ' + str((pos,k)))
yield d
def put(self,pos,offset,data):
self.logger.debug('Put data to %s, length -> %s' % (str((pos,offset)),len(data)))
with self.lock:
for req in self.requested:
if pos in req:
req.put(pos,offset,data)
if req.completed:
self.logger.debug('Request %s completed' % (req))
self.requested.remove(req)
break
def return_requests(self):
r = [i for i in self.requested]
self.requested = []
return r
#r = []
#with self.lock:
# for req in self.requested:
# r.append(req.return_requests())
# self.requested.remove(req)
#return r
@property
def timeout(self):
if time.time() - self.last_message_recv >= ALIVE_TIMEOUT:
return True
return False
@property
def request_timeout(self):
if self.requested:
if time.time() - min(self.requested, key=lambda x : x.t).t > REQUEST_TIMEOUT:
return True
return False
def have(self,t):
if self.server:
return True
return self.store.have(t)
@property
def can_request(self):
# because we can request only one 'request' at one time until previous doesn't complete
if self.requested:
return False
return True
@property
def need_keep_alive(self):
return time.time() - self.last_message_sent > KEEP_ALIVE_INTERVAL
def request_stream(self,request):
#request = Request(self.C,self.B,pos,offset,length,seconds)
self.send_buffer += request.get_request()
self.requested.append(request)
self.logger.debug('%s requested' % (request))
#def request_position(self):
# self.send_message(POSITION)
# self.logger.debug('POSITION requested')
def request_peers(self,num=35):
self.send_message(GET_PEERS,struct.pack('!B',num))
self.logger.debug('List of peers requested')
def send_have(self,t):
self.send_message(HAVE,struct.pack('!I',t))
self.logger.debug('Sended HAVE -> %s to %s' % (t,self))
def send_stop(p,o,s):
self.send_message(STOP,struct.pack('!HIH',p,o,s))
self.logger.debug('Sended STOP for ' + str((p,o,s)))
def send_message(self,id,data=''):
length = struct.pack('!I',len(id+data))
self.send_buffer += (length+id+data)
def send_keep_alive(self):
try :
self.socket.send(struct.pack('!I',1) + KEEP_ALIVE)
self.last_message_sent = time.time()
self.logger.debug('Sended KEEP-ALIVE')
except :
return self.handle_close()
# This method for safe closing our program
def notify_closing(self):
self.socket.send(struct.pack('!I',1) + CLOSE)
self.handle_close()
self.logger.info('Gracefully closing our program ... )')
def handle_close(self):
self.streamer = {}
self.read_buffer = ''
self.send_buffer = ''
r = self.return_requests()
if r:
self.C.return_reqs(r)
self.C.delete_peer(self.ip)
self.socket.close()
self.closed = True
self.logger.info('Closing connection with peer')
'''
We will implement different types of Peer by inheriting Peer class ->
Just need to reimplement next methods if needed:
handle_connect, handle_read, handle_write, send_message, send_keep_alive, notify_closing
'''
class EncryptedPeer(Peer):
pass
class UDPPeer(Peer):
pass
# and so on .... :)
# Testing
class sock:
def __init__(self):
self.s_buf = ''
self.r_buf = []
self.closed = False
def setblocking(self,num):
pass
def getpeername(self):
return ('0.0.0.0',123)
def fileno(self):
return 0
def send(self,data):
self.s_buf += data
return len(data)
def recv(self,num):
msg = self.r_buf.pop(0)
if msg == 'except':
raise Exception
return msg
def close(self):
self.closed = True
class ContaineR:
def __init__(self):
self.start_pos = 0
self.deleted = False
self.prepared = ''
self.h = []
def add_new_peer(self,ip,port):
pass
def set_pos(self,pos):
self.start_pos = pos
def delete_peer(self,ip):
self.deleted = True
def prepare_peer(self,ip,port):
pass
def prepare_peers(self,peers):
self.prepared = peers
def can_add_peer(self):
return True
def remove(self,peer):
pass
def tell_have(self,pos):
self.h.append(pos)
def return_reqs(self,reqs):
pass
def get_peers(self,num):
return 'peers_list'
class BuffeR:
def __init__(self):
self.buffer = {}
self.pos = 0
self.pieces = []
def put(self,pos,data):
self.buffer[pos] = data
def get(self,pos):
return self.buffer.get(pos,'')
def get_pos(self):
return struct.pack('!H',self.pos)
def __contains__(self,pos):
return pos in self.pieces
def test_read():
s = sock()
c = ContaineR()
b = BuffeR()
s_data = {'content_id':'content_id1234567890123456789012','handshake':'handshake','piece_length':0,'chunk_length':0}
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=7668)
p.send_buffer = ''
p.handshaked = False
s.r_buf = [struct.pack('!I',46)+'Salamatsyzby' + 'content_id1234567890123456789012' + struct.pack('!H',78), 'except']
p.handle_read()
#assert c.start_pos == 0
assert not s.closed
assert not p.streamer
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=7668,server=True)
p.send_buffer = ''
p.handshaked = False
s.r_buf = [struct.pack('!I',52)+'Salamatsyzby' + 'content_id1234567890123456789012' + struct.pack('!H',78)\
+ struct.pack('!HI',32,16384), 'except']
assert p.piece_length == 0
assert not hasattr(p,'pos')
p.handle_read()
#assert c.start_pos == 0
assert not s.closed
assert not p.streamer
assert p.pos == 32
assert p.piece_length == 16384
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=7668,server=True,node=True)
p.send_buffer = ''
p.handshaked = False
s.r_buf = [struct.pack('!I',52)+'Salamatsyzby' + 'content_id1234567890123456789012' + struct.pack('!H',78)\
+ struct.pack('!HI',32,16384), 'except']
assert p.piece_length == 0
assert not hasattr(p,'pos')
p.handle_read()
#assert c.start_pos == 0
assert not s.closed
assert not p.streamer
assert not hasattr(p,'pos')
assert p.piece_length == 0
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=7668)
p.send_buffer = ''
p.handshaked = False
s.r_buf = [struct.pack('!I',52)+'Salamatsyzby' + 'content_id1234567890123456789012' + struct.pack('!H',78)\
+ struct.pack('!HI',32,16384), 'except']
assert p.piece_length == 0
assert not hasattr(p,'pos')
p.handle_read()
#assert c.start_pos == 0
assert not s.closed
assert p.handshaked
assert not p.streamer
assert not hasattr(p,'pos')
assert p.piece_length == 0
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=7668,server=True)
p.send_buffer = ''
p.handshaked = False
s.r_buf = [struct.pack('!I',52)+'Salamatsyzby' + 'content_id1234567890123456789012' + struct.pack('!H',78)\
+ struct.pack('!HI',32,16384), 'except']
p.handle_read()
assert not s.closed
assert not p.streamer
p.send_buffer = ''
p.requested = [Request.Request(b,24,6,15,2)]
s.r_buf = [struct.pack('!I',2) + GET_PEERS + struct.pack('!B',12), struct.pack('!I',12) + GET_PEERS + 'peers_list1','except']
p.handle_read()
assert p.send_buffer[5:] == 'peers_list'
assert c.prepared == 'peers_list1'
assert not p.closed
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=7668,server=True)
p.send_buffer = ''
p.handshaked = True
s.r_buf = [struct.pack('!I',13) + GET_STREAM + struct.pack('!HIIH',1,3,45,3),'except']
assert len(p.streamer) == 0
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=7668,server=True)
p.send_buffer = ''
p.handshaked = True
p.requested = [Request.Request(b,24,8,2,2)]
b.pieces.append(1)
s.r_buf = [struct.pack('!I',6) + GET_STREAM + struct.pack('!HhB',1,-1,3),struct.pack('!I',13) + GET_STREAM + struct.pack('!Hh',2,0)+ \
'payload0', struct.pack('!I',13) + GET_STREAM + struct.pack('!Hh',3,0) + 'payload1', struct.pack('!I',13) + GET_STREAM +\
struct.pack('!Hh',4,0) + 'payload2','except']
p.handle_read()
assert len(p.streamer) == 1
assert len(p.requested[0].buffer) == 2
assert not p.store.have(23)
assert not p.closed
s.r_buf = [struct.pack('!I',6) + STOP + struct.pack('!HhB',1,-1,3), struct.pack('!I',5) + HAVE + struct.pack('!I',23),struct.pack('!I',1) + CLOSE, 'except']
p.handle_read()
assert not p.streamer
assert p.store.have(23)
assert c.deleted
def test_write():
s_data = {'content_id':'content_id1234567890123456789012','handshake':'handshake','piece_length':0,'chunk_length':0}
def g(n):
for i in range(n):
yield str(i)
c = ContaineR()
b = BuffeR()
s = sock()
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=struct.pack('!H',7668))
p.handle_write()
assert s.s_buf
p.send_buffer = 'datadata'
p.handle_write()
assert not p.send_buffer
assert s.s_buf == 'handshakedatadata'
p.streamer = {0:g(2), 1:g(3)}
s.s_buf = ''
p.send_buffer = 'data12'
p.handle_write()
assert s.s_buf == 'data12' + struct.pack('!I',2) + GET_STREAM + '0'
s.s_buf = ''
assert len(p.streamer) == 2
p.handle_write()
assert s.s_buf == struct.pack('!I',2) + GET_STREAM + '1'
s.s_buf = ''
p.handle_write()
assert len(p.streamer) == 1
p.handle_write()
p.handle_write()
assert s.s_buf == struct.pack('!I',2) + GET_STREAM + '0' + struct.pack('!I',2) + GET_STREAM + '1' +\
struct.pack('!I',2) + GET_STREAM + '2'
p.handle_write()
assert s.s_buf == struct.pack('!I',2) + GET_STREAM + '0' + struct.pack('!I',2) + GET_STREAM + '1' +\
struct.pack('!I',2) + GET_STREAM + '2'
assert not p.streamer
##def test_close():
## pass
##
##def test_add_stream():
## pass
##
def test_stream():
s_data = {'content_id':'content_id1234567890123456789012','handshake':'handshake','piece_length':16,'chunk_length':2}
c = ContaineR()
b = BuffeR()
s = sock()
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=struct.pack('!H',7668))
b.buffer = {1:'thisissimpletext', 2:'bvcgdfhsgtrfdvcg', 3:'thisissimpletext', 4:'testingpurpose45',5:'loremipsumdolors',6:'bulbulyrdasajurg'}
f = p.stream(7,1,{})
try : d = f.next()
except StopIteration : d = ''
assert not d
f = p.stream(3,3,{3:BitStream('0b10100100'),4:''})
assert f.next() == struct.pack('!Hh',3,1) + 'is'
assert f.next() == struct.pack('!Hh',3,3) + 'si'
assert f.next() == struct.pack('!Hh',3,4) + 'mp'
assert f.next() == struct.pack('!Hh',3,6) + 'te'
assert f.next() == struct.pack('!Hh',3,7) + 'xt'
assert f.next() == struct.pack('!Hh',5,0) + 'lo'
assert f.next() == struct.pack('!Hh',5,1) + 're'
assert f.next() == struct.pack('!Hh',5,2) + 'mi'
assert f.next() == struct.pack('!Hh',5,3) + 'ps'
assert f.next() == struct.pack('!Hh',5,4) + 'um'
assert f.next() == struct.pack('!Hh',5,5) + 'do'
assert f.next() == struct.pack('!Hh',5,6) + 'lo'
assert f.next() == struct.pack('!Hh',5,7) + 'rs'
try : n = f.next()
except StopIteration : n = ''
assert not n
def test_put():
s_data = {'content_id':'content_id1234567890123456789012','handshake':'handshake','piece_length':16,'chunk_length':2}
c = ContaineR()
b = BuffeR()
s = sock()
p = Peer(s_data,c,b,sock=s,ip='127.0.0.1',port=struct.pack('!H',7668))
p.put(1,0,'data')
assert not p.requested
p.requested = [Request.Request(b,8,1,15,2)]
p.put(15,0,'b')
p.put(15,1,'u')
assert p.requested[0].buffer[15][1] == 'u'
p.put(15,1,'e')
assert p.requested[0].buffer[15][1] == 'u'
p.put(15,2,'l')
p.put(15,3,'b')
p.put(17,0,'t')
assert len(p.requested[0].buffer) == 1
p.put(15,4,'u')
p.put(15,5,'l')
p.put(15,6,'y')
# not finished
assert not p.requested[0].is_completed(15)
assert not b.buffer
assert 15 in p.requested[0].to_do
p.put(15,7,'r')
# finished
assert len(b.buffer) == 1
assert 15 not in p.requested[0].to_do
assert 15 in b.buffer
assert p.requested[0].is_completed(15)
p.requested[0].pieces[16] = BitStream('0b01011111')
p.requested[0].buffer[16] = {}
p.put(16,0,'e')
assert 16 in p.requested[0].buffer
p.put(16,2,'t')
assert 16 in b.buffer
assert not p.requested
##
#def test_return():
# c = ContaineR()
# b = BuffeR()
# s = sock()
# p = Peer('content_id','handshake',c,b,sock=s,ip='127.0.0.1',port=struct.pack('!H',7668))
# r = Request(c,b,1,0,15,3)
# p.requested = [r]
# ret = p.return_requests()
# assert ret == [(1,0,15,3)]
# class data:
# def __init__(self,l):
# self.len = l
#
# r = Request(c,b,1,0,15,3)
# r.buffer[1] = {'data':data(10)}
# p.requested = [r]
# ret = p.return_requests()
# assert ret == [(1,10,15,3)]
# r = Request(c,b,1,0,15,3)
# r.c[1] = True
# r.buffer[1] = {'data':data(10)}
# r.buffer[2] = {'data':data(8)}
# p.requested = [r]
# ret = p.return_requests()
# assert ret == [(2,8,15,2)]
# p.requested = []
# ret = p.return_requests()
# assert ret == []
# r = Request(c,b,1,0,15,1)
# r.buffer[1] = {'data':data(10)}
# p.requested = [r]
# ret = p.return_requests()
# assert ret == [(1,10,15,1)]
##
##def test_timeout():
## pass
##
#def test():
# c = ContaineR()
# b = BuffeR()
# s = sock()
# p = Peer('content_id','handshake',c,b,sock=s,ip='127.0.0.1',port=struct.pack('!H',7668))
# p.handshaked = True
# p.request_peers()
# assert p.send_buffer == 'handshake' + struct.pack('!H',0) + struct.pack('!I',2) + GET_PEERS + struct.pack('!B',35)
# p.send_buffer = ''
# p.request_position()
# assert p.send_buffer == struct.pack('!I',1) + POSITION
# p.send_buffer = ''
# p.send_message(KEEP_ALIVE)
# assert p.send_buffer == struct.pack('!I',1) + KEEP_ALIVE
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
# | {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,734 | Nurchik/tamchy | refs/heads/master | /messages.py | # -*- coding:utf-8 -*-
import struct
from string import ascii_lowercase as al
from string import digits
from random import choice
KEEP_ALIVE = struct.pack('!B',0)
POSITION = struct.pack('!B',1)
GET_PEERS = struct.pack('!B',2)
GET_STREAM = struct.pack('!B',3)
STOP = struct.pack('!B',4)
CLOSE = struct.pack('!B',5)
ERROR = struct.pack('!B',6)
HAVE = struct.pack('!B',7)
BITFIELD = struct.pack('!B',8)
def construct_handshake(content_id,peer_id,port):
''' Structure of handshake
length|Salamatsyzby|content_id|peer_id|port
peer_id - 20 bytes, content_id - 32 bytes
'''
msg = 'Salamatsyzby' + content_id + peer_id + struct.pack('!H',port)
return struct.pack('!I',68) + msg
def generate_content_id(length=32):
return ''.join([choice(al + digits) for i in xrange(length)])
def generate_peer_id(length=20):
return generate_content_id(length) | {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,735 | Nurchik/tamchy | refs/heads/master | /Client.py | # -*- coding:utf-8 -*-
import multiprocessing,io,pickle,sqlite3,socket,logging,sys,random,re
import messages
from StringIO import StringIO
from Reactor import Reactor
from Container import StreamContainer
from Server import MultiServer as Server
import messages
from pickle import dump
from urllib2 import urlopen
from string import ascii_lowercase as al
from string import digits
from random import choice
import threading
import cherrypy
from jinja2 import Environment, FileSystemLoader
from cherrypy.lib.static import serve_fileobj
import os.path
from upnp import UPNP
from Server import MultiServer
STATIC_CONFIG = {
'/' : {'tools.staticdir.root' : os.path.abspath('static')},
'/css' : {'tools.staticdir.on' : True , 'tools.staticdir.dir' : 'css'},
'/js' : {'tools.staticdir.on' : True , 'tools.staticdir.dir' : 'js'},
'/images' : {'tools.staticdir.on' : True , 'tools.staticdir.dir' : 'images'}
}
pattern = re.compile(r'[a-zA-Z0-9]{32}')
class PeerStorage:
def __init__(self,max_con = 200,max_con_by_stream=50):
self.peers = []
self.MC = max_con
self.MCBS = max_con_by_stream
self.work = True
self.logger = logging.getLogger('tamchy.PeerStorage')
def get_peers(self,content_id):
to_remove = []
for peer in self.peers:
if peer.closed:
to_remove.append(peer)
continue
elif peer.timeout:
peer.handle_close()
to_remove.append(peer)
self.logger.debug('Connection with peer (%s) timed out' % (peer))
continue
else:
if peer.content_id == content_id:
yield peer
for peer in to_remove:
self.remove(peer)
def add(self,peer):
# if it's first peer's connection -> start working
self.peers.append(peer)
self.logger.debug('Peer (%s) added' % (peer))
def remove(self,peer):
try:
self.peers.remove(peer)
except:
pass
self.logger.debug('Peer (%s) removed' % (peer))
def can_add_peer(self,content_id=None):
# if content_id is not specified we will limit number of peers by mx_con
# else - by max_con_by_stream
if not content_id:
return len(self.peers) < self.MC
else:
return len(filter(lambda x : x.content_id == content_id,self.peers)) < self.MCBS
@property
def connected_peers(self):
return len(self.peers)
def run(self):
self.logger.info('PeerStorage Reactor started')
while self.work:
#with self.lock:
peers = self.peers
try:
r,w,e = select.select(peers,peers,peers,TIMEOUT)
except:
continue
#r,w,e = select.select(peers,peers,peers,TIMEOUT)
for peer in r:
peer.handle_read()
for peer in w:
peer.handle_write()
for peer in e:
peer.handle_close()
def start_serving(self):
t = threading.Thread(target=self.run)
t.daemon = True
t.start()
def close(self):
self.work = False
self.logger.info('PeerStorage Reactor terminated')
class ConfigLoader:
'''
Configuration file must be stored in tamchy.conf file in same folder as tamchy.py
format must be -> KEY = VALUE <- and each key,value must be separated with newline(\n)
'''
def __init__(self):
self.default_config = {
'INCOMING_PORT' : 7890,
'HTTP_HOST' : '127.0.0.1',
'HTTP_PORT' : 8001,
'BUFFERING_SECONDS' : 30,
'IP_CHECKER' : 'http://wtfismyip.com/text',
'DEBUG' : 0,
'LOG_FILE' : 'tamchy.log',
}
self.config = self.parse_config()
def parse_config(self):
try:
f = io.open('tamchy.conf','r')
# maybe conf-file does not exist or it's not accessible
except:
return self.default_config
c = {}
for line in f:
# trimming all whitespaces
line = line.replace(' ','')
# why if? maybe line is empty (last line)
if line:
key,value = line.split('=')
# just making uppercase
key = key.upper()
c[key] = value
f.close()
return c
def __setitem__(self,key,value):
self.config[key.upper()] = value
def __getitem__(self,key):
if key in self.config:
return self.config[key]
# maybe key is left default in config-file
else:
self.default_config.get(key,'')
def __delitem__(self,key):
if key in self.config:
del self.config[key]
class Client:
def __init__(self,debug=False):
'''
buffering_units -> This is a number of video stream units for buffering
all peers list is saved in DB
'''
self.config = ConfigLoader()
self.logger = logging.getLogger('tamchy')
self.logger.setLevel(self.config['DEBUG'])
f = logging.FileHandler(self.config['LOG_FILE'])
f.setLevel(self.config['DEBUG'])
formatter = logging.Formatter('%(asctime)s -- %(name)s ( %(filename)s : %(lineno)d) -- %(message)s')
f.setFormatter(formatter)
self.logger.addHandler(f)
#self.peer_id = messages.generate_peer_id()
self.work = True
# content_id : Stream Container
self._streams = {}
# this dict will hold port:Server instance for this port
self.ports = {}
self.debug = debug
self.logger.info('Client started')
# getting our external ip
self.ip = self.get_ext_ip()
self.http = HTTPEngine(self)
self.PStorage = PeerStorage()
#self.Reactor = Reactor(self.PStorage)
if not debug:
self.http.start_http_server()
u = UPNP()
port = self.config['INCOMING_PORT']
# we will try to map same external port to internal port
u.add_port_mapping(port,port)
self.PStorage.start_serving()
def get_ext_ip(self):
if self.debug:
return 'ip'
u = UPNP()
ip = u.get_external_ip()
if not ip:
try:
ip = urlopen(self.config['IP_CHECKER']).read().strip()
except:
self.logger.error('Cannot obtain external ip')
raise Exception("Cannot obtain external ip")
self.logger.debug('Obtained external IP - ' + ip)
return ip
def validate(self,c_id):
if (len(c_id) == 32) and pattern.match(c_id):
return True
return False
def check(self,source,port,nodes):
errors = ''
if port not in xrange(0,65536):
errors += 'Incorrect incoming port\n'
return errors
def create_stream(self,name,source,content_id='',bitrate=0,port=7889,nodes=[],chunk_length=16384):
'''
content_id, ip,port (additional ip,port -> [(ip1,port1),(ip2,port2)])
'''
errors = self.check(source,port,nodes)
if errors:
raise Exception(errors)
payload = {}
content_id = (content_id if self.validate(content_id) else generate_c_id())
payload['name'] = name
payload['content_id'] = content_id
payload['ip'] = self.ip
payload['port'] = port
payload['nodes'] = nodes
payload['chunk_length'] = chunk_length
server = self.ports.get(port,'')
if not server:
try:
server = Server(port,self.PStorage)
self.PStorage.add(server)
# we cannot create socket on given port
except:
raise Exception('Cannot Create Server on given port. Give another port')
self.ports[port] = server
s = StreamContainer(self,self.PStorage,payload,port,source=source,is_server=True,ext_ip=self.ip)
# if exception was raised the code below will not run
self._streams[content_id] = s
server.register_stream(s)
self.logger.debug('New StreamContainer (' + content_id + ') added to streams')
def open_stream(self,file,port=7889):
#try:
# file = open(file,'rb')
try:
info = pickle.load(file)
self.logger.info('Successfully loaded tamchy-file')
except:
raise Exception('Error. Corrupted tamchy-file')
#except:
# self.logger.error('Cannot open file')
# raise Exception('Cannot open tamchy-file')
server = self.ports.get(port,'')
if not server:
try:
server = Server(port,self.PStorage)
self.PStorage.add(server)
# we cannot create socket on given port
except:
raise Exception('Cannot Create Server on given port. Give another port')
self.ports[port] = server
s = StreamContainer(self,self.PStorage,info,port,ext_ip=self.ip)
self._streams[info['content_id']] = s
server.register_stream(s)
self.logger.debug('New StreamContainer (' + info['content_id'] + ') added to streams')
def close_container(self,container):
server = container.Server
server.unregister_stream(container)
container.close()
# maybe container is not created
try:
del self._streams[container.content_id]
except:
pass
# if there are no more StreamContainers
if not server.streams:
# closing incoming port
server.close()
del self.ports[server.port]
self.logger.debug('StreamContainer (%s) closed.' % (container.content_id))
def close(self):
for i in self._streams.values():
i.close()
self.http.stop_http_server()
self.PStorage.close()
self.work = False
self.logger.info('Client terminated')
def __contains__(self,stream_id):
return stream_id in self._streams
def get(self,id):
return self._streams.get(id,None)
def get_stream(self,stream_id,buf_seconds):
return self._streams[stream_id].B.get_stream(buf_seconds)
def get_list_of_streams(self):
d = []
for id,container in self._streams.items():
d.append((id,container.name))
return d
class HTTPEngine:
def __init__(self,Client):
self.Client = Client
self.config = Client.config
self.logger = logging.getLogger('tamchy.HTTPEngine')
self.logger.setLevel(self.config['DEBUG'])
f = logging.FileHandler(self.config['LOG_FILE'])
f.setLevel(self.config['DEBUG'])
formatter = logging.Formatter('%(asctime)s -- %(name)s ( %(filename)s : %(lineno)d) -- %(message)s')
f.setFormatter(formatter)
self.logger.addHandler(f)
self.logger.info('HTTPEngine started')
self.env = Environment(loader=FileSystemLoader('templates'))
def start_http_server(self):
cherrypy.config.update({'server.socket_host': self.config['HTTP_HOST'],'server.socket_port': self.config['HTTP_PORT'],'environment': 'production'})
cherrypy.tree.mount(self,'/',config=STATIC_CONFIG)
cherrypy.engine.start()
def stop_http_server(self):
cherrypy.engine.exit()
# -------------------------- CherryPy HTTP methods ---------------------------------------
@cherrypy.expose
def index(self):
tmpl = self.env.get_template('tamchy/index.html')
return tmpl.render(errors=[])
@cherrypy.expose
def streams(self,id=None):
tmpl = self.env.get_template('tamchy/streams.html')
if id is None:
return tmpl.render(streams=self.Client.get_list_of_streams(),errors=[])
stream = self.Client.get(id)
if stream is None:
raise cherrypy.HTTPError(404,'No matching stream')
tmpl = self.env.get_template('tamchy/stream.html')
return tmpl.render(stream=stream,errors=[])
@cherrypy.expose
def open_stream(self,stream_file=None):
tmpl = self.env.get_template('tamchy/open_stream.html')
if cherrypy.request.method == 'GET':
return tmpl.render(errors=[],success=False)
# POST
else:
if stream_file is not None:
# checking extension of a file
if stream_file.filename.split('.')[-1] != 'tamchy':
return tmpl.render(errors=['Unknown type of the file. Please check the extension'],success=False)
try:
self.Client.open_stream(stream_file.file)
except Exception as e:
return tmpl.render(errors=[e.message],success=False)
return tmpl.render(errors=[],success=True)
else:
return tmpl.render(errors=['Please select tamchy-file'],success=False)
@cherrypy.expose
def create_stream(self,name=None,source=None,content_id='',port=7889):
tmpl = self.env.get_template('tamchy/create_stream.html')
if cherrypy.request.method == 'GET':
return tmpl.render(errors=[],success=False)
else:
# form-filling check!
for arg in (name,source):
if not arg:
return tmpl.render(errors=['Please fill all of the fields'],success=False)
try:
self.Client.create_stream(name,source,content_id,int(port))
except Exception as e:
return tmpl.render(errors=[e.message],success=False)
return tmpl.render(errors=[],success=True)
@cherrypy.expose
def exit(self):
self.Client.close()
#return 'Goodbye!'
@cherrypy.expose
def delete(self,id):
tmpl = self.env.get_template('tamchy/delete.html')
container = self.Client.get(id)
if container is None:
raise cherrypy.HTTPError(404,'No matching stream')
self.Client.delete_stream(container)
return tmpl.render(errors=[])
@cherrypy.expose
def file(self,id=None,fmt='tamchy'):
stream = self.Client.get(id)
if stream is None:
raise cherrypy.HTTPError(404,'No matching stream')
if fmt == 'tamchy':
return serve_fileobj(stream.get_file(), "application/x-download", disposition='attachment',name=stream.name + '.tamchy')
if fmt == 'playlist':
playlist = '''#EXTM3U\n#EXTINF:-1, {0}\n{1}'''.format(stream.name,self.config['HTTP_HOST'] + ':' + str(self.config['HTTP_PORT']) + '/stream/' + id)
return serve_fileobj(StringIO(playlist), "application/x-download", disposition='attachment',name=stream.name + '.m3u')
@cherrypy.expose
def stream(self,id):
cherrypy.response.headers['Content-Type'] = 'application/octet-stream'
cherrypy.response.stream = True
if id not in self.Client:
raise cherrypy.HTTPError(404,'No matching stream')
stream = self.Client.get_stream(id,BUFFERING_SECONDS)
while True:
# this is http video stream
return stream
# -*-*-*-*-*-*-*-*-*-*-*-*-* CherryPy HTTP methods -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-
def generate_c_id(length=32):
return ''.join([choice(al + digits) for i in xrange(length)])
# ---------------------------------------- Testing ----------------------------------------------------
class SC:
def __init__(self,Client,Ps,payload,port,source='',bitrate=0,max_connections=50,is_server=True,ext_ip=''):
self.content_id = payload['content_id']
self.Client = Client
#self.Server = server
if payload['name'] != 'success':
raise Exception('Error')
def close(self):
pass
class ClienT:
def __init__(self):
self.streams = {}
class PeeR:
def __init__(self,id,c_id='sssss'):
self.content_id = c_id
self.closed = False
self.timeout = False
self.id = id
def handle_close(self):
self.closed = True
def test():
# this is a little hack to replace imported StreamContainer with another class to make tests
global StreamContainer
StreamContainer = SC
c = Client(debug=True)
assert not c.ports
assert not c._streams
c.create_stream('success','source')
assert c._streams
assert len(c.ports) == 1
c.create_stream('success','source1')
assert len(c._streams) == 2
assert len(c.ports) == 1
c.create_stream('success','source2')
assert len(c._streams) == 3
assert len(c.ports) == 1
c.create_stream('success','source3',port=5463)
assert len(c._streams) == 4
assert len(c.ports) == 2
try:
c.create_stream('fail','source3',port=5463)
except:
pass
assert len(c._streams) == 4
assert len(c.ports) == 2
try:
c.create_stream('fail','source3',port=5465)
except:
pass
assert len(c._streams) == 4
assert len(c.ports) == 3
err = False
try:
c.create_stream('success','source3',port=5465435345)
except:
err = True
assert len(c._streams) == 4
assert len(c.ports) == 3
assert err
print(c.ports[7889].streams)
s = c.ports[7889].streams.keys()
s1 = c.ports[7889].streams[s[0]]
def test_PeerStorage():
ps = PeerStorage(max_con=15,max_con_by_stream=5)
peers = []
for i in xrange(8):
peer = PeeR(i)
peers.append(peer)
ps.add(peer)
assert ps.can_add_peer()
assert ps.can_add_peer(content_id='content_id')
for peer in peers:
peer.content_id = 'content_id'
if peer.id == 3:
break
assert ps.can_add_peer()
assert ps.can_add_peer(content_id='content_id')
assert not list(ps.get_peers('buuhin'))
peers[4].content_id = 'content_id'
assert ps.can_add_peer()
assert not ps.can_add_peer(content_id='content_id')
assert len(ps.peers) == 8
v = list(ps.get_peers('content_id'))
assert len(v) == 5
peers[2].closed = True
peers[4].closed = True
v = list(ps.get_peers('content_id'))
assert len(v) == 3
assert len(ps.peers) == 6
peers[0].timeout = True
assert not peers[0].closed
v = list(ps.get_peers('content_id'))
assert len(v) == 2
assert len(ps.peers) == 5
assert v[0].id == 1
assert v[1].id == 3
assert peers[0].closed
# -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-* Testing -*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-*-
if __name__ == '__main__':
mode = sys.argv[1]
if mode == 'debug':
t = sys.argv[2]
c=Client(debug=True)
if t == 's':
c._create_stream('test','http://127.0.0.1:8080',content_id='w5vi59e7iysc3uu60pn7gasxkwf3hecc')
else:
file = open('umut.tamchy','rb')
c._open_stream(file,port=6590)
while True:
pass
#t = sys.argv[1]
#c=Client(debug=True)
#if t == 's':
# c._create_stream('test','http://127.0.0.1:8080',content_id='w5vi59e7iysc3uu60pn7gasxkwf3hecc')
#else:
# file = open('umut.tamchy','rb')
# c._open_stream(file,port=6590)
#while True:
# pass | {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,736 | Nurchik/tamchy | refs/heads/master | /Request.py | import struct
import bitstring
import Peer
class Request:
def __init__(self,Buffer,piece_length,chunk_length,pos,seconds):
self.Buffer = Buffer
#self.piece_length = piece_length
self.pos = pos
self.seconds = seconds
self.ps = pos + seconds
self.chunk_length = chunk_length
# in all requests offset = -1
offset = -1
self.info = str((pos,offset,seconds))
self.request = Peer.GET_STREAM + struct.pack('!HhB',pos,offset,seconds)
# self.buffer's structure
# /------/--POS--\-------\-------\
# | | | | |
# offset1 offset2 offset3 offset4 offsetN
# | | | | |
# data data data data data
#
self.buffer = {}
# in this dict pos <-> bitfield will be saved
self.pieces = {}
self.num = piece_length / chunk_length
self.to_do = self._prepare(pos,seconds)
def __contains__(self,item):
return (item >= self.pos and item < self.ps)
def __str__(self):
return self.info
def _prepare(self,pos,seconds):
d = []
for i in xrange(pos,pos + seconds):
# at initialisation there will be '' , but when the pieces will arrive it will be replaced with bitstring.BitStream()
d.append(i)
return d
def put(self,pos,offset,data):
if not self.check(pos,offset,data):
return
if pos not in self.pieces:
self.pieces[pos] = self.construct_bitfield()
self.buffer[pos] = {}
self.pieces[pos][offset] = True
self.buffer[pos][offset] = data
if self.is_completed(pos):
D = ''.join(piece for piece in self.buffer[pos].itervalues())
# clearing memory ... There is no need to have 2 copies of data, one in self.buffer, one in self.Buffer
del self.buffer[pos]
self.Buffer.put(pos,D)
self.to_do.remove(pos)
def check(self,pos,offset,data):
# we already downloaded a data
# get(pos,{}) -> because if we does not already have pos in self.buffer
# we will get error while checking offset
if offset in self.buffer.get(pos,{}) or (offset > self.num - 1):
return False
if len(data) != self.chunk_length:
## last chunk => it can have any size
#if offset != self.num - 1:
return False
return True
@property
def completed(self):
# if self.to_do is not empty it will mean that we have not completed => not True == False :)
return not self.to_do
def is_completed(self,pos):
for i in self.pieces[pos]:
# if data for given offset is not recieved from peers
if not i:
return False
return True
def construct_bitfield(self):
bitfield = bitstring.BitStream(self.num)
return bitfield
def get_request(self):
# request's structure
# ID + pos + offset + length + seconds + (pos + bitfield_length + bitfield)
# if piece not finished but started filling, we will send bitfield
bitfields = ''
for pos,bitfield in self.pieces.iteritems():
if self.is_completed(pos):
# if piece completed, we will tell that bitfield's length = 0
bitfield = ''
else:
#bitfield = bitfield.tobytes()
bitfield = bitfield.bytes()
bitfields += struct.pack('!HH',pos,len(bitfield)) + bitfield
msg = self.request + bitfields
msg_length = struct.pack('!I',len(msg))
return msg_length + msg
#Testing
class BuffeR:
def __init__(self):
self.b = {}
def put(self,pos,data):
self.b[pos] = data
def test():
b = BuffeR()
r = Request(b,16,2,3,2)
assert r.num == 8
assert r.to_do == [3,4]
r.put(3,2,'da')
assert r.buffer[3][2] == 'da'
assert r.pieces[3][2]
assert len(r.buffer) == 1
assert len(r.buffer[3]) == 1
r.put(3,2,'tt')
assert r.buffer[3][2] == 'da'
assert len(r.buffer) == 1
assert len(r.buffer[3]) == 1
r.put(3,8,'tt')
assert len(r.buffer[3]) == 1
assert r.pieces[3].bin == '00100000'
r.put(3,7,'ttf')
assert len(r.buffer[3]) == 1
assert r.pieces[3].bin == '00100000'
r.put(3,7,'ts')
assert len(r.buffer[3]) == 2
assert len(r.buffer) == 1
assert r.pieces[3].bin == '00100001'
assert not r.is_completed(3)
assert not r.completed
r.put(3,0,'th')
r.put(3,1,'is')
r.put(3,3,'ta')
r.put(3,4,'fo')
r.put(3,5,'rt')
assert r.pieces[3].bin == '11111101'
assert not r.is_completed(3)
r.put(3,6,'es')
assert not r.buffer
assert b.b[3] == 'thisdatafortests'
assert r.is_completed(3)
assert not r.completed
assert r.pieces[3].bin == '11111111'
r.put(4,0,'th')
r.put(4,1,'is')
assert len(r.buffer[4]) == 2
assert not r.is_completed(4)
r.put(4,2,'da')
r.put(4,3,'ta')
r.put(4,4,'fo')
r.put(4,5,'rt')
r.put(4,6,'es')
r.put(4,7,'ts')
assert r.pieces[4].bin == '11111111'
assert not r.buffer
assert b.b[4] == 'thisdatafortests'
assert r.is_completed(4)
assert not r.to_do
assert r.completed
| {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,737 | Nurchik/tamchy | refs/heads/master | /tamchy.py | from Client import Client
import os
if __name__ == '__main__':
c = Client()
while c.work:
pass
# user clicked exit button
c.close()
# 1 --> SIGHUP
os.kill(os.getppid(),1) | {"/Container.py": ["/Buffer.py", "/Peer.py", "/Server.py", "/messages.py", "/Input.py", "/Request.py"], "/Server.py": ["/Peer.py"], "/Peer.py": ["/Request.py"], "/Client.py": ["/messages.py", "/Reactor.py", "/Container.py", "/Server.py", "/upnp.py"], "/Request.py": ["/Peer.py"], "/tamchy.py": ["/Client.py"]} |
56,738 | wikimedia/labs-tools-github-pr-closer | refs/heads/master | /app.py | import os
from flask import Flask, render_template
from webhook_handler import webhook
app = Flask(__name__)
app.register_blueprint(webhook)
@app.route("/")
def index():
return render_template("index.html", app_id=os.environ.get("GITHUB_APP_ID_ENVVAR"))
if __name__ == "__main__":
app.run()
| {"/app.py": ["/webhook_handler.py"], "/webhook_handler.py": ["/github.py"]} |
56,739 | wikimedia/labs-tools-github-pr-closer | refs/heads/master | /github.py | import jwt
import time
import os
import requests
GITHUB_PRIVATE_KEY_ENVVAR = "GHPRC_JWT_SIGNING_KEY"
GITHUB_APP_ID_ENVVAR = "GHPRC_APP_ID"
GITHUB_APP_SECRET_ENVVAR = "GHPRC_APP_SECRET"
def get_jwt() -> str:
signing_key = os.environ.get(GITHUB_PRIVATE_KEY_ENVVAR)
app_id = os.environ.get(GITHUB_APP_ID_ENVVAR)
payload = {
"iat": int(time.time()),
"exp": int(time.time()) + (10 * 60),
"iss": app_id,
}
return jwt.encode(payload, signing_key, algorithm="RS256")
def get_message_template() -> str:
template_path = os.path.join(os.path.dirname(__file__), "message_template.md")
with open(template_path, "rt") as f:
return f.read()
def get_install_id(jwt_token, api_type, name):
return requests.get(
"https://api.github.com/" + api_type + "/" + name + "/installation",
headers={
"Authorization": "Bearer " + jwt_token,
"Accept": "application/vnd.github.machine-man-preview+json",
},
).json()["id"]
def get_access_token(jwt_token, install_id, additional_args=None):
if additional_args is None:
additional_args = {}
params = {
**additional_args,
"permissions": {
"metadata": "read",
"pull_requests": "write",
"single_file": "read",
},
}
return requests.post(
f"https://api.github.com/app/installations/{install_id}/access_tokens",
json=params,
headers={
"Authorization": "Bearer " + jwt_token,
"Accept": "application/vnd.github.machine-man-preview+json",
},
).json()["token"]
class Repo:
def __init__(self, repo_name, repo_id):
self.repo_name = repo_name
self.repo_id = repo_id
self.access_token = None
def set_access_token(self, token):
self.access_token = token
def fetch_access_token(self):
jwt_token = get_jwt()
install_id = get_install_id(jwt_token, "repos", self.repo_name)
self.set_access_token(
get_access_token(jwt_token, install_id, {"repository_ids": [self.repo_id]})
)
def get_access_token(self):
if self.access_token is None:
self.fetch_access_token()
return self.access_token
def does_file_exist(self, file_name):
return (
requests.get(
"https://api.github.com/repos/"
+ self.repo_name
+ "/contents/"
+ file_name,
headers={
"Accept": "application/vnd.github.v3+json",
"Authorization": f"token {self.get_access_token()}",
},
).status_code
== 200
)
def should_close(self, author):
if author == "dependabot[bot]" and self.does_file_exist(
".github/workflows/dependabot-gerrit.yml"
):
return False
return self.does_file_exist(".gitreview")
def comment_and_close(self, pull_request):
comment_url = (
"https://api.github.com/repos/"
+ self.repo_name
+ "/issues/"
+ str(pull_request["number"])
+ "/comments"
)
pr_edit_url = (
"https://api.github.com/repos/"
+ self.repo_name
+ "/pulls/"
+ str(pull_request["number"])
)
message = get_message_template().replace(
"{{author}}", pull_request["user"]["login"]
)
requests.post(
comment_url,
json={
"body": message,
},
headers={
"Accept": "application/vnd.github.v3+json",
"Authorization": f"token {self.get_access_token()}",
},
)
requests.patch(
pr_edit_url,
json={
"state": "closed",
},
headers={
"Accept": "application/vnd.github.v3+json",
"Authorization": f"token {self.get_access_token()}",
},
)
| {"/app.py": ["/webhook_handler.py"], "/webhook_handler.py": ["/github.py"]} |
56,740 | wikimedia/labs-tools-github-pr-closer | refs/heads/master | /webhook_handler.py | import hmac
import os
from flask import request, Blueprint, jsonify
import github
webhook = Blueprint("webhook", __name__, url_prefix="")
@webhook.route("/github", methods=["POST"])
def handle_github_hook():
"""Entry point for GitHub webhook"""
signature = request.headers.get("X-Hub-Signature")
sha, signature = signature.split("=")
secret = os.environ.get(github.GITHUB_APP_SECRET_ENVVAR).encode("utf-8")
hashhex = hmac.new(secret, request.data, digestmod="sha1").hexdigest()
if not hmac.compare_digest(hashhex, signature):
return jsonify({}), 200
if request.json["action"] not in ("opened", "reopened"):
return jsonify({}), 200
repo = github.Repo(
request.json["repository"]["full_name"], request.json["repository"]["id"]
)
if not repo.should_close(request.json["pull_request"]["user"]["login"]):
return jsonify({}), 200
repo.comment_and_close(request.json["pull_request"])
print(
"Closed pr #"
+ str(request.json["pull_request"]["number"])
+ " on repository "
+ repo.repo_name,
" made by ",
request.json["pull_request"]["user"]["login"],
)
return jsonify({}), 200
| {"/app.py": ["/webhook_handler.py"], "/webhook_handler.py": ["/github.py"]} |
56,743 | ManuelLizama/Arriendo_cletas | refs/heads/master | /clientes/urls.py | from django.conf.urls import url
from clientes import views
urlpatterns = [
url(r'^nuevocliente/$', views.nuevo_cliente_view, name='nuevo_cliente'),
url(r'^lista_clientes/$', views.lista_cliente, name='lista_cliente'),
]
| {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,744 | ManuelLizama/Arriendo_cletas | refs/heads/master | /clientes/views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from .forms import ClienteForm
from clientes.models import Cliente
# Create your views here.
@login_required
def nuevo_cliente_view(request):
if request.POST:
form = ClienteForm(request.POST)
if form.is_valid():
foto = request.POST.get('foto')
cl = form.save()
cl.foto = foto
cl.save()
messages.success( request, 'Cliente creado correctamente.' )
clientes = Cliente.objects.all()
form = ClienteForm()
return render(request, 'clientes/nuevo_cliente.html', {
'form' : form,
'clientes' : clientes,
})
@login_required
def lista_cliente( request ):
clientes = Cliente.objects.all()
return render(
request,
'clientes/lista_clientes.html',
{
'clientes' : clientes
}
)
| {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,745 | ManuelLizama/Arriendo_cletas | refs/heads/master | /reserva/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-20 02:00
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('clientes', '__first__'),
]
operations = [
migrations.CreateModel(
name='Bicicleta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(help_text='Nombre Producto', max_length=70, verbose_name='nombre')),
('descripcion', models.CharField(blank=True, help_text='Descripcion Producto', max_length=100, null=True, verbose_name='descripcion')),
('valor', models.IntegerField(help_text='Valor del Producto', verbose_name='valor')),
('activo', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Reserva',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('boleta', models.IntegerField(help_text='Numero de Boleta', verbose_name='boleta')),
('bici', models.ForeignKey(default=True, on_delete=django.db.models.deletion.CASCADE, to='reserva.Bicicleta', verbose_name='bicicleta')),
('cliente', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='clientes.Cliente', verbose_name='cliente')),
],
),
]
| {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,746 | ManuelLizama/Arriendo_cletas | refs/heads/master | /reserva/forms.py | from django import forms
from .models import Reserva
class ReservaForm ( forms.ModelForm ):
"""
Formulario de reserva
"""
# nombre = forms.CharField( required = True, label = "Nombre", widget = forms.TextInput( attrs = {"class" : "form-control"} ) )
class Meta:
model = Reserva
fields = '__all__' | {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,747 | ManuelLizama/Arriendo_cletas | refs/heads/master | /reserva/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^nueva_reserva/$', views.nueva_reserva_view, name='nueva_reserva'),
] | {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,748 | ManuelLizama/Arriendo_cletas | refs/heads/master | /reserva/views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.template import loader
from .forms import ReservaForm
from .models import Reserva
# Create your views here.
@login_required
def nueva_reserva_view(request):
if request.POST:
form = ReservaForm(request.POST)
if form.is_valid():
form.save()
messages.success( request, 'Reserva creada correctamente.' )
reservas = Reserva.objects.all()
form = ReservaForm()
return render(request, 'reserva/nueva_reserva.html', {
'form' : form,
'reserva': reservas,
})
| {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,749 | ManuelLizama/Arriendo_cletas | refs/heads/master | /reserva/models.py | # -*- coding: utf-8 -*-
from django.db import models
from clientes.models import Cliente
from django.utils import timezone
# Create your models here.
class Bicicleta(models.Model):
nombre = models.CharField( max_length = 70, verbose_name = u'nombre', help_text = "Nombre Producto" )
descripcion = models.CharField( blank = True, null = True, max_length = 100, verbose_name = u'descripcion', help_text = "Descripcion Producto" )
valor = models.IntegerField( verbose_name = u'valor', help_text = "Valor del Producto" )
activo = models.BooleanField( default = False)
fecha_creacion = models.DateField( auto_now_add = True, blank=True )
"""docstring for Bicicleta"""
class Reserva(models.Model):
cliente = models.ForeignKey( Cliente, verbose_name = u'cliente' )
boleta = models.IntegerField( verbose_name = u'boleta', help_text = "Numero de Boleta" )
bici = models.ForeignKey( Bicicleta, verbose_name = u'bicicleta', default = True)
fecha_creacion = models.DateField( auto_now_add = True , blank=True)
fecha_modificacion = models.DateField( auto_now = True , )
| {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,750 | ManuelLizama/Arriendo_cletas | refs/heads/master | /clientes/models.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Cliente(models.Model):
nombre = models.CharField( max_length = 50 )
telefono = models.CharField( max_length = 20, null = True, blank = True)
correo = models.CharField( max_length = 25, null =True, blank = True)
activo = models.BooleanField(default = True)
fecha_creacion = models.DateTimeField( auto_now_add = True, blank=True )
class Meta:
verbose_name = "Cliente"
verbose_name_plural = "Clientes"
# def __unicode__(self):
# return self.nombre
| {"/clientes/views.py": ["/clientes/models.py"], "/reserva/forms.py": ["/reserva/models.py"], "/reserva/views.py": ["/reserva/forms.py", "/reserva/models.py"], "/reserva/models.py": ["/clientes/models.py"]} |
56,752 | WeslleyBorges/gestao-produtos | refs/heads/master | /produtos/forms.py | from .models import Produto, Categoria
from django.forms import ModelForm
class ProdutoForm(ModelForm):
class Meta:
model = Produto
fields = ['codigo', 'descricao', 'preco', 'categoria']
class CategoriaForm(ModelForm):
class Meta:
model = Categoria
fields = ['codigo', 'descricao'] | {"/produtos/forms.py": ["/produtos/models.py"], "/produtos/views.py": ["/produtos/models.py", "/produtos/forms.py"], "/produtos/context_processors.py": ["/produtos/models.py"], "/produtos/urls.py": ["/produtos/views.py"]} |
56,753 | WeslleyBorges/gestao-produtos | refs/heads/master | /produtos/migrations/0002_auto_20190403_1842.py | # Generated by Django 2.2 on 2019-04-03 18:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('produtos', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Categoria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codigo', models.CharField(max_length=15)),
('descricao', models.CharField(max_length=30)),
],
),
migrations.AddField(
model_name='produto',
name='preco',
field=models.DecimalField(decimal_places=2, max_digits=7, null=True),
),
migrations.AddField(
model_name='produto',
name='categoria',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='produtos.Categoria'),
),
]
| {"/produtos/forms.py": ["/produtos/models.py"], "/produtos/views.py": ["/produtos/models.py", "/produtos/forms.py"], "/produtos/context_processors.py": ["/produtos/models.py"], "/produtos/urls.py": ["/produtos/views.py"]} |
56,754 | WeslleyBorges/gestao-produtos | refs/heads/master | /produtos/views.py | from django.shortcuts import render, redirect, get_object_or_404
from .models import Produto
from .forms import ProdutoForm, CategoriaForm
from django.contrib.auth.decorators import login_required
from django.http import FileResponse
from reportlab.pdfgen import canvas
# Create your views here.
@login_required
def lista_produtos(request):
produtos = Produto.objects.all()
return render(request, 'produtos.html', {'produtos': produtos})
@login_required
def novo_produto(request):
form = ProdutoForm(request.POST or None, request.FILES or None)
if form.is_valid():
form.save()
return redirect('lista_produtos')
return render(request, 'form-produto.html', {'form': form})
@login_required
def atualizar_produto(request, id):
produto = get_object_or_404(Produto, pk=id)
form = ProdutoForm(request.POST or None, request.FILES or None, instance=produto)
if form.is_valid():
form.save()
return redirect('lista_produtos')
return render(request, 'form-produto.html', {'form': form})
@login_required
def excluir_produto(request, id):
produto = get_object_or_404(Produto, pk=id)
produto.delete()
return redirect('lista_produtos')
def nova_categoria(request):
form = CategoriaForm(request.POST or None, request.FILES or None)
if form.is_valid():
form.save()
return redirect('lista_produtos')
return render(request, 'form-categoria.html', {'form': form}) | {"/produtos/forms.py": ["/produtos/models.py"], "/produtos/views.py": ["/produtos/models.py", "/produtos/forms.py"], "/produtos/context_processors.py": ["/produtos/models.py"], "/produtos/urls.py": ["/produtos/views.py"]} |
56,755 | WeslleyBorges/gestao-produtos | refs/heads/master | /produtos/context_processors.py | from .models import Produto
def produtos_dropdown(request):
return {
'produtos': Produto.objects.all()
} | {"/produtos/forms.py": ["/produtos/models.py"], "/produtos/views.py": ["/produtos/models.py", "/produtos/forms.py"], "/produtos/context_processors.py": ["/produtos/models.py"], "/produtos/urls.py": ["/produtos/views.py"]} |
56,756 | WeslleyBorges/gestao-produtos | refs/heads/master | /produtos/urls.py | from django.contrib import admin
from django.urls import path, include
from .views import lista_produtos
from .views import novo_produto, nova_categoria
from .views import atualizar_produto
from .views import excluir_produto
urlpatterns = [
path('lista', lista_produtos, name='lista_produtos'),
path('form-produto', novo_produto, name='novo_produto'),
path('form-produto/<int:id>', atualizar_produto,
name='atualizar_produto'),
path('excluir-produto/<int:id>', excluir_produto, name='excluir_produto'),
path('form-categoria', nova_categoria, name='nova_categoria')
]
| {"/produtos/forms.py": ["/produtos/models.py"], "/produtos/views.py": ["/produtos/models.py", "/produtos/forms.py"], "/produtos/context_processors.py": ["/produtos/models.py"], "/produtos/urls.py": ["/produtos/views.py"]} |
56,757 | WeslleyBorges/gestao-produtos | refs/heads/master | /produtos/models.py | from django.db import models
# Create your models here.
class Categoria(models.Model):
codigo = models.CharField(max_length=15)
descricao = models.CharField(max_length=30)
def __str__(self):
return self.descricao
class Produto(models.Model):
codigo = models.CharField(max_length=15)
descricao = models.CharField(max_length=50)
preco = models.DecimalField(max_digits=7, decimal_places=2, null=True)
categoria = models.ForeignKey(Categoria, null=True, on_delete=models.CASCADE)
def __str__(self):
return self.descricao | {"/produtos/forms.py": ["/produtos/models.py"], "/produtos/views.py": ["/produtos/models.py", "/produtos/forms.py"], "/produtos/context_processors.py": ["/produtos/models.py"], "/produtos/urls.py": ["/produtos/views.py"]} |
56,781 | fairoz-khan/Django-forms | refs/heads/master | /myform/views.py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def Base(request):
return render(request, 'Base.html')
def form(request):
if request.method == 'POST':
email = request.POST.get('mail')
password = request.POST.get('pswrd')
fname = request.POST.get('fname')
lname = request.POST.get('lname')
gender = request.POST.get('gender')
return HttpResponse(f"<h3>email:{email}<br>password:{password}<br>first-name:{fname}<br>Last-name:{lname}<br>Gender:{gender}<br></h3>")
return render(request, 'myform/form.html')
def multiselect(request):
if request.method == "POST":
languages = request.POST.getlist('language')
Framework = request.POST.getlist('framework')
return HttpResponse(f"<h2>{languages}<br>{Framework}</h2>")
return render(request, 'myform/multiselect.html')
# Uploading and displaying the uploaded image
from django.core.files.storage import FileSystemStorage
# import subprocess
# subprocess.call("/Users/apple/Documents/Jspider/django/projects/form/media")
def mediaUpload(request):
file_url = ''
if request.method == 'POST' and request.FILES:
image = request.FILES['img']
fs = FileSystemStorage()
file = fs.save(image.name, image)
file_url = fs.url(file)
print(file_url)
return render(request, 'myform/mediaup.html', {'file_url': file_url})
from .forms import SampleForm
def djangoform(request):
form = SampleForm()
return render(request, 'myform/dforms.html', {'form': form}) | {"/myform/views.py": ["/myform/forms.py"]} |
56,782 | fairoz-khan/Django-forms | refs/heads/master | /myform/forms.py | from django import forms
months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'act', 'nov', 'dec']
choice_day = [(str(i), str(i)) for i in range(1, 31)]
choice_month = [(str(i+1), months[i]) for i in range(len(months))]
choice_year = [(str(i), str(i)) for i in range(1980, 2021)]
class SampleForm(forms.Form):
firstname = forms.CharField(max_length=20, label='First name:', required=True)
lastname = forms.CharField(max_length=20, label='First name:', required=False)
email = forms.EmailField(max_length=100, label='Email address', required=True)
phonenum = forms.IntegerField(max_value=99999999999, min_value=6000000000, required=True)
pswrd = forms.CharField(max_length=20, label='Password', widget=forms.PasswordInput)
birthday = forms.ChoiceField(choices=choice_day, required=True, label='Birth day')
bmonth = forms.ChoiceField(choices=choice_month, required=True, label='birth month')
byear = forms.ChoiceField(choices=choice_year, required=True, label='birth year') | {"/myform/views.py": ["/myform/forms.py"]} |
56,783 | fairoz-khan/Django-forms | refs/heads/master | /myform/urls.py | from django.urls import path
from . import views
urlpatterns = [
path('base', views.Base, name='base'),
path('form', views.form, name='form'),
path('multisel', views.multiselect, name='multisel'),
path('media_up', views.mediaUpload, name='media_up'),
path('django_form', views.djangoform, name='django_form'),
] | {"/myform/views.py": ["/myform/forms.py"]} |
56,796 | crinfo/operacionesConFecha | refs/heads/master | /funcionFechaChile.py | from datetime import datetime
def formatoFecha(fecha): #2020-09-22
fechaArray = fecha.split('-') #divide la cadena donde encuentre "-"
objeto_datetime = datetime.strptime(fecha,"%Y-%m-%d")
dia_semana = datetime.weekday(objeto_datetime)
if dia_semana == 0:
dia = "Lunes"
elif dia_semana == 1:
dia = "Martes"
elif dia_semana == 2:
dia = "Miercoles"
elif dia_semana == 3:
dia = "Jueves"
elif dia_semana == 4:
dia = "Viernes"
elif dia_semana == 5:
dia = "Sabado"
elif dia_semana == 6:
dia = "Domingo"
mes = ''
if fechaArray[1]=='01':
mes = 'Enero'
elif fechaArray[1]=='02':
mes = 'Febrero'
elif fechaArray[1]=='03':
mes = 'Marzo'
elif fechaArray[1]=='04':
mes = 'Abril'
elif fechaArray[1]=='05':
mes = 'Mayo'
elif fechaArray[1]=='06':
mes = 'Junio'
elif fechaArray[1]=='07':
mes = 'Julio'
elif fechaArray[1]=='08':
mes = 'Agosto'
elif fechaArray[1]=='09':
mes = 'Septiembre'
elif fechaArray[1]=='10':
mes = 'Octubre'
elif fechaArray[1]=='11':
mes = 'Noviembre'
elif fechaArray[1]=='12':
mes = 'Diciembre'
return f'{dia} {fechaArray[2]} de {mes} del {fechaArray[0]}'
#cambio1 git | {"/fechaHora.py": ["/funcionFechaChile.py"]} |
56,797 | crinfo/operacionesConFecha | refs/heads/master | /fechaHora.py | from datetime import datetime, date, time, timedelta
from funcionFechaChile import formatoFecha
ahora = datetime.now()
#print(ahora)
#print(ahora.utcnow()) # formato UTC
#print(f"{ahora.day}-{ahora.month}-{ahora.year}\{ahora.hour}:{ahora.minute}:{ahora.second}")
#----------------creacion de fecha con parametros-------------------#
hoy = datetime.today()
print(hoy)
formato_date_anio = "%Y-%m-%d"
formato_date_dia = "%d/%m/%Y"
formato_dateTime_anio = "%Y-%m-%d %H:%M:%S"
formato_dateTime_dia = "%d/%m/%Y\%H:%M:%S"
formato_time = "%H:%M:%S"
print(f"fecha con formato date {hoy.strftime(formato_date_anio)}")
print(f"fecha y hora con formato datetime {hoy.strftime(formato_dateTime_anio)}")
#----------convertir una cadena a un objeto datetime-----------------------#
objeto_dateTime = datetime.strptime("2020-08-04",formato_date_anio) # convierte un string en formato fecha, se debe pasar los mismos parametros de orden
print(objeto_dateTime)
#------------------operaciones con fechas (weeks, days, hours, minutes, seconds, milliseconds)-------------------------#
fecha = date.today()
dia_anterior = fecha-timedelta(days=1)
dia_siguiente = fecha+timedelta(days=1)
dos_meses_siguiente = fecha+timedelta(weeks=8)
# print(f"fecha menos un dia formato año: {dia_anterior}")
# print(f"fecha menos un dia formato dia: {dia_anterior.strftime(formato_date_dia)}")
# print(f"fecha mas un dia formato año: {dia_siguiente}")
# print(f"fecha mas un dia formato dia: {dia_siguiente.strftime(formato_date_dia)}")
#print(f"fecha mas dos meses formato año: {dos_meses_siguiente}")
print(f"fecha mas dos meses formato dia: {dos_meses_siguiente.strftime(formato_date_dia)}")
#----------------- diferencias entre dos fechas----------------------#
fecha1 = date.today()
fecha_hora1 = datetime.now()
fecha2 = date(2019, 9, 22 )
fecha_hora2 = datetime(2019, 9, 22, 0, 0, 0)
diferencia_fecha = fecha1-fecha2
diferencia_fechaHora = fecha_hora1-fecha_hora2
#print(f"hay {diferencia_fecha.days} dias de diferencia en fecha")
print(f"hay {diferencia_fechaHora.days} dias de diferencia en fecha hora")
#-------Dia de semana, python los reconoce 0 = lunes, 1 = martes, 2 = miercoles, etc----#
print(f"dia de la semana {datetime.weekday(fecha_hora1)}")
#--------Convertir a timestamp-------------------#
print(f"fecha Hora = {fecha_hora1} | timestamp de fecha hora = {datetime.timestamp(fecha_hora1)} | Convertir de timestamp a datetime = {datetime.fromtimestamp(1600825273.584699)}")
print(formatoFecha("2020-07-26"))
print(f"{formatoFecha(hoy.strftime(formato_date_anio))} con hora {hoy.strftime(formato_time)}")
#editado desde github
| {"/fechaHora.py": ["/funcionFechaChile.py"]} |
56,802 | tornadoyi/rl-lab | refs/heads/master | /rllab/algorithms/__init__.py |
from . import deepq | {"/rllab/torchlab/core/device.py": ["/rllab/torchlab/__init__.py"], "/rllab/rl/profiling/__init__.py": ["/rllab/rl/profiling/profiling.py"], "/rllab/rl/features/mlp.py": ["/rllab/rl/features/features.py"], "/rllab/rl/features/__init__.py": ["/rllab/rl/features/features.py"], "/rllab/torchlab/nn/modules/__init__.py": ["/rllab/torchlab/nn/modules/tensor.py", "/rllab/torchlab/nn/modules/conv.py"], "/rllab/envs/__init__.py": ["/rllab/envs/wrapper/__init__.py"], "/rllab/algorithms/deepq/__init__.py": ["/rllab/algorithms/deepq/trainer.py"], "/rllab/torchlab/__init__.py": ["/rllab/torchlab/core/__init__.py"], "/rllab/torchlab/distributed/__init__.py": ["/rllab/torchlab/distributed/launcher.py", "/rllab/torchlab/distributed/optimizer.py"], "/rllab/algorithms/deepq/deepq.py": ["/rllab/torchlab/__init__.py", "/rllab/torchlab/nn/__init__.py", "/rllab/rl/profiling/__init__.py", "/rllab/algorithms/deepq/network.py"], "/rllab/envs/wrapper/__init__.py": ["/rllab/envs/wrapper/profiling.py", "/rllab/envs/wrapper/reward_ratio.py"], "/rllab/envs/race/race.py": ["/rllab/envs/race/runway.py"], "/rllab/torchlab/profiling/profiling.py": ["/rllab/torchlab/profiling/__init__.py"], "/rllab/torchlab/profiling/__init__.py": ["/rllab/torchlab/profiling/profiling.py", "/rllab/torchlab/profiling/indicator.py"], "/rllab/torchlab/optim/__init__.py": ["/rllab/torchlab/optim/optim.py"], "/rllab/envs/race/__init__.py": ["/rllab/envs/race/race.py", "/rllab/envs/race/shuttle_run.py"], "/rllab/rl/profiling/profiling.py": ["/rllab/torchlab/__init__.py", "/rllab/torchlab/profiling/__init__.py"], "/rllab/torchlab/cuda/cuda.py": ["/rllab/torchlab/utils/__init__.py"], "/rllab/envs/race/render.py": ["/rllab/envs/__init__.py"], "/rllab/torchlab/cuda/__init__.py": ["/rllab/torchlab/cuda/cuda.py"], "/rllab/envs/wrapper/profiling.py": ["/rllab/envs/wrapper/utils.py"], "/rllab/torchlab/profiling/indicator.py": ["/rllab/torchlab/profiling/__init__.py"], "/rllab/torchlab/core/__init__.py": ["/rllab/torchlab/core/device.py"], "/rllab/rl/features/cnn.py": ["/rllab/torchlab/__init__.py", "/rllab/rl/features/features.py"], "/rllab/envs/race/shuttle_run.py": ["/rllab/envs/race/runway.py"], "/rllab/algorithms/deepq/network.py": ["/rllab/torchlab/__init__.py"], "/rllab/envs/render/__init__.py": ["/rllab/envs/render/render.py"], "/rllab/envs/race/runway.py": ["/rllab/envs/race/render.py"], "/rllab/algorithms/deepq/trainer.py": ["/rllab/torchlab/__init__.py", "/rllab/rl/profiling/__init__.py", "/rllab/algorithms/deepq/__init__.py", "/rllab/algorithms/deepq/deepq.py"], "/rllab/torchlab/nn/__init__.py": ["/rllab/torchlab/nn/modules/__init__.py"]} |
56,803 | tornadoyi/rl-lab | refs/heads/master | /rllab/torchlab/core/device.py | import torch
from rllab.torchlab import cuda
def select_device(device=None, cuda_first=True, gpu_field='memory.free', field_sorted_reversed=True):
"""
Deprecated
"""
# check device
if device is not None and device != 'cpu' and device[:4] != 'cuda': raise Exception('Invalid device {}'.format(device))
# cpu
if device == 'cpu': return torch.device('cpu')
# cuda
if device is not None and device[:4] == 'cuda':
# check cuda
if not cuda.detect_available(): raise Exception('cuda is not available')
# specific cuda
id = device[5:]
if len(id) > 0: return torch.device(device)
# select cuda automatically
id = cuda.nvsmi_sort(gpu_field, field_sorted_reversed)[0]
return torch.device('cuda:{}'.format(id))
# select automatically
if not cuda_first or not cuda.detect_available(): return torch.device('cpu')
id = cuda.nvsmi_sort(gpu_field, field_sorted_reversed)[0]
return torch.device('cuda:{}'.format(id))
| {"/rllab/torchlab/core/device.py": ["/rllab/torchlab/__init__.py"], "/rllab/rl/profiling/__init__.py": ["/rllab/rl/profiling/profiling.py"], "/rllab/rl/features/mlp.py": ["/rllab/rl/features/features.py"], "/rllab/rl/features/__init__.py": ["/rllab/rl/features/features.py"], "/rllab/torchlab/nn/modules/__init__.py": ["/rllab/torchlab/nn/modules/tensor.py", "/rllab/torchlab/nn/modules/conv.py"], "/rllab/envs/__init__.py": ["/rllab/envs/wrapper/__init__.py"], "/rllab/algorithms/deepq/__init__.py": ["/rllab/algorithms/deepq/trainer.py"], "/rllab/torchlab/__init__.py": ["/rllab/torchlab/core/__init__.py"], "/rllab/torchlab/distributed/__init__.py": ["/rllab/torchlab/distributed/launcher.py", "/rllab/torchlab/distributed/optimizer.py"], "/rllab/algorithms/deepq/deepq.py": ["/rllab/torchlab/__init__.py", "/rllab/torchlab/nn/__init__.py", "/rllab/rl/profiling/__init__.py", "/rllab/algorithms/deepq/network.py"], "/rllab/envs/wrapper/__init__.py": ["/rllab/envs/wrapper/profiling.py", "/rllab/envs/wrapper/reward_ratio.py"], "/rllab/envs/race/race.py": ["/rllab/envs/race/runway.py"], "/rllab/torchlab/profiling/profiling.py": ["/rllab/torchlab/profiling/__init__.py"], "/rllab/torchlab/profiling/__init__.py": ["/rllab/torchlab/profiling/profiling.py", "/rllab/torchlab/profiling/indicator.py"], "/rllab/torchlab/optim/__init__.py": ["/rllab/torchlab/optim/optim.py"], "/rllab/envs/race/__init__.py": ["/rllab/envs/race/race.py", "/rllab/envs/race/shuttle_run.py"], "/rllab/rl/profiling/profiling.py": ["/rllab/torchlab/__init__.py", "/rllab/torchlab/profiling/__init__.py"], "/rllab/torchlab/cuda/cuda.py": ["/rllab/torchlab/utils/__init__.py"], "/rllab/envs/race/render.py": ["/rllab/envs/__init__.py"], "/rllab/torchlab/cuda/__init__.py": ["/rllab/torchlab/cuda/cuda.py"], "/rllab/envs/wrapper/profiling.py": ["/rllab/envs/wrapper/utils.py"], "/rllab/torchlab/profiling/indicator.py": ["/rllab/torchlab/profiling/__init__.py"], "/rllab/torchlab/core/__init__.py": ["/rllab/torchlab/core/device.py"], "/rllab/rl/features/cnn.py": ["/rllab/torchlab/__init__.py", "/rllab/rl/features/features.py"], "/rllab/envs/race/shuttle_run.py": ["/rllab/envs/race/runway.py"], "/rllab/algorithms/deepq/network.py": ["/rllab/torchlab/__init__.py"], "/rllab/envs/render/__init__.py": ["/rllab/envs/render/render.py"], "/rllab/envs/race/runway.py": ["/rllab/envs/race/render.py"], "/rllab/algorithms/deepq/trainer.py": ["/rllab/torchlab/__init__.py", "/rllab/rl/profiling/__init__.py", "/rllab/algorithms/deepq/__init__.py", "/rllab/algorithms/deepq/deepq.py"], "/rllab/torchlab/nn/__init__.py": ["/rllab/torchlab/nn/modules/__init__.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.