index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
12,501
|
theparadoxer02/koinex
|
refs/heads/master
|
/accounts/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django import forms
class Account(models.Model):
" Accounts "
status_option = (
('Submitted', 'Submitted'),
('Verified', 'Verified',),
('Rejected', 'Rejected'),
)
user = models.OneToOneField(User, on_delete=models.CASCADE)
first_name = models.CharField(max_length=40, default='', null=False)
last_name = models.CharField(max_length=40, default='', null=False)
email = models.EmailField(null=False, max_length=254)
asknbid_id = models.CharField(max_length=30, unique=True, null=True)
account_status = models.CharField(max_length=20, choices=status_option)
accont_created_on = models.DateField(auto_now=True, auto_now_add=False)
valid = models.BooleanField(default=False)
def __str__(self):
return self.first_name
class KYC(models.Model):
kyc_status = (
('Submitted', 'Submitted'),
('Pending','Pending'),
('Verified','Verified'),
)
Account = models.ForeignKey(Account, on_delete=models.CASCADE)
dob = models.DateField(auto_now=False, auto_now_add=False)
full_name = models.CharField(max_length=100, null=False)
pan_number = models.CharField(max_length=10, default='')
adhaar_no = models.CharField(max_length=12, default='')
gross_annual_income = models.CharField(max_length=12, default='')
residential_status = models.CharField(max_length=200, default='')
street_address = models.CharField(max_length=30, default='')
city = models.CharField(max_length=30, default='')
state = models.CharField(max_length=30, default='')
country = models.CharField(max_length=30, default='')
pin_code = models.CharField(max_length=10, default='')
kyc_status = models.CharField(max_length=30, choices=kyc_status)
valid = models.BooleanField(default=False)
def __str__(self):
return self.full_name
def save(self, *args, **kwargs):
if self.Account.valid is False:
raise forms.ValidationError('The User is not Verified yet')
else:
super(KYC_Document, self).save(*args, **kwargs)
class KYC_Document(models.Model):
Account = models.ForeignKey(Account, on_delete=models.CASCADE)
pan_card = models.ImageField(upload_to='media/upload/pancard')
adhaar_card = models.ImageField(upload_to='media/upload/adhaarcard')
adhaar_back = models.ImageField(upload_to='media/upload/adhaarback')
photograph = models.ImageField(upload_to='media/upload/photograph')
valid = models.BooleanField(default=False)
def __str__(self):
return str(self.Account)
def save(self, *args, **kwargs):
if self.Account.valid is False:
raise forms.ValidationError('The User is not Verified yet')
else:
super(KYC_Document, self).save(*args, **kwargs)
class BankDetail(models.Model):
Account = models.ForeignKey(Account, on_delete=models.CASCADE)
ifsc_code = models.CharField(max_length=10, default='', null=False)
time = models.TimeField(auto_now=False, auto_now_add=True)
# Not sure about it
source = models.CharField(max_length=100, default='')
ip = models.GenericIPAddressField()
activity = models.TextField()
status = models.BooleanField(default=False)
valid = models.BooleanField(default=False)
def __str__(self):
return str(self.Account)
def save(self, *args, **kwargs):
if self.Account.valid is False:
raise forms.ValidationError('The User is not Verified yet')
else:
super(KYC_Document, self).save(*args, **kwargs)
|
{"/accounts/admin.py": ["/accounts/models.py"], "/accounts/api/serializers.py": ["/accounts/models.py"], "/accounts/api/views.py": ["/accounts/api/serializers.py"], "/accounts/api/urls.py": ["/accounts/api/views.py"]}
|
12,510
|
igpg/htcap
|
refs/heads/master
|
/scripts/htmlreport.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
HTCAP - beta 1
Author: filippo.cavallarin@wearesegment.com
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
"""
import sys
import os
import sqlite3
import json
from urlparse import urlsplit
reload(sys)
sys.setdefaultencoding('utf8')
def dict_from_row(row):
return dict(zip(row.keys(), row))
def get_report(cur):
report = []
qry = """
SELECT r.type,r.id,r.url,r.method,r.data,r.http_auth,r.referer,r.out_of_scope, ri.trigger, r.crawler_errors,
(ri.id is not null) AS has_requests, ri.type AS req_type,ri.method AS req_method,ri.url AS req_url,ri.data AS req_data
FROM request r
LEFT JOIN request_child rc ON r.id=rc.id_request
LEFT JOIN request ri ON ri.id = rc.id_child
WHERE
r.type IN ('link', 'redirect','form')
and (has_requests=0 OR req_type IN ('xhr','form','websocket') OR (req_type='jsonp' AND ri.trigger <> ''))
"""
try:
cur.execute(qry)
for r in cur.fetchall():
report.append(dict_from_row(r))
except Exception as e:
print str(e)
return report
def get_assessment_vulnerabilities(cur, id_request):
report = []
qry = """
SELECT type, description FROM vulnerability WHERE id_request IN (
SELECT id FROM request WHERE (
id=? AND type IN ('link','redirect')) OR
(id_parent=? AND type IN ('xhr','jsonp','form','websocket')
)
)
"""
try:
cur.execute(qry, (id_request,id_request))
for r in cur.fetchall():
report.append(json.dumps({"type":r['type'], "description":r['description']}))
except Exception as e:
print str(e)
return report
def get_crawl_info(cur):
crawl = None
qry = """
SELECT *,
(SELECT htcap_version FROM crawl_info) AS htcap_version,
(SELECT COUNT(*) FROM request WHERE crawled=1) AS pages_crawled
FROM crawl_info
"""
try:
cur.execute(qry)
crawl = dict_from_row(cur.fetchone())
except Exception as e:
print str(e)
return crawl
def get_request_cmp_tuple(row):
# http_auth in included in the url
return (row['url'], row['method'], row['data'])
def add_http_auth(url, auth):
purl = urlsplit(url)
return purl._replace(netloc="%s@%s" % (auth, purl.netloc)).geturl()
def get_json(cur):
report = get_report(cur)
infos= get_crawl_info(cur)
ret = dict(
infos= infos,
results = []
)
for row in report:
if row['http_auth']:
row['url'] = add_http_auth(row['url'], row['http_auth'])
if get_request_cmp_tuple(row) in [get_request_cmp_tuple(r) for r in ret['results']]: continue
d = dict(
url = row['url'],
method = row['method'],
data = row['data'],
referer = row['referer'],
xhr = [],
jsonp = [],
websockets = [],
forms = [],
errors = json.loads(row['crawler_errors']) if row['crawler_errors'] else [],
vulnerabilities = get_assessment_vulnerabilities(cur, row['id'])
)
if row['out_of_scope']: d['out_of_scope'] = True
if row['has_requests']:
for r in report:
if r['id'] != row['id']: continue
req_obj = {}
trigger = json.loads(r['trigger']) if 'trigger' in r and r['trigger'] else None # {'event':'ready','element':'[document]'}
req_obj['trigger'] = "%s.%s()" % (trigger['element'], trigger['event']) if trigger else ""
if r['req_type']=='xhr':
data = " data: %s" % r['req_data'] if r['req_data'] else ""
req_obj['request'] = "%s %s%s" % (r['req_method'], r['req_url'], data)
d['xhr'].append(req_obj)
elif r['req_type']=='jsonp':
req_obj['request'] = r['req_url']
d['jsonp'].append(req_obj)
elif r['req_type']=='websocket':
req_obj['request'] = r['req_url']
d['websockets'].append(req_obj)
elif r['req_type']=='form':
req_obj['request'] = "%s %s data:%s" % (r['req_method'], r['req_url'], r['req_data'])
d['forms'].append(req_obj)
if row['has_requests'] or row['out_of_scope'] or len(d['errors']) > 0 or len(d['vulnerabilities']) > 0:
ret['results'].append(d)
return json.dumps(ret)
if __name__ == "__main__":
base_dir = os.path.dirname(os.path.realpath(__file__)) + os.sep + "htmlreport" + os.sep
if len(sys.argv) < 3:
print "usage: %s <dbfile> <outfile>" % sys.argv[0]
sys.exit(1)
dbfile = sys.argv[1]
outfile = sys.argv[2]
if not os.path.exists(dbfile):
print "No such file: %s" % dbfile
sys.exit(1)
if os.path.exists(outfile):
sys.stdout.write("File %s already exists. Overwrite [y/N]: " % outfile)
if sys.stdin.read(1) != "y":
sys.exit(1)
os.remove(outfile)
conn = sqlite3.connect(dbfile)
conn.row_factory = sqlite3.Row
cur = conn.cursor()
base_html = (
"<html>\n"
"<head>\n"
"<meta http-equiv='Content-Type' content='text/html; charset=utf-8' />\n"
"<style>\n%s\n</style>\n"
"<script>\n%s\n%s\n</script>\n"
"</head>\n"
"%s\n"
"</html>\n"
)
jsn = "var report = %s;\n" % get_json(cur)
with open("%sreport.html" % base_dir) as html, open("%sreport.js" % base_dir) as js, open("%sstyle.css" % base_dir) as css:
html = base_html % (css.read(), jsn, js.read(), html.read())
with open(outfile,'w') as out:
out.write(html)
print "Report saved to %s" % outfile
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,511
|
igpg/htcap
|
refs/heads/master
|
/htcap.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
HTCAP - beta 1
Author: filippo.cavallarin@wearesegment.com
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
"""
from __future__ import unicode_literals
import sys
import os
import datetime
import time
import getopt
from core.lib.utils import *
from core.crawl.crawler import Crawler
from core.scan.scanner import Scanner
reload(sys)
sys.setdefaultencoding('utf8')
def usage():
infos = get_program_infos()
print ("htcap ver " + infos['version'] + "\n"
"usage: htcap <command>\n"
"Commands: \n"
" crawl run crawler\n"
" scan run scanner\n"
)
if __name__ == '__main__':
if len(sys.argv) < 2:
usage()
sys.exit(1)
elif sys.argv[1] == "crawl":
Crawler(sys.argv[2:])
elif sys.argv[1] == "scan":
Scanner(sys.argv[2:])
else:
usage();
sys.exit(1)
sys.exit(0)
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,512
|
igpg/htcap
|
refs/heads/master
|
/core/crawl/crawler_thread.py
|
# -*- coding: utf-8 -*-
"""
HTCAP - beta 1
Author: filippo.cavallarin@wearesegment.com
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
"""
from __future__ import unicode_literals
import time
import re
import json
import urllib
import cookielib
import threading
import base64
import tempfile
import os
import uuid
from urlparse import urlparse, urlsplit, urljoin, parse_qsl
from core.lib.exception import *
from core.crawl.lib.shared import *
from core.crawl.lib.probe import Probe
from core.lib.http_get import HttpGet
from core.lib.cookie import Cookie
from core.lib.shell import CommandExecutor
from core.lib.request import Request
from core.lib.utils import *
from core.constants import *
from lib.utils import *
from lib.crawl_result import *
class CrawlerThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.thread_uuid = uuid.uuid4()
self.process_retries = 2
self.process_retries_interval = 0.5
self.status = THSTAT_RUNNING
self.exit = False
self.cookie_file = "%s%shtcap_cookiefile-%s.json" % (tempfile.gettempdir(), os.sep, self.thread_uuid)
def run(self):
self.crawl()
def wait_request(self):
request = None
Shared.th_condition.acquire()
while True:
if self.exit == True:
Shared.th_condition.notifyAll()
Shared.th_condition.release()
raise ThreadExitRequestException("exit request received")
if Shared.requests_index >= len(Shared.requests):
self.status = THSTAT_WAITING
Shared.th_condition.wait() # The wait method releases the lock, blocks the current thread until another thread calls notify
continue
request = Shared.requests[Shared.requests_index]
Shared.requests_index += 1
break
Shared.th_condition.release()
self.status = THSTAT_RUNNING
return request
def load_probe_json(self, jsn):
jsn = jsn.strip()
if not jsn: jsn = "["
if jsn[-1] != "]":
jsn += '{"status":"ok", "partialcontent":true}]'
try:
return json.loads(jsn)
except Exception:
#print "-- JSON DECODE ERROR %s" % jsn
raise
def send_probe(self, request, errors):
url = request.url
jsn = None
probe = None
retries = self.process_retries
params = []
cookies = []
if request.method == "POST":
params.append("-P")
if request.data:
params.extend(("-D", request.data))
if len(request.cookies) > 0:
for cookie in request.cookies:
cookies.append(cookie.get_dict())
with open(self.cookie_file,'w') as fil:
fil.write(json.dumps(cookies))
params.extend(("-c", self.cookie_file))
if request.http_auth:
params.extend(("-p" ,request.http_auth))
if Shared.options['set_referer'] and request.referer:
params.extend(("-r", request.referer))
params.extend(("-i", str(request.db_id)))
params.append(url)
while retries:
#while False:
# print cmd_to_str(Shared.probe_cmd + params)
# print ""
cmd = CommandExecutor(Shared.probe_cmd + params)
jsn = cmd.execute(Shared.options['process_timeout'] + 2)
if jsn == None:
errors.append(ERROR_PROBEKILLED)
time.sleep(self.process_retries_interval) # ... ???
retries -= 1
continue
# try to decode json also after an exception .. sometimes phantom crashes BUT returns a valid json ..
try:
if jsn and type(jsn) is not str:
jsn = jsn[0]
probeArray = self.load_probe_json(jsn)
except Exception as e:
raise
if probeArray:
probe = Probe(probeArray, request)
if probe.status == "ok":
break
errors.append(probe.errcode)
if probe.errcode in (ERROR_CONTENTTYPE, ERROR_PROBE_TO):
break
time.sleep(self.process_retries_interval)
retries -= 1
return probe
def crawl(self):
while True:
url = None
cookies = []
requests = []
requests_to_crawl = []
redirects = 0
errors = []
try:
request = self.wait_request()
except ThreadExitRequestException:
if os.path.exists(self.cookie_file):
os.remove(self.cookie_file)
return
except Exception as e:
print "-->"+str(e)
continue
url = request.url
purl = urlsplit(url)
probe = None
probe = self.send_probe(request, errors)
if probe:
if probe.status == "ok" or probe.errcode == ERROR_PROBE_TO:
requests = probe.requests
if probe.html:
request.html = probe.html
else :
errors.append(ERROR_PROBEFAILURE)
# get urls with python to continue crawling
if Shared.options['use_urllib_onerror'] == False:
continue
try:
hr = HttpGet(request, Shared.options['process_timeout'], self.process_retries, Shared.options['useragent'], Shared.options['proxy'])
requests = hr.get_requests()
except Exception as e:
errors.append(str(e))
# set out_of_scope, apply user-supplied filters to urls (ie group_qs)
adjust_requests(requests)
Shared.main_condition.acquire()
res = CrawlResult(request, requests, errors)
Shared.crawl_results.append(res)
Shared.main_condition.notify()
Shared.main_condition.release()
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,513
|
igpg/htcap
|
refs/heads/master
|
/core/crawl/lib/probe.py
|
# -*- coding: utf-8 -*-
"""
HTCAP - beta 1
Author: filippo.cavallarin@wearesegment.com
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
"""
from core.lib.request import Request
from core.lib.cookie import Cookie
from core.constants import *
class Probe:
def __init__(self, data, parent):
self.status = "ok"
self.requests = []
self.cookies = []
self.redirect = None;
# if True the probe returned no error BUT the json is not closed properly
self.partialcontent = False
self.html = None
status = data.pop()
if status['status'] == "error":
self.status = "error"
self.errcode = status['code']
if "partialcontent" in status:
self.partialcontent = status['partialcontent']
# grap cookies before creating rquests
for key,val in data:
if key == "cookies":
for cookie in val:
self.cookies.append(Cookie(cookie, parent.url))
if "redirect" in status:
self.redirect = status['redirect']
r = Request(REQTYPE_REDIRECT, "GET", self.redirect, parent=parent, set_cookie=self.cookies, parent_db_id=parent.db_id)
self.requests.append(r)
for key,val in data:
if key == "request":
trigger = val['trigger'] if 'trigger' in val else None
r = Request(val['type'], val['method'], val['url'], parent=parent, set_cookie=self.cookies, data=val['data'], trigger=trigger, parent_db_id=parent.db_id )
self.requests.append(r)
elif key == "html":
self.html = val
# @TODO handle cookies set by ajax (in probe too)
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,514
|
igpg/htcap
|
refs/heads/master
|
/scripts/ajax.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import sqlite3
import json
import getopt
import os
reload(sys)
sys.setdefaultencoding('utf8')
qry = """
SELECT r.id, r.url as page, r.referer, a.method, a.url,a.data,a.trigger
FROM request r inner join request a on r.id=a.id_parent
WHERE (a.type='xhr')
AND
%s
"""
try:
opts, args = getopt.getopt(sys.argv[1:], 'd')
except getopt.GetoptError as err:
print str(err)
sys.exit(1)
if len(args) < 1:
print (
"usage: %s <dbfile> [<final_part_of_query>]\n"
" Options:\n -d print POST data\n\n"
" Base query: %s" % (sys.argv[0], qry)
)
sys.exit(1)
print_post_data = False
for o, v in opts:
if o == '-d':
print_post_data = True
dbfile = args[0]
if not os.path.exists(dbfile):
print "No such file %s" % dbfile
sys.exit(1)
where = args[1] if len(args) > 1 else "1=1"
conn = sqlite3.connect(dbfile)
conn.row_factory = sqlite3.Row
cur = conn.cursor()
cur.execute(qry % where)
pages = {}
for res in cur.fetchall():
page = (res['id'], res['url'], res['referer'])
trigger = json.loads(res['trigger']) if res['trigger'] else None
trigger_str = "%s.%s() -> " % (trigger['element'], trigger['event']) if trigger else ""
data = " data: %s" % (res['data']) if print_post_data and res['data'] else ""
descr = " %s%s %s%s" % (trigger_str, res['method'], res['url'], data)
if page in pages:
pages[page].append(descr)
else:
pages[page] = [descr]
for page,ajax in pages.items():
print "Request ID: %s\nPage URL: %s\nReferer: %s\nAjax requests:" % page
for aj in ajax:
print aj
print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \n"
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,515
|
igpg/htcap
|
refs/heads/master
|
/scripts/vulns.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import sqlite3
import json
reload(sys)
sys.setdefaultencoding('utf8')
qry = """
SELECT scanner,start_date,end_date,id_request,type,description FROM assessment a
INNER JOIN vulnerability av ON a.id=av.id_assessment
WHERE
%s
"""
if len(sys.argv) < 2:
print "usage: %s <dbfile> [<final_part_of_query>]\n base query: %s" % (sys.argv[0], qry)
sys.exit(1)
dbfile = sys.argv[1]
where = sys.argv[2] if len(sys.argv) > 2 else "1=1"
conn = sqlite3.connect(dbfile)
conn.row_factory = sqlite3.Row
cur = conn.cursor()
cur.execute(qry % where)
for vuln in cur.fetchall():
print vuln['description']
print "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - "
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,516
|
igpg/htcap
|
refs/heads/master
|
/core/lib/request.py
|
# -*- coding: utf-8 -*-
"""
HTCAP - beta 1
Author: filippo.cavallarin@wearesegment.com
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
"""
from urlparse import urljoin
from core.lib.cookie import Cookie
from core.lib.utils import *
import json
from core.lib.thirdparty.simhash import Simhash
class Request(object):
def __init__(self, type, method, url, parent = None, referer = None, data = None, trigger=None, json_cookies = None, set_cookie = None, http_auth=None, db_id = None, parent_db_id = None, out_of_scope = None):
self.type = type
self.method = method
self._html = None
self._html_hash = None
url = url.strip()
try:
url = url.decode("utf-8")
except:
try:
url = url.decode("latin-1")
except Exception as e:
raise AssertionError("unable to decode " + url)
if type != REQTYPE_UNKNOWN:
# extract http auth if present in url
# if credentials are present in url, the url IS absolute so we can do this before urljoin
# (foo:bar@example.local is NOT A VALID URL)
auth, nurl = extract_http_auth(url)
if auth:
if not http_auth:
http_auth = auth
url = nurl
self.url = normalize_url( urljoin(parent.url, url) if parent else url )
else:
self.url = url
# parent is the parent request that can be a redirect, referer is the referer page (ahead of redirects)
self._parent = parent
self.data = data if data else ""
self.trigger = trigger
self.db_id = db_id
self.parent_db_id = parent_db_id
self.out_of_scope = out_of_scope
self.cookies = []
self.http_auth = parent.http_auth if not http_auth and parent else http_auth
self.redirects = parent.redirects + 1 if type == REQTYPE_REDIRECT and parent else 0
if not referer and parent:
self.referer = parent.url if type != REQTYPE_REDIRECT else parent.referer
else:
self.referer = referer
# if type == "unknown":
# return
if json_cookies:
self.all_cookies = self.cookies_from_json(json_cookies)
else:
set_cookie = set_cookie if set_cookie else []
self.all_cookies = self.merge_cookies(set_cookie, parent.all_cookies) if parent else set_cookie
self.cookies = [c for c in self.all_cookies if c.is_valid_for_url(self.url)]
@property
def parent(self):
if not self._parent and self.parent_db_id:
# fetch from db
pass
return self._parent
@parent.setter
def parent(self, value):
self._parent = value
@property
def html(self):
return self._html
@html.setter
def html(self, value):
self._html = value
self._html_hash = Simhash(value)
def get_dict(self):
return dict(
type = self.type,
method = self.method,
url = self.url,
referer = self.referer,
data = self.data,
trigger = self.trigger,
cookies = self.cookies,
db_id = self.db_id,
parent_db_id = self.parent_db_id,
out_of_scope = self.out_of_scope
)
def cookies_from_json(self, cookies):
#return [Cookie(c, self.parent.url) for c in json.loads(cookies)]
# create Cookie without "setter" because cookies loaded from db are always valid (no domain restrictions)
# see Cookie.py
return [Cookie(c) for c in json.loads(cookies)]
def get_cookies_as_json(self):
cookies = [c.get_dict() for c in self.cookies]
return json.dumps(cookies)
def merge_cookies(self, cookies1, cookies2):
cookies = list(cookies2)
for parent_cookie in cookies1:
if parent_cookie not in cookies:
cookies.append(parent_cookie)
else :
for cookie in cookies:
if parent_cookie == cookie:
cookie.update(parent_cookie.__dict__)
return cookies
def get_full_url(self):
"""
returns the url with http credentials
"""
if not self.http_auth:
return self.url
purl = urlsplit(self.url)
netloc = "%s@%s" % (self.http_auth, purl.netloc)
purl = purl._replace(netloc=netloc)
return purl.geturl()
# UNUSED
def tokenize_request(self, request):
"""
returns an array of url components
"""
purl = urlsplit(request.url)
tokens = [purl.scheme, purl.netloc]
if purl.path:
tokens.extend(purl.path.split("/"))
data = [purl.query] if purl.query else []
if request.data:
data.append(request.data)
for d in data:
qtokens = re.split(r'(?:&|&)', d)
for qt in qtokens:
tokens.extend(qt.split("=",1))
#print tokens
return tokens
# UNUSED
def compare_html(self, other):
if not other: return False
if not self.html and not other.html: return True
if self.html and other.html:
return self._html_hash.distance(other._html_hash) <= 2
return False
# UNUSED
def is_similar(self, other):
# is equal .. so not similar
if self == other: return False
ot = self.tokenize_request(other)
st = self.tokenize_request(self)
if len(ot) != len(st): return False
diff = 0
for i in range(0, len(st)):
if st[i] != ot[i]: diff += 1
if diff > 1: return False
return True
def __eq__(self, other):
if other == None: return False
data = self.data
odata = other.data
if self.method == "POST":
data = remove_tokens(data)
odata = remove_tokens(odata)
return (self.method, self.url, self.http_auth, data) == (other.method, other.url, other.http_auth, odata)
def __repr__(self):
print "DEBUG" + self.__str__()
def __str__(self):
return "%s %s %s %s" % (self.type, self.method, self.get_full_url(), self.data)
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,517
|
igpg/htcap
|
refs/heads/master
|
/scripts/curl.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import sqlite3
import json
reload(sys)
sys.setdefaultencoding('utf8')
qry = "SELECT method, url, data, referer, cookies FROM request WHERE %s"
if len(sys.argv) < 2:
print "usage: %s <dbfile> [<final_part_of_query>]\n base query: %s" % (sys.argv[0], qry)
sys.exit(1)
dbfile = sys.argv[1]
where = sys.argv[2] if len(sys.argv) > 2 else "1=1"
conn = sqlite3.connect(dbfile)
conn.row_factory = sqlite3.Row
cur = conn.cursor()
cur.execute(qry % where)
for req in cur.fetchall():
cookies = ["%s=%s" % (c['name'],c['value']) for c in json.loads(req['cookies'])]
cookies_str = " -H 'Cookie: %s'" % " ;".join(cookies) if len(cookies) > 0 else ""
method = " -X POST" if req['method'] == "POST" else ""
referer = " -H 'Referer: %s'" % req['referer'] if req['referer'] else ""
data = " --data '%s'" % req['data'] if req['data'] else ""
print "%s%s%s%s '%s'" % (method, referer, cookies_str, data,req['url'])
|
{"/core/crawl/lib/probe.py": ["/core/lib/request.py"]}
|
12,518
|
Py-Lambdas/office-hours-django
|
refs/heads/main
|
/tasks/migrations/0001_initial.py
|
# Generated by Django 3.1.3 on 2020-11-03 04:19
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name', models.TextField()),
],
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('complete', models.BooleanField(default=False)),
('priority', models.CharField(choices=[('U', 'Urgent'), ('H', 'High'), ('M', 'Medium'), ('L', 'Low')], default='L', max_length=1)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('project', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tasks', to='tasks.project')),
],
),
]
|
{"/tasks/admin.py": ["/tasks/models.py"]}
|
12,519
|
Py-Lambdas/office-hours-django
|
refs/heads/main
|
/tasks/admin.py
|
from django.contrib import admin
from .models import Task, Project
admin.site.register(Task)
admin.site.register(Project)
|
{"/tasks/admin.py": ["/tasks/models.py"]}
|
12,520
|
Py-Lambdas/office-hours-django
|
refs/heads/main
|
/tasks/models.py
|
import uuid
from django.db import models
class Task(models.Model):
class TaskPriority(models.TextChoices):
URGENT = "U", "Urgent"
HIGH = "H", "High"
MEDIUM = "M", "Medium"
LOW = "L", "Low"
id = models.UUIDField(primary_key=True, editable=False, default=uuid.uuid4)
name = models.CharField(max_length=255)
description = models.TextField()
complete = models.BooleanField(default=False)
priority = models.CharField(
max_length=1, choices=TaskPriority.choices, default=TaskPriority.LOW
)
project = models.ForeignKey(
"Project", on_delete=models.CASCADE, related_name="tasks", null=True
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return f"{self.name} <{self.priority}>"
class Project(models.Model):
id = models.UUIDField(primary_key=True, editable=False, default=uuid.uuid4)
name = models.TextField()
def __str__(self):
return f"{self.name} <Tasks: {self.tasks}>"
|
{"/tasks/admin.py": ["/tasks/models.py"]}
|
12,556
|
n-miyamoto/multvariable_lstm
|
refs/heads/master
|
/train.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from make_data import *
from stacked_lstm import *
import numpy as np
from chainer import optimizers, cuda
import time
import sys
import _pickle as cPickle
IN_UNITS = 5
HIDDEN_UNITS_L1 = 80
HIDDEN_UNITS_L2 = 80
OUT_UNITS = 5
TRAINING_EPOCHS = 4000
DISPLAY_EPOCH = 10
MINI_BATCH_SIZE = 100
LENGTH_OF_SEQUENCE = 100
STEPS_PER_CYCLE = 50
NUMBER_OF_CYCLES = 100
xp = cuda.cupy
def compute_loss(model, sequences):
loss = 0
num, rows, cols = sequences.shape
length_of_sequence = cols
for i in range(cols - 1):
x = chainer.Variable(
xp.asarray(
[[sequences[k, j, i + 0] for k in range(num)] for j in range(rows)],
dtype=np.float32
)
)
t = chainer.Variable(
xp.asarray(
[[sequences[k, j, i + 1] for k in range(num)] for j in range(rows)],
dtype=np.float32
)
)
loss += model(x, t)
return loss
if __name__ == "__main__":
# make training data
data_maker = DataMaker(steps_per_cycle=STEPS_PER_CYCLE, number_of_cycles=NUMBER_OF_CYCLES)
train_data = data_maker.make()
# setup model
model = LSTM(IN_UNITS, HIDDEN_UNITS_L1, HIDDEN_UNITS_L2 ,OUT_UNITS)
for param in model.params():
data = param.data
data[:] = np.random.uniform(-0.1, 0.1, data.shape)
model.to_gpu(0)
# setup optimizer
optimizer = optimizers.Adam()
optimizer.setup(model)
start = time.time()
cur_start = start
for epoch in range(TRAINING_EPOCHS):
sequences = data_maker.make_mini_batch(train_data, mini_batch_size=MINI_BATCH_SIZE, length_of_sequence=LENGTH_OF_SEQUENCE)
model.reset_state()
model.zerograds()
loss = compute_loss(model, sequences)
loss.backward()
optimizer.update()
if epoch != 0 and epoch % DISPLAY_EPOCH == 0:
cur_end = time.time()
# display loss
print(
"[{j}]training loss:\t{i}\t{k}[sec/epoch]".format(
j=epoch,
i=loss.data/(sequences.shape[1] - 1),
k=(cur_end - cur_start)/DISPLAY_EPOCH
)
)
cur_start = time.time()
sys.stdout.flush()
end = time.time()
# save model
cPickle.dump(model, open("./model.pkl", "wb"))
print("{}[sec]".format(end - start))
|
{"/train.py": ["/make_data.py"], "/predict.py": ["/make_data.py"]}
|
12,557
|
n-miyamoto/multvariable_lstm
|
refs/heads/master
|
/make_data.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import numpy as np
import math
import random
import pandas as pd
ROW_SIZE = 5
TRAINING_START = 0
TRAINING_SIGNAL_LENGTH = 100000
random.seed(0)
class DataMaker(object):
def __init__(self, steps_per_cycle, number_of_cycles):
#self.steps_per_cycle = steps_per_cycle
#self.number_of_cycles = number_of_cycles
self.df = pd.read_csv("5sins.csv",encoding="shift_jis")
def make(self):
return self.df
def make_mini_batch(self, data, mini_batch_size, length_of_sequence):
sequences = np.ndarray((ROW_SIZE, mini_batch_size, length_of_sequence), dtype=np.float32)
for j in range(ROW_SIZE):
data = self.df.ix[TRAINING_START:TRAINING_SIGNAL_LENGTH,j+1]
for i in range(mini_batch_size):
index = random.randint(0, len(data) - length_of_sequence)
sequences[j][i] = data[index:index+length_of_sequence]
return sequences
|
{"/train.py": ["/make_data.py"], "/predict.py": ["/make_data.py"]}
|
12,558
|
n-miyamoto/multvariable_lstm
|
refs/heads/master
|
/predict.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import _pickle as cPickle
import numpy as np
from chainer import optimizers, cuda
import chainer
from make_data import *
MODEL_PATH = "./model.pkl"
PREDICTION_LENGTH = 75
PREDICTION_PATH = "./prediction.txt"
INITIAL_PATH = "./initial.txt"
MINI_BATCH_SIZE = 100
LENGTH_OF_SEQUENCE = 100
STEPS_PER_CYCLE = 50
NUMBER_OF_CYCLES = 100
xp = cuda.cupy
def predict_sequence(model, input_seq, output_seq, dummy):
sequences_col = len(input_seq)
model.reset_state()
for i in range(sequences_col):
x = chainer.Variable(xp.asarray(input_seq[i:i+1], dtype=np.float32)[:, np.newaxis])
future = model(x, dummy)
cpu_future = chainer.cuda.to_cpu(future.data)
return cpu_future
def predict(seq, model, pre_length, initial_path, prediction_path):
# initial sequence
input_seq = np.array(seq[:seq.shape[0]//4])
output_seq = np.empty(0)
# append an initial value
output_seq = np.append(output_seq, input_seq[-1])
model.train = False
dummy = chainer.Variable(xp.asarray([0], dtype=np.float32)[:, np.newaxis])
for i in range(pre_length):
future = predict_sequence(model, input_seq, output_seq, dummy)
input_seq = np.delete(input_seq, 0)
input_seq = np.append(input_seq, future)
output_seq = np.append(output_seq, future)
with open(prediction_path, "w") as f:
for (i, v) in enumerate(output_seq.tolist(), start=input_seq.shape[0]):
f.write("{i} {v}\n".format(i=i-1, v=v))
with open(initial_path, "w") as f:
for (i, v) in enumerate(seq.tolist()):
f.write("{i} {v}\n".format(i=i, v=v))
if __name__ == "__main__":
# load model
model = cPickle.load(open(MODEL_PATH, 'rb'))
# make data
data_maker = DataMaker(steps_per_cycle=STEPS_PER_CYCLE, number_of_cycles=NUMBER_OF_CYCLES)
data = data_maker.make()
sequences = data_maker.make_mini_batch(data, mini_batch_size=MINI_BATCH_SIZE, length_of_sequence=LENGTH_OF_SEQUENCE)
sample_index = 45
predict(sequences[sample_index], model, PREDICTION_LENGTH, INITIAL_PATH, PREDICTION_PATH)
|
{"/train.py": ["/make_data.py"], "/predict.py": ["/make_data.py"]}
|
12,581
|
gazeti/aleph
|
refs/heads/master
|
/aleph/logic/permissions.py
|
import logging
from flask import render_template
from aleph.core import db, app_url, app_title
from aleph.notify import notify_role
from aleph.model import Permission
log = logging.getLogger(__name__)
def update_permission(role, collection, read, write):
"""Update a roles permission to access a given collection."""
pre = Permission.by_collection_role(collection.id, role)
post = Permission.grant_collection(collection.id, role, read, write)
db.session.commit()
try:
url = '%scollections/%s' % (app_url, collection.id)
html = render_template('email/permission.html', role=role, url=url,
collection=collection, pre=pre, post=post,
app_url=app_url, app_title=app_title)
notify_role(role, collection.label, html)
except Exception as ex:
log.exception(ex)
return post
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,582
|
gazeti/aleph
|
refs/heads/master
|
/aleph/tests/test_models.py
|
from aleph.core import db
from aleph.model import Collection, Entity
from aleph.logic import delete_pending
from aleph.tests.factories.models import EntityFactory, CollectionFactory
from aleph.tests.util import TestCase
class EntityModelTest(TestCase):
def setUp(self):
super(EntityModelTest, self).setUp()
self.pending_col = CollectionFactory.create()
self.col = CollectionFactory.create()
db.session.flush()
self.pending_ent = EntityFactory.create(state=Entity.STATE_PENDING)
self.pending_ent.collections = [self.pending_col]
self.ent = EntityFactory.create(state=Entity.STATE_ACTIVE)
self.ent = [self.col]
db.session.flush()
def test_delete_pending_entities(self):
self.assertEqual(Entity.query.count(), 2)
self.assertEqual(Collection.query.count(), 2)
delete_pending()
self.assertEqual(Entity.query.count(), 1)
self.assertEqual(Collection.query.count(), 2)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,583
|
gazeti/aleph
|
refs/heads/master
|
/aleph/analyze/polyglot_entity.py
|
from __future__ import absolute_import
import logging
from polyglot.text import Text
from aleph.analyze.analyzer import Analyzer
from aleph.model import DocumentTag, DocumentTagCollector
log = logging.getLogger(__name__)
class PolyglotEntityAnalyzer(Analyzer):
ORIGIN = 'polyglot'
MIN_LENGTH = 100
TYPES = {
'I-PER': DocumentTag.TYPE_PERSON,
'I-ORG': DocumentTag.TYPE_ORGANIZATION,
'I-LOC': DocumentTag.TYPE_LOCATION
}
def prepare(self):
self.disabled = self.document.type != self.document.TYPE_TEXT
self.collector = DocumentTagCollector(self.document, self.ORIGIN)
def on_text(self, text):
if text is None or len(text) <= self.MIN_LENGTH:
return
try:
hint_language_code = None
if len(self.document.languages) == 1:
hint_language_code = self.document.languages[0]
text = Text(text, hint_language_code=hint_language_code)
for entity in text.entities:
if entity.tag == 'I-LOC' or len(entity) == 1:
continue
label = ' '.join(entity)
if len(label) < 4 or len(label) > 200:
continue
self.collector.emit(label, self.TYPES.get(entity.tag))
except ValueError as ve:
log.info('NER value error: %r', ve)
except Exception as ex:
log.warning('NER failed: %r', ex)
def finalize(self):
log.info('Polyglot extracted %s entities.', len(self.collector))
self.collector.save()
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,584
|
gazeti/aleph
|
refs/heads/master
|
/aleph/search/leads.py
|
import logging
from aleph.core import es, es_index
from aleph.index import TYPE_LEAD, TYPE_ENTITY
from aleph.search.util import execute_basic
from aleph.search.fragments import filter_query, authz_filter, aggregate
from aleph.search.entities import facet_collections
from aleph.search.facet import parse_facet_result
log = logging.getLogger(__name__)
def leads_query(collection_id, state):
q = {'term': {'entity_collection_id': collection_id}}
q = authz_filter(q, state.authz, roles=True)
aggs = {'scoped': {'global': {}, 'aggs': {}}}
facets = list(state.facet_names)
if 'collections' in facets:
aggs = facet_collections(state, q, aggs)
facets.remove('collections')
aggs = aggregate(state, q, aggs, facets)
q = {
'sort': [{'judgement': 'asc'}, {'score': 'desc'}, {'match_id': 'asc'}],
'query': filter_query(q, state.filters),
'aggregations': aggs,
'size': state.limit,
'from': state.offset
}
result, hits, output = execute_basic(TYPE_LEAD, q)
output['facets'] = parse_facet_result(state, result)
entities = set([])
for doc in hits.get('hits', []):
link = doc.get('_source')
link['id'] = doc.get('_id')
entities.add(link.get('entity_id'))
entities.add(link.get('match_id'))
output['results'].append(link)
q = {'terms': {'_id': list(entities)}}
q = {'query': q, 'size': len(entities) + 2}
_, hits, _ = execute_basic(TYPE_ENTITY, q)
for doc in hits.get('hits', []):
entity = doc.get('_source')
entity['id'] = doc.get('_id')
for result in output['results']:
if result.get('match_id') == entity['id']:
result['match'] = entity
if result.get('entity_id') == entity['id']:
result['entity'] = entity
return output
def lead_count(collection_id):
"""Inaccurate, as it does not reflect auth."""
q = {'term': {'entity_collection_id': collection_id}}
q = {'size': 0, 'query': q}
result = es.search(index=es_index, doc_type=TYPE_LEAD, body=q)
return result.get('hits', {}).get('total', 0)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,585
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/search_api.py
|
from flask import Blueprint, request
from apikit import jsonify
from aleph.core import url_for
from aleph.views.cache import enable_cache
from aleph.events import log_event
from aleph.search import QueryState
from aleph.search import documents_query
from aleph.search.util import next_params
blueprint = Blueprint('search_api', __name__)
@blueprint.route('/api/1/query')
def query():
enable_cache(vary_user=True)
state = QueryState(request.args, request.authz)
result = documents_query(state)
params = next_params(request.args, result)
log_event(request)
if params is not None:
result['next'] = url_for('search_api.query', **params)
return jsonify(result)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,586
|
gazeti/aleph
|
refs/heads/master
|
/aleph/model/validate.py
|
import os
import json
from dalet import is_country_code, is_partial_date, is_language_code
from dalet import is_domain, is_url
from jsonschema import Draft4Validator, FormatChecker, RefResolver
from aleph.core import get_config
resolver = RefResolver('core.json#', {})
SCHEMA_DIR = os.path.join(os.path.dirname(__file__), 'validation')
for (root, dirs, files) in os.walk(SCHEMA_DIR):
for schema_file in files:
with open(os.path.join(root, schema_file), 'r') as fh:
schema = json.load(fh)
resolver.store[schema['id']] = schema
format_checker = FormatChecker()
format_checker.checks('country-code')(is_country_code)
format_checker.checks('partial-date')(is_partial_date)
format_checker.checks('language-code')(is_language_code)
format_checker.checks('url')(is_url)
format_checker.checks('domain')(is_domain)
@format_checker.checks('collection-category')
def is_collection_category(cat):
categories = get_config('COLLECTION_CATEGORIES', {})
return cat in categories.keys()
def validate(data, schema):
_, schema = resolver.resolve(schema)
validator = Draft4Validator(schema, resolver=resolver,
format_checker=format_checker)
return validator.validate(data, schema)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,587
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/crawlers_api.py
|
from flask import Blueprint, request
from apikit import request_data, jsonify, Pager
from aleph.crawlers import get_exposed_crawlers, execute_crawler
blueprint = Blueprint('crawlers_api', __name__)
@blueprint.route('/api/1/crawlers', methods=['GET'])
def index():
request.authz.require(request.authz.is_admin)
crawlers = list(sorted(get_exposed_crawlers(),
key=lambda c: c.CRAWLER_NAME))
return jsonify(Pager(crawlers, limit=20))
@blueprint.route('/api/1/crawlers', methods=['POST', 'PUT'])
def queue():
request.authz.require(request.authz.session_write())
request.authz.require(request.authz.is_admin)
data = request_data()
crawler_id = data.get('crawler_id')
for cls in get_exposed_crawlers():
if crawler_id == cls.get_id():
incremental = bool(data.get('incremental', False))
execute_crawler.delay(crawler_id, incremental=incremental)
return jsonify({'status': 'queued'})
return jsonify({'status': 'error', 'message': 'No such crawler'},
status=400)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,588
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/entities_api.py
|
from flask import Blueprint, request
from werkzeug.exceptions import BadRequest
from apikit import obj_or_404, jsonify, request_data, arg_bool
from aleph.core import db, schemata
from aleph.model import Entity, Collection
from aleph.logic import update_entity, delete_entity, combined_entity
from aleph.events import log_event
from aleph.search import QueryState
from aleph.search import entities_query, links_query, entity_documents
from aleph.search import suggest_entities, similar_entities
from aleph.views.util import get_entity
from aleph.views.cache import enable_cache
blueprint = Blueprint('entities_api', __name__)
@blueprint.route('/api/1/entities', methods=['GET'])
def index():
enable_cache(vary_user=True)
state = QueryState(request.args, request.authz)
doc_counts = state.getbool('doc_counts')
res = entities_query(state, doc_counts=doc_counts)
return jsonify(res)
@blueprint.route('/api/1/entities/_all', methods=['GET'])
def all():
collection_id = request.args.getlist('collection_id')
collection_id = request.authz.collections_intersect(request.authz.READ, collection_id) # noqa
q = Entity.all_ids()
q = q.filter(Entity.state == Entity.STATE_ACTIVE)
q = q.filter(Entity.deleted_at == None) # noqa
q = q.filter(Entity.collection_id.in_(collection_id))
return jsonify({'results': [r[0] for r in q]})
@blueprint.route('/api/1/entities/_suggest', methods=['GET'])
def suggest():
enable_cache(vary_user=True, server_side=False)
prefix = request.args.get('prefix')
min_count = int(request.args.get('min_count', 0))
return jsonify(suggest_entities(prefix, request.authz, min_count))
@blueprint.route('/api/1/entities', methods=['POST', 'PUT'])
def create():
data = request_data()
collection_id = data.get('collection_id')
try:
collection_id = int(collection_id)
except (ValueError, TypeError) as ve:
raise BadRequest("Invalid collection_id")
collection = obj_or_404(Collection.by_id(collection_id))
request.authz.require(request.authz.collection_write(collection.id))
try:
entity = Entity.save(data, collection)
except (ValueError, TypeError) as ve:
raise BadRequest(ve.message)
entity.collection.touch()
db.session.commit()
log_event(request, entity_id=entity.id)
update_entity(entity)
return view(entity.id)
@blueprint.route('/api/1/entities/<id>', methods=['GET'])
def view(id):
entity, obj = get_entity(id, request.authz.READ)
log_event(request, entity_id=id)
return jsonify(entity)
@blueprint.route('/api/1/entities/<id>/links', methods=['GET'])
def links(id):
entity, obj = get_entity(id, request.authz.READ)
state = QueryState(request.args, request.authz)
return jsonify(links_query(entity, state))
@blueprint.route('/api/1/entities/<id>/similar', methods=['GET'])
def similar(id):
entity, _ = get_entity(id, request.authz.READ)
schema = schemata.get(entity.get('schema'))
if not schema.fuzzy:
return jsonify({
'status': 'ignore',
'results': [],
'total': 0
})
state = QueryState(request.args, request.authz)
combined = combined_entity(entity)
return jsonify(similar_entities(combined, state))
@blueprint.route('/api/1/entities/<id>/documents', methods=['GET'])
def documents(id):
entity, _ = get_entity(id, request.authz.READ)
state = QueryState(request.args, request.authz)
combined = combined_entity(entity)
return jsonify(entity_documents(combined, state))
@blueprint.route('/api/1/entities/<id>', methods=['POST', 'PUT'])
def update(id):
_, entity = get_entity(id, request.authz.WRITE)
try:
entity = Entity.save(request_data(), entity.collection,
merge=arg_bool('merge'))
except (ValueError, TypeError) as ve:
raise BadRequest(ve.message)
entity.collection.touch()
db.session.commit()
log_event(request, entity_id=entity.id)
update_entity(entity)
return view(entity.id)
@blueprint.route('/api/1/entities/<id>/merge/<other_id>', methods=['DELETE'])
def merge(id, other_id):
_, entity = get_entity(id, request.authz.WRITE)
_, other = get_entity(other_id, request.authz.WRITE)
try:
entity.merge(other)
except ValueError as ve:
raise BadRequest(ve.message)
db.session.commit()
log_event(request, entity_id=entity.id)
update_entity(entity)
update_entity(other)
return view(entity.id)
@blueprint.route('/api/1/entities/<id>', methods=['DELETE'])
def delete(id):
_, entity = get_entity(id, request.authz.WRITE)
delete_entity(entity)
db.session.commit()
log_event(request, entity_id=entity.id)
return jsonify({'status': 'ok'})
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,589
|
gazeti/aleph
|
refs/heads/master
|
/aleph/model/entity_identity.py
|
import logging
from aleph.core import db
from aleph.model.common import IdModel, DatedModel
log = logging.getLogger(__name__)
class EntityIdentity(db.Model, IdModel, DatedModel):
CONFIRMED = 1
REJECTED = 2
UNDECIDED = 3
JUDGEMENTS = [1, 2, 3]
entity_id = db.Column(db.String(32), db.ForeignKey('entity.id'), index=True) # noqa
entity = db.relationship('Entity', backref=db.backref('identities', lazy='dynamic')) # noqa
match_id = db.Column(db.String(254), index=True, nullable=False)
judgement = db.Column(db.Integer(), nullable=False)
judge_id = db.Column(db.Integer, db.ForeignKey('role.id'), nullable=True)
@classmethod
def judgements_by_entity(cls, entity_id):
q = db.session.query(cls.match_id, cls.judgement)
q = q.filter(cls.entity_id == entity_id)
return {k: v for k, v in q.all()}
@classmethod
def entity_ids(cls, entity_id):
q = db.session.query(cls.match_id)
q = q.filter(cls.entity_id == entity_id)
q = q.filter(cls.judgement == cls.CONFIRMED)
ids = [entity_id]
for mapped_id, in q.all():
ids.append(mapped_id)
return ids
@classmethod
def by_entity_match(cls, entity_id, match_id):
q = db.session.query(cls)
q = q.filter(cls.entity_id == entity_id)
q = q.filter(cls.match_id == match_id)
return q.first()
@classmethod
def save(cls, entity_id, match_id, judgement, judge=None):
obj = cls.by_entity_match(entity_id, match_id)
if obj is None:
obj = cls()
obj.entity_id = entity_id
obj.match_id = match_id
obj.judgement = judgement
obj.judge = judge
db.session.add(obj)
return obj
def __repr__(self):
return 'EntityIdentity(%r, %r, %r)' % (self.entity_id, self.match_id,
self.judgement)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,590
|
gazeti/aleph
|
refs/heads/master
|
/aleph/model/entity.py
|
import logging
from datetime import datetime
from sqlalchemy import func
from sqlalchemy.orm import joinedload
from sqlalchemy.dialects.postgresql import JSONB, ARRAY
from aleph.core import db, schemata
from aleph.text import match_form, string_value
from aleph.util import ensure_list
from aleph.model.collection import Collection
from aleph.model.reference import Reference
from aleph.model.entity_identity import EntityIdentity
from aleph.model.common import SoftDeleteModel, UuidModel
from aleph.model.common import make_textid, merge_data
log = logging.getLogger(__name__)
class Entity(db.Model, UuidModel, SoftDeleteModel):
STATE_ACTIVE = 'active'
STATE_PENDING = 'pending'
STATE_DELETED = 'deleted'
name = db.Column(db.Unicode)
type = db.Column(db.String(255), index=True)
state = db.Column(db.String(128), nullable=True, default=STATE_ACTIVE, index=True) # noqa
foreign_ids = db.Column(ARRAY(db.Unicode()))
data = db.Column('data', JSONB)
collection_id = db.Column(db.Integer, db.ForeignKey('collection.id'), index=True) # noqa
collection = db.relationship(Collection, backref=db.backref('entities', lazy='dynamic')) # noqa
def delete_references(self, origin=None):
pq = db.session.query(Reference)
pq = pq.filter(Reference.entity_id == self.id)
if origin is not None:
pq = pq.filter(Reference.origin == origin)
pq.delete(synchronize_session='fetch')
db.session.refresh(self)
def delete_identities(self):
pq = db.session.query(EntityIdentity)
pq = pq.filter(EntityIdentity.entity_id == self.id)
pq.delete(synchronize_session='fetch')
db.session.refresh(self)
def delete(self, deleted_at=None):
self.delete_references()
self.delete_identities()
deleted_at = deleted_at or datetime.utcnow()
for alert in self.alerts:
alert.delete(deleted_at=deleted_at)
self.state = self.STATE_DELETED
super(Entity, self).delete(deleted_at=deleted_at)
@classmethod
def delete_dangling(cls, collection_id):
"""Delete dangling entities.
Entities can dangle in pending state while they have no references
pointing to them, thus making it impossible to enable them. This is
a routine cleanup function.
"""
q = db.session.query(cls)
q = q.filter(cls.collection_id == collection_id)
q = q.filter(cls.state == cls.STATE_PENDING)
q = q.outerjoin(Reference)
q = q.group_by(cls)
q = q.having(func.count(Reference.id) == 0)
for entity in q.all():
entity.delete()
def merge(self, other):
if self.id == other.id:
raise ValueError("Cannot merge an entity with itself.")
if self.collection_id != other.collection_id:
raise ValueError("Cannot merge entities from different collections.") # noqa
data = merge_data(self.data, other.data)
if self.name.lower() != other.name.lower():
data = merge_data(data, {'alias': [other.name]})
self.data = data
self.state = self.STATE_ACTIVE
self.foreign_ids = self.foreign_ids or []
self.foreign_ids += other.foreign_ids or []
self.created_at = min((self.created_at, other.created_at))
self.updated_at = datetime.utcnow()
# update alerts
from aleph.model.alert import Alert
q = db.session.query(Alert).filter(Alert.entity_id == other.id)
q.update({'entity_id': self.id})
# update document references
from aleph.model.reference import Reference
q = db.session.query(Reference).filter(Reference.entity_id == other.id)
q.update({'entity_id': self.id})
# delete source entities
other.delete()
db.session.add(self)
db.session.commit()
db.session.refresh(other)
def update(self, entity):
data = entity.get('data') or {}
data['name'] = entity.get('name')
self.data = self.schema.validate(data)
self.name = self.data.pop('name')
fid = [string_value(f) for f in entity.get('foreign_ids') or []]
self.foreign_ids = list(set([f for f in fid if f is not None]))
self.state = entity.pop('state', self.STATE_ACTIVE)
self.updated_at = datetime.utcnow()
db.session.add(self)
@classmethod
def save(cls, data, collection, merge=False):
ent = cls.by_id(data.get('id'))
if ent is None:
ent = cls()
ent.type = data.pop('schema', None)
if ent.type is None:
raise ValueError("No schema provided.")
ent.id = make_textid()
if merge:
data = merge_data(data, ent.to_dict())
if collection is None:
raise ValueError("No collection specified.")
ent.collection = collection
ent.update(data)
return ent
@classmethod
def filter_collections(cls, q, collections=None):
if collections is None:
return q
collection_ids = []
for collection in collections:
if isinstance(collection, Collection):
collection = collection.id
collection_ids.append(collection)
q = q.filter(Entity.collection_id.in_(collection_ids))
return q
@classmethod
def by_id_set(cls, ids, collections=None):
if not len(ids):
return {}
q = cls.all()
q = cls.filter_collections(q, collections=collections)
q = q.options(joinedload('collection'))
q = q.filter(cls.id.in_(ids))
entities = {}
for ent in q:
entities[ent.id] = ent
return entities
@classmethod
def by_foreign_id(cls, foreign_id, collection_id, deleted=False):
foreign_id = string_value(foreign_id)
if foreign_id is None:
return None
q = cls.all(deleted=deleted)
q = q.filter(Entity.collection_id == collection_id)
foreign_id = func.cast([foreign_id], ARRAY(db.Unicode()))
q = q.filter(cls.foreign_ids.contains(foreign_id))
q = q.order_by(Entity.deleted_at.desc().nullsfirst())
return q.first()
@classmethod
def latest(cls):
q = db.session.query(func.max(cls.updated_at))
q = q.filter(cls.state == cls.STATE_ACTIVE)
return q.scalar()
@property
def schema(self):
return schemata.get(self.type)
@property
def terms(self):
terms = set([self.name])
for alias in ensure_list(self.data.get('alias')):
if alias is not None and len(alias):
terms.add(alias)
return terms
@property
def regex_terms(self):
# This is to find the shortest possible regex for each entity.
# If, for example, and entity matches both "Al Qaeda" and
# "Al Qaeda in Iraq, Syria and the Levant", it is useless to
# search for the latter.
terms = set([match_form(t) for t in self.terms])
regex_terms = set()
for term in terms:
if term is None or len(term) < 4 or len(term) > 120:
continue
contained = False
for other in terms:
if other is None or other == term:
continue
if other in term:
contained = True
if not contained:
regex_terms.add(term)
return regex_terms
def to_dict(self):
data = super(Entity, self).to_dict()
data.update({
'schema': self.type,
'name': self.name,
'state': self.state,
'data': self.data,
'foreign_ids': self.foreign_ids or [],
'collection_id': self.collection_id
})
return data
def to_index(self):
entity = self.to_dict()
entity['properties'] = {'name': [self.name]}
for k, v in self.data.items():
v = ensure_list(v)
if len(v):
entity['properties'][k] = v
return entity
def to_ref(self):
return {
'id': self.id,
'label': self.name,
'schema': self.type,
'collection_id': self.collection_id
}
def __unicode__(self):
return self.name
def __repr__(self):
return '<Entity(%r, %r)>' % (self.id, self.name)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,591
|
gazeti/aleph
|
refs/heads/master
|
/aleph/datasets/mapper.py
|
import logging
import fingerprints
from hashlib import sha1
from pprint import pprint # noqa
from aleph.core import schemata
from aleph.schema import Schema
from aleph.util import dict_list, unique_list
from aleph.text import string_value
from aleph.datasets.formatting import Formatter
from aleph.datasets.util import finalize_index
log = logging.getLogger(__name__)
class MapperProperty(object):
def __init__(self, mapper, name, data, schema):
self.mapper = mapper
self.name = name
self.data = data
self.schema = schema
self.refs = dict_list(data, 'column', 'columns')
self.literals = dict_list(data, 'literal', 'literals')
self.join = data.get('join')
# this is hacky, trying to generate refs from template
self.template = data.get('template')
if self.template is not None:
self.formatter = Formatter(self.template)
self.refs.extend(self.formatter.refs)
def get_values(self, record):
values = []
if self.template is not None:
values.append(self.formatter.apply(record))
else:
for ref in self.refs:
values.append(record.get(ref))
values.extend(self.literals)
values = [self.schema.type.clean(v, record, self.data) for v in values]
values = [v for v in values if v is not None]
if self.join is not None:
values = [self.join.join(values)]
return unique_list(values)
def __repr__(self):
return '<MapperProperty(%r, %r, %r)>' % (self.mapper, self.name,
self.schema)
class Mapper(object):
def __init__(self, query, data):
self.query = query
self.data = data
self.keys = dict_list(data, 'keys', 'key')
self.key_fingerprint = data.get('key_fingerprint', False)
self.schema = schemata.get(data.get('schema'))
if self.schema is None or self.schema.section != self.section:
raise TypeError("Invalid schema: %r" % data.get('schema'))
self.properties = []
for name, prop in data.get('properties', {}).items():
schema = self.schema.get(name)
self.properties.append(MapperProperty(self, name, prop, schema))
@property
def refs(self):
for key in self.keys:
yield key
for prop in self.properties:
for ref in prop.refs:
yield ref
def compute_properties(self, record):
return {p.name: p.get_values(record) for p in self.properties}
def compute_key(self, record):
digest = sha1(self.query.dataset.name.encode('utf-8'))
has_key = False
for key in self.keys:
value = record.get(key)
if self.key_fingerprint:
value = fingerprints.generate(value)
else:
value = string_value(value)
if value is None:
continue
digest.update(value.encode('utf-8'))
has_key = True
if has_key:
return digest.hexdigest()
def to_index(self, record):
return {
'dataset': self.query.dataset.name,
'roles': self.query.dataset.roles,
'properties': self.compute_properties(record)
}
def __repr__(self):
return '<Mapper(%r)>' % self.query
class EntityMapper(Mapper):
section = Schema.ENTITY
def __init__(self, query, name, data):
self.name = name
super(EntityMapper, self).__init__(query, data)
if not len(self.keys):
log.warning("No key criteria defined: %r", data)
def to_index(self, record):
data = super(EntityMapper, self).to_index(record)
data['id'] = self.compute_key(record)
if data['id'] is None:
return
return finalize_index(data, self.schema)
class LinkMapper(Mapper):
section = Schema.LINK
def __init__(self, query, data):
super(LinkMapper, self).__init__(query, data)
def to_index(self, record, entities, inverted=False):
data = super(LinkMapper, self).to_index(record)
data['inverted'] = inverted
source, target = self.data.get('source'), self.data.get('target')
origin, remote = entities.get(source), entities.get(target)
if inverted:
origin, remote = remote, origin
if origin is None or remote is None:
# If data was missing for either the source or target entity
# they will be None, and we should not create a link.
return
# We don't need to index the entity here, since it's already known
# in the simplest case (entity profile pages).
data['origin'] = {
'id': origin.get('id'),
'fingerprints': origin.get('fingerprints'),
}
# this is expanded post entity indexing.
data['remote'] = remote.get('id')
# Generate a link ID
digest = sha1()
digest.update(str(inverted))
digest.update(origin['id'])
digest.update(remote['id'])
key_digest = self.compute_key(record)
if key_digest is not None:
digest.update(key_digest)
data['id'] = digest.hexdigest()
return finalize_index(data, self.schema)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,592
|
gazeti/aleph
|
refs/heads/master
|
/aleph/model/cache.py
|
from aleph.core import db
class Cache(db.Model):
"""Store OCR computation results."""
__tablename__ = 'cache'
id = db.Column(db.BigInteger, primary_key=True)
key = db.Column(db.Unicode, index=True)
value = db.Column(db.Unicode)
@classmethod
def get_cache(cls, key):
q = db.session.query(cls.value)
q = q.filter_by(key=key)
cobj = q.first()
if cobj is not None:
return cobj.value
@classmethod
def set_cache(cls, key, value):
session = db.sessionmaker(bind=db.engine)()
cobj = cls()
cobj.key = key
cobj.value = value
session.add(cobj)
session.commit()
def __repr__(self):
return '<Cache(%r)>' % self.key
def __unicode__(self):
return self.key
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,593
|
gazeti/aleph
|
refs/heads/master
|
/aleph/logic/leads.py
|
# Lead generator
import logging
from aleph.authz import Authz
from aleph.core import db
from aleph.index import delete_entity_leads, index_lead
from aleph.search import QueryState
from aleph.search.entities import load_entity, similar_entities
from aleph.model import EntityIdentity
from aleph.logic.distance import entity_distance
log = logging.getLogger(__name__)
def generate_leads(entity_id):
"""Compute likely duplicates of a given entity and index these leads."""
# Get rid of everything, also for deleted entities etc.
delete_entity_leads(entity_id)
entity = load_entity(entity_id)
if entity is None:
# log.warning("[%r] not indexed, skip lead generation.", entity_id)
return
if not entity.get('collection_id'):
# log.warning("[%r] is not in a collecton, skip lead generation.", entity_id) # noqa
return
log.debug("Generating leads for [%(id)s]: %(name)s", entity)
authz = Authz(override=True)
judgements = EntityIdentity.judgements_by_entity(entity_id)
state = QueryState({}, authz, limit=100)
result = similar_entities(entity, state)
for other in result.get('results', []):
score = entity_distance(entity, other)
log.debug(" -[%.2f]-> %s", score, other.get('name'))
# TODO: implement some cut-off
index_lead({
'entity_id': entity.get('id'),
'entity_collection_id': entity.get('collection_id'),
'score': score,
'judgement': judgements.get(other.get('id'), 0),
'match_id': other.get('id'),
'schema': other.get('schema'),
'schemata': other.get('schemata'),
'collection_id': other.get('collection_id'),
'dataset': other.get('dataset'),
'roles': other.get('roles')
})
def update_lead(entity, match, judgement, judge=None):
EntityIdentity.save(entity.get('id'), match.get('id'),
judgement, judge=judge)
db.session.commit()
score = entity_distance(entity, match)
index_lead({
'entity_id': entity.get('id'),
'entity_collection_id': entity.get('collection_id'),
'score': score,
'judgement': judgement,
'match_id': match.get('id'),
'schema': match.get('schema'),
'schemata': match.get('schemata'),
'collection_id': match.get('collection_id'),
'dataset': match.get('dataset'),
'roles': match.get('roles')
})
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,594
|
gazeti/aleph
|
refs/heads/master
|
/aleph/ingest/result.py
|
import logging
from collections import OrderedDict
from ingestors import Result
from normality import stringify
from aleph.core import db
from aleph.model import Document, DocumentRecord
from aleph.model import DocumentTag, DocumentTagCollector
log = logging.getLogger(__name__)
class DocumentResult(Result):
"""Wrapper to link a Document to an ingestor result object."""
def __init__(self, manager, document, file_path=None, user_queue=False):
self.manager = manager
self.user_queue = user_queue
self.document = document
self.pdf_hash = document.pdf_version
self.columns = OrderedDict()
bind = super(DocumentResult, self)
bind.__init__(id=document.foreign_id,
checksum=document.content_hash,
file_path=file_path,
title=document.meta.get('title'),
summary=document.meta.get('summary'),
author=document.meta.get('author'),
keywords=document.meta.get('keywords', []),
file_name=document.meta.get('file_name'),
mime_type=document.meta.get('mime_type'),
encoding=document.meta.get('encoding'),
languages=document.meta.get('languages', []),
headers=document.meta.get('headers'),
size=document.meta.get('file_size'))
def emit_page(self, index, text):
"""Emit a plain text page."""
self.document.type = Document.TYPE_TEXT
record = DocumentRecord()
record.document_id = self.document.id
record.text = text
record.index = index
db.session.add(record)
def _emit_iterator_rows(self, iterator):
for row in iterator:
for column in row.keys():
self.columns[column] = None
yield row
def emit_rows(self, iterator):
"""Emit rows of a tabular iterator."""
# TODO: also generate a tabular rep for the metadata
self.document.type = Document.TYPE_TABULAR
self.document.insert_records(0, self._emit_iterator_rows(iterator))
def emit_pdf_alternative(self, file_path):
self.pdf_hash = self.manager.archive.archive_file(file_path)
def update(self):
"""Apply the outcome of the result to the document."""
if self.status == self.STATUS_SUCCESS:
self.document.status = Document.STATUS_SUCCESS
self.document.error_message = None
else:
self.document.status = Document.STATUS_FAIL
self.document.type = Document.TYPE_OTHER
self.document.error_message = self.error_message
self.document.foreign_id = stringify(self.id)
if self.checksum:
self.document.content_hash = self.checksum
self.document.file_size = self.size
self.document.file_name = self.file_name
self.document.title = stringify(self.title)
self.document.summary = stringify(self.summary)
self.document.author = stringify(self.author)
self.document.keywords = self.keywords
self.document.mime_type = stringify(self.mime_type)
self.document.encoding = self.encoding
self.document.languages = self.languages
self.document.headers = self.headers
self.document.pdf_version = self.pdf_hash
self.document.columns = self.columns.keys()
collector = DocumentTagCollector(self.document, 'ingestors')
for entity in self.entities:
collector.emit(entity, DocumentTag.TYPE_PERSON)
for email in self.emails:
collector.emit(email, DocumentTag.TYPE_EMAIL)
collector.save()
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,595
|
gazeti/aleph
|
refs/heads/master
|
/aleph/logic/alerts.py
|
import logging
from urllib import quote_plus
from flask import render_template, current_app
from aleph.authz import Authz
from aleph.core import app_title, app_url, db, celery
from aleph.model import Role, Alert, Collection
from aleph.notify import notify_role
from aleph.search import QueryState, documents_query
log = logging.getLogger(__name__)
@celery.task()
def check_alerts():
"""Go through all users and execute their alerts."""
for role_id, in Role.notifiable():
with current_app.test_request_context('/'):
role = Role.by_id(role_id)
authz = Authz(role=role)
check_role_alerts(authz)
def format_results(alert, results):
# used to activate highlighting in results pages:
dq = alert.query_text or ''
qs = 'dq=%s' % quote_plus(dq.encode('utf-8'))
output = []
for result in results['results']:
collection_id = result.pop('collection_id', None)
if not collection_id:
continue
result['collection'] = Collection.by_id(collection_id)
# generate document URL:
if 'tabular' == result.get('type'):
result['url'] = '%stabular/%s/0?%s' % (app_url, result['id'], qs)
else:
result['url'] = '%stext/%s?%s' % (app_url, result['id'], qs)
# preview snippets:
result['snippets'] = []
for record in result['records'].get('results', []):
result['snippets'].append(record['text'])
output.append(result)
return output
def check_role_alerts(authz):
alerts = Alert.by_role(authz.role).all()
if not len(alerts):
return
log.info('Alerting %r, %d alerts...', authz.role, len(alerts))
for alert in alerts:
args = {
'q': alert.query_text,
'filter:entities.id': alert.entity_id,
'limit': 50
}
state = QueryState(args, authz)
results = documents_query(state, since=alert.notified_at)
if results['total'] == 0:
continue
log.info('Found %d new results for: %r', results['total'], alert.label)
alert.update()
try:
subject = '%s (%s new results)' % (alert.label, results['total'])
html = render_template('email/alert.html',
alert=alert,
role=authz.role,
total=results.get('total'),
results=format_results(alert, results),
app_title=app_title,
app_url=app_url)
notify_role(authz.role, subject, html)
except Exception as ex:
log.exception(ex)
db.session.commit()
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,596
|
gazeti/aleph
|
refs/heads/master
|
/aleph/schema/__init__.py
|
from aleph.util import dict_list, ensure_list
from aleph.text import string_value
from aleph.schema.types import resolve_type
class SchemaValidationException(Exception):
"""Schema validation errors will be caught by the API."""
def __init__(self, errors):
self.errors = errors
super(SchemaValidationException, self).__init__(repr(errors))
class SchemaProperty(object):
def __init__(self, schema, name, data):
self.schema = schema
self.name = name.strip()
self.data = data
self.label = data.get('label', name)
self.hidden = data.get('hidden', False)
self.is_multiple = data.get('multiple', False)
self.is_label = name == 'name'
cls = resolve_type(data.get('type', 'string'))
self.type = cls()
def validate(self, data):
"""Validate that the data should be stored.
Since the types system doesn't really have validation, this currently
tries to normalize the value to see if it passes strict parsing.
"""
value, error = [], None
for val in ensure_list(data):
val = string_value(val)
if val is None:
continue
val = val.strip()
if self.type.normalize_value(val) is None:
error = "Invalid value"
value.append(val)
if not self.is_multiple:
value = value[0] if len(value) else None
else:
value = list(set(value))
if self.is_label and (value is None or not len(value)):
error = "Field is required."
return value, error
def to_dict(self):
return {
'name': self.name,
'label': self.label,
'hidden': self.hidden,
'type': self.type.name
}
def __repr__(self):
return '<SchemaProperty(%r, %r)>' % (self.schema, self.name)
class Schema(object):
"""Defines the abstract data model.
Schema items define the entities and links available in the model.
"""
ENTITY = 'entities'
LINK = 'links'
SECTIONS = [ENTITY, LINK]
def __init__(self, schemata, section, name, data):
assert section in self.SECTIONS, section
self._schemata = schemata
self.section = section
self.name = name
self.data = data
self.label = data.get('label', name)
self.plural = data.get('plural', self.label)
self.icon = data.get('icon')
# Do not show in listings:
self.hidden = data.get('hidden', False)
# Try to perform fuzzy matching. Fuzzy similarity search does not
# make sense for entities which have a lot of similar names, such
# as land plots, assets etc.
self.fuzzy = data.get('fuzzy', True)
self._extends = dict_list(data, 'extends')
self._own_properties = []
for name, prop in data.get('properties', {}).items():
self._own_properties.append(SchemaProperty(self, name, prop))
self.forward = data.get('forward', self.label)
self.reverse = data.get('reverse', self.label)
@property
def extends(self):
"""Return the inherited schemata."""
for base in self._extends:
yield self._schemata.get(base)
@property
def schemata(self):
"""Return the full inheritance chain."""
yield self
for base in self.extends:
for schema in base.schemata:
yield schema
@property
def properties(self):
"""Return properties, those defined locally and in ancestors."""
names = set()
for prop in self._own_properties:
names.add(prop.name)
yield prop
for schema in self.extends:
for prop in schema.properties:
if prop.name in names:
continue
names.add(prop.name)
yield prop
def get(self, name):
for prop in self.properties:
if prop.name == name:
return prop
raise ValueError("[%r] missing property: %s" % (self, name))
def validate(self, data):
"""Validate a dataset against the given schema.
This will also drop keys which are not present as properties.
"""
result = {}
errors = {}
for prop in self.properties:
value = data.get(prop.name)
value, error = prop.validate(value)
if error is not None:
errors[prop.name] = error
elif value is not None:
result[prop.name] = value
if len(errors):
raise SchemaValidationException(errors)
return result
def to_dict(self):
data = {
'type': self.section,
'label': self.label,
'plural': self.plural,
'icon': self.icon,
'hidden': self.hidden,
'fuzzy': self.fuzzy,
'properties': list(self.properties)
}
if self.section == Schema.LINK:
data['forward'] = self.forward
data['reverse'] = self.reverse
return data
def __repr__(self):
return '<Schema(%r)>' % self.name
class SchemaSet(object):
"""A collection of schemata."""
def __init__(self, data):
self.schemata = {}
for section in Schema.SECTIONS:
for name, sconfig in data.get(section, {}).items():
if name in self.schemata:
raise TypeError("Duplicate schema name: %r" % name)
self.schemata[name] = Schema(self, section, name, sconfig)
def get(self, name):
schema = self.schemata.get(name)
if schema is None:
raise TypeError("No such schema: %r" % name)
return schema
def merge_entity_schema(self, left, right):
"""Select the most narrow of two schemata.
When indexing data from a dataset, an entity may be declared as a
LegalEntity in one query, and as a Person in another. This function
will select the most specific of two schemata offered. In the example,
that would be Person.
"""
if left == right:
return left
lefts = self.get(left)
lefts = [s.name for s in lefts.schemata]
if right in lefts:
return left
rights = self.get(right)
rights = [s.name for s in rights.schemata]
if left in rights:
return right
for left in lefts:
for right in rights:
if left == right:
return left
def to_dict(self):
data = {}
for name, schema in self.schemata.items():
if not schema.hidden:
data[name] = schema
return data
def __iter__(self):
return iter(self.schemata.values())
def __repr__(self):
return '<SchemaSet(%r)>' % self.schemata
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,597
|
gazeti/aleph
|
refs/heads/master
|
/aleph/logic/collections.py
|
import logging
from datetime import datetime
from aleph.core import db, celery
from aleph.model import Collection, Entity
from aleph.index.collections import delete_collection as index_delete
from aleph.analyze import analyze_documents
from aleph.logic.entities import delete_entity
from aleph.logic.entities import update_entity_full
from aleph.logic.documents import delete_document
log = logging.getLogger(__name__)
def update_collection(collection):
"""Create or update a collection."""
pass
@celery.task()
def analyze_collection(collection_id):
"""Re-analyze the elements of this collection, documents and entities."""
Entity.delete_dangling(collection_id)
db.session.commit()
q = db.session.query(Collection).filter(Collection.id == collection_id)
collection = q.first()
if collection is None:
log.error("No collection with ID: %r", collection_id)
# re-process the documents
analyze_documents(collection.id)
# re-process entities
for entity in collection.entities:
update_entity_full(entity.id)
@celery.task()
def delete_collection(collection_id):
# Deleting a collection affects many associated objects and requires
# checks, so this is done manually and in detail here.
q = db.session.query(Collection).filter(Collection.id == collection_id)
collection = q.first()
if collection is None:
log.error("No collection with ID: %r", collection_id)
return
log.info("Deleting collection [%r]: %r", collection.id, collection.label)
index_delete(collection_id)
deleted_at = datetime.utcnow()
for entity in collection.entities:
# TODO: consider hard-deleting entities because the polyglot tagger
# cannot tell if a deleted match on a tagged term on a revived
# collection means not to tag this entity any more.
log.info("Delete entity: %r", entity)
delete_entity(entity, deleted_at=deleted_at)
for document in collection.documents:
log.info("Delete document: %r", document)
delete_document(document, deleted_at=deleted_at)
db.session.refresh(collection)
collection.delete(deleted_at=deleted_at)
db.session.commit()
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,598
|
gazeti/aleph
|
refs/heads/master
|
/aleph/datasets/formatting.py
|
import re
import six
from normality import collapse_spaces
FORMAT_PATTERN = re.compile('{{([^(}})]*)}}')
class Formatter(object):
def __init__(self, template):
self.template = six.text_type(template)
self.refs = []
self.replacements = {}
for ref in FORMAT_PATTERN.findall(self.template):
self.refs.append(ref)
repl = '{{%s}}' % ref
self.replacements[repl] = ref
def apply(self, record):
value = six.text_type(self.template)
for repl, ref in self.replacements.items():
ref_value = record.get(ref) or ''
ref_value = six.text_type(ref_value)
value = value.replace(repl, ref_value)
return collapse_spaces(value).strip()
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,599
|
gazeti/aleph
|
refs/heads/master
|
/aleph/tests/test_role_model.py
|
import ldap
from flexmock import flexmock
from aleph.core import db
from aleph.model import Role
from aleph.tests.factories.models import RoleFactory
from aleph.core import get_config
from aleph.model.role import LDAPException
from aleph.tests.util import TestCase
class RoleModelTest(TestCase):
def setUp(self):
super(RoleModelTest, self).setUp()
self.role = RoleFactory.create()
db.session.commit()
def test_password(self):
password = self.fake.password()
role = RoleFactory.create()
self.assertFalse(role.check_password(password))
role.set_password(password)
self.assertTrue(role.check_password(password))
role.set_password(self.fake.password())
self.assertFalse(role.check_password(password))
def test_authenticate_using_ldap_with_blank_password(self):
secret = ''
self.assertIsNone(
Role.authenticate_using_ldap(self.role.email, secret)
)
def test_authenticate_using_ldap_with_bad_user_pass(self):
secret = self.fake.password()
email = self.fake.email()
fake_ldap_conn = flexmock(set_option=lambda x, y: x)
(flexmock(fake_ldap_conn)
.should_receive('simple_bind_s')
.with_args(get_config('LDAP_BASE_DN').format(email), secret)
.and_raise(LDAPException('Failed auth.'))
.times(1))
(flexmock(ldap)
.should_receive('initialize')
.and_return(fake_ldap_conn))
self.assertIsNone(
Role.authenticate_using_ldap(email, secret)
)
def test_authenticate_using_ldap_with_good_user_pass(self):
secret = self.fake.password()
email = self.fake.email()
fake_ldap_conn = flexmock(set_option=lambda x, y: x)
(flexmock(fake_ldap_conn)
.should_receive('simple_bind_s')
.with_args(get_config('LDAP_BASE_DN').format(email), secret)
.and_return(None)
.times(1))
(flexmock(fake_ldap_conn)
.should_receive('unbind_s')
.and_return(None)
.times(1))
(flexmock(ldap)
.should_receive('initialize')
.and_return(fake_ldap_conn))
role = Role.authenticate_using_ldap(email, secret)
self.assertIsInstance(role, Role)
self.assertEqual(role.email, email)
def test_by_email_when_blank_email(self):
self.assertIsNone(Role.by_email(None))
def test_by_email_does_not_match(self):
self.assertIsNone(Role.by_email(self.fake.email()).first())
def test_by_email_matches(self):
self.assertEqual(Role.by_email(self.role.email).first(), self.role)
def test_load_or_create_role_exists(self):
self.assertEqual(
Role.load_or_create(
foreign_id=self.role.foreign_id,
type=self.role.type,
name=self.role.name,
email=self.role.email
),
self.role
)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,600
|
gazeti/aleph
|
refs/heads/master
|
/aleph/tests/test_sessions_api.py
|
from aleph.tests.util import TestCase
from aleph.tests.factories.models import RoleFactory
class SessionsApiTestCase(TestCase):
def setUp(self):
super(SessionsApiTestCase, self).setUp()
self.role = RoleFactory.create()
def test_status_get_with_password_registration_enabled(self):
res = self.client.get('/api/1/sessions')
assert res.status_code == 200, res
assert len(res.json['providers']) == 1, res
assert res.json['providers'][0]['name'] == 'password', res
assert res.json['providers'][0]['registration'] == True, res
def test_status_get_with_password_registration_disabled(self):
self.app.config['PASSWORD_REGISTRATION'] = False
res = self.client.get('/api/1/sessions')
assert res.status_code == 200, res
assert len(res.json['providers']) == 1, res
assert res.json['providers'][0]['name'] == 'password', res
assert res.json['providers'][0]['registration'] == False, res
def test_status_get_without_password_login(self):
self.app.config['PASSWORD_LOGIN'] = False
res = self.client.get('/api/1/sessions')
assert res.status_code == 200, res
assert len(res.json['providers']) == 0, res
def test_password_login_get(self):
res = self.client.get('/api/1/sessions/login/password')
assert res.status_code == 404, res
def test_password_login_post_no_data(self):
res = self.client.post('/api/1/sessions/login/password')
assert res.status_code == 404, res
def test_password_login_post_good_email_and_password(self):
secret = self.fake.password()
self.role.set_password(secret)
data = dict(email=self.role.email, password=secret)
res = self.client.post('/api/1/sessions/login/password', data=data)
assert res.status_code == 200, res
assert res.json['role']['id'] == self.role.id, res
assert res.json['api_key'] == self.role.api_key, res
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,601
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/datasets_api.py
|
import logging
from collections import defaultdict
from werkzeug.exceptions import NotFound
from flask import Blueprint, request
from apikit import jsonify
from dalet import COUNTRY_NAMES
from aleph.core import datasets, get_config
from aleph.search import QueryState, entities_query
from aleph.views.cache import enable_cache
log = logging.getLogger(__name__)
blueprint = Blueprint('datasets_api', __name__)
@blueprint.route('/api/1/datasets', methods=['GET'])
def index():
enable_cache(vary_user=True)
results = [d for d in datasets if request.authz.check_roles(d.roles)]
state = QueryState({
'filter:dataset': [d.name for d in results],
'facet': 'dataset',
'limit': 0
}, request.authz)
res = entities_query(state)
values = res.get('facets', {}).get('dataset', {}).get('values', [])
counts = {v.get('id'): v.get('count') for v in values}
countries_facet = defaultdict(int)
category_facet = defaultdict(int)
countries_filter = set(request.args.getlist('filter:countries'))
category_filter = set(request.args.getlist('filter:category'))
filtered = []
for dataset in results:
dataset.entities_count = counts.get(dataset.name)
if len(category_filter) and dataset.category not in category_filter:
continue
if len(countries_filter) and \
not len(countries_filter.intersection(dataset.countries)):
continue
for country in dataset.countries:
countries_facet[country] += 1
category_facet[dataset.category] += 1
filtered.append(dataset)
filtered = sorted(filtered, key=lambda d: d.entities_count, reverse=True)
facets = {'countries': {'values': []}, 'category': {'values': []}}
categories = get_config('COLLECTION_CATEGORIES', {})
countries_facet = sorted(countries_facet.items(), key=lambda (k, c): c)
for key, count in countries_facet[::-1]:
facets['countries']['values'].append({
'id': key,
'count': count,
'label': COUNTRY_NAMES.get(key, key)
})
category_facet = sorted(category_facet.items(), key=lambda (k, c): c)
for key, count in category_facet[::-1]:
if key is None:
continue
facets['category']['values'].append({
'id': key,
'count': count,
'label': categories.get(key, key)
})
return jsonify({
'results': filtered,
'facets': facets,
'total': len(filtered),
'total_entities_count': res.get('total')
})
@blueprint.route('/api/1/datasets/<name>')
def view(name):
enable_cache(vary_user=True)
try:
dataset = datasets.get(name)
except NameError:
raise NotFound()
request.authz.require(request.authz.check_roles(dataset.roles))
state = QueryState({
'filter:dataset': dataset.name,
'facet': ['schema', 'countries'],
'limit': 0
}, request.authz)
res = entities_query(state)
data = dataset.to_dict()
data['facets'] = res.get('facets', {})
data['doc_count'] = res.get('total')
return jsonify(data)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,602
|
gazeti/aleph
|
refs/heads/master
|
/aleph/index/records.py
|
import six
import time
import logging
from elasticsearch.helpers import BulkIndexError
from aleph.core import es_index, db
from aleph.index.mapping import TYPE_RECORD
from aleph.model import DocumentRecord
from aleph.index.util import bulk_op, query_delete
from aleph.text import index_form
log = logging.getLogger(__name__)
def clear_records(document_id):
"""Delete all records associated with the given document."""
q = {'term': {'document_id': document_id}}
query_delete(q, doc_type=TYPE_RECORD)
def generate_records(document):
"""Generate index records, based on document rows or pages."""
q = db.session.query(DocumentRecord)
q = q.filter(DocumentRecord.document_id == document.id)
for record in q.yield_per(1000):
texts = [record.text]
if record.data is not None:
texts.extend(record.data.values())
yield {
'_id': record.id,
'_type': TYPE_RECORD,
'_index': six.text_type(es_index),
'_source': {
'document_id': document.id,
'collection_id': document.collection_id,
'index': record.index,
'sheet': record.sheet,
'text': index_form(texts)
}
}
def index_records(document):
clear_records(document.id)
while True:
try:
bulk_op(generate_records(document))
return
except BulkIndexError as exc:
log.warning('Indexing error: %s', exc)
time.sleep(10)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,603
|
gazeti/aleph
|
refs/heads/master
|
/aleph/index/leads.py
|
from __future__ import absolute_import
import logging
from hashlib import sha1
from aleph.core import es, es_index
from aleph.index.mapping import TYPE_LEAD
from aleph.index.util import query_delete
log = logging.getLogger(__name__)
def delete_entity_leads(entity_id):
"""Delete all entity-related leads from the index."""
q = {
'bool': {
'should': [
{'term': {'entity_id': entity_id}},
{'term': {'match_id': entity_id}}
]
}
}
query_delete(q, doc_type=TYPE_LEAD)
def index_lead(lead):
"""Index a lead."""
hash_sum = sha1()
hash_sum.update(lead.get('entity_id') or '')
hash_sum.update(lead.get('match_id') or '')
lead_id = hash_sum.hexdigest()
es.index(index=es_index, doc_type=TYPE_LEAD, id=lead_id, body=lead)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,604
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/leads_api.py
|
from flask import Blueprint, request
from apikit import obj_or_404, jsonify, request_data
from werkzeug.exceptions import BadRequest
from aleph.model import Collection, EntityIdentity
from aleph.search import QueryState
from aleph.search.leads import leads_query
from aleph.logic import update_entity, update_lead
from aleph.events import log_event
from aleph.views.util import get_entity
blueprint = Blueprint('leads_api', __name__)
@blueprint.route('/api/1/collections/<int:collection_id>/leads',
methods=['GET'])
def index(collection_id):
collection = obj_or_404(Collection.by_id(collection_id))
request.authz.require(request.authz.collection_read(collection))
state = QueryState(request.args, request.authz)
results = leads_query(collection_id, state)
return jsonify(results)
@blueprint.route('/api/1/collections/<int:collection_id>/leads',
methods=['POST', 'PUT'])
def update(collection_id):
collection = obj_or_404(Collection.by_id(collection_id))
request.authz.require(request.authz.collection_write(collection))
data = request_data()
entity, obj = get_entity(data.get('entity_id'), request.authz.WRITE)
if obj.collection_id != collection_id:
raise BadRequest("Entity does not belong to collection.")
match, _ = get_entity(data.get('match_id'), request.authz.READ)
judgement = data.get('judgement')
if judgement not in EntityIdentity.JUDGEMENTS:
raise BadRequest("Invalid judgement.")
update_lead(entity, match, judgement, judge=request.authz.role)
log_event(request)
update_entity(obj)
return jsonify({'status': 'ok'})
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,605
|
gazeti/aleph
|
refs/heads/master
|
/aleph/oauth.py
|
import jwt
import logging
from flask_oauthlib.client import OAuth
from flask import session
from aleph import signals
oauth = OAuth()
log = logging.getLogger(__name__)
def get_oauth_token():
if 'oauth' in session:
sig = session.get('oauth')
return (sig.get('access_token'), '')
def setup_providers(app):
# Reset the remote apps first!
oauth.remote_apps = {}
providers = app.config.get('OAUTH', [])
if isinstance(providers, dict):
providers = [providers]
for provider in providers:
# OAUTH providers from the config MUST have a name entry
name = provider.get('name')
label = provider.pop('label', name.capitalize())
provider = oauth.remote_app(**provider)
provider.label = label
provider.tokengetter(get_oauth_token)
def configure_oauth(app):
if not app.config.get('TESTING'):
setup_providers(app)
oauth.init_app(app)
return oauth
@signals.handle_oauth_session.connect
def handle_google_oauth(sender, provider=None, session=None):
from aleph.model import Role
# If you wish to use another OAuth provider with your installation of
# aleph, you can create a Python extension package and include a
# custom oauth handler like this, which will create roles and state
# for your session.
if 'googleapis.com' not in provider.base_url:
return
me = provider.get('userinfo')
user_id = 'google:%s' % me.data.get('id')
role = Role.load_or_create(user_id, Role.USER, me.data.get('name'),
email=me.data.get('email'))
session['user'] = role.id
@signals.handle_oauth_session.connect
def handle_facebook_oauth(sender, provider=None, session=None):
from aleph.model import Role
if 'facebook.com' not in provider.base_url:
return
me = provider.get('me?fields=id,name,email')
user_id = 'facebook:%s' % me.data.get('id')
role = Role.load_or_create(user_id, Role.USER, me.data.get('name'),
email=me.data.get('email'))
session['user'] = role.id
@signals.handle_oauth_session.connect
def handle_keycloak_oauth(sender, provider=None, session=None):
from aleph.model import Role
superuser_role = 'superuser'
if 'secure.occrp.org' not in provider.base_url:
return
access_token = session.get('oauth', {}).get('access_token')
access_token = jwt.decode(access_token, verify=False)
clients = access_token.get('resource_access', {})
client = clients.get(provider.consumer_key, {})
roles = set(client.get('roles', []))
user_id = 'kc:%s' % access_token.get('email')
if access_token.get('idashboard'):
user_id = 'idashboard:user:%s' % access_token.get('idashboard')
role = Role.load_or_create(user_id, Role.USER,
access_token.get('name'),
email=access_token.get('email'),
is_admin=superuser_role in roles)
role.clear_roles()
for role_name in roles:
if role_name == superuser_role:
continue
group_role = Role.load_or_create('kc:%s' % role_name,
Role.GROUP,
role_name)
role.add_role(group_role)
log.debug("User %r is member of %r", role, group_role)
session['user'] = role.id
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,606
|
gazeti/aleph
|
refs/heads/master
|
/aleph/datasets/__init__.py
|
import six
import logging
from aleph.authz import get_public_roles
from aleph.util import dict_list
from aleph.model import Role
from aleph.datasets.query import DBQuery, CSVQuery
log = logging.getLogger(__name__)
class Dataset(object):
"""A dataset describes one set of data to be loaded."""
def __init__(self, name, data):
self.name = six.text_type(name)
self.data = data
self.label = data.get('label', name)
self.info_url = data.get('info_url')
self.category = data.get('category')
self.roles = []
self.entities_count = None
self.public = False
for role in dict_list(data, 'roles', 'role'):
role_id = Role.load_id(role)
if role_id is not None:
self.roles.append(role_id)
else:
log.warning("Could not find role: %s", role)
if role_id in get_public_roles():
self.public = True
if not len(self.roles):
raise ValueError("No roles for dataset: %s" % self.name)
self._queries = dict_list(data, 'queries', 'query')
@property
def countries(self):
# This is cached only once for each run-time, basically as a really
# stupid cache. Perhaps configuring countries explicitly, or giving
# this into a memoization tool that timeouts every N hours would be
# a good idea.
if not hasattr(self, '_countries'):
from aleph.search.entities import get_dataset_countries
self._countries = get_dataset_countries(self.name)
return self._countries
@property
def queries(self):
for query in self._queries:
if 'database' in query or 'databases' in query:
yield DBQuery(self, query)
else:
yield CSVQuery(self, query)
def to_dict(self):
return {
'name': self.name,
'label': self.label,
'info_url': self.info_url,
'roles': self.roles,
'public': self.public,
'category': self.category,
'countries': self.countries,
'entities_count': self.entities_count
}
def __repr__(self):
return '<Dataset(%r, %r)>' % (self.name, self.label)
class DatasetSet(object):
def __init__(self, datasets):
self.datasets = []
for name, dconfig in datasets.get('datasets', {}).items():
self.datasets.append(Dataset(name, dconfig))
def get(self, name):
for dataset in self.datasets:
if dataset.name == name:
return dataset
raise NameError("No such dataset: %s" % name)
def __iter__(self):
return iter(self.datasets)
def __repr__(self):
return '<DatasetSet(%r)>' % self.datasets
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,607
|
gazeti/aleph
|
refs/heads/master
|
/aleph/search/links.py
|
from pprint import pprint # noqa
from aleph.index import TYPE_LINK
from aleph.search.util import execute_basic
from aleph.search.fragments import match_all, filter_query, authz_filter
from aleph.search.fragments import add_filter, aggregate
from aleph.search.facet import parse_facet_result
DEFAULT_FIELDS = ['roles', 'remote', 'origin', 'inverted', 'schema',
'schemata', 'properties']
def links_query(origin, state):
"""Parse a user query string, compose and execute a query."""
q = match_all()
if state.has_text:
q = {
"query_string": {
"query": state.text,
"fields": ['name^5', 'names^2', 'text'],
"default_operator": "AND",
"use_dis_max": True
}
}
ids = origin.get('ids') or [origin.get('id')]
q = add_filter(q, {'terms': {'origin.id': ids}})
q = authz_filter(q, state.authz, roles=True)
aggs = {'scoped': {'global': {}, 'aggs': {}}}
aggs = aggregate(state, q, aggs, state.facet_names)
if state.sort == 'score':
sort = ['_score']
else:
sort = [{'properties.start_date': 'desc'},
{'properties.end_date': 'desc'}]
q = {
'sort': sort,
'query': filter_query(q, state.filters),
'aggregations': aggs,
'size': state.limit,
'from': state.offset,
'_source': DEFAULT_FIELDS
}
result, hits, output = execute_basic(TYPE_LINK, q)
output['facets'] = parse_facet_result(state, result)
for doc in hits.get('hits', []):
link = doc.get('_source')
link['id'] = doc.get('_id')
link['score'] = doc.get('_score')
output['results'].append(link)
return output
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,608
|
gazeti/aleph
|
refs/heads/master
|
/aleph/analyze/regex.py
|
import re
import logging
from dalet import parse_phone
from aleph.analyze.analyzer import Analyzer
from aleph.model import DocumentTag, DocumentTagCollector
log = logging.getLogger(__name__)
# URLs:
# https://gist.github.com/uogbuji/705383
# REGEX = ur'(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?\xab\xbb\u201c\u201d\u2018\u2019]))' # noqa
class RegexAnalyzer(Analyzer):
REGEX = None
FLAG = None
def prepare(self):
# TODO: re-think this.
self.disabled = self.document.type == self.document.TYPE_TABULAR
self.collector = DocumentTagCollector(self.document, self.ORIGIN)
self.regex = re.compile(self.REGEX, self.FLAG)
def on_text(self, text):
if not self.disabled:
for mobj in self.regex.finditer(text):
self.on_match(mobj)
def finalize(self):
self.collector.save()
class EMailAnalyzer(RegexAnalyzer):
REGEX = '[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}'
FLAG = re.IGNORECASE
ORIGIN = 'regex:email'
def on_match(self, match):
text = match.group(0)
self.collector.emit(text, DocumentTag.TYPE_EMAIL)
class PhoneNumberAnalyzer(RegexAnalyzer):
REGEX = r'(\+?[\d\-\(\)\/\s]{5,})'
CHARS = '+0123456789'
FLAG = re.IGNORECASE
ORIGIN = 'regex:phones'
def on_match(self, match):
match = match.group(0)
match = ''.join([m for m in match if m in self.CHARS])
if len(match) < 5:
return
for country in [None] + self.document.countries:
num = parse_phone(match, country=country)
if num is None:
continue
self.collector.emit(num, DocumentTag.TYPE_PHONE)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,609
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/documents_api.py
|
import logging
from werkzeug.exceptions import BadRequest, NotFound
from flask import Blueprint, redirect, send_file, request
from apikit import jsonify, Pager, request_data
from aleph.core import archive, url_for, db
from aleph.model import Document, DocumentRecord, Entity, Reference
from aleph.logic import update_document
from aleph.events import log_event
from aleph.views.cache import enable_cache
from aleph.search import QueryState
from aleph.search import records_query, execute_records_query
from aleph.search.util import next_params
from aleph.views.util import get_document
from aleph.util import PDF_MIME
log = logging.getLogger(__name__)
blueprint = Blueprint('documents_api', __name__)
@blueprint.route('/api/1/documents', methods=['GET'])
def index():
authz = request.authz
collections = request.args.getlist('collection')
collections = authz.collections_intersect(authz.READ, collections)
q = Document.all()
q = q.filter(Document.collection_id.in_(collections))
hashes = request.args.getlist('content_hash')
if len(hashes):
q = q.filter(Document.content_hash.in_(hashes))
return jsonify(Pager(q))
@blueprint.route('/api/1/documents/<int:document_id>')
def view(document_id):
doc = get_document(document_id)
enable_cache()
data = doc.to_dict()
if doc.parent is not None:
data['parent'] = doc.parent.to_dict()
log_event(request, document_id=doc.id)
data['data_url'] = archive.generate_url(doc.content_hash)
if data['data_url'] is None:
data['data_url'] = url_for('documents_api.file',
document_id=document_id)
if doc.pdf_version:
data['pdf_url'] = url_for('documents_api.pdf',
document_id=document_id)
return jsonify(data)
@blueprint.route('/api/1/documents/<int:document_id>', methods=['POST', 'PUT'])
def update(document_id):
document = get_document(document_id, action=request.authz.WRITE)
data = request_data()
document.update(data)
db.session.commit()
log_event(request, document_id=document.id)
update_document(document)
return view(document_id)
@blueprint.route('/api/1/documents/<int:document_id>/references')
def references(document_id):
doc = get_document(document_id)
q = db.session.query(Reference)
q = q.filter(Reference.document_id == doc.id)
q = q.filter(Reference.origin == 'regex')
q = q.join(Entity)
q = q.filter(Entity.state == Entity.STATE_ACTIVE)
q = q.filter(Entity.collection_id.in_(request.authz.collections_read))
q = q.order_by(Reference.weight.desc())
return jsonify(Pager(q, document_id=document_id))
@blueprint.route('/api/1/documents/<int:document_id>/file')
def file(document_id):
document = get_document(document_id)
enable_cache(server_side=True)
log_event(request, document_id=document.id)
url = archive.generate_url(document.content_hash,
file_name=document.file_name,
mime_type=document.mime_type)
if url is not None:
return redirect(url)
local_path = archive.load_file(document.content_hash,
file_name=document.file_name)
if local_path is None:
raise NotFound("File does not exist.")
fh = open(local_path, 'rb')
return send_file(fh, as_attachment=True,
attachment_filename=document.file_name,
mimetype=document.mime_type)
@blueprint.route('/api/1/documents/<int:document_id>/pdf')
def pdf(document_id):
document = get_document(document_id)
enable_cache(server_side=True)
log_event(request, document_id=document.id)
if document.type != Document.TYPE_TEXT:
raise BadRequest("PDF is only available for text documents")
url = archive.generate_url(document.pdf_version, mime_type=PDF_MIME)
if url is not None:
return redirect(url)
path = archive.load_file(document.pdf_version,
file_name=document.file_name)
if path is None:
raise NotFound("Missing PDF file.")
return send_file(open(path, 'rb'), mimetype=PDF_MIME)
@blueprint.route('/api/1/documents/<int:document_id>/tables/<int:table_id>')
def table(document_id, table_id):
document = get_document(document_id)
enable_cache(vary_user=True)
try:
return jsonify(document.tables[table_id])
except IndexError:
raise NotFound("No such table: %s" % table_id)
@blueprint.route('/api/1/documents/<int:document_id>/records')
def records(document_id):
document = get_document(document_id)
enable_cache(vary_user=True)
state = QueryState(request.args, request.authz)
query = records_query(document.id, state)
result = execute_records_query(document.id, state, query)
params = next_params(request.args, result)
if params is not None:
result['next'] = url_for('documents_api.records',
document_id=document_id,
**params)
return jsonify(result)
@blueprint.route('/api/1/documents/<int:document_id>/records/<int:index>')
def record(document_id, index):
document = get_document(document_id)
q = db.session.query(DocumentRecord)
q = q.filter(DocumentRecord.document_id == document.id)
q = q.filter(DocumentRecord.index == index)
record = q.first()
if record is None:
raise NotFound("No such page: %s" % index)
enable_cache(server_side=True)
return jsonify(record)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,610
|
gazeti/aleph
|
refs/heads/master
|
/aleph/search/__init__.py
|
import logging
from aleph.index.mapping import TYPE_DOCUMENT, TYPE_RECORD # noqa
from aleph.search.query import QueryState # noqa
from aleph.search.documents import documents_query, documents_iter # noqa
from aleph.search.documents import entity_documents # noqa
from aleph.search.entities import entities_query # noqa
from aleph.search.entities import suggest_entities, similar_entities # noqa
from aleph.search.entities import load_entity # noqa
from aleph.search.links import links_query # noqa
from aleph.search.leads import leads_query, lead_count # noqa
from aleph.search.records import records_query, execute_records_query # noqa
from aleph.search.util import scan_iter # noqa
log = logging.getLogger(__name__)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,611
|
gazeti/aleph
|
refs/heads/master
|
/aleph/authz.py
|
from werkzeug.exceptions import Forbidden
from aleph.core import db, get_config
from aleph.model import Collection, Role, Permission
from aleph.util import ensure_list
def get_public_roles():
"""Roles which make a collection to be considered public."""
return [
Role.load_id(Role.SYSTEM_GUEST),
Role.load_id(Role.SYSTEM_USER),
]
class Authz(object):
"""Hold the authorization information for a user.
This is usually attached to a request, but can also be used separately,
e.g. in the context of notifications.
"""
READ = 'read'
WRITE = 'write'
PUBLIC = 'public'
def __init__(self, role=None, override=False):
self.roles = set([Role.load_id(Role.SYSTEM_GUEST)])
self.role = role
self.logged_in = role is not None
self.override = self.is_admin = override
self.in_maintenance = get_config('MAINTENANCE')
if self.logged_in:
self.is_admin = role.is_admin
self.roles.add(role.id)
self.roles.add(Role.load_id(Role.SYSTEM_USER))
for group in role.roles:
self.roles.add(group.id)
# Pre-load collection authorisation info and cache the result.
# This is the core authorisation function, and is called at least once
# per request. It will query and cache the ID for all collections the
# current user is authorised to read or write.
self.collections = {
self.READ: set(),
self.WRITE: set(),
self.PUBLIC: set()
}
q = db.session.query(Permission.collection_id,
Permission.role_id,
Permission.read,
Permission.write)
q = q.filter(Permission.deleted_at == None) # noqa
q = q.filter(Permission.role_id.in_(self.roles))
q = q.filter(Permission.collection_id != None) # noqa
for collection_id, role_id, read, write in q:
if read or write:
self.collections[self.READ].add(collection_id)
if role_id in get_public_roles():
self.collections[self.PUBLIC].add(collection_id)
if write and self.logged_in:
self.collections[self.WRITE].add(collection_id)
if self.is_admin:
q = Collection.all_ids().filter(Collection.deleted_at == None) # noqa
for collection_id, in q:
self.collections[self.READ].add(collection_id)
self.collections[self.WRITE].add(collection_id)
# Disable all in maintenance mode.
if self.in_maintenance:
self.collections[self.WRITE] = set()
self.collections_read = list(self.collections[self.READ])
self.collections_write = list(self.collections[self.WRITE])
def _collection_check(self, collection, action):
if isinstance(collection, Collection):
collection = collection.id
try:
return int(collection) in self.collections.get(action)
except:
return False
def collection_read(self, collection):
"""Check if a given collection can be read."""
return self._collection_check(collection, self.READ)
def collection_write(self, collection):
"""Check if a given collection can be written."""
return self._collection_check(collection, self.WRITE)
def collection_public(self, collection):
return self._collection_check(collection, self.PUBLIC)
def collections_intersect(self, action, colls, default_all=True):
"""Intersect the given and the available set of collections.
This will return all available collections if the given set is empty
and the ``default_all`` argument is ``True``.
"""
available = self.collections.get(action)
intersect = set()
for collection_id in colls:
try:
if isinstance(collection_id, dict):
collection_id = collection_id.get('id')
collection_id = int(collection_id)
if collection_id in available:
intersect.add(collection_id)
except:
pass
if not len(intersect) and default_all:
return available
return list(intersect)
def session_write(self):
if self.in_maintenance:
return False
return self.logged_in
def check_roles(self, roles):
# if self.in_maintenance:
# return False
if self.is_admin:
return True
isect = self.roles.intersection(ensure_list(roles))
return len(isect) > 0
def require(self, pred):
if not pred:
raise Forbidden("Sorry, you're not permitted to do this!")
def __repr__(self):
return '<Authz(%s)>' % self.role
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,612
|
gazeti/aleph
|
refs/heads/master
|
/aleph/logic/datasets.py
|
import logging
from aleph.index import index_items
log = logging.getLogger(__name__)
PAGE = 1000
def load_rows(dataset, query, rows):
"""Load a single batch of QUEUE_PAGE rows from the given query."""
entities = {}
links = []
for row in rows:
entity_map = {}
for entity in query.entities:
data = entity.to_index(row)
if data is not None:
entity_map[entity.name] = data
entities[data['id']] = data
for link in query.links:
for inverted in [False, True]:
data = link.to_index(row, entity_map, inverted=inverted)
if data is not None:
links.append(data)
index_items(entities, links)
log.info("[%s] Indexed %s rows as %s entities, %s links...",
dataset.name, len(rows), len(entities), len(links))
def load_dataset(dataset):
"""Index all the entities and links in a given dataset."""
for query in dataset.queries:
rows = []
for row_idx, row in enumerate(query.iterrows(), 1):
rows.append(row)
if len(rows) >= PAGE:
log.info("[%s] Tasked %s rows...", dataset.name, row_idx)
load_rows(dataset, query, rows)
rows = []
if len(rows):
load_rows(dataset, query, rows)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,613
|
gazeti/aleph
|
refs/heads/master
|
/aleph/model/document.py
|
import logging
from datetime import datetime, timedelta
from normality import ascii_text
from sqlalchemy import func
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm.attributes import flag_modified
from aleph.core import db
from aleph.model.metadata import Metadata
from aleph.model.validate import validate
from aleph.model.collection import Collection
from aleph.model.reference import Reference
from aleph.model.common import DatedModel
from aleph.model.document_record import DocumentRecord
from aleph.model.document_tag import DocumentTag
from aleph.text import index_form
log = logging.getLogger(__name__)
class Document(db.Model, DatedModel, Metadata):
_schema = 'document.json#'
SCHEMA = 'Document'
TYPE_TEXT = 'text'
TYPE_TABULAR = 'tabular'
TYPE_OTHER = 'other'
STATUS_PENDING = 'pending'
STATUS_SUCCESS = 'success'
STATUS_FAIL = 'fail'
id = db.Column(db.BigInteger, primary_key=True)
content_hash = db.Column(db.Unicode(65), nullable=True, index=True)
foreign_id = db.Column(db.Unicode, unique=False, nullable=True)
type = db.Column(db.Unicode(10), nullable=False, index=True)
status = db.Column(db.Unicode(10), nullable=True, index=True)
meta = db.Column(JSONB, default={})
crawler = db.Column(db.Unicode(), index=True)
crawler_run = db.Column(db.Unicode())
error_type = db.Column(db.Unicode(), nullable=True)
error_message = db.Column(db.Unicode(), nullable=True)
parent_id = db.Column(db.BigInteger, db.ForeignKey('document.id'), nullable=True) # noqa
children = db.relationship('Document', backref=db.backref('parent', uselist=False, remote_side=[id])) # noqa
collection_id = db.Column(db.Integer, db.ForeignKey('collection.id'), nullable=False, index=True) # noqa
collection = db.relationship(Collection, backref=db.backref('documents', lazy='dynamic')) # noqa
def __init__(self, **kw):
self.meta = {}
super(Document, self).__init__(**kw)
def update(self, data):
validate(data, self._schema)
self.title = data.get('title')
self.summary = data.get('summary')
self.languages = data.get('languages')
self.countries = data.get('countries')
db.session.add(self)
def update_meta(self):
flag_modified(self, 'meta')
def delete_records(self):
pq = db.session.query(DocumentRecord)
pq = pq.filter(DocumentRecord.document_id == self.id)
# pq.delete(synchronize_session='fetch')
pq.delete()
db.session.flush()
def delete_tags(self):
pq = db.session.query(DocumentTag)
pq = pq.filter(DocumentTag.document_id == self.id)
# pq.delete(synchronize_session='fetch')
pq.delete()
db.session.flush()
def delete_references(self, origin=None):
pq = db.session.query(Reference)
pq = pq.filter(Reference.document_id == self.id)
if origin is not None:
pq = pq.filter(Reference.origin == origin)
# pq.delete(synchronize_session='fetch')
pq.delete()
db.session.flush()
def delete(self, deleted_at=None):
self.delete_references()
self.delete_records()
db.session.delete(self)
def insert_records(self, sheet, iterable, chunk_size=1000):
chunk = []
for index, data in enumerate(iterable):
chunk.append({
'document_id': self.id,
'index': index,
'sheet': sheet,
'data': data
})
if len(chunk) >= chunk_size:
db.session.bulk_insert_mappings(DocumentRecord, chunk)
chunk = []
if len(chunk):
db.session.bulk_insert_mappings(DocumentRecord, chunk)
def text_parts(self):
pq = db.session.query(DocumentRecord)
pq = pq.filter(DocumentRecord.document_id == self.id)
for record in pq.yield_per(1000):
for text in record.text_parts():
yield text
@classmethod
def crawler_last_run(cls, crawler_id):
q = db.session.query(func.max(cls.updated_at))
q = q.filter(cls.crawler == crawler_id)
return q.scalar()
@classmethod
def is_crawler_active(cls, crawler_id):
# TODO: add a function to see if a particular crawl is still running
# this should be defined as having "pending" documents.
last_run_time = cls.crawler_last_run(crawler_id)
if last_run_time is None:
return False
return last_run_time > (datetime.utcnow() - timedelta(hours=1))
@classmethod
def crawler_stats(cls, crawler_id):
# Check if the crawler was active very recently, if so, don't
# allow the user to execute a new run right now.
stats = {
'updated': cls.crawler_last_run(crawler_id),
'running': cls.is_crawler_active(crawler_id)
}
q = db.session.query(cls.status, func.count(cls.id))
q = q.filter(cls.crawler == crawler_id)
q = q.group_by(cls.status)
for (status, count) in q.all():
stats[status] = count
return stats
@classmethod
def by_keys(cls, parent_id=None, collection=None, foreign_id=None,
content_hash=None):
"""Try and find a document by various criteria."""
q = cls.all()
if collection is not None:
q = q.filter(Document.collection_id == collection.id)
if parent_id is not None:
q = q.filter(Document.parent_id == parent_id)
if foreign_id is not None:
q = q.filter(Document.foreign_id == foreign_id)
elif content_hash is not None:
q = q.filter(Document.content_hash == content_hash)
else:
raise ValueError("No unique criterion for document.")
document = q.first()
if document is None:
document = cls()
document.type = cls.TYPE_OTHER
document.collection_id = collection.id
document.collection = collection
document.parent_id = parent_id
document.foreign_id = foreign_id
document.content_hash = content_hash
document.status = document.STATUS_PENDING
db.session.add(document)
return document
def to_dict(self):
data = self.to_meta_dict()
try:
from flask import request # noqa
data['public'] = request.authz.collection_public(self.collection_id) # noqa
except:
data['public'] = None
data.update({
'id': self.id,
'type': self.type,
'status': self.status,
'parent_id': self.parent_id,
'foreign_id': self.foreign_id,
'content_hash': self.content_hash,
'crawler': self.crawler,
'crawler_run': self.crawler_run,
'error_type': self.error_type,
'error_message': self.error_message,
'collection_id': self.collection_id,
'created_at': self.created_at,
'updated_at': self.updated_at
})
return data
def to_index_dict(self):
data = self.to_dict()
data['text'] = index_form(self.text_parts())
data['schema'] = self.SCHEMA
data['schemata'] = [self.SCHEMA]
data['name_sort'] = ascii_text(data.get('title'))
data['title_latin'] = ascii_text(data.get('title'))
data['summary_latin'] = ascii_text(data.get('summary'))
data.pop('tables')
return data
def __repr__(self):
return '<Document(%r,%r,%r)>' % (self.id, self.type, self.title)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,614
|
gazeti/aleph
|
refs/heads/master
|
/aleph/index/collections.py
|
from aleph.index.util import query_delete
def delete_collection(collection_id):
"""Delete all documents from a particular collection."""
query_delete({'term': {'collection_id': collection_id}})
query_delete({'term': {'entity_collection_id': collection_id}})
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,615
|
gazeti/aleph
|
refs/heads/master
|
/aleph/tests/test_view_util.py
|
from flask import Request
from aleph.views.util import extract_next_url
from aleph.tests.util import TestCase
class ViewUtilTest(TestCase):
def setUp(self):
super(ViewUtilTest, self).setUp()
def test_extract_next_url_blank(self):
req = Request.from_values('')
self.assertEqual('/', extract_next_url(req))
def test_extract_next_url_unsafe(self):
req = Request.from_values('/?next={}'.format(self.fake.url()))
self.assertEqual('/', extract_next_url(req))
def test_extract_next_url_safe(self):
req = Request.from_values('/?next=/help')
self.assertEqual('/help', extract_next_url(req))
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,616
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/ingest_api.py
|
import os
import json
from flask import Blueprint, request
from werkzeug import secure_filename
from werkzeug.exceptions import BadRequest
from apikit import obj_or_404, jsonify
from aleph.core import upload_folder
from aleph.events import log_event
from aleph.ingest import ingest_document
from aleph.model import Collection, Document
from aleph.model.common import make_textid
from aleph.model.validate import validate
from aleph.util import checksum
blueprint = Blueprint('ingest_api', __name__)
@blueprint.route('/api/1/collections/<int:collection_id>/ingest',
methods=['POST', 'PUT'])
def ingest_upload(collection_id):
collection = obj_or_404(Collection.by_id(collection_id))
request.authz.require(request.authz.collection_write(collection.id))
log_event(request)
crawler_run = make_textid()
try:
meta = json.loads(request.form.get('meta', '{}'))
except Exception as ex:
raise BadRequest(unicode(ex))
documents = []
for storage in request.files.values():
sec_fn = os.path.join(upload_folder, secure_filename(storage.filename))
storage.save(sec_fn)
content_hash = checksum(sec_fn)
document = Document.by_keys(collection=collection,
content_hash=content_hash)
document.crawler = 'user_upload:%s' % request.authz.role.id
document.crawler_run = crawler_run
document.mime_type = storage.mimetype
document.file_name = storage.filename
try:
meta = json.loads(request.form.get('meta', '{}'))
validate(meta, 'metadata.json#')
document.meta.update(meta)
except Exception as ex:
raise BadRequest(unicode(ex))
ingest_document(document, sec_fn, user_queue=True)
os.unlink(sec_fn)
documents.append(document)
return jsonify({'status': 'ok', 'documents': documents})
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,617
|
gazeti/aleph
|
refs/heads/master
|
/aleph/views/collections_api.py
|
from flask import Blueprint, request
from apikit import obj_or_404, jsonify, Pager, request_data
from normality import ascii_text
from dalet import COUNTRY_NAMES
from aleph.core import USER_QUEUE, USER_ROUTING_KEY, get_config, db
from aleph.model import Collection
from aleph.search import QueryState, lead_count
from aleph.events import log_event
from aleph.logic import delete_collection, update_collection
from aleph.logic import analyze_collection
blueprint = Blueprint('collections_api', __name__)
@blueprint.route('/api/1/collections', methods=['GET'])
def index():
# allow to filter for writeable collections only, needed
# in some UI scenarios:
state = QueryState(request.args, request.authz)
permission = request.args.get('permission')
if permission not in [request.authz.READ, request.authz.WRITE]:
permission = request.authz.READ
collections = request.authz.collections[permission]
# Other filters for navigation
label = request.args.get('label')
managed = state.getbool('managed', None)
# Include counts (of entities, documents) in list view?
counts = state.getbool('counts', False)
def converter(colls):
return [c.to_dict(counts=counts) for c in colls]
facet = [f.lower().strip() for f in request.args.getlist('facet')]
q = Collection.find(label=label,
countries=state.getfilter('countries'),
category=state.getfilter('category'),
collection_id=collections,
managed=managed)
data = Pager(q).to_dict(results_converter=converter)
facets = {}
if 'countries' in facet:
facets['countries'] = {
'values': Collection.facet_by(q, Collection.countries,
mapping=COUNTRY_NAMES)
}
if 'category' in facet:
mapping = get_config('COLLECTION_CATEGORIES', {})
facets['category'] = {
'values': Collection.facet_by(q, Collection.category,
mapping=mapping)
}
data['facets'] = facets
return jsonify(data)
@blueprint.route('/api/1/collections', methods=['POST', 'PUT'])
def create():
request.authz.require(request.authz.logged_in)
data = request_data()
data['managed'] = False
collection = Collection.create(data, request.authz.role)
db.session.commit()
update_collection(collection)
log_event(request)
return jsonify(collection)
@blueprint.route('/api/1/collections/<int:id>', methods=['GET'])
def view(id):
collection = obj_or_404(Collection.by_id(id))
request.authz.require(request.authz.collection_read(collection))
data = collection.to_dict(counts=True)
data['lead_count'] = lead_count(id)
return jsonify(data)
@blueprint.route('/api/1/collections/<int:id>', methods=['POST', 'PUT'])
def update(id):
collection = obj_or_404(Collection.by_id(id))
request.authz.require(request.authz.collection_write(collection))
collection.update(request_data())
db.session.add(collection)
db.session.commit()
update_collection(collection)
log_event(request)
return view(id)
@blueprint.route('/api/1/collections/<int:id>/process',
methods=['POST', 'PUT'])
def process(id):
collection = obj_or_404(Collection.by_id(id))
request.authz.require(request.authz.collection_write(collection))
analyze_collection.apply_async([collection.id], queue=USER_QUEUE,
routing_key=USER_ROUTING_KEY)
log_event(request)
return jsonify({'status': 'ok'})
@blueprint.route('/api/1/collections/<int:id>/pending', methods=['GET'])
def pending(id):
collection = obj_or_404(Collection.by_id(id))
request.authz.require(request.authz.collection_read(collection))
q = collection.pending_entities()
q = q.limit(30)
entities = []
for entity in q.all():
data = entity.to_dict()
data['name_latin'] = ascii_text(entity.name)
entities.append(data)
return jsonify({'results': entities, 'total': len(entities)})
@blueprint.route('/api/1/collections/<int:id>', methods=['DELETE'])
def delete(id):
collection = obj_or_404(Collection.by_id(id))
request.authz.require(request.authz.collection_write(collection))
delete_collection.apply_async([collection.id], queue=USER_QUEUE,
routing_key=USER_ROUTING_KEY)
log_event(request)
return jsonify({'status': 'ok'})
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,618
|
gazeti/aleph
|
refs/heads/master
|
/aleph/search/records.py
|
from elasticsearch.helpers import scan
from aleph.core import es, es_index
from aleph.index import TYPE_RECORD
from aleph.util import ensure_list
from aleph.search.fragments import text_query_string
from aleph.search.util import execute_basic
from aleph.search.fragments import match_all, filter_query
from aleph.model import DocumentRecord
SNIPPET_SIZE = 100
def records_query(document_id, state):
try:
rows = [int(r) for r in state.getlist('row')]
except:
rows = []
score_query = state.has_text or len(rows)
shoulds = records_query_shoulds(state)
if not len(shoulds):
shoulds = [match_all()]
if len(rows):
shoulds.append({
"constant_score": {
"filter": {'terms': {'index': rows}},
"boost": 1000
}
})
query = records_query_internal(document_id, shoulds, size=state.limit)
query['query'] = filter_query(query['query'], state.filters)
query['from'] = state.offset
sort = [{'index': 'asc'}, {'page': 'asc'}]
if score_query:
sort.insert(0, '_score')
return query
def records_query_shoulds(state):
shoulds = []
if state.has_text:
shoulds.append(text_query_string(state.text))
for term in state.highlight_terms:
shoulds.append(text_query_string(term))
return shoulds
def records_query_internal(document_id, shoulds, size=5):
return {
'size': size,
'query': {
'bool': {
'minimum_should_match': 1,
'should': shoulds,
'filter': [{'term': {'document_id': document_id}}]
}
},
'highlight': {
'fields': {
'text': {
'fragment_size': SNIPPET_SIZE,
'number_of_fragments': 1
}
}
},
'_source': ['document_id', 'sheet', 'index']
}
def scan_entity_mentions(entity):
"""Find mentions of a given entity in all records."""
shoulds = []
for term in entity.regex_terms:
shoulds.append(text_query_string(term))
query = {
'query': {
'bool': {
'should': shoulds,
'minimum_should_match': 1
}
},
'sort': [{'document_id': 'desc'}],
'_source': ['document_id', 'text']
}
for res in scan(es, query=query, index=es_index, doc_type=[TYPE_RECORD]):
for text in ensure_list(res.get('_source').get('text')):
yield (res.get('_source').get('document_id'), text)
def execute_records_query(document_id, state, query):
"""Execute a query against records and return a set of results."""
result, hits, output = execute_basic(TYPE_RECORD, query)
ids = []
for rec in hits.get('hits', []):
record = rec.get('_source')
record['score'] = rec.get('_score')
record['id'] = int(rec.get('_id'))
ids.append(rec.get('_id'))
for text in rec.get('highlight', {}).get('text', []):
record['text'] = text
output['results'].append(record)
for record in DocumentRecord.find_records(document_id, ids):
for result in output['results']:
if result['id'] == record.id:
result['data'] = record.data
result['text'] = record.text
return output
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,619
|
gazeti/aleph
|
refs/heads/master
|
/aleph/ingest/manager.py
|
import os
import logging
from ingestors import Manager
from ingestors.util import decode_path
from aleph.core import db
from aleph.model import Document, Cache
from aleph.analyze import analyze_document
from aleph.ingest.result import DocumentResult
from aleph.util import checksum
log = logging.getLogger(__name__)
class DocumentManager(Manager):
"""Handle the process of ingesting documents.
This includes creating and flushing records, setting document state and
dispatching child ingestors as needed.
"""
RESULT_CLASS = DocumentResult
def __init__(self, config, archive):
super(DocumentManager, self).__init__(config)
self.archive = archive
def before(self, result):
db.session.flush()
result.document.status = Document.STATUS_PENDING
result.document.delete_records()
def after(self, result):
result.update()
db.session.commit()
if result.error_message:
log.warn('Error [%r]: %s', result, result.error_message)
else:
log.debug('Ingested: %r', result.document)
analyze_document(result.document)
def get_cache(self, key):
return Cache.get_cache(key)
def set_cache(self, key, value):
Cache.set_cache(key, value)
def handle_child(self, parent, file_path, title=None, mime_type=None,
id=None, file_name=None):
file_path = decode_path(file_path)
file_name = decode_path(file_name) or os.path.basename(file_path)
content_hash = None
if not os.path.isdir(file_path):
content_hash = checksum(file_path)
document = Document.by_keys(parent_id=parent.document.id,
collection=parent.document.collection,
foreign_id=id, content_hash=content_hash)
document.title = title or document.meta.get('title')
document.file_name = file_name or document.meta.get('file_name')
document.mime_type = mime_type or document.meta.get('mime_type')
from aleph.ingest import ingest_document
ingest_document(document, file_path, user_queue=parent.user_queue)
def ingest_document(self, document, file_path=None, user_queue=False):
"""Ingest a database-backed document.
First retrieve it's data and then call the actual ingestor.
"""
if file_path is None:
file_path = self.archive.load_file(document.content_hash,
file_name=document.file_name)
if file_path is None:
# TODO: save this to the document?
log.error("Cannot load data: %r", document)
return
try:
if not len(document.languages) and document.collection is not None:
document.languages = document.collection.languages or []
if not len(document.countries) and document.collection is not None:
document.countries = document.collection.countries or []
result = DocumentResult(self, document,
file_path=file_path,
user_queue=user_queue)
self.ingest(file_path, result=result)
finally:
self.archive.cleanup_file(document.content_hash)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,620
|
gazeti/aleph
|
refs/heads/master
|
/aleph/tests/test_export_api.py
|
from aleph.tests.util import TestCase
class ExportApiTestCase(TestCase):
def setUp(self):
super(ExportApiTestCase, self).setUp()
self.load_fixtures('docs.yaml')
def test_smoke_comes_out(self):
self.login(is_admin=True)
res = self.client.get('/api/1/query/export')
assert res.status_code == 200, res
assert 'openxmlformats' in res.content_type, res.content_type
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,621
|
gazeti/aleph
|
refs/heads/master
|
/aleph/text.py
|
# coding: utf-8
import six
import logging
from normality import normalize, stringify, latinize_text, collapse_spaces
from normality import slugify # noqa
from normality.cleaning import decompose_nfkd, remove_control_chars
log = logging.getLogger(__name__)
INDEX_MAX_LEN = 1024 * 1024 * 100
def index_form(texts):
"""Turn a set of strings into the appropriate form for indexing."""
results = []
total_len = 0
for text in texts:
# We don't want to store more than INDEX_MAX_LEN of text per doc
if total_len > INDEX_MAX_LEN:
# TODO: there might be nicer techniques for dealing with overly
# long text buffers?
results = list(set(results))
total_len = sum((len(t) for t in results))
if total_len > INDEX_MAX_LEN:
break
text = stringify(text)
if text is None:
continue
text = collapse_spaces(text)
# XXX: is NFKD a great idea?
text = decompose_nfkd(text)
total_len += len(text)
results.append(text)
# Make latinized text version
latin = latinize_text(text)
latin = stringify(latin)
if latin is None or latin == text:
continue
total_len += len(latin)
results.append(latin)
return results
def match_form(text):
"""Turn a string into a form appropriate for name matching.
The goal of this function is not to retain a readable version of the given
string, but rather to yield a normalised version suitable for comparisons
and machine analysis.
"""
return normalize(text, lowercase=True, ascii=True)
def string_value(value, encoding=None):
value = stringify(value, encoding=encoding, encoding_default='utf-8')
value = remove_control_chars(value)
return value
def encoded_value(text):
if isinstance(text, six.binary_type):
return text
return six.text_type(text).encode('utf-8')
def has_value(value):
"""Check a given value is not empty."""
if value is None:
return False
if isinstance(value, six.string_types):
if not len(value.strip()):
return False
return True
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,622
|
gazeti/aleph
|
refs/heads/master
|
/aleph/index/__init__.py
|
from aleph.index.admin import init_search, upgrade_search # noqa
from aleph.index.admin import delete_index, flush_index # noqa
from aleph.index.entities import index_entity, delete_entity # noqa
from aleph.index.documents import index_document, index_document_id # noqa
from aleph.index.documents import delete_document # noqa
from aleph.index.records import index_records # noqa
from aleph.index.datasets import index_items, delete_dataset # noqa
from aleph.index.leads import index_lead, delete_entity_leads # noqa
from aleph.index.mapping import TYPE_DOCUMENT, TYPE_RECORD, TYPE_ENTITY # noqa
from aleph.index.mapping import TYPE_LINK, TYPE_LEAD # noqa
from aleph.index.mapping import DOCUMENT_MAPPING, RECORD_MAPPING # noqa
from aleph.index.mapping import( # noqa
ENTITY_MAPPING, LINK_MAPPING, LEAD_MAPPING)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,623
|
gazeti/aleph
|
refs/heads/master
|
/aleph/logic/distance.py
|
from itertools import product
from Levenshtein import jaro_winkler
from pprint import pprint # noqa
SUPER_SCIENTIFIC_WEIGHTINGS = {
'names': 0.3,
'fp_distance': 0.3,
'fp_tokens': 0.2,
'countries': 0.1,
'dates': 0.1,
'addresses_distance': 0.1,
'addresses_tokens': 0.1,
'emails': 0.3,
'phones': 0.3,
'identifiers': 0.4,
}
def pred_best_jw(a, b, field):
"""Find the closest jaro-winkler match."""
best = float()
for (ak, bk) in product(a.get(field, []), b.get(field, [])):
best = max(best, jaro_winkler(ak.lower(), bk.lower()))
return best
def pred_matching_elem(a, b, field):
"""Find the closest jaro-winkler match."""
for (ak, bk) in product(a.get(field, []), b.get(field, [])):
if ak.lower() == bk.lower():
return 1.0
return 0.0
def pred_token_overlap(a, b, field):
"""Find the closest jaro-winkler match."""
best = float()
a = [set(n.split()) for n in a.get(field, [])]
b = [set(n.split()) for n in b.get(field, [])]
for (ak, bk) in product(a, b):
overlap = float(len(ak.intersection(bk)))
overlap = overlap / float(max(len(ak), len(bk)))
best = max(overlap, best)
return best
def entity_distance(entity, other):
# once we have enough training data, this should use a regression model
# of some sort to calculate a multi-attribute based similarity metric.
# cf. https://github.com/datamade/rlr
# http://scikit-learn.org/stable/auto_examples/linear_model/plot_ols.html
if 'names' not in other:
other['names'] = [other['name']]
features = {
'names': pred_best_jw(entity, other, 'names'),
'fp_distance': pred_best_jw(entity, other, 'fingerprints'),
'fp_tokens': pred_token_overlap(entity, other, 'fingerprints'),
'countries': pred_best_jw(entity, other, 'countries'),
'addresses_distance': pred_best_jw(entity, other, 'addresses'),
'addresses_tokens': pred_token_overlap(entity, other, 'addresses'),
'emails': pred_best_jw(entity, other, 'emails'),
'phones': pred_best_jw(entity, other, 'phones'),
'identifiers': pred_best_jw(entity, other, 'identifiers'),
}
# pprint(features)
score = float()
for field, value in features.items():
score += value * SUPER_SCIENTIFIC_WEIGHTINGS[field]
return min(1.0, score)
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,624
|
gazeti/aleph
|
refs/heads/master
|
/aleph/schema/types.py
|
import re
import fingerprints
from normality import ascii_text, stringify, collapse_spaces
from dalet import is_partial_date, parse_date
from dalet import parse_phone, parse_country, parse_email
from aleph.util import ensure_list
class StringProperty(object):
index_invert = None
def __init__(self):
self.name = type(self).__name__.lower().replace('property', '')
def clean(self, value, record, config):
value = stringify(value)
if value is not None:
return collapse_spaces(value)
def normalize(self, values):
results = set()
for value in values:
results.update(ensure_list(self.normalize_value(value)))
return results
def normalize_value(self, value):
return self.clean(value, {}, {})
def fingerprint(self, values):
return []
class NameProperty(StringProperty):
index_invert = 'names'
def normalize_value(self, value):
value = collapse_spaces(value)
return value, ascii_text(value)
def fingerprint(self, values):
# TODO: this should not be a property thing, so that fp's can include
# dates etx.
fps = []
for value in values:
fps.append(fingerprints.generate(value))
return [fp for fp in fps if fp is not None]
class URLProperty(StringProperty):
index_invert = None
class DateProperty(StringProperty):
index_invert = 'dates'
def clean(self, value, record, config):
value = super(DateProperty, self).clean(value, record, config)
return parse_date(value, date_format=config.get('format'))
def normalize_value(self, value):
if is_partial_date(value):
return value
class CountryProperty(StringProperty):
index_invert = 'countries'
def clean(self, value, record, config):
value = super(CountryProperty, self).clean(value, record, config)
return parse_country(value) or value
def normalize_value(self, value):
return parse_country(value)
class AddressProperty(StringProperty):
index_invert = 'addresses'
def normalize_value(self, value):
return fingerprints.generate(value)
class PhoneProperty(StringProperty):
index_invert = 'phones'
def clean(self, value, record, config):
value = super(PhoneProperty, self).clean(value, record, config)
number = parse_phone(value, config.get('country'))
return number or value
class EmailProperty(StringProperty):
index_invert = 'emails'
def clean(self, value, record, config):
value = super(EmailProperty, self).clean(value, record, config)
return parse_email(value) or value
def normalize_value(self, value):
return parse_email(value)
class IdentiferProperty(StringProperty):
index_invert = 'identifiers'
clean_re = re.compile('[^a-zA-Z0-9]*')
def normalize_value(self, value):
value = stringify(value)
if value is not None:
value = self.clean_re.sub('', value).upper()
return stringify(value)
def resolve_type(name):
"""Look up a configerty type by name."""
types = {
'string': StringProperty,
'name': NameProperty,
'date': DateProperty,
'country': CountryProperty,
'address': AddressProperty,
'phone': PhoneProperty,
'email': EmailProperty,
'url': URLProperty,
'uri': URLProperty,
'identifier': IdentiferProperty
}
type_ = types.get(name.strip().lower())
if type_ is None:
raise TypeError("No such type: %s" % name)
return type_
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,625
|
gazeti/aleph
|
refs/heads/master
|
/aleph/tests/test_base_api.py
|
from aleph.tests.util import TestCase
class BaseApiTestCase(TestCase):
def setUp(self):
super(BaseApiTestCase, self).setUp()
def test_index(self):
res = self.client.get('/')
assert res.status_code == 200, res
assert '<title>' in res.data, res.data
assert 'ng-view' in res.data, res.data
def test_metadata(self):
res = self.client.get('/api/1/metadata')
assert res.status_code == 200, res
assert 'countries' in res.json, res.json
countries = res.json['countries']
assert 'ar' in countries, countries
assert countries['ar'] == 'Argentina', countries
|
{"/aleph/search/leads.py": ["/aleph/index/__init__.py"], "/aleph/views/search_api.py": ["/aleph/search/__init__.py"], "/aleph/views/entities_api.py": ["/aleph/search/__init__.py"], "/aleph/model/entity.py": ["/aleph/text.py", "/aleph/model/entity_identity.py"], "/aleph/datasets/mapper.py": ["/aleph/schema/__init__.py", "/aleph/text.py", "/aleph/datasets/formatting.py"], "/aleph/logic/leads.py": ["/aleph/authz.py", "/aleph/index/__init__.py", "/aleph/search/__init__.py", "/aleph/logic/distance.py"], "/aleph/logic/alerts.py": ["/aleph/authz.py", "/aleph/search/__init__.py"], "/aleph/schema/__init__.py": ["/aleph/text.py", "/aleph/schema/types.py"], "/aleph/logic/collections.py": ["/aleph/index/collections.py"], "/aleph/index/records.py": ["/aleph/text.py"], "/aleph/views/leads_api.py": ["/aleph/search/__init__.py", "/aleph/search/leads.py"], "/aleph/datasets/__init__.py": ["/aleph/authz.py"], "/aleph/search/links.py": ["/aleph/index/__init__.py"], "/aleph/views/documents_api.py": ["/aleph/search/__init__.py"], "/aleph/search/__init__.py": ["/aleph/search/links.py", "/aleph/search/leads.py", "/aleph/search/records.py"], "/aleph/logic/datasets.py": ["/aleph/index/__init__.py"], "/aleph/model/document.py": ["/aleph/model/validate.py", "/aleph/text.py"], "/aleph/views/ingest_api.py": ["/aleph/model/validate.py"], "/aleph/views/collections_api.py": ["/aleph/search/__init__.py"], "/aleph/search/records.py": ["/aleph/index/__init__.py"], "/aleph/ingest/manager.py": ["/aleph/ingest/result.py"], "/aleph/index/__init__.py": ["/aleph/index/records.py", "/aleph/index/leads.py"]}
|
12,649
|
yenlt6/learningpython
|
refs/heads/main
|
/Day18UnitTest2310/testing/test_todos.py
|
import requests
from unittest.mock import Mock, patch
from services import get_todos, get_uncompleted_todos
# 3
def test_request_response():
# 1
response = requests.get('http://jsonplaceholder.typicode.com/todos')
# response = get_todos()
# Confirm that the request-response cycle completed successfully.
assert response.ok, True
# 4
@patch('services.requests.get')
def test_getting_todos(mock_get):
# Configure the mock to return a response with an OK status code.
mock_get.return_value.ok = True
# Call the service, which will send a request to the server.
response = get_todos()
# If the request is sent successfully, then I expect a response to be returned.
assert response != None
# 5
def test_another_getting_todos():
mock_get_patcher = patch('services.requests.get')
# Start patching `requests.get`.
mock_get = mock_get_patcher.start()
mock_get.return_value.ok = True
response = get_todos()
mock_get_patcher.stop()
assert response != None
# 7
@patch('services.requests.get')
def test_getting_todos_when_response_is_ok(mock_get):
todos = [{
'userId': 1,
'id': 1,
'title': 'Make the bed',
'completed': False
}]
# request.get() trả ra một đối tượng Response
mock_get.return_value = Mock(ok=True)
mock_get.return_value.json.return_value = todos
# Call the service, which will send a request to the server.
response = get_todos()
# If the request is sent successfully, then I expect a response to be returned.
assert response.json() == todos
@patch('services.requests.get')
def test_getting_todos_when_response_is_not_ok(mock_get):
mock_get.return_value.ok = False
response = get_todos()
assert (response is None)
# 8
@patch('services.get_todos')
def test_getting_uncompleted_todos_when_todos_is_not_none(mock_get_todos):
todo1 = {
'userId': 1,
'id': 1,
'title': 'Make the bed',
'completed': False
}
todo2 = {
'userId': 1,
'id': 2,
'title': 'Walk the dog',
'completed': True
}
# Configure mock to return a response with a JSON-serialized list of todos.
mock_get_todos.return_value = Mock()
mock_get_todos.return_value.json.return_value = [todo1, todo2]
uncompleted_todos = get_uncompleted_todos()
assert mock_get_todos.called, True
# Confirm that the expected filtered list of todos was returned.
assert uncompleted_todos == [todo1]
@patch('services.get_todos')
def test_getting_uncompleted_todos_when_todos_is_none(mock_get_todos):
mock_get_todos.return_value = None
uncompleted_todos = get_uncompleted_todos()
# Confirm that the mock was called.
assert mock_get_todos.called, True
# Confirm that an empty list was returned.
assert uncompleted_todos == []
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,650
|
yenlt6/learningpython
|
refs/heads/main
|
/Day16Exam2_TEMP/card_game/player.py
|
import card
class Player(card):
'''
Class đại diện cho mỗi người chơi
Người chơi chỉ cần lưu tên, và các lá bài người chơi có
'''
def __init__(self,name,card):
self.name=name
self.card=card # mỗi người chơi được 3 quân bài, có nghĩa là nó phải chứa kiểu list Card?
def __str__(self) -> str:
return f'Player: {self.name}, {self.card}'
@property
def point(self): # trung bình
'''Tính điểm cho bộ bài của người chơi,
Y mỗi người chơi có 3 lá bài'''
var_1=0
self.Car
@property
def point(self): # trung bình
'''Tính điểm cho bộ bài của người chơi,
Y mỗi người chơi có 3 lá bài'''
var_1=0
switcher={
'A': 1,
1:1,
2:2,
3:3,
4:4,
5:5,
6:6,
7:7,
8:8,
9:9,
}
switcher.get(self.Card.rank)
@property
def biggest_card(self):
'''
Tìm lá bài lớn nhất
Trong trường hợp điểm bằng nhau, sẽ so sánh lá bài lớn nhất để tìm ra người chiến thắng
'''
switcher={
'G': 4,
'H':3,
'J':2,
'k':1
}
pass
def add_card(self):
'''Thêm một lá bài vào bộ (rút từ bộ bài)'''
pass
def remove_card(self):
'''Reset bộ bài khi chơi game mới'''
def flip_card(self):
'''Lật bài, hiển thị các lá bài'''
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,651
|
yenlt6/learningpython
|
refs/heads/main
|
/Day16Exam2_TEMP/card_game/deck copy.py
|
from card import Card
import random
class Deck:
def __init__(self):
self.list_card=self.build()
'''
Class đại diện cho bộ bài, bao gồm 36 lá
'''
# ('A', 2, 3, 4, 5, 6, 7, 8, 9) và suit ('G', 'H', 'J', 'K')
'''Tạo bộ bài: tạo 1 list, mỗi phần tử của list là 1 lá bài gồm 9*4=36 lá bài'''
def build(self):
list_card=[]
list_rank = ['A', 2, 3, 4, 5, 6, 7, 8, 9]
list_suit = ['G', 'H', 'J', 'K']
for rank in list_rank:
for suit in list_suit:
list_card.append(Card(rank, suit))
return list_card
def shuffle_card(self):
'''Trộn bài, random(list)'''
return random.shuffle(self.list_card)
# def shuffle_card(self,list_card):
# '''Trộn bài, random(list)'''
# return random.choice(list_card)
#Chia bài, gán 1 con bài cho 1 người nào đó.
def deal_card(self,card_item):
return self.remove(card_item)
'''Rút một lá bài từ bộ bài: lấy random 1 lá bài từ bộ bài, tức là lấy random 1 phần tử trong list ra'''
deck=Deck()
print(deck)
list_card_t=deck.shuffle_card
for i in list_card_t:
print(i)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,652
|
yenlt6/learningpython
|
refs/heads/main
|
/day_8_temp/homework8.py
|
#Bài 3: Regex - ApacheLog
import sys
import re
def readfile_regex_image(filename):
f = open(filename, encoding='utf-8-sig')
content = f.read()
data_find = re.findall("GET (.*\.jpg)", content, re.MULTILINE)
if(data_find):
domain = get_domain(filename)
#loại bỏ image trùng
data = set(data_find)
inc = 1;
print ("Danh sách ảnh trong file: ")
for path in data:
print(f"{inc}. {domain}{path}")
inc += 1
else:
print ("Không có link ảnh trong file")
def get_domain(filename):
#regex như này thì đuôi file là google.com.vn vẫn hiểu được
re_host = re.search("[\.](\w*(\.[a-z]{2,6}){1,2})$", filename)
if re_host: domain = "http://"+re_host.groups()[0]
else: domain = ""
return domain
###
def main():
if len(sys.argv) != 2:
print('usage: ./homework8.py file')
sys.exit(1)
filename = sys.argv[1]
readfile_regex_image(filename)
sys.exit(1)
if __name__ == '__main__':
main()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,653
|
yenlt6/learningpython
|
refs/heads/main
|
/Day20FileHandling0611/file_handling/file_handling/writeCV.py
|
from docx import Document
def xin_tang_luong(ho_ten, luong_hien_tai):
doc = Document()
doc.add_heading('Don xin tang luong', level=0)
doc.add_paragraph(f'Xin chao chi quan ly, em ten la {ho_ten}, em muon tang luong, vi luong hien tai {luong_hien_tai} nhu nay la qua thap')
doc.save('don_xin_tang_luong.docx')
if __name__== "__main__":
ho_ten = input("Ho ten: ")
luong_hien_tai = input("Luong hien tai: ")
xin_tang_luong(ho_ten, luong_hien_tai)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,654
|
yenlt6/learningpython
|
refs/heads/main
|
/Date16Exam2Date1610/game.py
|
import deck as dec
import player as pla
import card as ca
class Game:
'''
Class chứa các chức năng chính của game
Game chứa danh sách người chơi, và bộ bài
'''
def __init__(self,lisNguoi=[]):
self.lisNguoi=lisNguoi
self.boBai= dec.Deck()
def setup(self):
print("chào mưng bạn đến với game bài 3 cây")
print("Nhập số lượng người chơi:")
n= int(input())
while(n>5):
print("chỉ cho tối đa 5 người chơi")
n = int(input("nhập lại số lượng người chơi:"))
i = 1
self.lisNguoi = []
while(n!=0):
name=input(f"Tên người chơi thứ {i}: ")
i=i+1
pla.Player(name)
self.lisNguoi.append(pla.Player(name))
n=n-1
def list_players(self):
print(f"| {'ID':9} | {'Name':15}")
for i in range (0,len(self.lisNguoi)):
print(f"| {i:9} | {self.lisNguoi[i].name:15}")
#thêm mới nhười chơi
def add_player(self):
name = input(f"Tên người chơi tiếp theo:")
if len(self.lisNguoi)<=5:
self.lisNguoi.append(pla.Player(name))
else:
print("số người chơi đã đủ")
#xóa 1 người chơi
def remove_player(self,id):
del self.lisNguoi[id]
#chia bài cho người chơi
def deal_card(self):
self.boBai.shuffle_card()
for i in range(0,len(self.lisNguoi)):
self.lisNguoi[i].add_card(self.boBai.deal_card())
self.lisNguoi[i].add_card(self.boBai.deal_card())
self.lisNguoi[i].add_card(self.boBai.deal_card())
for i in self.boBai.liscard:
i.__str__()
#lật bái cho tất cả người chơi, thông báo người chiến t:hắng
def flip_card(self):
diem=[]
for i in range(0,len(self.lisNguoi)):
self.lisNguoi[i].flip_card()
diem.append(self.lisNguoi[i].point)
lismax=[]
#print(max(diem))
for i in range(0,len(self.lisNguoi)):
if self.lisNguoi[i].point==max(diem):
lismax.append(self.lisNguoi[i])
one=lismax[0]
if len(lismax)==1:
print(f'người chơi chiến thắng:',end='')
lismax[0].flip_card()
else:
for i in lismax:
car= ca.Card(i.biggest_card.rank,i.biggest_card.suit)
car1=ca.Card(one.biggest_card.rank,one.biggest_card.suit)
if car.__gt__(car1) == car:
one = i
print(f'người chơi chiến thắng:', end='')
one.flip_card()
# ga=Game()
# ga.setup()
# ga.deal_card()
# ga.flip_card()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,655
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/vingroup/vinmec.py
|
def time_city():
print('[vinmec] time_city()')
class VinMec:
pass
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,656
|
yenlt6/learningpython
|
refs/heads/main
|
/Day102509/bank_account_property.py
|
class BankAccount:
def __init__(self, account_number, account_name, balance=0):
self._account_number = account_number
self._account_name = account_name
self.balance = balance # gọi @balance.setter
@property
def account_number(self):
return self._account_number
@property
def account_name(self):
return self._account_name
@property
def balance(self):
return self._balance
@balance.setter
def balance(self, balance):
if balance >= 0:
self._balance = balance
else:
raise ValueError("Số dư phải lớn hơn 0")
def display(self):
print(self.account_number, self.account_name, self.balance, "₫")
def withdraw(self, amount):
if 0 < amount <= self.balance:
self.balance -= amount
else:
raise ValueError(
"Số tiền phải lớn hơn 0 và không được vượt quá số dư hiện tại")
def deposit(self, amount):
if amount > 0:
self.balance += amount
else:
raise ValueError("Số tiền phải lớn hơn 0")
my_account = BankAccount(1, "Ba", 1_000_000_000_000_000_000)
my_account.deposit(1_000_000_000_000_000_000)
my_account.display()
my_account.withdraw(100_000_000) # quá nhỏ bé 😊
my_account.display()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,657
|
yenlt6/learningpython
|
refs/heads/main
|
/Date16Exam2Date1610/player.py
|
import card as ca
class Player:
'''
Class đại diện cho mỗi người chơi
Người chơi chỉ cần lưu tên, và các lá bài người chơi có
'''
def __init__(self,name): # dễ
self.name=name
self.listBai=[]
@property
def point(self): # trung bình
sum=self.listBai[0].rank+self.listBai[1].rank+self.listBai[2].rank
if sum <=10:
sum=sum
else:
if sum%10==0:
sum=10
else:
sum=sum%10
return sum
'''
Tìm lá bài lớn nhất
Trong trường hợp điểm bằng nhau, sẽ so sánh lá bài lớn nhất để tìm ra người chiến thắng
'''
@property
def biggest_card(self):
maxValue = self.listBai[0]
if self.listBai[1].__gt__(maxValue)==self.listBai[1]:
maxValue=self.listBai[1]
if self.listBai[2].__gt__(maxValue)==self.listBai[2]:
maxValue=self.listBai[2]
return maxValue
def add_card(self,labai):
self.listBai.append(labai)
'''Reset bộ bài khi chơi game mới'''
def remove_card(self):
self.listBai=[]
'''Lật bài, hiển thị các lá bài '♠', '♣', '♦', '♥'''
def flip_card(self):
print(f'{self.name} {self.listBai[0].rank}{self.listBai[0].suit} {self.listBai[1].rank}{self.listBai[1].suit} {self.listBai[2].rank}{self.listBai[2].suit} Điểm:{self.point} Lá bài lớn nhất:',end='')
self.biggest_card.__str__()
# pl=Player("lan")
# pl.listBai.append(ca.Card(9,"♣"))
# pl.listBai.append(ca.Card(8,"♣"))
# pl.listBai.append(ca.Card(6,"♣"))
# print(pl.point)
#
#
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,658
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/exception/app.py
|
from random import randint
class Error(Exception):
"""Base Class"""
pass
class ValueOutOfRangeError(Error):
"""Lỗi khi số đoán đoán nằm ngoài phạm vi"""
pass
class SmallValueError(Error):
"""Lỗi số đoán nhỏ hơn kết quả"""
pass
class LargeValueError(Error):
"""Lỗi số đoán lớn hơn kết quả"""
pass
number = randint(0, 50)
count = 3
while count > 0:
try:
guess = int(input("Đoán một số (nguyên) bất kỳ trong khoảng 0 - 50: "))
count -= 1
if guess < 0 or guess > 50:
raise ValueOutOfRangeError
if guess < number:
raise SmallValueError
elif guess > number:
raise LargeValueError
else:
print("Chúc mừng, số chính xác là:", number)
break
except ValueError:
print("Nhập một số hợp lệ!")
except ValueOutOfRangeError:
print("Con số chính xác nằm trong khoảng 0-50")
except SmallValueError:
print("Số đoán nhỏ hơn con số chính xác")
except LargeValueError:
print("Số đoán lớn hơn con số chính xác")
except Exception as e:
print(e)
else:
print("Đã hết số lượt đoán")
print("Con số chính xác là:", number)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,659
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/card_game/db.py
|
from pymysql import connect, cursors, Error
import sys
# Tạo CSDL vs file game_log.sql, khởi chạy CSDL trước
config = {
'host': 'localhost',
'user': 'root',
'password': '123456',
'database': 'game_log',
'cursorclass': cursors.DictCursor
}
try:
cnx = connect(**config)
cur = cnx.cursor()
except Error as e:
print('Lỗi kết nối đến MySQL Server, không thể khởi chạy ứng dụng')
print(e.args[1])
sys.exit()
def log(winner, players):
sql = '''INSERT INTO games (winner) VALUES (%s)'''
cur.execute(sql, winner)
game_id = cur.lastrowid
sql = f'''
INSERT INTO logs (game_id, player, cards, point, biggest_card)
VALUES ({game_id}, %(player)s, %(cards)s, %(point)s, %(biggest_card)s)
'''
cur.executemany(sql, players)
cnx.commit()
def get_last_game():
sql = '''
SELECT *
FROM games AS g
ORDER BY g.play_at DESC
'''
cur.execute(sql)
game = cur.fetchone()
if not game:
raise Exception('Không có lịch sử game\nChơi vài game vui vẻ đi 😉\n')
sql = f'''
SELECT *
FROM logs
WHERE game_id = {game['game_id']}
'''
cur.execute(sql)
players = cur.fetchall()
return game, players
def history():
sql = '''
SELECT
winner as player,
COUNT(*) AS game_won
FROM games AS g
WHERE DATE(g.play_at) = CURDATE()
GROUP BY player
ORDER BY game_won DESC
'''
cur.execute(sql)
records = cur.fetchall()
if not records:
raise Exception('Không có lịch sử game\nChơi vài game vui vẻ đi 😉\n')
total_game = sum([r['game_won'] for r in records])
return total_game, records
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,660
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/vingroup/payments/__init__.py
|
print(f'Gọi __init__.py cho {__name__}')
A = ['quux', 'corge', 'grault']
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,661
|
yenlt6/learningpython
|
refs/heads/main
|
/Day6Date1409/wordcount.py
|
#!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Quang Le - Techmaster.vn - 09/2021
########
"""Wordcount exercise
Hàm main() đã được định nghĩa hoàn chỉnh ở dưới. Bạn phải viết hàm print_words()
và print_top() mà sẽ được gọi từ main().
1. Với đối số --count, viết hàm print_words(filename) đếm số lần xuất hiện của mỗi từ
trong file đầu vào và in ra theo định dạng sau:
word1 count1
word2 count2
...
In danh sách trên theo thứ tự từ điển các từ (python sẽ sắp xếp dấu câu đứng trước
các chữ cái nên cũng không thành vấn đề). Lưu tất cả các từ dưới dạng chữ thường,
vì vậy 'The' và 'the' được tính là cùng một từ.
2. Với đối số --topcount, viết hàm print_top(filename) tương tự như print_words()
nhưng chỉ in ra 20 từ thông dụng nhất sắp xếp theo từ thông dụng nhất ở trên cùng.
Tùy chọn: định nghĩa một hàm helper để tránh lặp lại code trong các hàm
print_words() và print_top().
"""
import sys
# +++your code here+++
def print_words(filename):
pass
def print_top(filename):
pass
###
# This basic command line argument parsing code is provided and
# calls the print_words() and print_top() functions which you must define.
def main():
if len(sys.argv) != 3:
print('usage: ./wordcount.py {--count | --topcount} file')
sys.exit(1)
option = sys.argv[1]
filename = sys.argv[2]
if option == '--count':
print_words(filename)
elif option == '--topcount':
print_top(filename)
else:
print('unknown option: ' + option)
sys.exit(1)
if __name__ == '__main__':
main()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,662
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/oop/solutions/bank_account_advance.py
|
from datetime import datetime
class Customer:
def __init__(self, name, dob, email, phone):
self.name = name
self.dob = datetime.strptime(dob, "%d/%m/%Y").date()
self.email = email
self.phone = phone
def get_info(self):
print("Customer name:", self.name)
print("Date of birth:", self.dob)
print("Email:", self.email)
print("Phone:", self.phone)
class BankAccount:
def __init__(self, account_number, owner, balance=0):
self._account_number = account_number
self._owner = Customer()
self.balance = balance # gọi @balance.setter
@property
def account_number(self):
return self._account_number
@property
def owner(self):
return self._owner
@property
def balance(self):
return self._balance
@balance.setter
def balance(self, balance):
if balance >= 0:
self._balance = balance
else:
raise ValueError("Số dư phải lớn hơn 0")
def display(self):
print("Account info")
print("Account number:", self.account_number)
self.owner.get_info()
print("Balance:", self.balance, "₫")
def withdraw(self, amount):
if 0 < amount <= self.balance:
self.balance -= amount
else:
raise ValueError(
"Số tiền phải lớn hơn 0 và không được vượt quá số dư hiện tại")
def deposit(self, amount):
if amount > 0:
self.balance += amount
else:
raise ValueError("Số tiền phải lớn hơn 0")
class SavingAccount(BankAccount):
monthly_interest_rate = 0.005
def calculate_interest(self):
return self.balance * self.monthly_interest_rate
ba = Customer("Ba Nguyễn", "24/05/1992", "ba@techmaster.vn", "09xx")
my_account = SavingAccount("1", ba, 1_000_000_000)
my_account.display()
print(my_account.calculate_interest())
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,663
|
yenlt6/learningpython
|
refs/heads/main
|
/Day18UnitTest2310/testing/tests/test_real_server.py
|
from unittest import skipIf
# Local imports...
from constants import SKIP_TAGS
from services import get_users
@skipIf('real' in SKIP_TAGS, 'Skipping tests that hit the real API server.')
def test_request_response():
response = get_users()
assert 'Content-Type' in response.headers.keys()
assert response.headers['Content-Type'] == 'application/json; charset=utf-8'
assert response.ok, True
assert isinstance(response.json(), list)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,664
|
yenlt6/learningpython
|
refs/heads/main
|
/Day16Exam2_TEMP/card_game/deck.py
|
from Day16Exam2Date1510.card_game.card import Card
class Deck(Card):
'''
Class đại diện cho bộ bài, bao gồm 36 lá
'''
def build(self):
'''Tạo bộ bài: tạo 1 list, mỗi phần tử của list là 1 lá bài gồm 9*4=36 lá bài'''
pass
def shuffle_card(self):
'''Trộn bài, random(list)'''
pass
def deal_card(self):
'''Rút một lá bài từ bộ bài: lấy random 1 lá bài từ bộ bài, tức là lấy random 1 phần tử trong list ra'''
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,665
|
yenlt6/learningpython
|
refs/heads/main
|
/Day6Date1409/hello.py
|
print("yenle")
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,666
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/testing/constants.py
|
import os
BASE_URL = 'http://jsonplaceholder.typicode.com'
SKIP_REAL = os.getenv('SKIP_REAL', False) # default là không skip
SKIP_TAGS = os.getenv('SKIP_TAGS', '').split()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,667
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/card_game/player.py
|
class Player:
count = 1
auto = 1
def __init__(self, name):
if not name:
name = f'Nghiện nhưng còn ngại {Player.auto} 🦊'
Player.auto += 1
self._id = Player.count
self._name = name
self._cards = []
Player.count += 1
@property
def id(self):
return self._id
@property
def name(self):
return self._name
@property
def cards(self):
return self._cards
@property
def info(self):
return '{:2} {}'.format(self.id, self.name)
@property
def point(self):
total = sum([int(c) for c in self.cards])
total %= 10
return 10 if total == 0 else total
@property
def biggest_card(self):
return max(self.cards)
def add_card(self, card):
self.cards.append(card)
def remove_cards(self):
self.cards.clear()
def flip_cards(self):
return ' '.join([str(c) for c in self.cards])
def __gt__(self, other):
return self.point > other.point or (self.point == other.point and self.biggest_card > other.biggest_card)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,668
|
yenlt6/learningpython
|
refs/heads/main
|
/Day13Exception0510/factorial.py
|
class Error(Exception):
"""Base class cho các exception trong module"""
pass
class NotIntegerError(Error):
"""Ném ra khi giá trị đầu vào không phải integer"""
def __init__(self, value):
message = f"Không phải số nguyên: {value}"
self.value = value
self.message = message
class NegativeError(Error):
"""Ném ra khi giá trị số là số âm"""
def __init__(self, value):
message = f"Không phải số nguyên dương: {value}"
self.value = value
self.message = message
def factorial(number):
if type(number) != int: raise NotIntegerError(number)
if number < 0: raise NegativeError(number)
result = number
if number == 0:
return 1
for i in range(2, number):
result *= i
return result
# int(input())
# print(factorial("abc"))
list_ = ["hello", 0, 1, -2, 3,6]
for value in list_:
try:
print(f"{value}! =", factorial(value))
except (NotIntegerError,NegativeError) as e:
print("lay ra thuoc tinh message của exception: ", e.message,"\nvalue của exception có có ý nghĩa sử dụng:-------value:",e.value )
print()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,669
|
yenlt6/learningpython
|
refs/heads/main
|
/Day20FileHandling0611/file_handling/file_handling/app.py
|
from docx import Document #thao tác mở nó lên ( nó: wordoffice ?)
from docx.shared import Cm
doc = Document()#=new: tạo mới documnet
# doc = docx.document.Document
# doc.add_picture('avatar.jpg', width=Cm(4))
doc.add_paragraph("Ba Nguyễn")
doc.add_paragraph("Mô tả bản thân...")
doc.add_paragraph("Thông tin liên hệ...")
doc.add_heading('Thông tin bản thân...', level=2)
para = doc.add_paragraph("Tui là ")
para.add_run("Ba").bold = True
para.add_run(". Tui đến từ ")
para.add_run("Nam Định").italic = True
experiences = (
('Developer', 'Developer....'),
('Teacher', 'Teacher....'),
)
doc.add_heading("Kinh nghiệm làm việc...", level=2)
table = doc.add_table(rows=1, cols=2)
position, detail = table.rows[0].cells
position.text = 'Vị trí'
detail.text = 'Chi tiết'
for p, d in experiences:
position, detail = table.add_row().cells
position.text = p
detail.text = d
doc.save('cv.docx')
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,670
|
yenlt6/learningpython
|
refs/heads/main
|
/Date16Exam2Date1610/card.py
|
class Card:
'''
Class đại diện cho mỗi lá bài
Mỗi lá bài bao gồm rank ('A', 2, 3, 4, 5, 6, 7, 8, 9) và suit ('♠', '♣', '♦', '♥')
'''
def __init__(self,rank,suit):
self.setRank=rank
self.suit=suit
self.setsuitNumber=suit
self.setRankA=rank
@property
def getRank(self):
return self.rank
@getRank.setter
def setRank(self, rank):
if rank == 'A':
self.rank = 1
elif rank <= 2 and rank >= 9:
print("Rank khong hop le")
else:
self.rank = rank
@property
def getRankA(self):
return self.rankA
@getRank.setter
def setRankA(self, rank):
if rank == 'A':
self.rankA = 'A'
else:
self.rankA=None
@property
def getSuitNumber(self):
return self.suitNumber
@getSuitNumber.setter
def setsuitNumber(self,suit):
if suit=='♠':
self.suitNumber=1
elif suit=='♣':
self.suitNumber=2
elif suit=='♦':
self.suitNumber=4
elif suit=='♥':
self.suitNumber=3
def __str__(self):
if self.rankA=='A':
print(f'{self.rankA}{self.suit}')
else:
print(f'{self.rank}{self.suit}')
def __gt__(self, Card):
if self.rank>Card.rank:
return self
elif self.rank==Card.rank:
if self.suitNumber>Card.suitNumber:
return self
elif self.suitNumber==Card.suitNumber:
return 0;
else:
return Card
else:
return Card
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,671
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/card_game/game.py
|
import os
import sys
import db
import error
from deck import Deck
from player import Player
class Game:
min_players = 2
max_players = 12
cards_per_player = 3
def __init__(self):
self.is_playing = False
self.is_dealt = False
self.is_flipped = False
self._deck = Deck()
self._players = []
self.choices = {
'1': self.list_players,
'2': self.add_player,
'3': self.remove_player,
'4': self.dealing_card,
'5': self.flip_cards,
'6': self.last_game,
'7': self.history,
'8': self.quit
}
@property
def deck(self):
return self._deck
@property
def players(self):
return self._players
def cls(self):
os.system('cls' if os.name == 'nt' else 'clear')
def setup(self):
self.cls()
print('Welcome!!!')
print('Chào mừng đến với game đánh bài 3 cây (vui thôi nha)')
print('Có bao nhiêu người muốn chơi?')
while True:
try:
num_of_players = int(input('> '))
if num_of_players < Game.min_players:
raise error.MinimumPlayerError(
f'Tối thiểu {Game.min_players} người chơi')
elif num_of_players > Game.max_players:
raise error.MaximumPlayerError(
f'Tối đa {Game.max_players} người chơi')
else:
for i in range(num_of_players):
self.add_player()
break
except error.Error as e:
print(e.message)
except ValueError as e:
print('Có muốn chơi không zị?')
print('Nhập một số đi :)')
def menu(self):
num_of_player = len(self.players)
print(f'1. Danh sách người chơi ({num_of_player})')
print('2. Thêm người chơi')
print('3. Loại người chơi')
print('4. Chia bài')
print('5. Lật bài')
print('6. Xem lại game vừa chơi')
print('7. Xem lịch sử chơi hôm nay')
print('8. Công an tới, tốc biến :)')
def list_players(self):
print('{:2} {}'.format('ID', 'Tên'))
for player in self.players:
print(player.info)
def add_player(self):
if self.is_playing:
raise error.PlayingError()
elif len(self.players) >= Game.max_players:
raise error.MaximumPlayerError()
else:
name = input(
f'Tên người chơi {len(self.players) + 1}: ').strip()[0:6]
self.players.append(Player(name))
def remove_player(self):
if self.is_playing:
raise error.PlayingError()
elif len(self.players) <= Game.min_players:
raise error.MinimumPlayerError()
else:
self.list_players()
print()
id = int(input('Nhập ID người chơi: '))
print(id)
self.cls()
try:
player = self.players[id - 1]
self.players.remove(player)
print('Một thanh niên đã nhẵn túi :)')
except IndexError as e:
raise error.PlayerDoesNotExistsError()
def dealing_card(self):
if self.is_dealt:
raise error.DealtError()
else:
for player in self.players:
player.remove_cards()
self.deck.build()
self.deck.shuffle_cards()
for i in range(Game.cards_per_player):
for player in self.players:
card = self.deck.deal_card()
player.add_card(card)
self.is_dealt = True
self.is_flipped = False
self.is_playing = True
print('Bài đã chia :)\nXuống tiền đi nào')
def flip_cards(self):
if not self.is_dealt:
raise error.NotDealtError()
if self.is_flipped:
raise error.FlippedError()
else:
self.winner = max(self.players)
for player in self._players:
print(f'Tay chơi: {player.name}')
print(
f'Bộ bài: {player.flip_cards()} Điểm: {player.point:2} Lá lớn nhất: {player.biggest_card}')
print()
print(f'🏆 Chúc mừng tay chơi {self.winner.name} có xiền :)\n')
self.is_dealt = False
self.is_flipped = True
self.is_playing = False
players = [{'player': p.name, 'cards': p.flip_cards(
), 'point': p.point, 'biggest_card': p.biggest_card} for p in self.players]
db.log(self.winner.name, players)
def last_game(self):
if self.is_playing:
raise error.PlayingError()
else:
last_game, players = db.get_last_game()
print(last_game['play_at'])
print()
for p in players:
print(f'Tay chơi: {p["player"]}')
print(
f'Bộ bài: {p["cards"]} Điểm: {p["point"]} Lá bài lớn nhất: {p["biggest_card"]}')
print()
print(f'🏆 Tay chơi chiến thắng: {last_game["winner"]} :)')
def history(self):
if self.is_playing:
raise error.PlayingError()
else:
total_game, records = db.history()
print(f'Hôm nay đã chơi: {total_game} ván bài 🤣\n')
for r in records:
print(f'{r["player"]:6} thắng {r["game_won"]} ván')
def run(self):
self.setup()
self.cls()
while True:
self.menu()
try:
c = input("> ")
choice = self.choices.get(c)
self.cls()
if choice:
choice()
print()
else:
raise error.FunctionDoesNotExists()
except ValueError as e:
raise error.FunctionDoesNotExists()
except error.Error as e:
print(e.message)
def quit(self):
print("Have fun :)")
sys.exit()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,672
|
yenlt6/learningpython
|
refs/heads/main
|
/Day16Exam2_TEMP/card_game/main.py
|
import card as c
import game as g
import deck as d
import player as p
def main():
list_player=[] # khó
'''Khởi tạo trò chơi, hiển thị menu và thực thi các chức năng tương ứng'''
print("Wellcome!!!\n chào mừng đến với game đánh bài 3 cây ( vui thôi nha)")
print(" Có bao nhiêu người muốn chơi:")
number_player=int(input("Nhập số người chơi:"))
try:
name_1=input("Tên người chơi 1: ")
name_2=input("Tên người chơi 2: ")
except ValueError as e:
print(e)
c.card_function()
if __name__ == '__main__':
main()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,673
|
yenlt6/learningpython
|
refs/heads/main
|
/Day1Date2808/main.py
|
print("helo world")
print(4.5//3)
print(True + True)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,674
|
yenlt6/learningpython
|
refs/heads/main
|
/Day16Exam2_TEMP/card_game/db.py
|
'''Kết nối CSDL'''
# Kết nối đến MySQL
# from pymysql import connect, cursors
# config = {
# 'host': 'remotemysql.com',
# 'user': 'UyMDXcxEoz',
# 'password': 'lFJmWnNbEC',
# 'database': 'UyMDXcxEoz'
# }
# conn = connect(**config)
# conn.close()
# import the time module
import time
from datetime import date
from datetime import datetime
from pymysql import connect, cursors
config = {
"host": "10.124.60.67",
"user": "omd_qc",
"password": "omd_qc",
"database": "game_log",
}
conn = connect(**config)
conn.close()
today = datetime.now()
def log():
'''
Ghi thông tin về game vào CSDL và 2 bảng games và logs
Bảng games gồm tên người chiến thắng
Bảng logs gồm danh sách người chơi, bộ bài, điểm và lá bài lớn nhất tương ứng với game
Chú ý, sau khi INSERT vào games, có thể lấy id của game vừa tạo với cursor.lastrowid
'''
pass
def get_last_game():
'''Lấy thông tin về game gần nhất từ cả 2 bảng games và logs'''
pass
def history():
'''
Lấy thông tin về lịch sử chơi
Bao gồm tổng số game đã chơi, số game chiến thắng ứng với mỗi người chơi (sử dụng GROUP BY và các hàm tổng hợp)
'''
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,675
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/testing/wallet/test_wallet.py
|
# test_wallet.py
import pytest
from wallet import Wallet, InsufficientAmount
@pytest.mark.ini
def test_default_initial_amount():
wallet = Wallet()
assert wallet.balance == 0
@pytest.mark.ini
def test_setting_initial_amount():
wallet = Wallet(100)
assert wallet.balance == 100
def test_wallet_add_cash():
wallet = Wallet(10)
wallet.add_cash(90)
assert wallet.balance == 100
def test_wallet_spend_cash():
wallet100 = Wallet(100)
wallet100.spend_cash(10)
assert wallet100.balance == 90
# def test_wallet_spend_cash_again():
# wallet100 = Wallet(100)
# wallet.spend_cash(10)
# assert wallet.balance == 90
def test_wallet_spend_cash_raises_exception_on_insufficient_amount():
wallet100 = Wallet()
with pytest.raises(InsufficientAmount):
wallet100.spend_cash(100)
# @pytest.fixture
# def empty_wallet():
# '''Returns a Wallet instance with a zero balance'''
# return Wallet()
# @pytest.fixture
# def wallet20():
# '''Returns a Wallet instance with a balance of 20'''
# return Wallet(20)
# @pytest.mark.parametrize("earned,spent,expected", [
# (30, 10, 20),
# (20, 2, 18),
# ])
# def test_params(earned, spent, expected):
# my_wallet = Wallet()
# my_wallet.add_cash(earned)
# my_wallet.spend_cash(spent)
# assert my_wallet.balance == expected
# Kết hợp fixture và parametrize
# @pytest.mark.parametrize("earned,spent,expected", [
# (30, 10, 120),
# (20, 2, 118),
# ])
# def test_transactions(wallet100, earned, spent, expected):
# wallet100.add_cash(earned)
# wallet100.spend_cash(spent)
# assert wallet100.balance == expected
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,676
|
yenlt6/learningpython
|
refs/heads/main
|
/Day18UnitTest2310/testing/tests/test_todos_extra.py
|
from unittest.mock import Mock, patch
from services import get_todos, get_uncompleted_todos
# 11
from unittest import skipIf
from constants import SKIP_REAL
class TestTodosExtra(object):
@classmethod
def setup_class(cls):
cls.mock_get_patcher = patch('services.requests.get')
cls.mock_get = cls.mock_get_patcher.start()
@classmethod
def teardown_class(cls):
cls.mock_get_patcher.stop()
def test_getting_todos_when_response_is_ok(self):
# Configure the mock to return a response with an OK status code.
self.mock_get.return_value.ok = True
todos = [{
'userId': 1,
'id': 1,
'title': 'Make the bed',
'completed': False
}]
self.mock_get.return_value = Mock()
self.mock_get.return_value.json.return_value = todos
# Call the service, which will send a request to the server.
response = get_todos()
# If the request is sent successfully, then I expect a response to be returned.
assert response.json() == todos
def test_getting_todos_when_response_is_not_ok(self):
# Configure the mock to not return a response with an OK status code.
self.mock_get.return_value.ok = False
# Call the service, which will send a request to the server.
response = get_todos()
# If the response contains an error, I should get no todos.
assert response is None
class TestUncompletedTodos(object):
@classmethod
def setup_class(cls):
cls.mock_get_todos_patcher = patch('services.get_todos')
cls.mock_get_todos = cls.mock_get_todos_patcher.start()
@classmethod
def teardown_class(cls):
cls.mock_get_todos_patcher.stop()
def test_getting_uncompleted_todos_when_todos_is_not_none(self):
todo1 = {
'userId': 1,
'id': 1,
'title': 'Make the bed',
'completed': False
}
todo2 = {
'userId': 2,
'id': 2,
'title': 'Walk the dog',
'completed': True
}
# Configure mock to return a response with a JSON-serialized list of todos.
self.mock_get_todos.return_value = Mock()
self.mock_get_todos.return_value.json.return_value = [todo1, todo2]
# Call the service, which will get a list of todos filtered on completed.
uncompleted_todos = get_uncompleted_todos()
# Confirm that the mock was called.
assert self.mock_get_todos.called == True
# Confirm that the expected filtered list of todos was returned.
assert uncompleted_todos == [todo1]
def test_getting_uncompleted_todos_when_todos_is_none(self):
# Configure mock to return None.
self.mock_get_todos.return_value = None
# Call the service, which will return an empty list.
uncompleted_todos = get_uncompleted_todos()
# Confirm that the mock was called.
assert self.mock_get_todos.called == True
# Confirm that an empty list was returned.
assert uncompleted_todos == []
# 11
@skipIf(SKIP_REAL, 'Skipping tests that hit the real API server.')
# 10
def test_integration_contract():
# Call the service to hit the actual API.
actual = get_todos()
actual_keys = actual.json().pop().keys()
# Call the service to hit the mocked API.
with patch('services.requests.get') as mock_get:
mock_get.return_value.ok = True
mock_get.return_value.json.return_value = [{
'userId': 1,
'id': 1,
'title': 'Make the bed',
'completed': False
}]
mocked = get_todos()
mocked_keys = mocked.json().pop().keys()
# An object from the actual API and an object from the mocked API should have
# the same data structure.
assert list(actual_keys) == list(mocked_keys)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,677
|
yenlt6/learningpython
|
refs/heads/main
|
/Day16Exam2_TEMP/card_game/card.py
|
class Card:
'''
Class đại diện cho mỗi lá bài
Mỗi lá bài bao gồm rank ('A', 2, 3, 4, 5, 6, 7, 8, 9) và suit ('♠', '♣', '♦', '♥')
Mỗi lá bài bao gồm rank ('A', 2, 3, 4, 5, 6, 7, 8, 9) và suit ('G', 'H', 'J', 'K')
'''
def __init__(self,rank,suit):
self.rank=rank
self.suit=suit
def __str__(self):
'''Hiển thị lá bài'''
return f'Card({self.rank},{self.suit})'
def __gt__(self, other):
'''So sánh 2 lá bài'''
def __gt__(self, other):
val1 = 0
val2 = 0
if self.rank>other.rank:
return True
elif self.rank==other.rank and self.suit>other.suit:
return True
else:
return False
def card_function(self):
print("Yeeennn")
pass
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,678
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/card_game/error.py
|
class Error(Exception):
'''Base class cho game error'''
pass
class MaximumPlayerError(Error):
'''Số lượng người chơi vượt quá mức quy định'''
message = 'Số lượng người chơi quá đông và nguy hiểm rồi :)\n'
def __init__(self, message=message):
self.message = message
class MinimumPlayerError(Error):
'''Số lượng người chơi nhỏ hơn mức quy định'''
message = 'Còn quá ít người chơi, rủ thêm đi cho vui :)\n'
def __init__(self, message=message):
self.message = message
class PlayerDoesNotExistsError(Error):
'''Người chơi không tồn tại'''
message = '''ID người chơi không tồn tại\n'''
def __init__(self, message=message):
self.message = message
class PlayingError(Error):
'''Lỗi thao tác khi game đang chơi, ví dụ định chuồn =]]'''
message = 'Game đang chơi, kết thúc ván đã nhé :)\n'
def __init__(self, message=message):
self.message = message
class DealtError(Error):
'''Lỗi thực hiện tao tác chia bài nhiều lần'''
message = 'Bài đã chia rồi nhé :)\nXuống tiền đi nào :)\n'
def __init__(self, message=message):
self.message = message
class NotDealtError(Error):
'''Lỗi thực hiện thao tác lật bài khi chưa chia'''
message = 'Chưa chia bài mà :)\nGọi thêm người chơi cho vui :)\n'
def __init__(self, message=message):
self.message = message
class FlippedError(Error):
'''Lỗi khi lật bài nhiều lần'''
message = 'Bài đã lật rồi mà :)\nCay cú thì làm ván mới gỡ x2 nhé :)\n'
def __init__(self, message=message):
self.message = message
class FunctionDoesNotExists(Error):
'''Lỗi khi chọn chức năng không tồn tại'''
message = 'Chức năng không tồn tại\n'
def __init__(self, message=message):
self.message = message
class CancelError(Error):
'''Lỗi khi hủy chức năng, ví dụ hủy thêm người chơi, hay xóa người chơi'''
def __init__(self, message=''):
self.message = message
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,679
|
yenlt6/learningpython
|
refs/heads/main
|
/Date16Exam2Date1610/deck.py
|
import card as ca
import random
class Deck:
'''
Class đại diện cho bộ bài, bao gồm 36 lá
'''
def __init__(self):
self.liscard=self.build()
def build(self):
listRank=["A",2,3,4,5,6,7,8,9]
listSuit={'♠','♣','♦','♥'}
listCard=[]
for i in range (0, len(listRank)):
for j in listSuit:
card= ca.Card(listRank[i],j)
listCard.append(card)
return listCard
#trộn bài
def shuffle_card(self):
random.shuffle(self.liscard)
#Rút một lá bài từ bộ bài
def deal_card(self):
labai=self.liscard.pop()
return labai
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,680
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/vingroup/payments/vinid.py
|
def id():
print("[vinid] id()")
from .. import products
print(products)
from ..products.vinfast import lux_sa
lux_sa()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,681
|
yenlt6/learningpython
|
refs/heads/main
|
/Day7Date1609/babynames_mshanh_thamkhao.py
|
#!/usr/bin/python
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Python for Tester - OneMount Class
# Quang Le - quangdle@gmail.com - 09/2021
import sys
import re
import io
"""Baby Names exercise
Định nghĩa hàm extract_names() dưới đây và gọi từ hàm main().
Cấu trúc các tag html trong các file baby.html như sau:
...
<h3 align="center">Popularity in 1990</h3>
....
<tr align="right"><td>1</td><td>Michael</td><td>Jessica</td>
<tr align="right"><td>2</td><td>Christopher</td><td>Ashley</td>
<tr align="right"><td>3</td><td>Matthew</td><td>Brittany</td>
...
Các bước nên làm tuần tự:
-Trích xuất năm
-Lấy và in ra tên và thứ hạng phổ biến
-Xây danh sách [year, 'name rank', ... ] và in ra
-Sửa hàm main() để dùng hàm extract_names.
"""
def extract_names(filename):
"""
Cho một file .html, trả ra list bắt đầu bằng năm,
theo sau bởi các chuỗi tên-xếp hạng theo thứ tự abc.
['2006', 'Aaliyah 91', Aaron 57', 'Abagail 895', ' ...]
"""
year = 0
list_data = []
with open(filename, "r", encoding='utf-8-sig') as file:
for line in file:
if(year == 0):
re_year = re.search("Popularity in (\d+)", line)
if(re_year):
year = re_year.groups()[0]
re_name = re.findall("<td>(\d+)</td><td>([a-zA-Z]+)</td><td>([a-zA-Z]+)</td>", line)
if(re_name):
list_data.append(re_name[0][1] + " " + re_name[0][0])
list_data.append(re_name[0][2] + " " + re_name[0][0])
list_data.sort();
list_data.insert(0, year)
return list_data
def main():
# Chương trình này có thể nhận đối số đầu vào là một hoặc nhiều tên file
args = sys.argv[1:]
if not args:
print('usage: [--summaryfile] file [file ...]')
sys.exit(1)
# Notice the summary flag and remove it from args if it is present.
summary = False
if args[0] == '--summaryfile':
summary = True
del args[0]
# +++your code here+++
# Với mỗi tên file, gọi hàm extract_names ở trên và in kết quả ra stdout
# hoặc viết kết quả ra file summary (nếu có input --summaryfile).
list_data_name = []
for filename in args:
list_data_name += extract_names(filename)
if(summary):
with open('summary.txt', 'w') as file:
file.write("\n".join(list_data_name))
print("Vui lòng mở file summary.txt để xem kết quả")
else:
print(list_data_name)
if __name__ == '__main__':
main()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,682
|
yenlt6/learningpython
|
refs/heads/main
|
/Day8Test1809/hackathon1_midterm/babynames.py
|
import sys
import re
import io
def extract_names(filename):
f = open(filename, encoding='utf-8-sig')
content = f.read()
year=re.findall("Popularity in \d+",content)
year=year[0]
year=year[14:]
name=re.findall('tr align="right"><td>(\d+)<\/td><td>(\w+)<\/td><td>(\w+)<',content)
rank_1=[]
rank_2=[]
for i in name:
rank_1.append(f"{i[1]} {i[0]}")
rank_2.append(f"{i[2]} {i[0]}")
rank_1.extend(rank_2)
rank_1.sort()
rank_1.insert(0,year)
# print(rank_1)
return rank_1
#Run: python babynames.py baby1998.html
def main():
# Chương trình này có thể nhận đối số đầu vào là một hoặc nhiều tên file
args = sys.argv[1:]
if not args:
print('usage: [--summaryfile] file [file ...]')
sys.exit(1)
# Notice the summary flag and remove it from args if it is present.
summary = False
if args[0] == '--summaryfile':
summary = True
del args[0]
# +++your code here+++
# Với mỗi tên file, gọi hàm extract_names ở trên và in kết quả ra stdout
# hoặc viết kết quả ra file summary (nếu có input --summaryfile).
list_name = []
for filename in args:
list_name += extract_names(filename)
if(summary):
with open('summary.txt', 'w') as file:
file.write("\n".join(list_name))
print("Vui lòng mở file summary.txt để xem kết quả")
else:
print(list_name)
if __name__ == '__main__':
main()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,683
|
yenlt6/learningpython
|
refs/heads/main
|
/Day6Date1409/thamkhao_mis_hanh.py
|
#Bài 1: Đảo ngược chuỗi trong python
def convert_string(s):
list_str= s.split(" ")
list_str.reverse()
str = " ".join(list_str)
new_str = ''
for i in str:
if i.isupper(): new_str += i.lower()
elif i.islower(): new_str += i.upper()
else: new_str += i
return new_str
s = "tHE fOX iS cOMING fOR tHE cHICKEN"
print(convert_string (s) )
#Bài 2: Sorting - Sắp xếp điểm thi
def sort_list_last(list_a):
list_sorted = sorted(list_a, key=lambda tup:tup[2])
return list_sorted
A = [(1, 2, 5), (9, 1, 2), (6, 4, 4), (3, 2, 3), (10, 2, 1)]
print("Danh sách sau khi sắp xếp: ", sort_list_last(A))
#Bài 3: Bài tập tổng hợp phần Basic - Đếm từ
import sys
# Đếm từ
def print_words(filename):
words_count = readfile_count_words(filename)
words_count_sorted = sorted(list(words_count), key = lambda k: k[0])
for i in words_count_sorted:
print (i, ":", words_count[i])
def print_top(filename):
words_count = readfile_count_words(filename)
words_count_sorted = sorted(list(words_count), key = words_count.get, reverse = True)
inc = 1
for i in words_count_sorted:
print (f"{inc}. {i}: {words_count[i]}")
inc += 1
if inc > 20: break
def readfile_count_words(filename):
f = open(filename, encoding='utf-8-sig')
content = remove_special_char(f.read())
list_str = content.split(" ")
dict_count = {}
for i in list_str:
if i == '': continue
i = i.lower()
if i in dict_count: dict_count[i] += 1
else: dict_count[i] = 1
return dict_count
def remove_special_char(str):
special_char = ['.', ',', '?', '!', '\n', '\r', '(', ')', '[', ']', ':', '--', ';', '`', "' ", '"']
for j in special_char:
str = str.replace(j, " ")
return str
###
# This basic command line argument parsing code is provided and
# calls the print_words() and print_top() functions which you must define.
def main():
if len(sys.argv) != 3:
print('usage: ./wordcount.py {--count | --topcount} file')
sys.exit(1)
option = sys.argv[1]
filename = sys.argv[2]
if option == '--count':
print_words(filename)
elif option == '--topcount':
print_top(filename)
else:
print('unknown option: ' + option)
sys.exit(1)
if __name__ == '__main__':
main()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,684
|
yenlt6/learningpython
|
refs/heads/main
|
/Day17_API_1910/main.py
|
import sys
from typing import Optional
from fastapi import FastAPI
from pydantic import BaseModel
from pymysql import connect, cursors, Error
class Todo(BaseModel):
id: Optional[int] = None
title: str
completed: Optional[bool] = False
config = {
"host": "10.124.60.67",
"user": "root",
"password": "root",
'database': 'todo',
'cursorclass': cursors.DictCursor
}
# config = {
# 'host': 'localhost',
# 'user': 'root',
# 'password': '123456',
# 'database': 'todo',
# 'cursorclass': cursors.DictCursor
# }
try:
cnx = connect(**config)
cur = cnx.cursor()
except Error as e:
print(e)
sys.exit()
app = FastAPI()
@app.get("/api/todos", status_code=200)
def get_all_todos():
sql = '''SELECT * FROM todolist'''
cur.execute(sql)
todos = cur.fetchall()
return todos
@app.get("/api/todos/{id}", status_code=200)
def get_todo_by_id(id):
sql = '''SELECT * FROM todolist Where id=%s'''
cur.execute(sql,id)
todos = cur.fetchone()
return todos
@app.get("/api/todos/not-completed", status_code=200)
def get_todo_by_id(id):
sql = '''SELECT * FROM todolist Where completed=0'''
cur.execute(sql)
todos = cur.fetchall()
return todos
@app.post("/api/todos", status_code=201)
def create_todo(todo: Todo):
sql = '''INSERT INTO todolist (title) VALUES (%s)'''
cur.execute(sql, todo.title)
cnx.commit()
id = cur.lastrowid# là id mới nhất mà nó vừa tạo, gán lại cái id đó cho todo
todo.id = id
return todo
@app.put("/api/todos/{id}", status_code=200)
def update_todo(id: int, todo: Todo):
sql = f'''
UPDATE todolist
SET title = %(title)s,
completed = %(completed)s
WHERE id = {id}
'''
cur.execute(sql, dict(todo))
cnx.commit()
todo.id = id
return todo
@app.delete("/api/todos/{id}", status_code=200)
def delete_todo(id):
sql = f'''DELETE FROM todolist WHERE id = {id}'''
cur.execute(sql)
cnx.commit()
return
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,685
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/oop/solutions/fraction.py
|
def hcf(x, y):
x, y = abs(x), abs(y)
hcf = x if x < y else y
while hcf > 0:
if x % hcf == 0 and y % hcf == 0:
break
hcf -= 1
return hcf if hcf > 0 else None
class Fraction:
def __init__(self, nr, dr=1):
if dr == 0:
raise ZeroDivisionError("Mẫu số phải khác 0")
if dr < 0:
self.nr = nr * -1
self.dr = dr * -1
else:
self.nr = nr
self.dr = dr
self._reduce()
def __repr__(self):
return "0" if self.nr == 0 else str(self.nr) if self.dr == 1 else f"{self.nr}/{self.dr}"
def __add__(self, other):
if type(other) == int or type(other) == float:
other = Fraction(other * self.dr, self.dr)
return Fraction((self.nr * other.dr) + (other.nr * self.dr), self.dr * other.dr)
def __sub__(self, other):
if type(other) == int or type(other) == float:
other = Fraction(other * self.dr, self.dr)
return Fraction((self.nr * other.dr) - (other.nr * self.dr), self.dr * other.dr)
def __mul__(self, other):
if type(other) == int or type(other) == float:
other = Fraction(other * self.dr, self.dr)
return Fraction(self.nr * other.nr, self.dr * other.dr)
def __truediv__(self, other):
if type(other) == int or type(other) == float:
other = Fraction(other * self.dr, self.dr)
return Fraction(self.nr * other.dr, other.nr * self.dr)
def _reduce(self):
n = hcf(self.nr, self.dr)
if n:
self.nr = int(self.nr / n)
self.dr = int(self.dr / n)
fr = Fraction(1, 2)
other = Fraction(1.5, -3)
print(fr, other)
print()
print(fr + other)
print(fr - other)
print(fr * other)
print(fr / other)
print()
fr = Fraction(1, 2)
print(fr + 1)
print(fr - 1.5)
print(fr * 2)
print(fr / 2)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,686
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/testing/wallet/conftest.py
|
import pytest
from wallet import Wallet, InsufficientAmount
@pytest.fixture
def wallet100():
return Wallet(100)
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,687
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/code_along/oop/solutions/bank_account_data_hidding.py
|
class BankAccount:
def __init__(self, account_number, account_name, balance=0):
self._account_number = account_number
self._account_name = account_name
# Gọi set_balance để validate
# thay vì self._balance = balance
self.set_balance(balance)
def get_account_number(self):
return self._account_number
def get_account_name(self):
return self._account_name
def get_balance(self):
return self._balance
def set_balance(self, balance):
if balance >= 0:
self._balance = balance
else:
# raise phát ra một ngoại lệ (lỗi)
# Tạm thời chấp nhận sử dụng print
raise ValueError("Số dư phải lớn hơn 0")
def display(self):
# Khi sử dụng getter và setter, các phương thức khác cần truy cập
# thuộc tính cũng nên gọi qua getter và setter
print(self.get_account_number(),
self.get_account_name(), self.get_balance(), "₫")
def withdraw(self, amount):
if 0 < amount <= self.get_balance():
self.set_balance(self.get_balance() - amount)
else:
raise ValueError(
"Số tiền phải lớn hơn 0 và không được vượt quá số dư hiện tại")
def deposit(self, amount):
if amount > 0:
self.set_balance(self.get_balance() + amount)
else:
raise ValueError("Số tiền phải lớn hơn 0")
my_account = BankAccount(1, "Ba", 1_000_000_000_000_000_000)
my_account.deposit(1_000_000_000_000_000_000)
my_account.display()
my_account.withdraw(100_000_000) # quá nhỏ bé 😊
my_account.display()
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
12,688
|
yenlt6/learningpython
|
refs/heads/main
|
/python4testers_student/testing/wallet/test_newwallet.py
|
import pytest
def test_assertFalse(wallet100):
print(wallet100)
assert wallet100.balance == 100
def assertTrue():
assert 1 == True
def testAssertTrue():
assert 2 < 3
|
{"/python4testers_student/code_along/vingroup/payments/vinid.py": ["/python4testers_student/code_along/vingroup/products/vinfast.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.