repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
jazzband/django-simple-menu | menu/menu.py | MenuItem.match_url | def match_url(self, request):
"""
match url determines if this is selected
"""
matched = False
if self.exact_url:
if re.match("%s$" % (self.url,), request.path):
matched = True
elif re.match("%s" % self.url, request.path):
matched = True
return matched | python | def match_url(self, request):
"""
match url determines if this is selected
"""
matched = False
if self.exact_url:
if re.match("%s$" % (self.url,), request.path):
matched = True
elif re.match("%s" % self.url, request.path):
matched = True
return matched | [
"def",
"match_url",
"(",
"self",
",",
"request",
")",
":",
"matched",
"=",
"False",
"if",
"self",
".",
"exact_url",
":",
"if",
"re",
".",
"match",
"(",
"\"%s$\"",
"%",
"(",
"self",
".",
"url",
",",
")",
",",
"request",
".",
"path",
")",
":",
"mat... | match url determines if this is selected | [
"match",
"url",
"determines",
"if",
"this",
"is",
"selected"
] | c9d8c4f1246655a7f9763555f7c96b88dd770791 | https://github.com/jazzband/django-simple-menu/blob/c9d8c4f1246655a7f9763555f7c96b88dd770791/menu/menu.py#L252-L262 | train | 212,100 |
twisted/txmongo | txmongo/connection.py | _Connection.configure | def configure(self, proto):
"""
Configures the protocol using the information gathered from the
remote Mongo instance. Such information may contain the max
BSON document size, replica set configuration, and the master
status of the instance.
"""
if not proto:
defer.returnValue(None)
reply = yield self.__send_ismaster(proto, timeout=self.initialDelay)
# Handle the reply from the "ismaster" query. The reply contains
# configuration information about the peer.
# Make sure we got a result document.
if len(reply.documents) != 1:
raise OperationFailure("TxMongo: invalid document length.")
# Get the configuration document from the reply.
config = reply.documents[0].decode()
# Make sure the command was successful.
if not config.get("ok"):
code = config.get("code")
msg = "TxMongo: " + config.get("err", "Unknown error")
raise OperationFailure(msg, code)
# Check that the replicaSet matches.
set_name = config.get("setName")
expected_set_name = self.uri["options"].get("replicaset")
if expected_set_name and (expected_set_name != set_name):
# Log the invalid replica set failure.
msg = "TxMongo: Mongo instance does not match requested replicaSet."
raise ConfigurationError(msg)
# Track max bson object size limit.
proto.max_bson_size = config.get("maxBsonObjectSize", DEFAULT_MAX_BSON_SIZE)
proto.max_write_batch_size = config.get("maxWriteBatchSize", DEFAULT_MAX_WRITE_BATCH_SIZE)
proto.set_wire_versions(config.get("minWireVersion", 0),
config.get("maxWireVersion", 0))
# Track the other hosts in the replica set.
hosts = config.get("hosts")
if isinstance(hosts, list) and hosts:
for host in hosts:
if ':' not in host:
host = (host, 27017)
else:
host = host.split(':', 1)
host[1] = int(host[1])
host = tuple(host)
if host not in self.__allnodes:
self.__allnodes.append(host)
# Check if this node is the master.
ismaster = config.get("ismaster")
if not ismaster:
msg = "TxMongo: MongoDB host `%s` is not master." % config.get('me')
raise AutoReconnect(msg) | python | def configure(self, proto):
"""
Configures the protocol using the information gathered from the
remote Mongo instance. Such information may contain the max
BSON document size, replica set configuration, and the master
status of the instance.
"""
if not proto:
defer.returnValue(None)
reply = yield self.__send_ismaster(proto, timeout=self.initialDelay)
# Handle the reply from the "ismaster" query. The reply contains
# configuration information about the peer.
# Make sure we got a result document.
if len(reply.documents) != 1:
raise OperationFailure("TxMongo: invalid document length.")
# Get the configuration document from the reply.
config = reply.documents[0].decode()
# Make sure the command was successful.
if not config.get("ok"):
code = config.get("code")
msg = "TxMongo: " + config.get("err", "Unknown error")
raise OperationFailure(msg, code)
# Check that the replicaSet matches.
set_name = config.get("setName")
expected_set_name = self.uri["options"].get("replicaset")
if expected_set_name and (expected_set_name != set_name):
# Log the invalid replica set failure.
msg = "TxMongo: Mongo instance does not match requested replicaSet."
raise ConfigurationError(msg)
# Track max bson object size limit.
proto.max_bson_size = config.get("maxBsonObjectSize", DEFAULT_MAX_BSON_SIZE)
proto.max_write_batch_size = config.get("maxWriteBatchSize", DEFAULT_MAX_WRITE_BATCH_SIZE)
proto.set_wire_versions(config.get("minWireVersion", 0),
config.get("maxWireVersion", 0))
# Track the other hosts in the replica set.
hosts = config.get("hosts")
if isinstance(hosts, list) and hosts:
for host in hosts:
if ':' not in host:
host = (host, 27017)
else:
host = host.split(':', 1)
host[1] = int(host[1])
host = tuple(host)
if host not in self.__allnodes:
self.__allnodes.append(host)
# Check if this node is the master.
ismaster = config.get("ismaster")
if not ismaster:
msg = "TxMongo: MongoDB host `%s` is not master." % config.get('me')
raise AutoReconnect(msg) | [
"def",
"configure",
"(",
"self",
",",
"proto",
")",
":",
"if",
"not",
"proto",
":",
"defer",
".",
"returnValue",
"(",
"None",
")",
"reply",
"=",
"yield",
"self",
".",
"__send_ismaster",
"(",
"proto",
",",
"timeout",
"=",
"self",
".",
"initialDelay",
")... | Configures the protocol using the information gathered from the
remote Mongo instance. Such information may contain the max
BSON document size, replica set configuration, and the master
status of the instance. | [
"Configures",
"the",
"protocol",
"using",
"the",
"information",
"gathered",
"from",
"the",
"remote",
"Mongo",
"instance",
".",
"Such",
"information",
"may",
"contain",
"the",
"max",
"BSON",
"document",
"size",
"replica",
"set",
"configuration",
"and",
"the",
"ma... | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/connection.py#L80-L141 | train | 212,101 |
twisted/txmongo | txmongo/connection.py | _Connection.notifyReady | def notifyReady(self):
"""
Returns a deferred that will fire when the factory has created a
protocol that can be used to communicate with a Mongo server.
Note that this will not fire until we have connected to a Mongo
master, unless slaveOk was specified in the Mongo URI connection
options.
"""
if self.instance:
return defer.succeed(self.instance)
def on_cancel(d):
self.__notify_ready.remove(d)
df = defer.Deferred(on_cancel)
self.__notify_ready.append(df)
return df | python | def notifyReady(self):
"""
Returns a deferred that will fire when the factory has created a
protocol that can be used to communicate with a Mongo server.
Note that this will not fire until we have connected to a Mongo
master, unless slaveOk was specified in the Mongo URI connection
options.
"""
if self.instance:
return defer.succeed(self.instance)
def on_cancel(d):
self.__notify_ready.remove(d)
df = defer.Deferred(on_cancel)
self.__notify_ready.append(df)
return df | [
"def",
"notifyReady",
"(",
"self",
")",
":",
"if",
"self",
".",
"instance",
":",
"return",
"defer",
".",
"succeed",
"(",
"self",
".",
"instance",
")",
"def",
"on_cancel",
"(",
"d",
")",
":",
"self",
".",
"__notify_ready",
".",
"remove",
"(",
"d",
")"... | Returns a deferred that will fire when the factory has created a
protocol that can be used to communicate with a Mongo server.
Note that this will not fire until we have connected to a Mongo
master, unless slaveOk was specified in the Mongo URI connection
options. | [
"Returns",
"a",
"deferred",
"that",
"will",
"fire",
"when",
"the",
"factory",
"has",
"created",
"a",
"protocol",
"that",
"can",
"be",
"used",
"to",
"communicate",
"with",
"a",
"Mongo",
"server",
"."
] | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/connection.py#L155-L172 | train | 212,102 |
twisted/txmongo | txmongo/connection.py | _Connection.retryNextHost | def retryNextHost(self, connector=None):
"""
Have this connector connect again, to the next host in the
configured list of hosts.
"""
if not self.continueTrying:
msg = "TxMongo: Abandoning {0} on explicit request.".format(connector)
log.msg(msg)
return
if connector is None:
if self.connector is None:
raise ValueError("TxMongo: No additional connector to retry.")
else:
connector = self.connector
delay = False
self.__index += 1
if self.__index >= len(self.__allnodes):
self.__index = 0
delay = True
connector.host, connector.port = self.__allnodes[self.__index]
if delay:
self.retry(connector)
else:
connector.connect() | python | def retryNextHost(self, connector=None):
"""
Have this connector connect again, to the next host in the
configured list of hosts.
"""
if not self.continueTrying:
msg = "TxMongo: Abandoning {0} on explicit request.".format(connector)
log.msg(msg)
return
if connector is None:
if self.connector is None:
raise ValueError("TxMongo: No additional connector to retry.")
else:
connector = self.connector
delay = False
self.__index += 1
if self.__index >= len(self.__allnodes):
self.__index = 0
delay = True
connector.host, connector.port = self.__allnodes[self.__index]
if delay:
self.retry(connector)
else:
connector.connect() | [
"def",
"retryNextHost",
"(",
"self",
",",
"connector",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"continueTrying",
":",
"msg",
"=",
"\"TxMongo: Abandoning {0} on explicit request.\"",
".",
"format",
"(",
"connector",
")",
"log",
".",
"msg",
"(",
"msg",
... | Have this connector connect again, to the next host in the
configured list of hosts. | [
"Have",
"this",
"connector",
"connect",
"again",
"to",
"the",
"next",
"host",
"in",
"the",
"configured",
"list",
"of",
"hosts",
"."
] | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/connection.py#L174-L202 | train | 212,103 |
twisted/txmongo | txmongo/_gridfs/__init__.py | GridFS.indexes_created | def indexes_created(self):
"""Returns a defer on the creation of this GridFS instance's indexes
"""
d = defer.Deferred()
self.__indexes_created_defer.chainDeferred(d)
return d | python | def indexes_created(self):
"""Returns a defer on the creation of this GridFS instance's indexes
"""
d = defer.Deferred()
self.__indexes_created_defer.chainDeferred(d)
return d | [
"def",
"indexes_created",
"(",
"self",
")",
":",
"d",
"=",
"defer",
".",
"Deferred",
"(",
")",
"self",
".",
"__indexes_created_defer",
".",
"chainDeferred",
"(",
"d",
")",
"return",
"d"
] | Returns a defer on the creation of this GridFS instance's indexes | [
"Returns",
"a",
"defer",
"on",
"the",
"creation",
"of",
"this",
"GridFS",
"instance",
"s",
"indexes"
] | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/_gridfs/__init__.py#L69-L74 | train | 212,104 |
twisted/txmongo | txmongo/_gridfs/__init__.py | GridFS.get_last_version | def get_last_version(self, filename):
"""Get a file from GridFS by ``"filename"``.
Returns the most recently uploaded file in GridFS with the
name `filename` as an instance of
:class:`~gridfs.grid_file.GridOut`. Raises
:class:`~gridfs.errors.NoFile` if no such file exists.
An index on ``{filename: 1, uploadDate: -1}`` will
automatically be created when this method is called the first
time.
:Parameters:
- `filename`: ``"filename"`` of the file to get
.. versionadded:: 1.6
"""
def ok(doc):
if doc is None:
raise NoFile("TxMongo: no file in gridfs with filename {0}".format(repr(filename)))
return GridOut(self.__collection, doc)
return self.__files.find_one({"filename": filename},
filter = filter.sort(DESCENDING("uploadDate"))).addCallback(ok) | python | def get_last_version(self, filename):
"""Get a file from GridFS by ``"filename"``.
Returns the most recently uploaded file in GridFS with the
name `filename` as an instance of
:class:`~gridfs.grid_file.GridOut`. Raises
:class:`~gridfs.errors.NoFile` if no such file exists.
An index on ``{filename: 1, uploadDate: -1}`` will
automatically be created when this method is called the first
time.
:Parameters:
- `filename`: ``"filename"`` of the file to get
.. versionadded:: 1.6
"""
def ok(doc):
if doc is None:
raise NoFile("TxMongo: no file in gridfs with filename {0}".format(repr(filename)))
return GridOut(self.__collection, doc)
return self.__files.find_one({"filename": filename},
filter = filter.sort(DESCENDING("uploadDate"))).addCallback(ok) | [
"def",
"get_last_version",
"(",
"self",
",",
"filename",
")",
":",
"def",
"ok",
"(",
"doc",
")",
":",
"if",
"doc",
"is",
"None",
":",
"raise",
"NoFile",
"(",
"\"TxMongo: no file in gridfs with filename {0}\"",
".",
"format",
"(",
"repr",
"(",
"filename",
")"... | Get a file from GridFS by ``"filename"``.
Returns the most recently uploaded file in GridFS with the
name `filename` as an instance of
:class:`~gridfs.grid_file.GridOut`. Raises
:class:`~gridfs.errors.NoFile` if no such file exists.
An index on ``{filename: 1, uploadDate: -1}`` will
automatically be created when this method is called the first
time.
:Parameters:
- `filename`: ``"filename"`` of the file to get
.. versionadded:: 1.6 | [
"Get",
"a",
"file",
"from",
"GridFS",
"by",
"filename",
"."
] | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/_gridfs/__init__.py#L189-L213 | train | 212,105 |
twisted/txmongo | txmongo/database.py | Database.authenticate | def authenticate(self, name, password, mechanism="DEFAULT"):
"""
Send an authentication command for this database.
mostly stolen from pymongo
"""
if not isinstance(name, (bytes, unicode)):
raise TypeError("TxMongo: name must be an instance of basestring.")
if not isinstance(password, (bytes, unicode)):
raise TypeError("TxMongo: password must be an instance of basestring.")
"""
Authenticating
"""
return self.connection.authenticate(self, name, password, mechanism) | python | def authenticate(self, name, password, mechanism="DEFAULT"):
"""
Send an authentication command for this database.
mostly stolen from pymongo
"""
if not isinstance(name, (bytes, unicode)):
raise TypeError("TxMongo: name must be an instance of basestring.")
if not isinstance(password, (bytes, unicode)):
raise TypeError("TxMongo: password must be an instance of basestring.")
"""
Authenticating
"""
return self.connection.authenticate(self, name, password, mechanism) | [
"def",
"authenticate",
"(",
"self",
",",
"name",
",",
"password",
",",
"mechanism",
"=",
"\"DEFAULT\"",
")",
":",
"if",
"not",
"isinstance",
"(",
"name",
",",
"(",
"bytes",
",",
"unicode",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"TxMongo: name must be ... | Send an authentication command for this database.
mostly stolen from pymongo | [
"Send",
"an",
"authentication",
"command",
"for",
"this",
"database",
".",
"mostly",
"stolen",
"from",
"pymongo"
] | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/database.py#L119-L132 | train | 212,106 |
twisted/txmongo | txmongo/utils/__init__.py | timeout | def timeout(func):
"""Decorator to add timeout to Deferred calls"""
@wraps(func)
def _timeout(*args, **kwargs):
now = time()
deadline = kwargs.pop("deadline", None)
seconds = kwargs.pop("timeout", None)
if deadline is None and seconds is not None:
deadline = now + seconds
if deadline is not None and deadline < now:
raise TimeExceeded("TxMongo: run time exceeded by {0}s.".format(now-deadline))
kwargs['_deadline'] = deadline
raw_d = func(*args, **kwargs)
if deadline is None:
return raw_d
if seconds is None and deadline is not None and deadline - now > 0:
seconds = deadline - now
timeout_d = defer.Deferred()
times_up = reactor.callLater(seconds, timeout_d.callback, None)
def on_ok(result):
if timeout_d.called:
raw_d.cancel()
raise TimeExceeded("TxMongo: run time of {0}s exceeded.".format(seconds))
else:
times_up.cancel()
return result[0]
def on_fail(failure):
failure.trap(defer.FirstError)
assert failure.value.index == 0
times_up.cancel()
failure.value.subFailure.raiseException()
return defer.DeferredList([raw_d, timeout_d], fireOnOneCallback=True,
fireOnOneErrback=True, consumeErrors=True).addCallbacks(on_ok, on_fail)
return _timeout | python | def timeout(func):
"""Decorator to add timeout to Deferred calls"""
@wraps(func)
def _timeout(*args, **kwargs):
now = time()
deadline = kwargs.pop("deadline", None)
seconds = kwargs.pop("timeout", None)
if deadline is None and seconds is not None:
deadline = now + seconds
if deadline is not None and deadline < now:
raise TimeExceeded("TxMongo: run time exceeded by {0}s.".format(now-deadline))
kwargs['_deadline'] = deadline
raw_d = func(*args, **kwargs)
if deadline is None:
return raw_d
if seconds is None and deadline is not None and deadline - now > 0:
seconds = deadline - now
timeout_d = defer.Deferred()
times_up = reactor.callLater(seconds, timeout_d.callback, None)
def on_ok(result):
if timeout_d.called:
raw_d.cancel()
raise TimeExceeded("TxMongo: run time of {0}s exceeded.".format(seconds))
else:
times_up.cancel()
return result[0]
def on_fail(failure):
failure.trap(defer.FirstError)
assert failure.value.index == 0
times_up.cancel()
failure.value.subFailure.raiseException()
return defer.DeferredList([raw_d, timeout_d], fireOnOneCallback=True,
fireOnOneErrback=True, consumeErrors=True).addCallbacks(on_ok, on_fail)
return _timeout | [
"def",
"timeout",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"_timeout",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"now",
"=",
"time",
"(",
")",
"deadline",
"=",
"kwargs",
".",
"pop",
"(",
"\"deadline\"",
",",
"None"... | Decorator to add timeout to Deferred calls | [
"Decorator",
"to",
"add",
"timeout",
"to",
"Deferred",
"calls"
] | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/utils/__init__.py#L7-L52 | train | 212,107 |
twisted/txmongo | txmongo/_gridfs/grid_file.py | GridIn.writelines | def writelines(self, sequence):
"""Write a sequence of strings to the file.
Does not add separators.
"""
iterator = iter(sequence)
def iterate(_=None):
try:
return self.write(next(iterator)).addCallback(iterate)
except StopIteration:
return
return defer.maybeDeferred(iterate) | python | def writelines(self, sequence):
"""Write a sequence of strings to the file.
Does not add separators.
"""
iterator = iter(sequence)
def iterate(_=None):
try:
return self.write(next(iterator)).addCallback(iterate)
except StopIteration:
return
return defer.maybeDeferred(iterate) | [
"def",
"writelines",
"(",
"self",
",",
"sequence",
")",
":",
"iterator",
"=",
"iter",
"(",
"sequence",
")",
"def",
"iterate",
"(",
"_",
"=",
"None",
")",
":",
"try",
":",
"return",
"self",
".",
"write",
"(",
"next",
"(",
"iterator",
")",
")",
".",
... | Write a sequence of strings to the file.
Does not add separators. | [
"Write",
"a",
"sequence",
"of",
"strings",
"to",
"the",
"file",
"."
] | a788c27649a0c62e11f84af0de9754fda01a38c0 | https://github.com/twisted/txmongo/blob/a788c27649a0c62e11f84af0de9754fda01a38c0/txmongo/_gridfs/grid_file.py#L257-L269 | train | 212,108 |
tijme/not-your-average-web-crawler | nyawc/http/Handler.py | Handler.get_new_requests | def get_new_requests(self):
"""Retrieve all the new request that were found in this request.
Returns:
list(:class:`nyawc.http.Request`): A list of request objects.
"""
content_type = self.__queue_item.response.headers.get('content-type')
scrapers = self.__get_all_scrapers()
new_requests = []
for scraper in scrapers:
instance = scraper(self.__options, self.__queue_item)
if self.__content_type_matches(content_type, instance.content_types):
new_requests.extend(instance.get_requests())
return new_requests | python | def get_new_requests(self):
"""Retrieve all the new request that were found in this request.
Returns:
list(:class:`nyawc.http.Request`): A list of request objects.
"""
content_type = self.__queue_item.response.headers.get('content-type')
scrapers = self.__get_all_scrapers()
new_requests = []
for scraper in scrapers:
instance = scraper(self.__options, self.__queue_item)
if self.__content_type_matches(content_type, instance.content_types):
new_requests.extend(instance.get_requests())
return new_requests | [
"def",
"get_new_requests",
"(",
"self",
")",
":",
"content_type",
"=",
"self",
".",
"__queue_item",
".",
"response",
".",
"headers",
".",
"get",
"(",
"'content-type'",
")",
"scrapers",
"=",
"self",
".",
"__get_all_scrapers",
"(",
")",
"new_requests",
"=",
"[... | Retrieve all the new request that were found in this request.
Returns:
list(:class:`nyawc.http.Request`): A list of request objects. | [
"Retrieve",
"all",
"the",
"new",
"request",
"that",
"were",
"found",
"in",
"this",
"request",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/http/Handler.py#L66-L83 | train | 212,109 |
tijme/not-your-average-web-crawler | nyawc/http/Handler.py | Handler.__make_request | def __make_request(self, url, method, data, auth, cookies, headers, proxies, timeout, verify):
"""Execute a request with the given data.
Args:
url (str): The URL to call.
method (str): The method (e.g. `get` or `post`).
data (str): The data to call the URL with.
auth (obj): The authentication class.
cookies (obj): The cookie dict.
headers (obj): The header dict.
proxies (obj): The proxies dict.
timeout (int): The request timeout in seconds.
verify (mixed): SSL verification.
Returns:
obj: The response object.
"""
request_by_method = getattr(requests, method)
return request_by_method(
url=url,
data=data,
auth=auth,
cookies=cookies,
headers=headers,
proxies=proxies,
timeout=timeout,
verify=verify,
allow_redirects=True,
stream=False
) | python | def __make_request(self, url, method, data, auth, cookies, headers, proxies, timeout, verify):
"""Execute a request with the given data.
Args:
url (str): The URL to call.
method (str): The method (e.g. `get` or `post`).
data (str): The data to call the URL with.
auth (obj): The authentication class.
cookies (obj): The cookie dict.
headers (obj): The header dict.
proxies (obj): The proxies dict.
timeout (int): The request timeout in seconds.
verify (mixed): SSL verification.
Returns:
obj: The response object.
"""
request_by_method = getattr(requests, method)
return request_by_method(
url=url,
data=data,
auth=auth,
cookies=cookies,
headers=headers,
proxies=proxies,
timeout=timeout,
verify=verify,
allow_redirects=True,
stream=False
) | [
"def",
"__make_request",
"(",
"self",
",",
"url",
",",
"method",
",",
"data",
",",
"auth",
",",
"cookies",
",",
"headers",
",",
"proxies",
",",
"timeout",
",",
"verify",
")",
":",
"request_by_method",
"=",
"getattr",
"(",
"requests",
",",
"method",
")",
... | Execute a request with the given data.
Args:
url (str): The URL to call.
method (str): The method (e.g. `get` or `post`).
data (str): The data to call the URL with.
auth (obj): The authentication class.
cookies (obj): The cookie dict.
headers (obj): The header dict.
proxies (obj): The proxies dict.
timeout (int): The request timeout in seconds.
verify (mixed): SSL verification.
Returns:
obj: The response object. | [
"Execute",
"a",
"request",
"with",
"the",
"given",
"data",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/http/Handler.py#L85-L116 | train | 212,110 |
tijme/not-your-average-web-crawler | nyawc/http/Handler.py | Handler.__get_all_scrapers | def __get_all_scrapers(self):
"""Find all available scraper references.
Returns:
list(obj): The scraper references.
"""
modules_strings = self.__get_all_scrapers_modules()
modules = []
for module_string in modules_strings:
module = importlib.import_module("nyawc.scrapers." + module_string)
modules.append(getattr(module, module_string))
return modules | python | def __get_all_scrapers(self):
"""Find all available scraper references.
Returns:
list(obj): The scraper references.
"""
modules_strings = self.__get_all_scrapers_modules()
modules = []
for module_string in modules_strings:
module = importlib.import_module("nyawc.scrapers." + module_string)
modules.append(getattr(module, module_string))
return modules | [
"def",
"__get_all_scrapers",
"(",
"self",
")",
":",
"modules_strings",
"=",
"self",
".",
"__get_all_scrapers_modules",
"(",
")",
"modules",
"=",
"[",
"]",
"for",
"module_string",
"in",
"modules_strings",
":",
"module",
"=",
"importlib",
".",
"import_module",
"("... | Find all available scraper references.
Returns:
list(obj): The scraper references. | [
"Find",
"all",
"available",
"scraper",
"references",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/http/Handler.py#L118-L133 | train | 212,111 |
tijme/not-your-average-web-crawler | nyawc/http/Handler.py | Handler.__get_all_scrapers_modules | def __get_all_scrapers_modules(self):
"""Find all available scraper modules.
Returns:
list(obj): The scraper modules.
"""
modules = []
file = os.path.realpath(__file__)
folder = os.path.dirname(file)
for filename in os.listdir(folder + "/../scrapers"):
if filename.endswith("Scraper.py") and not filename.startswith("Base"):
modules.append(filename[:-3])
return modules | python | def __get_all_scrapers_modules(self):
"""Find all available scraper modules.
Returns:
list(obj): The scraper modules.
"""
modules = []
file = os.path.realpath(__file__)
folder = os.path.dirname(file)
for filename in os.listdir(folder + "/../scrapers"):
if filename.endswith("Scraper.py") and not filename.startswith("Base"):
modules.append(filename[:-3])
return modules | [
"def",
"__get_all_scrapers_modules",
"(",
"self",
")",
":",
"modules",
"=",
"[",
"]",
"file",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
"folder",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"file",
")",
"for",
"filename",
"in",
... | Find all available scraper modules.
Returns:
list(obj): The scraper modules. | [
"Find",
"all",
"available",
"scraper",
"modules",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/http/Handler.py#L135-L152 | train | 212,112 |
tijme/not-your-average-web-crawler | nyawc/http/Handler.py | Handler.__content_type_matches | def __content_type_matches(self, content_type, available_content_types):
"""Check if the given content type matches one of the available content types.
Args:
content_type (str): The given content type.
available_content_types list(str): All the available content types.
Returns:
bool: True if a match was found, False otherwise.
"""
if content_type is None:
return False
if content_type in available_content_types:
return True
for available_content_type in available_content_types:
if available_content_type in content_type:
return True
return False | python | def __content_type_matches(self, content_type, available_content_types):
"""Check if the given content type matches one of the available content types.
Args:
content_type (str): The given content type.
available_content_types list(str): All the available content types.
Returns:
bool: True if a match was found, False otherwise.
"""
if content_type is None:
return False
if content_type in available_content_types:
return True
for available_content_type in available_content_types:
if available_content_type in content_type:
return True
return False | [
"def",
"__content_type_matches",
"(",
"self",
",",
"content_type",
",",
"available_content_types",
")",
":",
"if",
"content_type",
"is",
"None",
":",
"return",
"False",
"if",
"content_type",
"in",
"available_content_types",
":",
"return",
"True",
"for",
"available_c... | Check if the given content type matches one of the available content types.
Args:
content_type (str): The given content type.
available_content_types list(str): All the available content types.
Returns:
bool: True if a match was found, False otherwise. | [
"Check",
"if",
"the",
"given",
"content",
"type",
"matches",
"one",
"of",
"the",
"available",
"content",
"types",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/http/Handler.py#L154-L176 | train | 212,113 |
tijme/not-your-average-web-crawler | nyawc/Routing.py | Routing.increase_route_count | def increase_route_count(self, crawled_request):
"""Increase the count that determines how many times a URL of a certain route has been crawled.
Args:
crawled_request (:class:`nyawc.http.Request`): The request that possibly matches a route.
"""
for route in self.__routing_options.routes:
if re.compile(route).match(crawled_request.url):
count_key = str(route) + crawled_request.method
if count_key in self.__routing_count.keys():
self.__routing_count[count_key] += 1
else:
self.__routing_count[count_key] = 1
break | python | def increase_route_count(self, crawled_request):
"""Increase the count that determines how many times a URL of a certain route has been crawled.
Args:
crawled_request (:class:`nyawc.http.Request`): The request that possibly matches a route.
"""
for route in self.__routing_options.routes:
if re.compile(route).match(crawled_request.url):
count_key = str(route) + crawled_request.method
if count_key in self.__routing_count.keys():
self.__routing_count[count_key] += 1
else:
self.__routing_count[count_key] = 1
break | [
"def",
"increase_route_count",
"(",
"self",
",",
"crawled_request",
")",
":",
"for",
"route",
"in",
"self",
".",
"__routing_options",
".",
"routes",
":",
"if",
"re",
".",
"compile",
"(",
"route",
")",
".",
"match",
"(",
"crawled_request",
".",
"url",
")",
... | Increase the count that determines how many times a URL of a certain route has been crawled.
Args:
crawled_request (:class:`nyawc.http.Request`): The request that possibly matches a route. | [
"Increase",
"the",
"count",
"that",
"determines",
"how",
"many",
"times",
"a",
"URL",
"of",
"a",
"certain",
"route",
"has",
"been",
"crawled",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Routing.py#L47-L64 | train | 212,114 |
tijme/not-your-average-web-crawler | nyawc/Routing.py | Routing.is_treshold_reached | def is_treshold_reached(self, scraped_request):
"""Check if similar requests to the given requests have already been crawled X times. Where X is the
minimum treshold amount from the options.
Args:
scraped_request (:class:`nyawc.http.Request`): The request that possibly reached the minimum treshold.
Returns:
bool: True if treshold reached, false otherwise.
"""
for route in self.__routing_options.routes:
if re.compile(route).match(scraped_request.url):
count_key = str(route) + scraped_request.method
if count_key in self.__routing_count.keys():
return self.__routing_count[count_key] >= self.__routing_options.minimum_threshold
return False | python | def is_treshold_reached(self, scraped_request):
"""Check if similar requests to the given requests have already been crawled X times. Where X is the
minimum treshold amount from the options.
Args:
scraped_request (:class:`nyawc.http.Request`): The request that possibly reached the minimum treshold.
Returns:
bool: True if treshold reached, false otherwise.
"""
for route in self.__routing_options.routes:
if re.compile(route).match(scraped_request.url):
count_key = str(route) + scraped_request.method
if count_key in self.__routing_count.keys():
return self.__routing_count[count_key] >= self.__routing_options.minimum_threshold
return False | [
"def",
"is_treshold_reached",
"(",
"self",
",",
"scraped_request",
")",
":",
"for",
"route",
"in",
"self",
".",
"__routing_options",
".",
"routes",
":",
"if",
"re",
".",
"compile",
"(",
"route",
")",
".",
"match",
"(",
"scraped_request",
".",
"url",
")",
... | Check if similar requests to the given requests have already been crawled X times. Where X is the
minimum treshold amount from the options.
Args:
scraped_request (:class:`nyawc.http.Request`): The request that possibly reached the minimum treshold.
Returns:
bool: True if treshold reached, false otherwise. | [
"Check",
"if",
"similar",
"requests",
"to",
"the",
"given",
"requests",
"have",
"already",
"been",
"crawled",
"X",
"times",
".",
"Where",
"X",
"is",
"the",
"minimum",
"treshold",
"amount",
"from",
"the",
"options",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Routing.py#L66-L85 | train | 212,115 |
tijme/not-your-average-web-crawler | nyawc/Queue.py | Queue.add_request | def add_request(self, request):
"""Add a request to the queue.
Args:
request (:class:`nyawc.http.Request`): The request to add.
Returns:
:class:`nyawc.QueueItem`: The created queue item.
"""
queue_item = QueueItem(request, Response(request.url))
self.add(queue_item)
return queue_item | python | def add_request(self, request):
"""Add a request to the queue.
Args:
request (:class:`nyawc.http.Request`): The request to add.
Returns:
:class:`nyawc.QueueItem`: The created queue item.
"""
queue_item = QueueItem(request, Response(request.url))
self.add(queue_item)
return queue_item | [
"def",
"add_request",
"(",
"self",
",",
"request",
")",
":",
"queue_item",
"=",
"QueueItem",
"(",
"request",
",",
"Response",
"(",
"request",
".",
"url",
")",
")",
"self",
".",
"add",
"(",
"queue_item",
")",
"return",
"queue_item"
] | Add a request to the queue.
Args:
request (:class:`nyawc.http.Request`): The request to add.
Returns:
:class:`nyawc.QueueItem`: The created queue item. | [
"Add",
"a",
"request",
"to",
"the",
"queue",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Queue.py#L64-L77 | train | 212,116 |
tijme/not-your-average-web-crawler | nyawc/Queue.py | Queue.has_request | def has_request(self, request):
"""Check if the given request already exists in the queue.
Args:
request (:class:`nyawc.http.Request`): The request to check.
Returns:
bool: True if already exists, False otherwise.
"""
queue_item = QueueItem(request, Response(request.url))
key = queue_item.get_hash()
for status in QueueItem.STATUSES:
if key in self.__get_var("items_" + status).keys():
return True
return False | python | def has_request(self, request):
"""Check if the given request already exists in the queue.
Args:
request (:class:`nyawc.http.Request`): The request to check.
Returns:
bool: True if already exists, False otherwise.
"""
queue_item = QueueItem(request, Response(request.url))
key = queue_item.get_hash()
for status in QueueItem.STATUSES:
if key in self.__get_var("items_" + status).keys():
return True
return False | [
"def",
"has_request",
"(",
"self",
",",
"request",
")",
":",
"queue_item",
"=",
"QueueItem",
"(",
"request",
",",
"Response",
"(",
"request",
".",
"url",
")",
")",
"key",
"=",
"queue_item",
".",
"get_hash",
"(",
")",
"for",
"status",
"in",
"QueueItem",
... | Check if the given request already exists in the queue.
Args:
request (:class:`nyawc.http.Request`): The request to check.
Returns:
bool: True if already exists, False otherwise. | [
"Check",
"if",
"the",
"given",
"request",
"already",
"exists",
"in",
"the",
"queue",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Queue.py#L79-L97 | train | 212,117 |
tijme/not-your-average-web-crawler | nyawc/Queue.py | Queue.get_first | def get_first(self, status):
"""Get the first item in the queue that has the given status.
Args:
status (str): return the first item with this status.
Returns:
:class:`nyawc.QueueItem`: The first queue item with the given status.
"""
items = self.get_all(status)
if items:
return list(items.items())[0][1]
return None | python | def get_first(self, status):
"""Get the first item in the queue that has the given status.
Args:
status (str): return the first item with this status.
Returns:
:class:`nyawc.QueueItem`: The first queue item with the given status.
"""
items = self.get_all(status)
if items:
return list(items.items())[0][1]
return None | [
"def",
"get_first",
"(",
"self",
",",
"status",
")",
":",
"items",
"=",
"self",
".",
"get_all",
"(",
"status",
")",
"if",
"items",
":",
"return",
"list",
"(",
"items",
".",
"items",
"(",
")",
")",
"[",
"0",
"]",
"[",
"1",
"]",
"return",
"None"
] | Get the first item in the queue that has the given status.
Args:
status (str): return the first item with this status.
Returns:
:class:`nyawc.QueueItem`: The first queue item with the given status. | [
"Get",
"the",
"first",
"item",
"in",
"the",
"queue",
"that",
"has",
"the",
"given",
"status",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Queue.py#L150-L166 | train | 212,118 |
tijme/not-your-average-web-crawler | nyawc/helpers/RandomInputHelper.py | RandomInputHelper.get_for_type | def get_for_type(input_type="text"):
"""Get a random string for the given html input type
Args:
input_type (str): The input type (e.g. email).
Returns:
str: The (cached) random value.
"""
if input_type in RandomInputHelper.cache:
return RandomInputHelper.cache[input_type]
types = {
"text": RandomInputHelper.get_random_value,
"hidden": RandomInputHelper.get_random_value,
"search": RandomInputHelper.get_random_value,
"color": RandomInputHelper.get_random_color,
"week": {"function": RandomInputHelper.get_random_value, "params": [2, ["1234"]]},
"password": RandomInputHelper.get_random_password,
"number": RandomInputHelper.get_random_number,
"tel": RandomInputHelper.get_random_telephonenumber,
"url": RandomInputHelper.get_random_url,
"textarea": RandomInputHelper.get_random_text,
"email": RandomInputHelper.get_random_email
}
if types.get(input_type) is None:
return ""
if type(types.get(input_type)) is dict:
generator = types.get(input_type)
value = generator.get("function")(*generator.get("params"))
else:
value = types.get(input_type)()
RandomInputHelper.cache[input_type] = value
return value | python | def get_for_type(input_type="text"):
"""Get a random string for the given html input type
Args:
input_type (str): The input type (e.g. email).
Returns:
str: The (cached) random value.
"""
if input_type in RandomInputHelper.cache:
return RandomInputHelper.cache[input_type]
types = {
"text": RandomInputHelper.get_random_value,
"hidden": RandomInputHelper.get_random_value,
"search": RandomInputHelper.get_random_value,
"color": RandomInputHelper.get_random_color,
"week": {"function": RandomInputHelper.get_random_value, "params": [2, ["1234"]]},
"password": RandomInputHelper.get_random_password,
"number": RandomInputHelper.get_random_number,
"tel": RandomInputHelper.get_random_telephonenumber,
"url": RandomInputHelper.get_random_url,
"textarea": RandomInputHelper.get_random_text,
"email": RandomInputHelper.get_random_email
}
if types.get(input_type) is None:
return ""
if type(types.get(input_type)) is dict:
generator = types.get(input_type)
value = generator.get("function")(*generator.get("params"))
else:
value = types.get(input_type)()
RandomInputHelper.cache[input_type] = value
return value | [
"def",
"get_for_type",
"(",
"input_type",
"=",
"\"text\"",
")",
":",
"if",
"input_type",
"in",
"RandomInputHelper",
".",
"cache",
":",
"return",
"RandomInputHelper",
".",
"cache",
"[",
"input_type",
"]",
"types",
"=",
"{",
"\"text\"",
":",
"RandomInputHelper",
... | Get a random string for the given html input type
Args:
input_type (str): The input type (e.g. email).
Returns:
str: The (cached) random value. | [
"Get",
"a",
"random",
"string",
"for",
"the",
"given",
"html",
"input",
"type"
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/RandomInputHelper.py#L46-L85 | train | 212,119 |
tijme/not-your-average-web-crawler | nyawc/helpers/RandomInputHelper.py | RandomInputHelper.get_random_value | def get_random_value(length=10, character_sets=[string.ascii_uppercase, string.ascii_lowercase]):
"""Get a random string with the given length.
Args:
length (int): The length of the string to return.
character_sets list(str): The caracter sets to use.
Returns:
str: The random string.
"""
return "".join(random.choice("".join(character_sets)) for i in range(length)) | python | def get_random_value(length=10, character_sets=[string.ascii_uppercase, string.ascii_lowercase]):
"""Get a random string with the given length.
Args:
length (int): The length of the string to return.
character_sets list(str): The caracter sets to use.
Returns:
str: The random string.
"""
return "".join(random.choice("".join(character_sets)) for i in range(length)) | [
"def",
"get_random_value",
"(",
"length",
"=",
"10",
",",
"character_sets",
"=",
"[",
"string",
".",
"ascii_uppercase",
",",
"string",
".",
"ascii_lowercase",
"]",
")",
":",
"return",
"\"\"",
".",
"join",
"(",
"random",
".",
"choice",
"(",
"\"\"",
".",
"... | Get a random string with the given length.
Args:
length (int): The length of the string to return.
character_sets list(str): The caracter sets to use.
Returns:
str: The random string. | [
"Get",
"a",
"random",
"string",
"with",
"the",
"given",
"length",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/RandomInputHelper.py#L88-L100 | train | 212,120 |
tijme/not-your-average-web-crawler | nyawc/helpers/RandomInputHelper.py | RandomInputHelper.get_random_email | def get_random_email(ltd="com"):
"""Get a random email address with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random email.
"""
email = [
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
"@",
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
".",
ltd
]
return "".join(email) | python | def get_random_email(ltd="com"):
"""Get a random email address with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random email.
"""
email = [
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
"@",
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
".",
ltd
]
return "".join(email) | [
"def",
"get_random_email",
"(",
"ltd",
"=",
"\"com\"",
")",
":",
"email",
"=",
"[",
"RandomInputHelper",
".",
"get_random_value",
"(",
"6",
",",
"[",
"string",
".",
"ascii_lowercase",
"]",
")",
",",
"\"@\"",
",",
"RandomInputHelper",
".",
"get_random_value",
... | Get a random email address with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random email. | [
"Get",
"a",
"random",
"email",
"address",
"with",
"the",
"given",
"ltd",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/RandomInputHelper.py#L142-L161 | train | 212,121 |
tijme/not-your-average-web-crawler | nyawc/helpers/RandomInputHelper.py | RandomInputHelper.get_random_password | def get_random_password():
"""Get a random password that complies with most of the requirements.
Note:
This random password is not strong and not "really" random, and should only be
used for testing purposes.
Returns:
str: The random password.
"""
password = []
password.append(RandomInputHelper.get_random_value(4, [string.ascii_lowercase]))
password.append(RandomInputHelper.get_random_value(2, [string.digits]))
password.append(RandomInputHelper.get_random_value(2, ["$&*@!"]))
password.append(RandomInputHelper.get_random_value(4, [string.ascii_uppercase]))
return "".join(password) | python | def get_random_password():
"""Get a random password that complies with most of the requirements.
Note:
This random password is not strong and not "really" random, and should only be
used for testing purposes.
Returns:
str: The random password.
"""
password = []
password.append(RandomInputHelper.get_random_value(4, [string.ascii_lowercase]))
password.append(RandomInputHelper.get_random_value(2, [string.digits]))
password.append(RandomInputHelper.get_random_value(2, ["$&*@!"]))
password.append(RandomInputHelper.get_random_value(4, [string.ascii_uppercase]))
return "".join(password) | [
"def",
"get_random_password",
"(",
")",
":",
"password",
"=",
"[",
"]",
"password",
".",
"append",
"(",
"RandomInputHelper",
".",
"get_random_value",
"(",
"4",
",",
"[",
"string",
".",
"ascii_lowercase",
"]",
")",
")",
"password",
".",
"append",
"(",
"Rand... | Get a random password that complies with most of the requirements.
Note:
This random password is not strong and not "really" random, and should only be
used for testing purposes.
Returns:
str: The random password. | [
"Get",
"a",
"random",
"password",
"that",
"complies",
"with",
"most",
"of",
"the",
"requirements",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/RandomInputHelper.py#L164-L183 | train | 212,122 |
tijme/not-your-average-web-crawler | nyawc/helpers/RandomInputHelper.py | RandomInputHelper.get_random_url | def get_random_url(ltd="com"):
"""Get a random url with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random url.
"""
url = [
"https://",
RandomInputHelper.get_random_value(8, [string.ascii_lowercase]),
".",
ltd
]
return "".join(url) | python | def get_random_url(ltd="com"):
"""Get a random url with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random url.
"""
url = [
"https://",
RandomInputHelper.get_random_value(8, [string.ascii_lowercase]),
".",
ltd
]
return "".join(url) | [
"def",
"get_random_url",
"(",
"ltd",
"=",
"\"com\"",
")",
":",
"url",
"=",
"[",
"\"https://\"",
",",
"RandomInputHelper",
".",
"get_random_value",
"(",
"8",
",",
"[",
"string",
".",
"ascii_lowercase",
"]",
")",
",",
"\".\"",
",",
"ltd",
"]",
"return",
"\... | Get a random url with the given ltd.
Args:
ltd (str): The ltd to use (e.g. com).
Returns:
str: The random url. | [
"Get",
"a",
"random",
"url",
"with",
"the",
"given",
"ltd",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/RandomInputHelper.py#L186-L204 | train | 212,123 |
tijme/not-your-average-web-crawler | nyawc/helpers/RandomInputHelper.py | RandomInputHelper.get_random_telephonenumber | def get_random_telephonenumber():
"""Get a random 10 digit phone number that complies with most of the requirements.
Returns:
str: The random telephone number.
"""
phone = [
RandomInputHelper.get_random_value(3, "123456789"),
RandomInputHelper.get_random_value(3, "12345678"),
"".join(map(str, random.sample(range(10), 4)))
]
return "-".join(phone) | python | def get_random_telephonenumber():
"""Get a random 10 digit phone number that complies with most of the requirements.
Returns:
str: The random telephone number.
"""
phone = [
RandomInputHelper.get_random_value(3, "123456789"),
RandomInputHelper.get_random_value(3, "12345678"),
"".join(map(str, random.sample(range(10), 4)))
]
return "-".join(phone) | [
"def",
"get_random_telephonenumber",
"(",
")",
":",
"phone",
"=",
"[",
"RandomInputHelper",
".",
"get_random_value",
"(",
"3",
",",
"\"123456789\"",
")",
",",
"RandomInputHelper",
".",
"get_random_value",
"(",
"3",
",",
"\"12345678\"",
")",
",",
"\"\"",
".",
"... | Get a random 10 digit phone number that complies with most of the requirements.
Returns:
str: The random telephone number. | [
"Get",
"a",
"random",
"10",
"digit",
"phone",
"number",
"that",
"complies",
"with",
"most",
"of",
"the",
"requirements",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/RandomInputHelper.py#L207-L221 | train | 212,124 |
tijme/not-your-average-web-crawler | nyawc/helpers/HTTPRequestHelper.py | HTTPRequestHelper.complies_with_scope | def complies_with_scope(queue_item, new_request, scope):
"""Check if the new request complies with the crawling scope.
Args:
queue_item (:class:`nyawc.QueueItem`): The parent queue item of the new request.
new_request (:class:`nyawc.http.Request`): The request to check.
scope (:class:`nyawc.Options.OptionsScope`): The scope to check.
Returns:
bool: True if it complies, False otherwise.
"""
if not URLHelper.is_parsable(queue_item.request.url):
return False
if not URLHelper.is_parsable(new_request.url):
return False
if scope.request_methods:
if not queue_item.request.method in scope.request_methods:
return False
if scope.protocol_must_match:
if URLHelper.get_protocol(queue_item.request.url) != URLHelper.get_protocol(new_request.url):
return False
if scope.subdomain_must_match:
current_subdomain = URLHelper.get_subdomain(queue_item.request.url)
new_subdomain = URLHelper.get_subdomain(new_request.url)
www_matches = False
if current_subdomain == "www" and new_subdomain == "":
www_matches = True
if new_subdomain == "www" and current_subdomain == "":
www_matches = True
if not www_matches and current_subdomain != new_subdomain:
return False
if scope.hostname_must_match:
if URLHelper.get_hostname(queue_item.request.url) != URLHelper.get_hostname(new_request.url):
return False
if scope.tld_must_match:
if URLHelper.get_tld(queue_item.request.url) != URLHelper.get_tld(new_request.url):
return False
return True | python | def complies_with_scope(queue_item, new_request, scope):
"""Check if the new request complies with the crawling scope.
Args:
queue_item (:class:`nyawc.QueueItem`): The parent queue item of the new request.
new_request (:class:`nyawc.http.Request`): The request to check.
scope (:class:`nyawc.Options.OptionsScope`): The scope to check.
Returns:
bool: True if it complies, False otherwise.
"""
if not URLHelper.is_parsable(queue_item.request.url):
return False
if not URLHelper.is_parsable(new_request.url):
return False
if scope.request_methods:
if not queue_item.request.method in scope.request_methods:
return False
if scope.protocol_must_match:
if URLHelper.get_protocol(queue_item.request.url) != URLHelper.get_protocol(new_request.url):
return False
if scope.subdomain_must_match:
current_subdomain = URLHelper.get_subdomain(queue_item.request.url)
new_subdomain = URLHelper.get_subdomain(new_request.url)
www_matches = False
if current_subdomain == "www" and new_subdomain == "":
www_matches = True
if new_subdomain == "www" and current_subdomain == "":
www_matches = True
if not www_matches and current_subdomain != new_subdomain:
return False
if scope.hostname_must_match:
if URLHelper.get_hostname(queue_item.request.url) != URLHelper.get_hostname(new_request.url):
return False
if scope.tld_must_match:
if URLHelper.get_tld(queue_item.request.url) != URLHelper.get_tld(new_request.url):
return False
return True | [
"def",
"complies_with_scope",
"(",
"queue_item",
",",
"new_request",
",",
"scope",
")",
":",
"if",
"not",
"URLHelper",
".",
"is_parsable",
"(",
"queue_item",
".",
"request",
".",
"url",
")",
":",
"return",
"False",
"if",
"not",
"URLHelper",
".",
"is_parsable... | Check if the new request complies with the crawling scope.
Args:
queue_item (:class:`nyawc.QueueItem`): The parent queue item of the new request.
new_request (:class:`nyawc.http.Request`): The request to check.
scope (:class:`nyawc.Options.OptionsScope`): The scope to check.
Returns:
bool: True if it complies, False otherwise. | [
"Check",
"if",
"the",
"new",
"request",
"complies",
"with",
"the",
"crawling",
"scope",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/HTTPRequestHelper.py#L62-L112 | train | 212,125 |
tijme/not-your-average-web-crawler | nyawc/helpers/HTTPRequestHelper.py | HTTPRequestHelper.get_cookie_header | def get_cookie_header(queue_item):
"""Convert a requests cookie jar to a HTTP request cookie header value.
Args:
queue_item (:class:`nyawc.QueueItem`): The parent queue item of the new request.
Returns:
str: The HTTP cookie header value.
"""
header = []
path = URLHelper.get_path(queue_item.request.url)
for cookie in queue_item.request.cookies:
root_path = cookie.path == "" or cookie.path == "/"
if path.startswith(cookie.path) or root_path:
header.append(cookie.name + "=" + cookie.value)
return "&".join(header) | python | def get_cookie_header(queue_item):
"""Convert a requests cookie jar to a HTTP request cookie header value.
Args:
queue_item (:class:`nyawc.QueueItem`): The parent queue item of the new request.
Returns:
str: The HTTP cookie header value.
"""
header = []
path = URLHelper.get_path(queue_item.request.url)
for cookie in queue_item.request.cookies:
root_path = cookie.path == "" or cookie.path == "/"
if path.startswith(cookie.path) or root_path:
header.append(cookie.name + "=" + cookie.value)
return "&".join(header) | [
"def",
"get_cookie_header",
"(",
"queue_item",
")",
":",
"header",
"=",
"[",
"]",
"path",
"=",
"URLHelper",
".",
"get_path",
"(",
"queue_item",
".",
"request",
".",
"url",
")",
"for",
"cookie",
"in",
"queue_item",
".",
"request",
".",
"cookies",
":",
"ro... | Convert a requests cookie jar to a HTTP request cookie header value.
Args:
queue_item (:class:`nyawc.QueueItem`): The parent queue item of the new request.
Returns:
str: The HTTP cookie header value. | [
"Convert",
"a",
"requests",
"cookie",
"jar",
"to",
"a",
"HTTP",
"request",
"cookie",
"header",
"value",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/HTTPRequestHelper.py#L115-L134 | train | 212,126 |
tijme/not-your-average-web-crawler | nyawc/QueueItem.py | QueueItem.get_soup_response | def get_soup_response(self):
"""Get the response as a cached BeautifulSoup container.
Returns:
obj: The BeautifulSoup container.
"""
if self.response is not None:
if self.__response_soup is None:
result = BeautifulSoup(self.response.text, "lxml")
if self.decomposed:
return result
else:
self.__response_soup = BeautifulSoup(self.response.text, "lxml")
return self.__response_soup | python | def get_soup_response(self):
"""Get the response as a cached BeautifulSoup container.
Returns:
obj: The BeautifulSoup container.
"""
if self.response is not None:
if self.__response_soup is None:
result = BeautifulSoup(self.response.text, "lxml")
if self.decomposed:
return result
else:
self.__response_soup = BeautifulSoup(self.response.text, "lxml")
return self.__response_soup | [
"def",
"get_soup_response",
"(",
"self",
")",
":",
"if",
"self",
".",
"response",
"is",
"not",
"None",
":",
"if",
"self",
".",
"__response_soup",
"is",
"None",
":",
"result",
"=",
"BeautifulSoup",
"(",
"self",
".",
"response",
".",
"text",
",",
"\"lxml\"... | Get the response as a cached BeautifulSoup container.
Returns:
obj: The BeautifulSoup container. | [
"Get",
"the",
"response",
"as",
"a",
"cached",
"BeautifulSoup",
"container",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/QueueItem.py#L86-L103 | train | 212,127 |
tijme/not-your-average-web-crawler | nyawc/QueueItem.py | QueueItem.get_hash | def get_hash(self):
"""Generate and return the dict index hash of the given queue item.
Note:
Cookies should not be included in the hash calculation because
otherwise requests are crawled multiple times with e.g. different
session keys, causing infinite crawling recursion.
Note:
At this moment the keys do not actually get hashed since it works perfectly without and
since hashing the keys requires us to built hash collision management.
Returns:
str: The hash of the given queue item.
"""
if self.__index_hash:
return self.__index_hash
key = self.request.method
key += URLHelper.get_protocol(self.request.url)
key += URLHelper.get_subdomain(self.request.url)
key += URLHelper.get_hostname(self.request.url)
key += URLHelper.get_tld(self.request.url)
key += URLHelper.get_path(self.request.url)
key += str(URLHelper.get_ordered_params(self.request.url))
if self.request.data is not None:
key += str(self.request.data.keys())
self.__index_hash = key
return self.__index_hash | python | def get_hash(self):
"""Generate and return the dict index hash of the given queue item.
Note:
Cookies should not be included in the hash calculation because
otherwise requests are crawled multiple times with e.g. different
session keys, causing infinite crawling recursion.
Note:
At this moment the keys do not actually get hashed since it works perfectly without and
since hashing the keys requires us to built hash collision management.
Returns:
str: The hash of the given queue item.
"""
if self.__index_hash:
return self.__index_hash
key = self.request.method
key += URLHelper.get_protocol(self.request.url)
key += URLHelper.get_subdomain(self.request.url)
key += URLHelper.get_hostname(self.request.url)
key += URLHelper.get_tld(self.request.url)
key += URLHelper.get_path(self.request.url)
key += str(URLHelper.get_ordered_params(self.request.url))
if self.request.data is not None:
key += str(self.request.data.keys())
self.__index_hash = key
return self.__index_hash | [
"def",
"get_hash",
"(",
"self",
")",
":",
"if",
"self",
".",
"__index_hash",
":",
"return",
"self",
".",
"__index_hash",
"key",
"=",
"self",
".",
"request",
".",
"method",
"key",
"+=",
"URLHelper",
".",
"get_protocol",
"(",
"self",
".",
"request",
".",
... | Generate and return the dict index hash of the given queue item.
Note:
Cookies should not be included in the hash calculation because
otherwise requests are crawled multiple times with e.g. different
session keys, causing infinite crawling recursion.
Note:
At this moment the keys do not actually get hashed since it works perfectly without and
since hashing the keys requires us to built hash collision management.
Returns:
str: The hash of the given queue item. | [
"Generate",
"and",
"return",
"the",
"dict",
"index",
"hash",
"of",
"the",
"given",
"queue",
"item",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/QueueItem.py#L118-L152 | train | 212,128 |
tijme/not-your-average-web-crawler | nyawc/scrapers/HTMLSoupFormScraper.py | HTMLSoupFormScraper.__get_request | def __get_request(self, host, soup):
"""Build a request from the given soup form.
Args:
host str: The URL of the current queue item.
soup (obj): The BeautifulSoup form.
Returns:
:class:`nyawc.http.Request`: The new Request.
"""
url = URLHelper.make_absolute(host, self.__trim_grave_accent(soup["action"])) if soup.has_attr("action") else host
method_original = soup["method"] if soup.has_attr("method") else "get"
method = "post" if method_original.lower() == "post" else "get"
data = self.__get_form_data(soup)
return Request(url, method, data) | python | def __get_request(self, host, soup):
"""Build a request from the given soup form.
Args:
host str: The URL of the current queue item.
soup (obj): The BeautifulSoup form.
Returns:
:class:`nyawc.http.Request`: The new Request.
"""
url = URLHelper.make_absolute(host, self.__trim_grave_accent(soup["action"])) if soup.has_attr("action") else host
method_original = soup["method"] if soup.has_attr("method") else "get"
method = "post" if method_original.lower() == "post" else "get"
data = self.__get_form_data(soup)
return Request(url, method, data) | [
"def",
"__get_request",
"(",
"self",
",",
"host",
",",
"soup",
")",
":",
"url",
"=",
"URLHelper",
".",
"make_absolute",
"(",
"host",
",",
"self",
".",
"__trim_grave_accent",
"(",
"soup",
"[",
"\"action\"",
"]",
")",
")",
"if",
"soup",
".",
"has_attr",
... | Build a request from the given soup form.
Args:
host str: The URL of the current queue item.
soup (obj): The BeautifulSoup form.
Returns:
:class:`nyawc.http.Request`: The new Request. | [
"Build",
"a",
"request",
"from",
"the",
"given",
"soup",
"form",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/scrapers/HTMLSoupFormScraper.py#L63-L80 | train | 212,129 |
tijme/not-your-average-web-crawler | nyawc/scrapers/HTMLSoupFormScraper.py | HTMLSoupFormScraper.__get_form_data | def __get_form_data(self, soup):
"""Build a form data dict from the given form.
Args:
soup (obj): The BeautifulSoup form.
Returns:
obj: The form data (key/value).
"""
elements = self.__get_valid_form_data_elements(soup)
form_data = self.__get_default_form_data_input(elements)
callback = self.options.callbacks.form_before_autofill
action = callback(self.queue_item, elements, form_data)
if action == CrawlerActions.DO_AUTOFILL_FORM:
self.__autofill_form_data(form_data, elements)
return form_data | python | def __get_form_data(self, soup):
"""Build a form data dict from the given form.
Args:
soup (obj): The BeautifulSoup form.
Returns:
obj: The form data (key/value).
"""
elements = self.__get_valid_form_data_elements(soup)
form_data = self.__get_default_form_data_input(elements)
callback = self.options.callbacks.form_before_autofill
action = callback(self.queue_item, elements, form_data)
if action == CrawlerActions.DO_AUTOFILL_FORM:
self.__autofill_form_data(form_data, elements)
return form_data | [
"def",
"__get_form_data",
"(",
"self",
",",
"soup",
")",
":",
"elements",
"=",
"self",
".",
"__get_valid_form_data_elements",
"(",
"soup",
")",
"form_data",
"=",
"self",
".",
"__get_default_form_data_input",
"(",
"elements",
")",
"callback",
"=",
"self",
".",
... | Build a form data dict from the given form.
Args:
soup (obj): The BeautifulSoup form.
Returns:
obj: The form data (key/value). | [
"Build",
"a",
"form",
"data",
"dict",
"from",
"the",
"given",
"form",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/scrapers/HTMLSoupFormScraper.py#L102-L121 | train | 212,130 |
tijme/not-your-average-web-crawler | nyawc/scrapers/HTMLSoupFormScraper.py | HTMLSoupFormScraper.__get_valid_form_data_elements | def __get_valid_form_data_elements(self, soup):
"""Get all valid form input elements.
Note:
An element is valid when the value can be updated client-side
and the element has a name attribute.
Args:
soup (obj): The BeautifulSoup form.
Returns:
list(obj): Soup elements.
"""
elements = []
for element in soup.find_all(["input", "button", "textarea", "select"]):
if element.has_attr("name"):
elements.append(element)
return elements | python | def __get_valid_form_data_elements(self, soup):
"""Get all valid form input elements.
Note:
An element is valid when the value can be updated client-side
and the element has a name attribute.
Args:
soup (obj): The BeautifulSoup form.
Returns:
list(obj): Soup elements.
"""
elements = []
for element in soup.find_all(["input", "button", "textarea", "select"]):
if element.has_attr("name"):
elements.append(element)
return elements | [
"def",
"__get_valid_form_data_elements",
"(",
"self",
",",
"soup",
")",
":",
"elements",
"=",
"[",
"]",
"for",
"element",
"in",
"soup",
".",
"find_all",
"(",
"[",
"\"input\"",
",",
"\"button\"",
",",
"\"textarea\"",
",",
"\"select\"",
"]",
")",
":",
"if",
... | Get all valid form input elements.
Note:
An element is valid when the value can be updated client-side
and the element has a name attribute.
Args:
soup (obj): The BeautifulSoup form.
Returns:
list(obj): Soup elements. | [
"Get",
"all",
"valid",
"form",
"input",
"elements",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/scrapers/HTMLSoupFormScraper.py#L123-L144 | train | 212,131 |
tijme/not-your-average-web-crawler | nyawc/scrapers/HTMLSoupFormScraper.py | HTMLSoupFormScraper.__autofill_form_data | def __autofill_form_data(self, form_data, elements):
"""Autofill empty form data with random data.
Args:
form_data (obj): The {key: value} form data
elements list(obj): Soup elements.
Returns:
obj: The {key: value}
"""
for element in elements:
if not element["name"] in form_data:
continue
if not len(form_data[element["name"]]) is 0:
continue
if element.name == "textarea":
form_data[element["name"]] = RandomInputHelper.get_for_type("textarea")
continue
if element.has_attr("type"):
form_data[element["name"]] = RandomInputHelper.get_for_type(element["type"]) | python | def __autofill_form_data(self, form_data, elements):
"""Autofill empty form data with random data.
Args:
form_data (obj): The {key: value} form data
elements list(obj): Soup elements.
Returns:
obj: The {key: value}
"""
for element in elements:
if not element["name"] in form_data:
continue
if not len(form_data[element["name"]]) is 0:
continue
if element.name == "textarea":
form_data[element["name"]] = RandomInputHelper.get_for_type("textarea")
continue
if element.has_attr("type"):
form_data[element["name"]] = RandomInputHelper.get_for_type(element["type"]) | [
"def",
"__autofill_form_data",
"(",
"self",
",",
"form_data",
",",
"elements",
")",
":",
"for",
"element",
"in",
"elements",
":",
"if",
"not",
"element",
"[",
"\"name\"",
"]",
"in",
"form_data",
":",
"continue",
"if",
"not",
"len",
"(",
"form_data",
"[",
... | Autofill empty form data with random data.
Args:
form_data (obj): The {key: value} form data
elements list(obj): Soup elements.
Returns:
obj: The {key: value} | [
"Autofill",
"empty",
"form",
"data",
"with",
"random",
"data",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/scrapers/HTMLSoupFormScraper.py#L169-L193 | train | 212,132 |
tijme/not-your-average-web-crawler | nyawc/scrapers/HTMLSoupFormScraper.py | HTMLSoupFormScraper.__get_default_value_from_element | def __get_default_value_from_element(self, element):
"""Get the default value of a form element
Args:
elements (obj): The soup element.
Returns:
str: The default value
"""
if element.name == "select":
options = element.find_all("option")
is_multiple = element.has_attr("multiple")
selected_options = [
option for option in options
if option.has_attr("selected")
]
if not selected_options and options:
selected_options = [options[0]]
selected_values = []
if is_multiple:
for option in selected_options:
value = option["value"] if option.has_attr("value") else option.string
selected_values.append(value)
return selected_values
elif len(selected_options) >= 1:
if selected_options[0].has_attr("value"):
return selected_options[0]["value"]
else:
return selected_options[0].string
return ""
if element.name == "textarea":
return element.string if element.string is not None else ""
if element.name == "input" and element.has_attr("type"):
if element["type"] in ("checkbox", "radio"):
if not element.has_attr("checked"):
return False
if element.has_attr("value"):
return element["value"]
else:
return "on"
if element.has_attr("value"):
return element["value"]
return "" | python | def __get_default_value_from_element(self, element):
"""Get the default value of a form element
Args:
elements (obj): The soup element.
Returns:
str: The default value
"""
if element.name == "select":
options = element.find_all("option")
is_multiple = element.has_attr("multiple")
selected_options = [
option for option in options
if option.has_attr("selected")
]
if not selected_options and options:
selected_options = [options[0]]
selected_values = []
if is_multiple:
for option in selected_options:
value = option["value"] if option.has_attr("value") else option.string
selected_values.append(value)
return selected_values
elif len(selected_options) >= 1:
if selected_options[0].has_attr("value"):
return selected_options[0]["value"]
else:
return selected_options[0].string
return ""
if element.name == "textarea":
return element.string if element.string is not None else ""
if element.name == "input" and element.has_attr("type"):
if element["type"] in ("checkbox", "radio"):
if not element.has_attr("checked"):
return False
if element.has_attr("value"):
return element["value"]
else:
return "on"
if element.has_attr("value"):
return element["value"]
return "" | [
"def",
"__get_default_value_from_element",
"(",
"self",
",",
"element",
")",
":",
"if",
"element",
".",
"name",
"==",
"\"select\"",
":",
"options",
"=",
"element",
".",
"find_all",
"(",
"\"option\"",
")",
"is_multiple",
"=",
"element",
".",
"has_attr",
"(",
... | Get the default value of a form element
Args:
elements (obj): The soup element.
Returns:
str: The default value | [
"Get",
"the",
"default",
"value",
"of",
"a",
"form",
"element"
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/scrapers/HTMLSoupFormScraper.py#L195-L250 | train | 212,133 |
tijme/not-your-average-web-crawler | nyawc/helpers/URLHelper.py | URLHelper.append_with_data | def append_with_data(url, data):
"""Append the given URL with the given data OrderedDict.
Args:
url (str): The URL to append.
data (obj): The key value OrderedDict to append to the URL.
Returns:
str: The new URL.
"""
if data is None:
return url
url_parts = list(urlparse(url))
query = OrderedDict(parse_qsl(url_parts[4], keep_blank_values=True))
query.update(data)
url_parts[4] = URLHelper.query_dict_to_string(query)
return urlunparse(url_parts) | python | def append_with_data(url, data):
"""Append the given URL with the given data OrderedDict.
Args:
url (str): The URL to append.
data (obj): The key value OrderedDict to append to the URL.
Returns:
str: The new URL.
"""
if data is None:
return url
url_parts = list(urlparse(url))
query = OrderedDict(parse_qsl(url_parts[4], keep_blank_values=True))
query.update(data)
url_parts[4] = URLHelper.query_dict_to_string(query)
return urlunparse(url_parts) | [
"def",
"append_with_data",
"(",
"url",
",",
"data",
")",
":",
"if",
"data",
"is",
"None",
":",
"return",
"url",
"url_parts",
"=",
"list",
"(",
"urlparse",
"(",
"url",
")",
")",
"query",
"=",
"OrderedDict",
"(",
"parse_qsl",
"(",
"url_parts",
"[",
"4",
... | Append the given URL with the given data OrderedDict.
Args:
url (str): The URL to append.
data (obj): The key value OrderedDict to append to the URL.
Returns:
str: The new URL. | [
"Append",
"the",
"given",
"URL",
"with",
"the",
"given",
"data",
"OrderedDict",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/URLHelper.py#L69-L91 | train | 212,134 |
tijme/not-your-average-web-crawler | nyawc/helpers/URLHelper.py | URLHelper.get_subdomain | def get_subdomain(url):
"""Get the subdomain of the given URL.
Args:
url (str): The URL to get the subdomain from.
Returns:
str: The subdomain(s)
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
return ".".join(URLHelper.__cache[url].netloc.split(".")[:-2]) | python | def get_subdomain(url):
"""Get the subdomain of the given URL.
Args:
url (str): The URL to get the subdomain from.
Returns:
str: The subdomain(s)
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
return ".".join(URLHelper.__cache[url].netloc.split(".")[:-2]) | [
"def",
"get_subdomain",
"(",
"url",
")",
":",
"if",
"url",
"not",
"in",
"URLHelper",
".",
"__cache",
":",
"URLHelper",
".",
"__cache",
"[",
"url",
"]",
"=",
"urlparse",
"(",
"url",
")",
"return",
"\".\"",
".",
"join",
"(",
"URLHelper",
".",
"__cache",
... | Get the subdomain of the given URL.
Args:
url (str): The URL to get the subdomain from.
Returns:
str: The subdomain(s) | [
"Get",
"the",
"subdomain",
"of",
"the",
"given",
"URL",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/URLHelper.py#L144-L158 | train | 212,135 |
tijme/not-your-average-web-crawler | nyawc/helpers/URLHelper.py | URLHelper.get_hostname | def get_hostname(url):
"""Get the hostname of the given URL.
Args:
url (str): The URL to get the hostname from.
Returns:
str: The hostname
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
parts = URLHelper.__cache[url].netloc.split(".")
if len(parts) == 1:
return parts[0]
else:
return ".".join(parts[-2:-1]) | python | def get_hostname(url):
"""Get the hostname of the given URL.
Args:
url (str): The URL to get the hostname from.
Returns:
str: The hostname
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
parts = URLHelper.__cache[url].netloc.split(".")
if len(parts) == 1:
return parts[0]
else:
return ".".join(parts[-2:-1]) | [
"def",
"get_hostname",
"(",
"url",
")",
":",
"if",
"url",
"not",
"in",
"URLHelper",
".",
"__cache",
":",
"URLHelper",
".",
"__cache",
"[",
"url",
"]",
"=",
"urlparse",
"(",
"url",
")",
"parts",
"=",
"URLHelper",
".",
"__cache",
"[",
"url",
"]",
".",
... | Get the hostname of the given URL.
Args:
url (str): The URL to get the hostname from.
Returns:
str: The hostname | [
"Get",
"the",
"hostname",
"of",
"the",
"given",
"URL",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/URLHelper.py#L161-L180 | train | 212,136 |
tijme/not-your-average-web-crawler | nyawc/helpers/URLHelper.py | URLHelper.get_tld | def get_tld(url):
"""Get the tld of the given URL.
Args:
url (str): The URL to get the tld from.
Returns:
str: The tld
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
parts = URLHelper.__cache[url].netloc.split(".")
if len(parts) == 1:
return ""
else:
return parts[-1] | python | def get_tld(url):
"""Get the tld of the given URL.
Args:
url (str): The URL to get the tld from.
Returns:
str: The tld
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
parts = URLHelper.__cache[url].netloc.split(".")
if len(parts) == 1:
return ""
else:
return parts[-1] | [
"def",
"get_tld",
"(",
"url",
")",
":",
"if",
"url",
"not",
"in",
"URLHelper",
".",
"__cache",
":",
"URLHelper",
".",
"__cache",
"[",
"url",
"]",
"=",
"urlparse",
"(",
"url",
")",
"parts",
"=",
"URLHelper",
".",
"__cache",
"[",
"url",
"]",
".",
"ne... | Get the tld of the given URL.
Args:
url (str): The URL to get the tld from.
Returns:
str: The tld | [
"Get",
"the",
"tld",
"of",
"the",
"given",
"URL",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/URLHelper.py#L183-L202 | train | 212,137 |
tijme/not-your-average-web-crawler | nyawc/helpers/URLHelper.py | URLHelper.get_ordered_params | def get_ordered_params(url):
"""Get the query parameters of the given URL in alphabetical order.
Args:
url (str): The URL to get the query parameters from.
Returns:
str: The query parameters
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
params = URLHelper.query_string_to_dict(URLHelper.__cache[url].query)
return OrderedDict(sorted(params.items())) | python | def get_ordered_params(url):
"""Get the query parameters of the given URL in alphabetical order.
Args:
url (str): The URL to get the query parameters from.
Returns:
str: The query parameters
"""
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
params = URLHelper.query_string_to_dict(URLHelper.__cache[url].query)
return OrderedDict(sorted(params.items())) | [
"def",
"get_ordered_params",
"(",
"url",
")",
":",
"if",
"url",
"not",
"in",
"URLHelper",
".",
"__cache",
":",
"URLHelper",
".",
"__cache",
"[",
"url",
"]",
"=",
"urlparse",
"(",
"url",
")",
"params",
"=",
"URLHelper",
".",
"query_string_to_dict",
"(",
"... | Get the query parameters of the given URL in alphabetical order.
Args:
url (str): The URL to get the query parameters from.
Returns:
str: The query parameters | [
"Get",
"the",
"query",
"parameters",
"of",
"the",
"given",
"URL",
"in",
"alphabetical",
"order",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/URLHelper.py#L222-L238 | train | 212,138 |
tijme/not-your-average-web-crawler | nyawc/helpers/URLHelper.py | URLHelper.query_dict_to_string | def query_dict_to_string(query):
"""Convert an OrderedDict to a query string.
Args:
query (obj): The key value object with query params.
Returns:
str: The query string.
Note:
This method does the same as urllib.parse.urlencode except
that it doesn't actually encode the values.
"""
query_params = []
for key, value in query.items():
query_params.append(key + "=" + value)
return "&".join(query_params) | python | def query_dict_to_string(query):
"""Convert an OrderedDict to a query string.
Args:
query (obj): The key value object with query params.
Returns:
str: The query string.
Note:
This method does the same as urllib.parse.urlencode except
that it doesn't actually encode the values.
"""
query_params = []
for key, value in query.items():
query_params.append(key + "=" + value)
return "&".join(query_params) | [
"def",
"query_dict_to_string",
"(",
"query",
")",
":",
"query_params",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"query",
".",
"items",
"(",
")",
":",
"query_params",
".",
"append",
"(",
"key",
"+",
"\"=\"",
"+",
"value",
")",
"return",
"\"&\"",... | Convert an OrderedDict to a query string.
Args:
query (obj): The key value object with query params.
Returns:
str: The query string.
Note:
This method does the same as urllib.parse.urlencode except
that it doesn't actually encode the values. | [
"Convert",
"an",
"OrderedDict",
"to",
"a",
"query",
"string",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/URLHelper.py#L255-L275 | train | 212,139 |
tijme/not-your-average-web-crawler | nyawc/helpers/URLHelper.py | URLHelper.query_string_to_dict | def query_string_to_dict(query):
"""Convert a string to a query dict.
Args:
query (str): The query string.
Returns:
obj: The key value object with query params.
Note:
This method does the same as urllib.parse.parse_qsl except
that it doesn't actually decode the values.
"""
query_params = {}
for key_value in query.split("&"):
key_value_pair = key_value.split("=", 1)
key = key_value_pair[0] if len(key_value_pair) >= 1 else ""
value = key_value_pair[1] if len(key_value_pair) == 2 else ""
query_params[key] = value
return query_params | python | def query_string_to_dict(query):
"""Convert a string to a query dict.
Args:
query (str): The query string.
Returns:
obj: The key value object with query params.
Note:
This method does the same as urllib.parse.parse_qsl except
that it doesn't actually decode the values.
"""
query_params = {}
for key_value in query.split("&"):
key_value_pair = key_value.split("=", 1)
key = key_value_pair[0] if len(key_value_pair) >= 1 else ""
value = key_value_pair[1] if len(key_value_pair) == 2 else ""
query_params[key] = value
return query_params | [
"def",
"query_string_to_dict",
"(",
"query",
")",
":",
"query_params",
"=",
"{",
"}",
"for",
"key_value",
"in",
"query",
".",
"split",
"(",
"\"&\"",
")",
":",
"key_value_pair",
"=",
"key_value",
".",
"split",
"(",
"\"=\"",
",",
"1",
")",
"key",
"=",
"k... | Convert a string to a query dict.
Args:
query (str): The query string.
Returns:
obj: The key value object with query params.
Note:
This method does the same as urllib.parse.parse_qsl except
that it doesn't actually decode the values. | [
"Convert",
"a",
"string",
"to",
"a",
"query",
"dict",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/URLHelper.py#L278-L303 | train | 212,140 |
tijme/not-your-average-web-crawler | nyawc/helpers/PackageHelper.py | PackageHelper.get_version | def get_version():
"""Get the version number of this package.
Returns:
str: The version number (marjor.minor.patch).
Note:
When this package is installed, the version number will be available through the
package resource details. Otherwise this method will look for a ``.semver`` file.
Note:
In rare cases corrupt installs can cause the version number to be unknown. In this case
the version number will be set to the string "Unknown".
"""
if PackageHelper.__version:
return PackageHelper.__version
PackageHelper.__version = "Unknown"
# If this is a GIT clone without install, use the ``.semver`` file.
file = os.path.realpath(__file__)
folder = os.path.dirname(file)
try:
semver = open(folder + "/../../.semver", "r")
PackageHelper.__version = semver.read().rstrip()
semver.close()
return PackageHelper.__version
except:
pass
# If the package was installed, get the version number via Python's distribution details.
try:
distribution = pkg_resources.get_distribution(PackageHelper.get_alias())
if distribution.version:
PackageHelper.__version = distribution.version
return PackageHelper.__version
except:
pass
return PackageHelper.__version | python | def get_version():
"""Get the version number of this package.
Returns:
str: The version number (marjor.minor.patch).
Note:
When this package is installed, the version number will be available through the
package resource details. Otherwise this method will look for a ``.semver`` file.
Note:
In rare cases corrupt installs can cause the version number to be unknown. In this case
the version number will be set to the string "Unknown".
"""
if PackageHelper.__version:
return PackageHelper.__version
PackageHelper.__version = "Unknown"
# If this is a GIT clone without install, use the ``.semver`` file.
file = os.path.realpath(__file__)
folder = os.path.dirname(file)
try:
semver = open(folder + "/../../.semver", "r")
PackageHelper.__version = semver.read().rstrip()
semver.close()
return PackageHelper.__version
except:
pass
# If the package was installed, get the version number via Python's distribution details.
try:
distribution = pkg_resources.get_distribution(PackageHelper.get_alias())
if distribution.version:
PackageHelper.__version = distribution.version
return PackageHelper.__version
except:
pass
return PackageHelper.__version | [
"def",
"get_version",
"(",
")",
":",
"if",
"PackageHelper",
".",
"__version",
":",
"return",
"PackageHelper",
".",
"__version",
"PackageHelper",
".",
"__version",
"=",
"\"Unknown\"",
"# If this is a GIT clone without install, use the ``.semver`` file.",
"file",
"=",
"os",... | Get the version number of this package.
Returns:
str: The version number (marjor.minor.patch).
Note:
When this package is installed, the version number will be available through the
package resource details. Otherwise this method will look for a ``.semver`` file.
Note:
In rare cases corrupt installs can cause the version number to be unknown. In this case
the version number will be set to the string "Unknown". | [
"Get",
"the",
"version",
"number",
"of",
"this",
"package",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/helpers/PackageHelper.py#L82-L124 | train | 212,141 |
tijme/not-your-average-web-crawler | nyawc/CrawlerThread.py | CrawlerThread.run | def run(self):
"""Executes the HTTP call.
Note:
If this and the parent handler raised an error, the queue item status
will be set to errored instead of finished. This is to prevent e.g. 404
recursion.
"""
try:
self.__options.callbacks.request_in_thread_before_start(self.__queue_item)
except Exception as e:
print(e)
new_requests = []
failed = False
try:
handler = Handler(self.__options, self.__queue_item)
new_requests = handler.get_new_requests()
try:
self.__queue_item.response.raise_for_status()
except Exception:
if self.__queue_item.request.parent_raised_error:
failed = True
else:
for new_request in new_requests:
new_request.parent_raised_error = True
except Exception as e:
failed = True
error_message = "Setting status of '{}' to '{}' because of an HTTP error.".format(
self.__queue_item.request.url,
QueueItem.STATUS_ERRORED
)
DebugHelper.output(self.__options, error_message)
DebugHelper.output(self.__options, e)
try:
self.__options.callbacks.request_on_error(self.__queue_item, str(e))
except Exception as e:
print(e)
for new_request in new_requests:
new_request.parent_url = self.__queue_item.request.url
try:
self.__options.callbacks.request_in_thread_after_finish(self.__queue_item)
except Exception as e:
print(e)
with self.__callback_lock:
self.__callback(self.__queue_item, new_requests, failed) | python | def run(self):
"""Executes the HTTP call.
Note:
If this and the parent handler raised an error, the queue item status
will be set to errored instead of finished. This is to prevent e.g. 404
recursion.
"""
try:
self.__options.callbacks.request_in_thread_before_start(self.__queue_item)
except Exception as e:
print(e)
new_requests = []
failed = False
try:
handler = Handler(self.__options, self.__queue_item)
new_requests = handler.get_new_requests()
try:
self.__queue_item.response.raise_for_status()
except Exception:
if self.__queue_item.request.parent_raised_error:
failed = True
else:
for new_request in new_requests:
new_request.parent_raised_error = True
except Exception as e:
failed = True
error_message = "Setting status of '{}' to '{}' because of an HTTP error.".format(
self.__queue_item.request.url,
QueueItem.STATUS_ERRORED
)
DebugHelper.output(self.__options, error_message)
DebugHelper.output(self.__options, e)
try:
self.__options.callbacks.request_on_error(self.__queue_item, str(e))
except Exception as e:
print(e)
for new_request in new_requests:
new_request.parent_url = self.__queue_item.request.url
try:
self.__options.callbacks.request_in_thread_after_finish(self.__queue_item)
except Exception as e:
print(e)
with self.__callback_lock:
self.__callback(self.__queue_item, new_requests, failed) | [
"def",
"run",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"__options",
".",
"callbacks",
".",
"request_in_thread_before_start",
"(",
"self",
".",
"__queue_item",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"e",
")",
"new_requests",
"=",
... | Executes the HTTP call.
Note:
If this and the parent handler raised an error, the queue item status
will be set to errored instead of finished. This is to prevent e.g. 404
recursion. | [
"Executes",
"the",
"HTTP",
"call",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/CrawlerThread.py#L60-L116 | train | 212,142 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.start_with | def start_with(self, request):
"""Start the crawler using the given request.
Args:
request (:class:`nyawc.http.Request`): The startpoint for the crawler.
"""
HTTPRequestHelper.patch_with_options(request, self.__options)
self.queue.add_request(request)
self.__crawler_start() | python | def start_with(self, request):
"""Start the crawler using the given request.
Args:
request (:class:`nyawc.http.Request`): The startpoint for the crawler.
"""
HTTPRequestHelper.patch_with_options(request, self.__options)
self.queue.add_request(request)
self.__crawler_start() | [
"def",
"start_with",
"(",
"self",
",",
"request",
")",
":",
"HTTPRequestHelper",
".",
"patch_with_options",
"(",
"request",
",",
"self",
".",
"__options",
")",
"self",
".",
"queue",
".",
"add_request",
"(",
"request",
")",
"self",
".",
"__crawler_start",
"("... | Start the crawler using the given request.
Args:
request (:class:`nyawc.http.Request`): The startpoint for the crawler. | [
"Start",
"the",
"crawler",
"using",
"the",
"given",
"request",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L87-L98 | train | 212,143 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__spawn_new_requests | def __spawn_new_requests(self):
"""Spawn new requests until the max threads option value is reached.
Note:
If no new requests were spawned and there are no requests in progress
the crawler will stop crawling.
"""
self.__should_spawn_new_requests = False
in_progress_count = len(self.queue.get_all(QueueItem.STATUS_IN_PROGRESS))
while in_progress_count < self.__options.performance.max_threads:
if self.__spawn_new_request():
in_progress_count += 1
else:
break
if in_progress_count == 0:
self.__crawler_stop() | python | def __spawn_new_requests(self):
"""Spawn new requests until the max threads option value is reached.
Note:
If no new requests were spawned and there are no requests in progress
the crawler will stop crawling.
"""
self.__should_spawn_new_requests = False
in_progress_count = len(self.queue.get_all(QueueItem.STATUS_IN_PROGRESS))
while in_progress_count < self.__options.performance.max_threads:
if self.__spawn_new_request():
in_progress_count += 1
else:
break
if in_progress_count == 0:
self.__crawler_stop() | [
"def",
"__spawn_new_requests",
"(",
"self",
")",
":",
"self",
".",
"__should_spawn_new_requests",
"=",
"False",
"in_progress_count",
"=",
"len",
"(",
"self",
".",
"queue",
".",
"get_all",
"(",
"QueueItem",
".",
"STATUS_IN_PROGRESS",
")",
")",
"while",
"in_progre... | Spawn new requests until the max threads option value is reached.
Note:
If no new requests were spawned and there are no requests in progress
the crawler will stop crawling. | [
"Spawn",
"new",
"requests",
"until",
"the",
"max",
"threads",
"option",
"value",
"is",
"reached",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L100-L120 | train | 212,144 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__spawn_new_request | def __spawn_new_request(self):
"""Spawn the first queued request if there is one available.
Returns:
bool: True if a new request was spawned, false otherwise.
"""
first_in_line = self.queue.get_first(QueueItem.STATUS_QUEUED)
if first_in_line is None:
return False
while self.routing.is_treshold_reached(first_in_line.request):
self.queue.move(first_in_line, QueueItem.STATUS_CANCELLED)
first_in_line = self.queue.get_first(QueueItem.STATUS_QUEUED)
if first_in_line is None:
return False
self.__request_start(first_in_line)
return True | python | def __spawn_new_request(self):
"""Spawn the first queued request if there is one available.
Returns:
bool: True if a new request was spawned, false otherwise.
"""
first_in_line = self.queue.get_first(QueueItem.STATUS_QUEUED)
if first_in_line is None:
return False
while self.routing.is_treshold_reached(first_in_line.request):
self.queue.move(first_in_line, QueueItem.STATUS_CANCELLED)
first_in_line = self.queue.get_first(QueueItem.STATUS_QUEUED)
if first_in_line is None:
return False
self.__request_start(first_in_line)
return True | [
"def",
"__spawn_new_request",
"(",
"self",
")",
":",
"first_in_line",
"=",
"self",
".",
"queue",
".",
"get_first",
"(",
"QueueItem",
".",
"STATUS_QUEUED",
")",
"if",
"first_in_line",
"is",
"None",
":",
"return",
"False",
"while",
"self",
".",
"routing",
".",... | Spawn the first queued request if there is one available.
Returns:
bool: True if a new request was spawned, false otherwise. | [
"Spawn",
"the",
"first",
"queued",
"request",
"if",
"there",
"is",
"one",
"available",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L122-L143 | train | 212,145 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__crawler_start | def __crawler_start(self):
"""Spawn the first X queued request, where X is the max threads option.
Note:
The main thread will sleep until the crawler is finished. This enables
quiting the application using sigints (see http://stackoverflow.com/a/11816038/2491049).
Note:
`__crawler_stop()` and `__spawn_new_requests()` are called here on the main thread to
prevent thread recursion and deadlocks.
"""
try:
self.__options.callbacks.crawler_before_start()
except Exception as e:
print(e)
print(traceback.format_exc())
self.__spawn_new_requests()
while not self.__stopped:
if self.__should_stop:
self.__crawler_stop()
if self.__should_spawn_new_requests:
self.__spawn_new_requests()
time.sleep(0.1) | python | def __crawler_start(self):
"""Spawn the first X queued request, where X is the max threads option.
Note:
The main thread will sleep until the crawler is finished. This enables
quiting the application using sigints (see http://stackoverflow.com/a/11816038/2491049).
Note:
`__crawler_stop()` and `__spawn_new_requests()` are called here on the main thread to
prevent thread recursion and deadlocks.
"""
try:
self.__options.callbacks.crawler_before_start()
except Exception as e:
print(e)
print(traceback.format_exc())
self.__spawn_new_requests()
while not self.__stopped:
if self.__should_stop:
self.__crawler_stop()
if self.__should_spawn_new_requests:
self.__spawn_new_requests()
time.sleep(0.1) | [
"def",
"__crawler_start",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"__options",
".",
"callbacks",
".",
"crawler_before_start",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"e",
")",
"print",
"(",
"traceback",
".",
"format_exc",
"... | Spawn the first X queued request, where X is the max threads option.
Note:
The main thread will sleep until the crawler is finished. This enables
quiting the application using sigints (see http://stackoverflow.com/a/11816038/2491049).
Note:
`__crawler_stop()` and `__spawn_new_requests()` are called here on the main thread to
prevent thread recursion and deadlocks. | [
"Spawn",
"the",
"first",
"X",
"queued",
"request",
"where",
"X",
"is",
"the",
"max",
"threads",
"option",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L151-L179 | train | 212,146 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__crawler_stop | def __crawler_stop(self):
"""Mark the crawler as stopped.
Note:
If :attr:`__stopped` is True, the main thread will be stopped. Every piece of code that gets
executed after :attr:`__stopped` is True could cause Thread exceptions and or race conditions.
"""
if self.__stopping:
return
self.__stopping = True
self.__wait_for_current_threads()
self.queue.move_bulk([
QueueItem.STATUS_QUEUED,
QueueItem.STATUS_IN_PROGRESS
], QueueItem.STATUS_CANCELLED)
self.__crawler_finish()
self.__stopped = True | python | def __crawler_stop(self):
"""Mark the crawler as stopped.
Note:
If :attr:`__stopped` is True, the main thread will be stopped. Every piece of code that gets
executed after :attr:`__stopped` is True could cause Thread exceptions and or race conditions.
"""
if self.__stopping:
return
self.__stopping = True
self.__wait_for_current_threads()
self.queue.move_bulk([
QueueItem.STATUS_QUEUED,
QueueItem.STATUS_IN_PROGRESS
], QueueItem.STATUS_CANCELLED)
self.__crawler_finish()
self.__stopped = True | [
"def",
"__crawler_stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"__stopping",
":",
"return",
"self",
".",
"__stopping",
"=",
"True",
"self",
".",
"__wait_for_current_threads",
"(",
")",
"self",
".",
"queue",
".",
"move_bulk",
"(",
"[",
"QueueItem",
".",... | Mark the crawler as stopped.
Note:
If :attr:`__stopped` is True, the main thread will be stopped. Every piece of code that gets
executed after :attr:`__stopped` is True could cause Thread exceptions and or race conditions. | [
"Mark",
"the",
"crawler",
"as",
"stopped",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L181-L202 | train | 212,147 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__crawler_finish | def __crawler_finish(self):
"""Called when the crawler is finished because there are no queued requests left or it was stopped."""
try:
self.__options.callbacks.crawler_after_finish(self.queue)
except Exception as e:
print(e)
print(traceback.format_exc()) | python | def __crawler_finish(self):
"""Called when the crawler is finished because there are no queued requests left or it was stopped."""
try:
self.__options.callbacks.crawler_after_finish(self.queue)
except Exception as e:
print(e)
print(traceback.format_exc()) | [
"def",
"__crawler_finish",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"__options",
".",
"callbacks",
".",
"crawler_after_finish",
"(",
"self",
".",
"queue",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"e",
")",
"print",
"(",
"traceback... | Called when the crawler is finished because there are no queued requests left or it was stopped. | [
"Called",
"when",
"the",
"crawler",
"is",
"finished",
"because",
"there",
"are",
"no",
"queued",
"requests",
"left",
"or",
"it",
"was",
"stopped",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L204-L211 | train | 212,148 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__request_start | def __request_start(self, queue_item):
"""Execute the request in given queue item.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair to scrape.
"""
try:
action = self.__options.callbacks.request_before_start(self.queue, queue_item)
except Exception as e:
action = None
print(e)
print(traceback.format_exc())
if action == CrawlerActions.DO_STOP_CRAWLING:
self.__should_stop = True
if action == CrawlerActions.DO_SKIP_TO_NEXT:
self.queue.move(queue_item, QueueItem.STATUS_FINISHED)
self.__should_spawn_new_requests = True
if action == CrawlerActions.DO_CONTINUE_CRAWLING or action is None:
self.queue.move(queue_item, QueueItem.STATUS_IN_PROGRESS)
thread = CrawlerThread(self.__request_finish, self.__lock, self.__options, queue_item)
self.__threads[queue_item.get_hash()] = thread
thread.daemon = True
thread.start() | python | def __request_start(self, queue_item):
"""Execute the request in given queue item.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair to scrape.
"""
try:
action = self.__options.callbacks.request_before_start(self.queue, queue_item)
except Exception as e:
action = None
print(e)
print(traceback.format_exc())
if action == CrawlerActions.DO_STOP_CRAWLING:
self.__should_stop = True
if action == CrawlerActions.DO_SKIP_TO_NEXT:
self.queue.move(queue_item, QueueItem.STATUS_FINISHED)
self.__should_spawn_new_requests = True
if action == CrawlerActions.DO_CONTINUE_CRAWLING or action is None:
self.queue.move(queue_item, QueueItem.STATUS_IN_PROGRESS)
thread = CrawlerThread(self.__request_finish, self.__lock, self.__options, queue_item)
self.__threads[queue_item.get_hash()] = thread
thread.daemon = True
thread.start() | [
"def",
"__request_start",
"(",
"self",
",",
"queue_item",
")",
":",
"try",
":",
"action",
"=",
"self",
".",
"__options",
".",
"callbacks",
".",
"request_before_start",
"(",
"self",
".",
"queue",
",",
"queue_item",
")",
"except",
"Exception",
"as",
"e",
":"... | Execute the request in given queue item.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair to scrape. | [
"Execute",
"the",
"request",
"in",
"given",
"queue",
"item",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L213-L241 | train | 212,149 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__request_finish | def __request_finish(self, queue_item, new_requests, request_failed=False):
"""Called when the crawler finished the given queue item.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair that finished.
new_requests list(:class:`nyawc.http.Request`): All the requests that were found during this request.
request_failed (bool): True if the request failed (if needs to be moved to errored).
"""
if self.__stopping:
return
del self.__threads[queue_item.get_hash()]
if request_failed:
new_queue_items = []
self.queue.move(queue_item, QueueItem.STATUS_ERRORED)
else:
self.routing.increase_route_count(queue_item.request)
new_queue_items = self.__add_scraped_requests_to_queue(queue_item, new_requests)
self.queue.move(queue_item, QueueItem.STATUS_FINISHED)
try:
action = self.__options.callbacks.request_after_finish(self.queue, queue_item, new_queue_items)
except Exception as e:
action = None
print(e)
print(traceback.format_exc())
queue_item.decompose()
if action == CrawlerActions.DO_STOP_CRAWLING:
self.__should_stop = True
if action == CrawlerActions.DO_CONTINUE_CRAWLING or action is None:
self.__should_spawn_new_requests = True | python | def __request_finish(self, queue_item, new_requests, request_failed=False):
"""Called when the crawler finished the given queue item.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair that finished.
new_requests list(:class:`nyawc.http.Request`): All the requests that were found during this request.
request_failed (bool): True if the request failed (if needs to be moved to errored).
"""
if self.__stopping:
return
del self.__threads[queue_item.get_hash()]
if request_failed:
new_queue_items = []
self.queue.move(queue_item, QueueItem.STATUS_ERRORED)
else:
self.routing.increase_route_count(queue_item.request)
new_queue_items = self.__add_scraped_requests_to_queue(queue_item, new_requests)
self.queue.move(queue_item, QueueItem.STATUS_FINISHED)
try:
action = self.__options.callbacks.request_after_finish(self.queue, queue_item, new_queue_items)
except Exception as e:
action = None
print(e)
print(traceback.format_exc())
queue_item.decompose()
if action == CrawlerActions.DO_STOP_CRAWLING:
self.__should_stop = True
if action == CrawlerActions.DO_CONTINUE_CRAWLING or action is None:
self.__should_spawn_new_requests = True | [
"def",
"__request_finish",
"(",
"self",
",",
"queue_item",
",",
"new_requests",
",",
"request_failed",
"=",
"False",
")",
":",
"if",
"self",
".",
"__stopping",
":",
"return",
"del",
"self",
".",
"__threads",
"[",
"queue_item",
".",
"get_hash",
"(",
")",
"]... | Called when the crawler finished the given queue item.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair that finished.
new_requests list(:class:`nyawc.http.Request`): All the requests that were found during this request.
request_failed (bool): True if the request failed (if needs to be moved to errored). | [
"Called",
"when",
"the",
"crawler",
"finished",
"the",
"given",
"queue",
"item",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L243-L279 | train | 212,150 |
tijme/not-your-average-web-crawler | nyawc/Crawler.py | Crawler.__add_scraped_requests_to_queue | def __add_scraped_requests_to_queue(self, queue_item, scraped_requests):
"""Convert the scraped requests to queue items, return them and also add them to the queue.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair that finished.
new_requests list(:class:`nyawc.http.Request`): All the requests that were found during this request.
Returns:
list(:class:`nyawc.QueueItem`): The new queue items.
"""
new_queue_items = []
for scraped_request in scraped_requests:
HTTPRequestHelper.patch_with_options(scraped_request, self.__options, queue_item)
if not HTTPRequestHelper.complies_with_scope(queue_item, scraped_request, self.__options.scope):
continue
if self.queue.has_request(scraped_request):
continue
scraped_request.depth = queue_item.request.depth + 1
if self.__options.scope.max_depth is not None:
if scraped_request.depth > self.__options.scope.max_depth:
continue
new_queue_item = self.queue.add_request(scraped_request)
new_queue_items.append(new_queue_item)
return new_queue_items | python | def __add_scraped_requests_to_queue(self, queue_item, scraped_requests):
"""Convert the scraped requests to queue items, return them and also add them to the queue.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair that finished.
new_requests list(:class:`nyawc.http.Request`): All the requests that were found during this request.
Returns:
list(:class:`nyawc.QueueItem`): The new queue items.
"""
new_queue_items = []
for scraped_request in scraped_requests:
HTTPRequestHelper.patch_with_options(scraped_request, self.__options, queue_item)
if not HTTPRequestHelper.complies_with_scope(queue_item, scraped_request, self.__options.scope):
continue
if self.queue.has_request(scraped_request):
continue
scraped_request.depth = queue_item.request.depth + 1
if self.__options.scope.max_depth is not None:
if scraped_request.depth > self.__options.scope.max_depth:
continue
new_queue_item = self.queue.add_request(scraped_request)
new_queue_items.append(new_queue_item)
return new_queue_items | [
"def",
"__add_scraped_requests_to_queue",
"(",
"self",
",",
"queue_item",
",",
"scraped_requests",
")",
":",
"new_queue_items",
"=",
"[",
"]",
"for",
"scraped_request",
"in",
"scraped_requests",
":",
"HTTPRequestHelper",
".",
"patch_with_options",
"(",
"scraped_request"... | Convert the scraped requests to queue items, return them and also add them to the queue.
Args:
queue_item (:class:`nyawc.QueueItem`): The request/response pair that finished.
new_requests list(:class:`nyawc.http.Request`): All the requests that were found during this request.
Returns:
list(:class:`nyawc.QueueItem`): The new queue items. | [
"Convert",
"the",
"scraped",
"requests",
"to",
"queue",
"items",
"return",
"them",
"and",
"also",
"add",
"them",
"to",
"the",
"queue",
"."
] | d77c14e1616c541bb3980f649a7e6f8ed02761fb | https://github.com/tijme/not-your-average-web-crawler/blob/d77c14e1616c541bb3980f649a7e6f8ed02761fb/nyawc/Crawler.py#L281-L312 | train | 212,151 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM.fit | def fit(self, train_set, test_set):
"""Fit the model to the given data.
:param train_set: training data
:param test_set: test data
"""
with tf.Graph().as_default(), tf.Session() as self.tf_session:
self.build_model()
tf.global_variables_initializer().run()
third = self.num_epochs // 3
for i in range(self.num_epochs):
lr_decay = self.lr_decay ** max(i - third, 0.0)
self.tf_session.run(
tf.assign(self.lr_var, tf.multiply(self.learning_rate, lr_decay)))
train_perplexity = self._run_train_step(train_set, 'train')
print("Epoch: %d Train Perplexity: %.3f"
% (i + 1, train_perplexity))
test_perplexity = self._run_train_step(test_set, 'test')
print("Test Perplexity: %.3f" % test_perplexity) | python | def fit(self, train_set, test_set):
"""Fit the model to the given data.
:param train_set: training data
:param test_set: test data
"""
with tf.Graph().as_default(), tf.Session() as self.tf_session:
self.build_model()
tf.global_variables_initializer().run()
third = self.num_epochs // 3
for i in range(self.num_epochs):
lr_decay = self.lr_decay ** max(i - third, 0.0)
self.tf_session.run(
tf.assign(self.lr_var, tf.multiply(self.learning_rate, lr_decay)))
train_perplexity = self._run_train_step(train_set, 'train')
print("Epoch: %d Train Perplexity: %.3f"
% (i + 1, train_perplexity))
test_perplexity = self._run_train_step(test_set, 'test')
print("Test Perplexity: %.3f" % test_perplexity) | [
"def",
"fit",
"(",
"self",
",",
"train_set",
",",
"test_set",
")",
":",
"with",
"tf",
".",
"Graph",
"(",
")",
".",
"as_default",
"(",
")",
",",
"tf",
".",
"Session",
"(",
")",
"as",
"self",
".",
"tf_session",
":",
"self",
".",
"build_model",
"(",
... | Fit the model to the given data.
:param train_set: training data
:param test_set: test data | [
"Fit",
"the",
"model",
"to",
"the",
"given",
"data",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L52-L73 | train | 212,152 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM._run_train_step | def _run_train_step(self, data, mode='train'):
"""Run a single training step.
:param data: input data
:param mode: 'train' or 'test'.
"""
epoch_size = ((len(data) // self.batch_size) - 1) // self.num_steps
costs = 0.0
iters = 0
step = 0
state = self._init_state.eval()
op = self._train_op if mode == 'train' else tf.no_op()
for step, (x, y) in enumerate(
utilities.seq_data_iterator(
data, self.batch_size, self.num_steps)):
cost, state, _ = self.tf_session.run(
[self.cost, self.final_state, op],
{self.input_data: x,
self.input_labels: y,
self._init_state: state})
costs += cost
iters += self.num_steps
if step % (epoch_size // 10) == 10:
print("%.3f perplexity" % (step * 1.0 / epoch_size))
return np.exp(costs / iters) | python | def _run_train_step(self, data, mode='train'):
"""Run a single training step.
:param data: input data
:param mode: 'train' or 'test'.
"""
epoch_size = ((len(data) // self.batch_size) - 1) // self.num_steps
costs = 0.0
iters = 0
step = 0
state = self._init_state.eval()
op = self._train_op if mode == 'train' else tf.no_op()
for step, (x, y) in enumerate(
utilities.seq_data_iterator(
data, self.batch_size, self.num_steps)):
cost, state, _ = self.tf_session.run(
[self.cost, self.final_state, op],
{self.input_data: x,
self.input_labels: y,
self._init_state: state})
costs += cost
iters += self.num_steps
if step % (epoch_size // 10) == 10:
print("%.3f perplexity" % (step * 1.0 / epoch_size))
return np.exp(costs / iters) | [
"def",
"_run_train_step",
"(",
"self",
",",
"data",
",",
"mode",
"=",
"'train'",
")",
":",
"epoch_size",
"=",
"(",
"(",
"len",
"(",
"data",
")",
"//",
"self",
".",
"batch_size",
")",
"-",
"1",
")",
"//",
"self",
".",
"num_steps",
"costs",
"=",
"0.0... | Run a single training step.
:param data: input data
:param mode: 'train' or 'test'. | [
"Run",
"a",
"single",
"training",
"step",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L75-L103 | train | 212,153 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM.build_model | def build_model(self):
"""Build the model's computational graph."""
with tf.variable_scope(
"model", reuse=None, initializer=self.initializer):
self._create_placeholders()
self._create_rnn_cells()
self._create_initstate_and_embeddings()
self._create_rnn_architecture()
self._create_optimizer_node() | python | def build_model(self):
"""Build the model's computational graph."""
with tf.variable_scope(
"model", reuse=None, initializer=self.initializer):
self._create_placeholders()
self._create_rnn_cells()
self._create_initstate_and_embeddings()
self._create_rnn_architecture()
self._create_optimizer_node() | [
"def",
"build_model",
"(",
"self",
")",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"\"model\"",
",",
"reuse",
"=",
"None",
",",
"initializer",
"=",
"self",
".",
"initializer",
")",
":",
"self",
".",
"_create_placeholders",
"(",
")",
"self",
".",
"_cr... | Build the model's computational graph. | [
"Build",
"the",
"model",
"s",
"computational",
"graph",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L105-L113 | train | 212,154 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM._create_placeholders | def _create_placeholders(self):
"""Create the computational graph's placeholders."""
self.input_data = tf.placeholder(
tf.int32, [self.batch_size, self.num_steps])
self.input_labels = tf.placeholder(
tf.int32, [self.batch_size, self.num_steps]) | python | def _create_placeholders(self):
"""Create the computational graph's placeholders."""
self.input_data = tf.placeholder(
tf.int32, [self.batch_size, self.num_steps])
self.input_labels = tf.placeholder(
tf.int32, [self.batch_size, self.num_steps]) | [
"def",
"_create_placeholders",
"(",
"self",
")",
":",
"self",
".",
"input_data",
"=",
"tf",
".",
"placeholder",
"(",
"tf",
".",
"int32",
",",
"[",
"self",
".",
"batch_size",
",",
"self",
".",
"num_steps",
"]",
")",
"self",
".",
"input_labels",
"=",
"tf... | Create the computational graph's placeholders. | [
"Create",
"the",
"computational",
"graph",
"s",
"placeholders",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L115-L120 | train | 212,155 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM._create_rnn_cells | def _create_rnn_cells(self):
"""Create the LSTM cells."""
lstm_cell = tf.nn.rnn_cell.LSTMCell(
self.num_hidden, forget_bias=0.0)
lstm_cell = tf.nn.rnn_cell.DropoutWrapper(
lstm_cell, output_keep_prob=self.dropout)
self.cell = tf.nn.rnn_cell.MultiRNNCell(
[lstm_cell] * self.num_layers) | python | def _create_rnn_cells(self):
"""Create the LSTM cells."""
lstm_cell = tf.nn.rnn_cell.LSTMCell(
self.num_hidden, forget_bias=0.0)
lstm_cell = tf.nn.rnn_cell.DropoutWrapper(
lstm_cell, output_keep_prob=self.dropout)
self.cell = tf.nn.rnn_cell.MultiRNNCell(
[lstm_cell] * self.num_layers) | [
"def",
"_create_rnn_cells",
"(",
"self",
")",
":",
"lstm_cell",
"=",
"tf",
".",
"nn",
".",
"rnn_cell",
".",
"LSTMCell",
"(",
"self",
".",
"num_hidden",
",",
"forget_bias",
"=",
"0.0",
")",
"lstm_cell",
"=",
"tf",
".",
"nn",
".",
"rnn_cell",
".",
"Dropo... | Create the LSTM cells. | [
"Create",
"the",
"LSTM",
"cells",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L122-L129 | train | 212,156 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM._create_initstate_and_embeddings | def _create_initstate_and_embeddings(self):
"""Create the initial state for the cell and the data embeddings."""
self._init_state = self.cell.zero_state(self.batch_size, tf.float32)
embedding = tf.get_variable(
"embedding", [self.vocab_size, self.num_hidden])
inputs = tf.nn.embedding_lookup(embedding, self.input_data)
self.inputs = tf.nn.dropout(inputs, self.dropout) | python | def _create_initstate_and_embeddings(self):
"""Create the initial state for the cell and the data embeddings."""
self._init_state = self.cell.zero_state(self.batch_size, tf.float32)
embedding = tf.get_variable(
"embedding", [self.vocab_size, self.num_hidden])
inputs = tf.nn.embedding_lookup(embedding, self.input_data)
self.inputs = tf.nn.dropout(inputs, self.dropout) | [
"def",
"_create_initstate_and_embeddings",
"(",
"self",
")",
":",
"self",
".",
"_init_state",
"=",
"self",
".",
"cell",
".",
"zero_state",
"(",
"self",
".",
"batch_size",
",",
"tf",
".",
"float32",
")",
"embedding",
"=",
"tf",
".",
"get_variable",
"(",
"\"... | Create the initial state for the cell and the data embeddings. | [
"Create",
"the",
"initial",
"state",
"for",
"the",
"cell",
"and",
"the",
"data",
"embeddings",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L131-L137 | train | 212,157 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM._create_rnn_architecture | def _create_rnn_architecture(self):
"""Create the training architecture and the last layer of the LSTM."""
self.inputs = [tf.squeeze(i, [1]) for i in tf.split(
axis=1, num_or_size_splits=self.num_steps, value=self.inputs)]
outputs, state = tf.nn.rnn(
self.cell, self.inputs, initial_state=self._init_state)
output = tf.reshape(tf.concat(axis=1, values=outputs), [-1, self.num_hidden])
softmax_w = tf.get_variable(
"softmax_w", [self.num_hidden, self.vocab_size])
softmax_b = tf.get_variable("softmax_b", [self.vocab_size])
logits = tf.add(tf.matmul(output, softmax_w), softmax_b)
loss = tf.nn.seq2seq.sequence_loss_by_example(
[logits],
[tf.reshape(self.input_labels, [-1])],
[tf.ones([self.batch_size * self.num_steps])])
self.cost = tf.div(tf.reduce_sum(loss), self.batch_size)
self.final_state = state | python | def _create_rnn_architecture(self):
"""Create the training architecture and the last layer of the LSTM."""
self.inputs = [tf.squeeze(i, [1]) for i in tf.split(
axis=1, num_or_size_splits=self.num_steps, value=self.inputs)]
outputs, state = tf.nn.rnn(
self.cell, self.inputs, initial_state=self._init_state)
output = tf.reshape(tf.concat(axis=1, values=outputs), [-1, self.num_hidden])
softmax_w = tf.get_variable(
"softmax_w", [self.num_hidden, self.vocab_size])
softmax_b = tf.get_variable("softmax_b", [self.vocab_size])
logits = tf.add(tf.matmul(output, softmax_w), softmax_b)
loss = tf.nn.seq2seq.sequence_loss_by_example(
[logits],
[tf.reshape(self.input_labels, [-1])],
[tf.ones([self.batch_size * self.num_steps])])
self.cost = tf.div(tf.reduce_sum(loss), self.batch_size)
self.final_state = state | [
"def",
"_create_rnn_architecture",
"(",
"self",
")",
":",
"self",
".",
"inputs",
"=",
"[",
"tf",
".",
"squeeze",
"(",
"i",
",",
"[",
"1",
"]",
")",
"for",
"i",
"in",
"tf",
".",
"split",
"(",
"axis",
"=",
"1",
",",
"num_or_size_splits",
"=",
"self",... | Create the training architecture and the last layer of the LSTM. | [
"Create",
"the",
"training",
"architecture",
"and",
"the",
"last",
"layer",
"of",
"the",
"LSTM",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L139-L157 | train | 212,158 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/recurrent/lstm.py | LSTM._create_optimizer_node | def _create_optimizer_node(self):
"""Create the optimizer node of the graph."""
self.lr_var = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars),
self.max_grad_norm)
optimizer = tf.train.GradientDescentOptimizer(self.lr_var)
self._train_op = optimizer.apply_gradients(zip(grads, tvars)) | python | def _create_optimizer_node(self):
"""Create the optimizer node of the graph."""
self.lr_var = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars),
self.max_grad_norm)
optimizer = tf.train.GradientDescentOptimizer(self.lr_var)
self._train_op = optimizer.apply_gradients(zip(grads, tvars)) | [
"def",
"_create_optimizer_node",
"(",
"self",
")",
":",
"self",
".",
"lr_var",
"=",
"tf",
".",
"Variable",
"(",
"0.0",
",",
"trainable",
"=",
"False",
")",
"tvars",
"=",
"tf",
".",
"trainable_variables",
"(",
")",
"grads",
",",
"_",
"=",
"tf",
".",
"... | Create the optimizer node of the graph. | [
"Create",
"the",
"optimizer",
"node",
"of",
"the",
"graph",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/recurrent/lstm.py#L159-L166 | train | 212,159 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/boltzmann/deep_autoencoder.py | DeepAutoencoder._create_encoding_layers | def _create_encoding_layers(self):
"""Create the encoding layers for supervised finetuning.
:return: output of the final encoding layer.
"""
next_train = self.input_data
self.layer_nodes = []
for l, layer in enumerate(self.layers):
with tf.name_scope("encode-{}".format(l)):
y_act = tf.add(
tf.matmul(next_train, self.encoding_w_[l]),
self.encoding_b_[l]
)
if self.finetune_enc_act_func[l] is not None:
layer_y = self.finetune_enc_act_func[l](y_act)
else:
layer_y = None
# the input to the next layer is the output of this layer
next_train = tf.nn.dropout(layer_y, self.keep_prob)
self.layer_nodes.append(next_train)
self.encode = next_train | python | def _create_encoding_layers(self):
"""Create the encoding layers for supervised finetuning.
:return: output of the final encoding layer.
"""
next_train = self.input_data
self.layer_nodes = []
for l, layer in enumerate(self.layers):
with tf.name_scope("encode-{}".format(l)):
y_act = tf.add(
tf.matmul(next_train, self.encoding_w_[l]),
self.encoding_b_[l]
)
if self.finetune_enc_act_func[l] is not None:
layer_y = self.finetune_enc_act_func[l](y_act)
else:
layer_y = None
# the input to the next layer is the output of this layer
next_train = tf.nn.dropout(layer_y, self.keep_prob)
self.layer_nodes.append(next_train)
self.encode = next_train | [
"def",
"_create_encoding_layers",
"(",
"self",
")",
":",
"next_train",
"=",
"self",
".",
"input_data",
"self",
".",
"layer_nodes",
"=",
"[",
"]",
"for",
"l",
",",
"layer",
"in",
"enumerate",
"(",
"self",
".",
"layers",
")",
":",
"with",
"tf",
".",
"nam... | Create the encoding layers for supervised finetuning.
:return: output of the final encoding layer. | [
"Create",
"the",
"encoding",
"layers",
"for",
"supervised",
"finetuning",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/boltzmann/deep_autoencoder.py#L248-L276 | train | 212,160 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/boltzmann/deep_autoencoder.py | DeepAutoencoder._create_decoding_layers | def _create_decoding_layers(self):
"""Create the decoding layers for reconstruction finetuning.
:return: output of the final encoding layer.
"""
next_decode = self.encode
for l, layer in reversed(list(enumerate(self.layers))):
with tf.name_scope("decode-{}".format(l)):
# Create decoding variables
if self.tied_weights:
dec_w = tf.transpose(self.encoding_w_[l])
else:
dec_w = tf.Variable(tf.transpose(
self.encoding_w_[l].initialized_value()))
dec_b = tf.Variable(tf.constant(
0.1, shape=[dec_w.get_shape().dims[1].value]))
self.decoding_w.append(dec_w)
self.decoding_b.append(dec_b)
y_act = tf.add(
tf.matmul(next_decode, dec_w),
dec_b
)
if self.finetune_dec_act_func[l] is not None:
layer_y = self.finetune_dec_act_func[l](y_act)
else:
layer_y = None
# the input to the next layer is the output of this layer
next_decode = tf.nn.dropout(layer_y, self.keep_prob)
self.layer_nodes.append(next_decode)
self.reconstruction = next_decode | python | def _create_decoding_layers(self):
"""Create the decoding layers for reconstruction finetuning.
:return: output of the final encoding layer.
"""
next_decode = self.encode
for l, layer in reversed(list(enumerate(self.layers))):
with tf.name_scope("decode-{}".format(l)):
# Create decoding variables
if self.tied_weights:
dec_w = tf.transpose(self.encoding_w_[l])
else:
dec_w = tf.Variable(tf.transpose(
self.encoding_w_[l].initialized_value()))
dec_b = tf.Variable(tf.constant(
0.1, shape=[dec_w.get_shape().dims[1].value]))
self.decoding_w.append(dec_w)
self.decoding_b.append(dec_b)
y_act = tf.add(
tf.matmul(next_decode, dec_w),
dec_b
)
if self.finetune_dec_act_func[l] is not None:
layer_y = self.finetune_dec_act_func[l](y_act)
else:
layer_y = None
# the input to the next layer is the output of this layer
next_decode = tf.nn.dropout(layer_y, self.keep_prob)
self.layer_nodes.append(next_decode)
self.reconstruction = next_decode | [
"def",
"_create_decoding_layers",
"(",
"self",
")",
":",
"next_decode",
"=",
"self",
".",
"encode",
"for",
"l",
",",
"layer",
"in",
"reversed",
"(",
"list",
"(",
"enumerate",
"(",
"self",
".",
"layers",
")",
")",
")",
":",
"with",
"tf",
".",
"name_scop... | Create the decoding layers for reconstruction finetuning.
:return: output of the final encoding layer. | [
"Create",
"the",
"decoding",
"layers",
"for",
"reconstruction",
"finetuning",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/boltzmann/deep_autoencoder.py#L278-L317 | train | 212,161 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/datasets.py | load_mnist_dataset | def load_mnist_dataset(mode='supervised', one_hot=True):
"""Load the MNIST handwritten digits dataset.
:param mode: 'supervised' or 'unsupervised' mode
:param one_hot: whether to get one hot encoded labels
:return: train, validation, test data:
for (X, y) if 'supervised',
for (X) if 'unsupervised'
"""
mnist = input_data.read_data_sets("MNIST_data/", one_hot=one_hot)
# Training set
trX = mnist.train.images
trY = mnist.train.labels
# Validation set
vlX = mnist.validation.images
vlY = mnist.validation.labels
# Test set
teX = mnist.test.images
teY = mnist.test.labels
if mode == 'supervised':
return trX, trY, vlX, vlY, teX, teY
elif mode == 'unsupervised':
return trX, vlX, teX | python | def load_mnist_dataset(mode='supervised', one_hot=True):
"""Load the MNIST handwritten digits dataset.
:param mode: 'supervised' or 'unsupervised' mode
:param one_hot: whether to get one hot encoded labels
:return: train, validation, test data:
for (X, y) if 'supervised',
for (X) if 'unsupervised'
"""
mnist = input_data.read_data_sets("MNIST_data/", one_hot=one_hot)
# Training set
trX = mnist.train.images
trY = mnist.train.labels
# Validation set
vlX = mnist.validation.images
vlY = mnist.validation.labels
# Test set
teX = mnist.test.images
teY = mnist.test.labels
if mode == 'supervised':
return trX, trY, vlX, vlY, teX, teY
elif mode == 'unsupervised':
return trX, vlX, teX | [
"def",
"load_mnist_dataset",
"(",
"mode",
"=",
"'supervised'",
",",
"one_hot",
"=",
"True",
")",
":",
"mnist",
"=",
"input_data",
".",
"read_data_sets",
"(",
"\"MNIST_data/\"",
",",
"one_hot",
"=",
"one_hot",
")",
"# Training set",
"trX",
"=",
"mnist",
".",
... | Load the MNIST handwritten digits dataset.
:param mode: 'supervised' or 'unsupervised' mode
:param one_hot: whether to get one hot encoded labels
:return: train, validation, test data:
for (X, y) if 'supervised',
for (X) if 'unsupervised' | [
"Load",
"the",
"MNIST",
"handwritten",
"digits",
"dataset",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/datasets.py#L18-L45 | train | 212,162 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/datasets.py | load_cifar10_dataset | def load_cifar10_dataset(cifar_dir, mode='supervised'):
"""Load the cifar10 dataset.
:param cifar_dir: path to the dataset directory
(cPicle format from: https://www.cs.toronto.edu/~kriz/cifar.html)
:param mode: 'supervised' or 'unsupervised' mode
:return: train, test data:
for (X, y) if 'supervised',
for (X) if 'unsupervised'
"""
# Training set
trX = None
trY = np.array([])
# Test set
teX = np.array([])
teY = np.array([])
for fn in os.listdir(cifar_dir):
if not fn.startswith('batches') and not fn.startswith('readme'):
fo = open(os.path.join(cifar_dir, fn), 'rb')
data_batch = pickle.load(fo)
fo.close()
if fn.startswith('data'):
if trX is None:
trX = data_batch['data']
trY = data_batch['labels']
else:
trX = np.concatenate((trX, data_batch['data']), axis=0)
trY = np.concatenate((trY, data_batch['labels']), axis=0)
if fn.startswith('test'):
teX = data_batch['data']
teY = data_batch['labels']
trX = trX.astype(np.float32) / 255.
teX = teX.astype(np.float32) / 255.
if mode == 'supervised':
return trX, trY, teX, teY
elif mode == 'unsupervised':
return trX, teX | python | def load_cifar10_dataset(cifar_dir, mode='supervised'):
"""Load the cifar10 dataset.
:param cifar_dir: path to the dataset directory
(cPicle format from: https://www.cs.toronto.edu/~kriz/cifar.html)
:param mode: 'supervised' or 'unsupervised' mode
:return: train, test data:
for (X, y) if 'supervised',
for (X) if 'unsupervised'
"""
# Training set
trX = None
trY = np.array([])
# Test set
teX = np.array([])
teY = np.array([])
for fn in os.listdir(cifar_dir):
if not fn.startswith('batches') and not fn.startswith('readme'):
fo = open(os.path.join(cifar_dir, fn), 'rb')
data_batch = pickle.load(fo)
fo.close()
if fn.startswith('data'):
if trX is None:
trX = data_batch['data']
trY = data_batch['labels']
else:
trX = np.concatenate((trX, data_batch['data']), axis=0)
trY = np.concatenate((trY, data_batch['labels']), axis=0)
if fn.startswith('test'):
teX = data_batch['data']
teY = data_batch['labels']
trX = trX.astype(np.float32) / 255.
teX = teX.astype(np.float32) / 255.
if mode == 'supervised':
return trX, trY, teX, teY
elif mode == 'unsupervised':
return trX, teX | [
"def",
"load_cifar10_dataset",
"(",
"cifar_dir",
",",
"mode",
"=",
"'supervised'",
")",
":",
"# Training set",
"trX",
"=",
"None",
"trY",
"=",
"np",
".",
"array",
"(",
"[",
"]",
")",
"# Test set",
"teX",
"=",
"np",
".",
"array",
"(",
"[",
"]",
")",
"... | Load the cifar10 dataset.
:param cifar_dir: path to the dataset directory
(cPicle format from: https://www.cs.toronto.edu/~kriz/cifar.html)
:param mode: 'supervised' or 'unsupervised' mode
:return: train, test data:
for (X, y) if 'supervised',
for (X) if 'unsupervised' | [
"Load",
"the",
"cifar10",
"dataset",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/datasets.py#L48-L94 | train | 212,163 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/layers.py | Layers.linear | def linear(prev_layer, out_dim, name="linear"):
"""Create a linear fully-connected layer.
Parameters
----------
prev_layer : tf.Tensor
Last layer's output tensor.
out_dim : int
Number of output units.
Returns
-------
tuple (
tf.Tensor : Linear output tensor
tf.Tensor : Linear weights variable
tf.Tensor : Linear biases variable
)
"""
with tf.name_scope(name):
in_dim = prev_layer.get_shape()[1].value
W = tf.Variable(tf.truncated_normal([in_dim, out_dim], stddev=0.1))
b = tf.Variable(tf.constant(0.1, shape=[out_dim]))
out = tf.add(tf.matmul(prev_layer, W), b)
return (out, W, b) | python | def linear(prev_layer, out_dim, name="linear"):
"""Create a linear fully-connected layer.
Parameters
----------
prev_layer : tf.Tensor
Last layer's output tensor.
out_dim : int
Number of output units.
Returns
-------
tuple (
tf.Tensor : Linear output tensor
tf.Tensor : Linear weights variable
tf.Tensor : Linear biases variable
)
"""
with tf.name_scope(name):
in_dim = prev_layer.get_shape()[1].value
W = tf.Variable(tf.truncated_normal([in_dim, out_dim], stddev=0.1))
b = tf.Variable(tf.constant(0.1, shape=[out_dim]))
out = tf.add(tf.matmul(prev_layer, W), b)
return (out, W, b) | [
"def",
"linear",
"(",
"prev_layer",
",",
"out_dim",
",",
"name",
"=",
"\"linear\"",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"name",
")",
":",
"in_dim",
"=",
"prev_layer",
".",
"get_shape",
"(",
")",
"[",
"1",
"]",
".",
"value",
"W",
"=",
"t... | Create a linear fully-connected layer.
Parameters
----------
prev_layer : tf.Tensor
Last layer's output tensor.
out_dim : int
Number of output units.
Returns
-------
tuple (
tf.Tensor : Linear output tensor
tf.Tensor : Linear weights variable
tf.Tensor : Linear biases variable
) | [
"Create",
"a",
"linear",
"fully",
"-",
"connected",
"layer",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/layers.py#L10-L36 | train | 212,164 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/layers.py | Layers.regularization | def regularization(variables, regtype, regcoef, name="regularization"):
"""Compute the regularization tensor.
Parameters
----------
variables : list of tf.Variable
List of model variables.
regtype : str
Type of regularization. Can be ["none", "l1", "l2"]
regcoef : float,
Regularization coefficient.
name : str, optional (default = "regularization")
Name for the regularization op.
Returns
-------
tf.Tensor : Regularization tensor.
"""
with tf.name_scope(name):
if regtype != 'none':
regs = tf.constant(0.0)
for v in variables:
if regtype == 'l2':
regs = tf.add(regs, tf.nn.l2_loss(v))
elif regtype == 'l1':
regs = tf.add(regs, tf.reduce_sum(tf.abs(v)))
return tf.multiply(regcoef, regs)
else:
return None | python | def regularization(variables, regtype, regcoef, name="regularization"):
"""Compute the regularization tensor.
Parameters
----------
variables : list of tf.Variable
List of model variables.
regtype : str
Type of regularization. Can be ["none", "l1", "l2"]
regcoef : float,
Regularization coefficient.
name : str, optional (default = "regularization")
Name for the regularization op.
Returns
-------
tf.Tensor : Regularization tensor.
"""
with tf.name_scope(name):
if regtype != 'none':
regs = tf.constant(0.0)
for v in variables:
if regtype == 'l2':
regs = tf.add(regs, tf.nn.l2_loss(v))
elif regtype == 'l1':
regs = tf.add(regs, tf.reduce_sum(tf.abs(v)))
return tf.multiply(regcoef, regs)
else:
return None | [
"def",
"regularization",
"(",
"variables",
",",
"regtype",
",",
"regcoef",
",",
"name",
"=",
"\"regularization\"",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"name",
")",
":",
"if",
"regtype",
"!=",
"'none'",
":",
"regs",
"=",
"tf",
".",
"constant",... | Compute the regularization tensor.
Parameters
----------
variables : list of tf.Variable
List of model variables.
regtype : str
Type of regularization. Can be ["none", "l1", "l2"]
regcoef : float,
Regularization coefficient.
name : str, optional (default = "regularization")
Name for the regularization op.
Returns
-------
tf.Tensor : Regularization tensor. | [
"Compute",
"the",
"regularization",
"tensor",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/layers.py#L39-L73 | train | 212,165 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/layers.py | Evaluation.accuracy | def accuracy(mod_y, ref_y, summary=True, name="accuracy"):
"""Accuracy computation op.
Parameters
----------
mod_y : tf.Tensor
Model output tensor.
ref_y : tf.Tensor
Reference input tensor.
summary : bool, optional (default = True)
Whether to save tf summary for the op.
Returns
-------
tf.Tensor : accuracy op. tensor
"""
with tf.name_scope(name):
mod_pred = tf.argmax(mod_y, 1)
correct_pred = tf.equal(mod_pred, tf.argmax(ref_y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
if summary:
tf.summary.scalar('accuracy', accuracy)
return accuracy | python | def accuracy(mod_y, ref_y, summary=True, name="accuracy"):
"""Accuracy computation op.
Parameters
----------
mod_y : tf.Tensor
Model output tensor.
ref_y : tf.Tensor
Reference input tensor.
summary : bool, optional (default = True)
Whether to save tf summary for the op.
Returns
-------
tf.Tensor : accuracy op. tensor
"""
with tf.name_scope(name):
mod_pred = tf.argmax(mod_y, 1)
correct_pred = tf.equal(mod_pred, tf.argmax(ref_y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
if summary:
tf.summary.scalar('accuracy', accuracy)
return accuracy | [
"def",
"accuracy",
"(",
"mod_y",
",",
"ref_y",
",",
"summary",
"=",
"True",
",",
"name",
"=",
"\"accuracy\"",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"name",
")",
":",
"mod_pred",
"=",
"tf",
".",
"argmax",
"(",
"mod_y",
",",
"1",
")",
"corr... | Accuracy computation op.
Parameters
----------
mod_y : tf.Tensor
Model output tensor.
ref_y : tf.Tensor
Reference input tensor.
summary : bool, optional (default = True)
Whether to save tf summary for the op.
Returns
-------
tf.Tensor : accuracy op. tensor | [
"Accuracy",
"computation",
"op",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/layers.py#L80-L106 | train | 212,166 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/model.py | Model.pretrain_procedure | def pretrain_procedure(self, layer_objs, layer_graphs, set_params_func,
train_set, validation_set=None):
"""Perform unsupervised pretraining of the model.
:param layer_objs: list of model objects (autoencoders or rbms)
:param layer_graphs: list of model tf.Graph objects
:param set_params_func: function used to set the parameters after
pretraining
:param train_set: training set
:param validation_set: validation set
:return: return data encoded by the last layer
"""
next_train = train_set
next_valid = validation_set
for l, layer_obj in enumerate(layer_objs):
print('Training layer {}...'.format(l + 1))
next_train, next_valid = self._pretrain_layer_and_gen_feed(
layer_obj, set_params_func, next_train, next_valid,
layer_graphs[l])
return next_train, next_valid | python | def pretrain_procedure(self, layer_objs, layer_graphs, set_params_func,
train_set, validation_set=None):
"""Perform unsupervised pretraining of the model.
:param layer_objs: list of model objects (autoencoders or rbms)
:param layer_graphs: list of model tf.Graph objects
:param set_params_func: function used to set the parameters after
pretraining
:param train_set: training set
:param validation_set: validation set
:return: return data encoded by the last layer
"""
next_train = train_set
next_valid = validation_set
for l, layer_obj in enumerate(layer_objs):
print('Training layer {}...'.format(l + 1))
next_train, next_valid = self._pretrain_layer_and_gen_feed(
layer_obj, set_params_func, next_train, next_valid,
layer_graphs[l])
return next_train, next_valid | [
"def",
"pretrain_procedure",
"(",
"self",
",",
"layer_objs",
",",
"layer_graphs",
",",
"set_params_func",
",",
"train_set",
",",
"validation_set",
"=",
"None",
")",
":",
"next_train",
"=",
"train_set",
"next_valid",
"=",
"validation_set",
"for",
"l",
",",
"layer... | Perform unsupervised pretraining of the model.
:param layer_objs: list of model objects (autoencoders or rbms)
:param layer_graphs: list of model tf.Graph objects
:param set_params_func: function used to set the parameters after
pretraining
:param train_set: training set
:param validation_set: validation set
:return: return data encoded by the last layer | [
"Perform",
"unsupervised",
"pretraining",
"of",
"the",
"model",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/model.py#L38-L59 | train | 212,167 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/model.py | Model._pretrain_layer_and_gen_feed | def _pretrain_layer_and_gen_feed(self, layer_obj, set_params_func,
train_set, validation_set, graph):
"""Pretrain a single autoencoder and encode the data for the next layer.
:param layer_obj: layer model
:param set_params_func: function used to set the parameters after
pretraining
:param train_set: training set
:param validation_set: validation set
:param graph: tf object for the rbm
:return: encoded train data, encoded validation data
"""
layer_obj.fit(train_set, train_set,
validation_set, validation_set, graph=graph)
with graph.as_default():
set_params_func(layer_obj, graph)
next_train = layer_obj.transform(train_set, graph=graph)
if validation_set is not None:
next_valid = layer_obj.transform(validation_set, graph=graph)
else:
next_valid = None
return next_train, next_valid | python | def _pretrain_layer_and_gen_feed(self, layer_obj, set_params_func,
train_set, validation_set, graph):
"""Pretrain a single autoencoder and encode the data for the next layer.
:param layer_obj: layer model
:param set_params_func: function used to set the parameters after
pretraining
:param train_set: training set
:param validation_set: validation set
:param graph: tf object for the rbm
:return: encoded train data, encoded validation data
"""
layer_obj.fit(train_set, train_set,
validation_set, validation_set, graph=graph)
with graph.as_default():
set_params_func(layer_obj, graph)
next_train = layer_obj.transform(train_set, graph=graph)
if validation_set is not None:
next_valid = layer_obj.transform(validation_set, graph=graph)
else:
next_valid = None
return next_train, next_valid | [
"def",
"_pretrain_layer_and_gen_feed",
"(",
"self",
",",
"layer_obj",
",",
"set_params_func",
",",
"train_set",
",",
"validation_set",
",",
"graph",
")",
":",
"layer_obj",
".",
"fit",
"(",
"train_set",
",",
"train_set",
",",
"validation_set",
",",
"validation_set"... | Pretrain a single autoencoder and encode the data for the next layer.
:param layer_obj: layer model
:param set_params_func: function used to set the parameters after
pretraining
:param train_set: training set
:param validation_set: validation set
:param graph: tf object for the rbm
:return: encoded train data, encoded validation data | [
"Pretrain",
"a",
"single",
"autoencoder",
"and",
"encode",
"the",
"data",
"for",
"the",
"next",
"layer",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/model.py#L61-L85 | train | 212,168 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/model.py | Model.get_layers_output | def get_layers_output(self, dataset):
"""Get output from each layer of the network.
:param dataset: input data
:return: list of np array, element i is the output of layer i
"""
layers_out = []
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
for l in self.layer_nodes:
layers_out.append(l.eval({self.input_data: dataset,
self.keep_prob: 1}))
if layers_out == []:
raise Exception("This method is not implemented for this model")
else:
return layers_out | python | def get_layers_output(self, dataset):
"""Get output from each layer of the network.
:param dataset: input data
:return: list of np array, element i is the output of layer i
"""
layers_out = []
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
for l in self.layer_nodes:
layers_out.append(l.eval({self.input_data: dataset,
self.keep_prob: 1}))
if layers_out == []:
raise Exception("This method is not implemented for this model")
else:
return layers_out | [
"def",
"get_layers_output",
"(",
"self",
",",
"dataset",
")",
":",
"layers_out",
"=",
"[",
"]",
"with",
"self",
".",
"tf_graph",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"self",
".",
"tf_session",
":",
"self",
"... | Get output from each layer of the network.
:param dataset: input data
:return: list of np array, element i is the output of layer i | [
"Get",
"output",
"from",
"each",
"layer",
"of",
"the",
"network",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/model.py#L87-L105 | train | 212,169 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/model.py | Model.get_parameters | def get_parameters(self, params, graph=None):
"""Get the parameters of the model.
:param params: dictionary of keys (str names) and values (tensors).
:return: evaluated tensors in params
"""
g = graph if graph is not None else self.tf_graph
with g.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
out = {}
for par in params:
if type(params[par]) == list:
for i, p in enumerate(params[par]):
out[par + '-' + str(i+1)] = p.eval()
else:
out[par] = params[par].eval()
return out | python | def get_parameters(self, params, graph=None):
"""Get the parameters of the model.
:param params: dictionary of keys (str names) and values (tensors).
:return: evaluated tensors in params
"""
g = graph if graph is not None else self.tf_graph
with g.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
out = {}
for par in params:
if type(params[par]) == list:
for i, p in enumerate(params[par]):
out[par + '-' + str(i+1)] = p.eval()
else:
out[par] = params[par].eval()
return out | [
"def",
"get_parameters",
"(",
"self",
",",
"params",
",",
"graph",
"=",
"None",
")",
":",
"g",
"=",
"graph",
"if",
"graph",
"is",
"not",
"None",
"else",
"self",
".",
"tf_graph",
"with",
"g",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
".",
"Ses... | Get the parameters of the model.
:param params: dictionary of keys (str names) and values (tensors).
:return: evaluated tensors in params | [
"Get",
"the",
"parameters",
"of",
"the",
"model",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/model.py#L107-L125 | train | 212,170 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/supervised_model.py | SupervisedModel.fit | def fit(self, train_X, train_Y, val_X=None, val_Y=None, graph=None):
"""Fit the model to the data.
Parameters
----------
train_X : array_like, shape (n_samples, n_features)
Training data.
train_Y : array_like, shape (n_samples, n_classes)
Training labels.
val_X : array_like, shape (N, n_features) optional, (default = None).
Validation data.
val_Y : array_like, shape (N, n_classes) optional, (default = None).
Validation labels.
graph : tf.Graph, optional (default = None)
Tensorflow Graph object.
Returns
-------
"""
if len(train_Y.shape) != 1:
num_classes = train_Y.shape[1]
else:
raise Exception("Please convert the labels with one-hot encoding.")
g = graph if graph is not None else self.tf_graph
with g.as_default():
# Build model
self.build_model(train_X.shape[1], num_classes)
with tf.Session() as self.tf_session:
# Initialize tf stuff
summary_objs = tf_utils.init_tf_ops(self.tf_session)
self.tf_merged_summaries = summary_objs[0]
self.tf_summary_writer = summary_objs[1]
self.tf_saver = summary_objs[2]
# Train model
self._train_model(train_X, train_Y, val_X, val_Y)
# Save model
self.tf_saver.save(self.tf_session, self.model_path) | python | def fit(self, train_X, train_Y, val_X=None, val_Y=None, graph=None):
"""Fit the model to the data.
Parameters
----------
train_X : array_like, shape (n_samples, n_features)
Training data.
train_Y : array_like, shape (n_samples, n_classes)
Training labels.
val_X : array_like, shape (N, n_features) optional, (default = None).
Validation data.
val_Y : array_like, shape (N, n_classes) optional, (default = None).
Validation labels.
graph : tf.Graph, optional (default = None)
Tensorflow Graph object.
Returns
-------
"""
if len(train_Y.shape) != 1:
num_classes = train_Y.shape[1]
else:
raise Exception("Please convert the labels with one-hot encoding.")
g = graph if graph is not None else self.tf_graph
with g.as_default():
# Build model
self.build_model(train_X.shape[1], num_classes)
with tf.Session() as self.tf_session:
# Initialize tf stuff
summary_objs = tf_utils.init_tf_ops(self.tf_session)
self.tf_merged_summaries = summary_objs[0]
self.tf_summary_writer = summary_objs[1]
self.tf_saver = summary_objs[2]
# Train model
self._train_model(train_X, train_Y, val_X, val_Y)
# Save model
self.tf_saver.save(self.tf_session, self.model_path) | [
"def",
"fit",
"(",
"self",
",",
"train_X",
",",
"train_Y",
",",
"val_X",
"=",
"None",
",",
"val_Y",
"=",
"None",
",",
"graph",
"=",
"None",
")",
":",
"if",
"len",
"(",
"train_Y",
".",
"shape",
")",
"!=",
"1",
":",
"num_classes",
"=",
"train_Y",
"... | Fit the model to the data.
Parameters
----------
train_X : array_like, shape (n_samples, n_features)
Training data.
train_Y : array_like, shape (n_samples, n_classes)
Training labels.
val_X : array_like, shape (N, n_features) optional, (default = None).
Validation data.
val_Y : array_like, shape (N, n_classes) optional, (default = None).
Validation labels.
graph : tf.Graph, optional (default = None)
Tensorflow Graph object.
Returns
------- | [
"Fit",
"the",
"model",
"to",
"the",
"data",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/supervised_model.py#L29-L72 | train | 212,171 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/supervised_model.py | SupervisedModel.predict | def predict(self, test_X):
"""Predict the labels for the test set.
Parameters
----------
test_X : array_like, shape (n_samples, n_features)
Test data.
Returns
-------
array_like, shape (n_samples,) : predicted labels.
"""
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {
self.input_data: test_X,
self.keep_prob: 1
}
return self.mod_y.eval(feed) | python | def predict(self, test_X):
"""Predict the labels for the test set.
Parameters
----------
test_X : array_like, shape (n_samples, n_features)
Test data.
Returns
-------
array_like, shape (n_samples,) : predicted labels.
"""
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {
self.input_data: test_X,
self.keep_prob: 1
}
return self.mod_y.eval(feed) | [
"def",
"predict",
"(",
"self",
",",
"test_X",
")",
":",
"with",
"self",
".",
"tf_graph",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"self",
".",
"tf_session",
":",
"self",
".",
"tf_saver",
".",
"restore",
"(",
"... | Predict the labels for the test set.
Parameters
----------
test_X : array_like, shape (n_samples, n_features)
Test data.
Returns
-------
array_like, shape (n_samples,) : predicted labels. | [
"Predict",
"the",
"labels",
"for",
"the",
"test",
"set",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/supervised_model.py#L74-L95 | train | 212,172 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/supervised_model.py | SupervisedModel.score | def score(self, test_X, test_Y):
"""Compute the mean accuracy over the test set.
Parameters
----------
test_X : array_like, shape (n_samples, n_features)
Test data.
test_Y : array_like, shape (n_samples, n_features)
Test labels.
Returns
-------
float : mean accuracy over the test set
"""
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {
self.input_data: test_X,
self.input_labels: test_Y,
self.keep_prob: 1
}
return self.accuracy.eval(feed) | python | def score(self, test_X, test_Y):
"""Compute the mean accuracy over the test set.
Parameters
----------
test_X : array_like, shape (n_samples, n_features)
Test data.
test_Y : array_like, shape (n_samples, n_features)
Test labels.
Returns
-------
float : mean accuracy over the test set
"""
with self.tf_graph.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {
self.input_data: test_X,
self.input_labels: test_Y,
self.keep_prob: 1
}
return self.accuracy.eval(feed) | [
"def",
"score",
"(",
"self",
",",
"test_X",
",",
"test_Y",
")",
":",
"with",
"self",
".",
"tf_graph",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"self",
".",
"tf_session",
":",
"self",
".",
"tf_saver",
".",
"res... | Compute the mean accuracy over the test set.
Parameters
----------
test_X : array_like, shape (n_samples, n_features)
Test data.
test_Y : array_like, shape (n_samples, n_features)
Test labels.
Returns
-------
float : mean accuracy over the test set | [
"Compute",
"the",
"mean",
"accuracy",
"over",
"the",
"test",
"set",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/supervised_model.py#L97-L122 | train | 212,173 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/autoencoders/stacked_denoising_autoencoder.py | StackedDenoisingAutoencoder.pretrain | def pretrain(self, train_set, validation_set=None):
"""Perform Unsupervised pretraining of the autoencoder."""
self.do_pretrain = True
def set_params_func(autoenc, autoencgraph):
params = autoenc.get_parameters(graph=autoencgraph)
self.encoding_w_.append(params['enc_w'])
self.encoding_b_.append(params['enc_b'])
return SupervisedModel.pretrain_procedure(
self, self.autoencoders, self.autoencoder_graphs,
set_params_func=set_params_func, train_set=train_set,
validation_set=validation_set) | python | def pretrain(self, train_set, validation_set=None):
"""Perform Unsupervised pretraining of the autoencoder."""
self.do_pretrain = True
def set_params_func(autoenc, autoencgraph):
params = autoenc.get_parameters(graph=autoencgraph)
self.encoding_w_.append(params['enc_w'])
self.encoding_b_.append(params['enc_b'])
return SupervisedModel.pretrain_procedure(
self, self.autoencoders, self.autoencoder_graphs,
set_params_func=set_params_func, train_set=train_set,
validation_set=validation_set) | [
"def",
"pretrain",
"(",
"self",
",",
"train_set",
",",
"validation_set",
"=",
"None",
")",
":",
"self",
".",
"do_pretrain",
"=",
"True",
"def",
"set_params_func",
"(",
"autoenc",
",",
"autoencgraph",
")",
":",
"params",
"=",
"autoenc",
".",
"get_parameters",... | Perform Unsupervised pretraining of the autoencoder. | [
"Perform",
"Unsupervised",
"pretraining",
"of",
"the",
"autoencoder",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/autoencoders/stacked_denoising_autoencoder.py#L112-L124 | train | 212,174 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/tf_utils.py | init_tf_ops | def init_tf_ops(sess):
"""Initialize TensorFlow operations.
This function initialize the following tensorflow ops:
* init variables ops
* summary ops
* create model saver
Parameters
----------
sess : object
Tensorflow `Session` object
Returns
-------
tuple : (summary_merged, summary_writer)
* tf merged summaries object
* tf summary writer object
* tf saver object
"""
summary_merged = tf.summary.merge_all()
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sess.run(init_op)
# Retrieve run identifier
run_id = 0
for e in os.listdir(Config().logs_dir):
if e[:3] == 'run':
r = int(e[3:])
if r > run_id:
run_id = r
run_id += 1
run_dir = os.path.join(Config().logs_dir, 'run' + str(run_id))
print('Tensorboard logs dir for this run is %s' % (run_dir))
summary_writer = tf.summary.FileWriter(run_dir, sess.graph)
return (summary_merged, summary_writer, saver) | python | def init_tf_ops(sess):
"""Initialize TensorFlow operations.
This function initialize the following tensorflow ops:
* init variables ops
* summary ops
* create model saver
Parameters
----------
sess : object
Tensorflow `Session` object
Returns
-------
tuple : (summary_merged, summary_writer)
* tf merged summaries object
* tf summary writer object
* tf saver object
"""
summary_merged = tf.summary.merge_all()
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sess.run(init_op)
# Retrieve run identifier
run_id = 0
for e in os.listdir(Config().logs_dir):
if e[:3] == 'run':
r = int(e[3:])
if r > run_id:
run_id = r
run_id += 1
run_dir = os.path.join(Config().logs_dir, 'run' + str(run_id))
print('Tensorboard logs dir for this run is %s' % (run_dir))
summary_writer = tf.summary.FileWriter(run_dir, sess.graph)
return (summary_merged, summary_writer, saver) | [
"def",
"init_tf_ops",
"(",
"sess",
")",
":",
"summary_merged",
"=",
"tf",
".",
"summary",
".",
"merge_all",
"(",
")",
"init_op",
"=",
"tf",
".",
"global_variables_initializer",
"(",
")",
"saver",
"=",
"tf",
".",
"train",
".",
"Saver",
"(",
")",
"sess",
... | Initialize TensorFlow operations.
This function initialize the following tensorflow ops:
* init variables ops
* summary ops
* create model saver
Parameters
----------
sess : object
Tensorflow `Session` object
Returns
-------
tuple : (summary_merged, summary_writer)
* tf merged summaries object
* tf summary writer object
* tf saver object | [
"Initialize",
"TensorFlow",
"operations",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/tf_utils.py#L9-L50 | train | 212,175 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/tf_utils.py | run_summaries | def run_summaries(
sess, merged_summaries, summary_writer, epoch, feed, tens):
"""Run the summaries and error computation on the validation set.
Parameters
----------
sess : tf.Session
Tensorflow session object.
merged_summaries : tf obj
Tensorflow merged summaries obj.
summary_writer : tf.summary.FileWriter
Tensorflow summary writer obj.
epoch : int
Current training epoch.
feed : dict
Validation feed dict.
tens : tf.Tensor
Tensor to display and evaluate during training.
Can be self.accuracy for SupervisedModel or self.cost for
UnsupervisedModel.
Returns
-------
err : float, mean error over the validation set.
"""
try:
result = sess.run([merged_summaries, tens], feed_dict=feed)
summary_str = result[0]
out = result[1]
summary_writer.add_summary(summary_str, epoch)
except tf.errors.InvalidArgumentError:
out = sess.run(tens, feed_dict=feed)
return out | python | def run_summaries(
sess, merged_summaries, summary_writer, epoch, feed, tens):
"""Run the summaries and error computation on the validation set.
Parameters
----------
sess : tf.Session
Tensorflow session object.
merged_summaries : tf obj
Tensorflow merged summaries obj.
summary_writer : tf.summary.FileWriter
Tensorflow summary writer obj.
epoch : int
Current training epoch.
feed : dict
Validation feed dict.
tens : tf.Tensor
Tensor to display and evaluate during training.
Can be self.accuracy for SupervisedModel or self.cost for
UnsupervisedModel.
Returns
-------
err : float, mean error over the validation set.
"""
try:
result = sess.run([merged_summaries, tens], feed_dict=feed)
summary_str = result[0]
out = result[1]
summary_writer.add_summary(summary_str, epoch)
except tf.errors.InvalidArgumentError:
out = sess.run(tens, feed_dict=feed)
return out | [
"def",
"run_summaries",
"(",
"sess",
",",
"merged_summaries",
",",
"summary_writer",
",",
"epoch",
",",
"feed",
",",
"tens",
")",
":",
"try",
":",
"result",
"=",
"sess",
".",
"run",
"(",
"[",
"merged_summaries",
",",
"tens",
"]",
",",
"feed_dict",
"=",
... | Run the summaries and error computation on the validation set.
Parameters
----------
sess : tf.Session
Tensorflow session object.
merged_summaries : tf obj
Tensorflow merged summaries obj.
summary_writer : tf.summary.FileWriter
Tensorflow summary writer obj.
epoch : int
Current training epoch.
feed : dict
Validation feed dict.
tens : tf.Tensor
Tensor to display and evaluate during training.
Can be self.accuracy for SupervisedModel or self.cost for
UnsupervisedModel.
Returns
-------
err : float, mean error over the validation set. | [
"Run",
"the",
"summaries",
"and",
"error",
"computation",
"on",
"the",
"validation",
"set",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/tf_utils.py#L53-L93 | train | 212,176 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/boltzmann/dbn.py | DeepBeliefNetwork.pretrain | def pretrain(self, train_set, validation_set=None):
"""Perform Unsupervised pretraining of the DBN."""
self.do_pretrain = True
def set_params_func(rbmmachine, rbmgraph):
params = rbmmachine.get_parameters(graph=rbmgraph)
self.encoding_w_.append(params['W'])
self.encoding_b_.append(params['bh_'])
return SupervisedModel.pretrain_procedure(
self, self.rbms, self.rbm_graphs, set_params_func=set_params_func,
train_set=train_set, validation_set=validation_set) | python | def pretrain(self, train_set, validation_set=None):
"""Perform Unsupervised pretraining of the DBN."""
self.do_pretrain = True
def set_params_func(rbmmachine, rbmgraph):
params = rbmmachine.get_parameters(graph=rbmgraph)
self.encoding_w_.append(params['W'])
self.encoding_b_.append(params['bh_'])
return SupervisedModel.pretrain_procedure(
self, self.rbms, self.rbm_graphs, set_params_func=set_params_func,
train_set=train_set, validation_set=validation_set) | [
"def",
"pretrain",
"(",
"self",
",",
"train_set",
",",
"validation_set",
"=",
"None",
")",
":",
"self",
".",
"do_pretrain",
"=",
"True",
"def",
"set_params_func",
"(",
"rbmmachine",
",",
"rbmgraph",
")",
":",
"params",
"=",
"rbmmachine",
".",
"get_parameters... | Perform Unsupervised pretraining of the DBN. | [
"Perform",
"Unsupervised",
"pretraining",
"of",
"the",
"DBN",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/boltzmann/dbn.py#L113-L124 | train | 212,177 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/autoencoders/denoising_autoencoder.py | DenoisingAutoencoder._create_encode_layer | def _create_encode_layer(self):
"""Create the encoding layer of the network.
Returns
-------
self
"""
with tf.name_scope("encoder"):
activation = tf.add(
tf.matmul(self.input_data, self.W_),
self.bh_
)
if self.enc_act_func:
self.encode = self.enc_act_func(activation)
else:
self.encode = activation
return self | python | def _create_encode_layer(self):
"""Create the encoding layer of the network.
Returns
-------
self
"""
with tf.name_scope("encoder"):
activation = tf.add(
tf.matmul(self.input_data, self.W_),
self.bh_
)
if self.enc_act_func:
self.encode = self.enc_act_func(activation)
else:
self.encode = activation
return self | [
"def",
"_create_encode_layer",
"(",
"self",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"\"encoder\"",
")",
":",
"activation",
"=",
"tf",
".",
"add",
"(",
"tf",
".",
"matmul",
"(",
"self",
".",
"input_data",
",",
"self",
".",
"W_",
")",
",",
"sel... | Create the encoding layer of the network.
Returns
-------
self | [
"Create",
"the",
"encoding",
"layer",
"of",
"the",
"network",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/autoencoders/denoising_autoencoder.py#L247-L267 | train | 212,178 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/autoencoders/denoising_autoencoder.py | DenoisingAutoencoder._create_decode_layer | def _create_decode_layer(self):
"""Create the decoding layer of the network.
Returns
-------
self
"""
with tf.name_scope("decoder"):
activation = tf.add(
tf.matmul(self.encode, tf.transpose(self.W_)),
self.bv_
)
if self.dec_act_func:
self.reconstruction = self.dec_act_func(activation)
else:
self.reconstruction = activation
return self | python | def _create_decode_layer(self):
"""Create the decoding layer of the network.
Returns
-------
self
"""
with tf.name_scope("decoder"):
activation = tf.add(
tf.matmul(self.encode, tf.transpose(self.W_)),
self.bv_
)
if self.dec_act_func:
self.reconstruction = self.dec_act_func(activation)
else:
self.reconstruction = activation
return self | [
"def",
"_create_decode_layer",
"(",
"self",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"\"decoder\"",
")",
":",
"activation",
"=",
"tf",
".",
"add",
"(",
"tf",
".",
"matmul",
"(",
"self",
".",
"encode",
",",
"tf",
".",
"transpose",
"(",
"self",
... | Create the decoding layer of the network.
Returns
-------
self | [
"Create",
"the",
"decoding",
"layer",
"of",
"the",
"network",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/autoencoders/denoising_autoencoder.py#L269-L289 | train | 212,179 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | sample_prob | def sample_prob(probs, rand):
"""Get samples from a tensor of probabilities.
:param probs: tensor of probabilities
:param rand: tensor (of the same shape as probs) of random values
:return: binary sample of probabilities
"""
return tf.nn.relu(tf.sign(probs - rand)) | python | def sample_prob(probs, rand):
"""Get samples from a tensor of probabilities.
:param probs: tensor of probabilities
:param rand: tensor (of the same shape as probs) of random values
:return: binary sample of probabilities
"""
return tf.nn.relu(tf.sign(probs - rand)) | [
"def",
"sample_prob",
"(",
"probs",
",",
"rand",
")",
":",
"return",
"tf",
".",
"nn",
".",
"relu",
"(",
"tf",
".",
"sign",
"(",
"probs",
"-",
"rand",
")",
")"
] | Get samples from a tensor of probabilities.
:param probs: tensor of probabilities
:param rand: tensor (of the same shape as probs) of random values
:return: binary sample of probabilities | [
"Get",
"samples",
"from",
"a",
"tensor",
"of",
"probabilities",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L16-L23 | train | 212,180 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | corrupt_input | def corrupt_input(data, sess, corrtype, corrfrac):
"""Corrupt a fraction of data according to the chosen noise method.
:return: corrupted data
"""
corruption_ratio = np.round(corrfrac * data.shape[1]).astype(np.int)
if corrtype == 'none':
return np.copy(data)
if corrfrac > 0.0:
if corrtype == 'masking':
return masking_noise(data, sess, corrfrac)
elif corrtype == 'salt_and_pepper':
return salt_and_pepper_noise(data, corruption_ratio)
else:
return np.copy(data) | python | def corrupt_input(data, sess, corrtype, corrfrac):
"""Corrupt a fraction of data according to the chosen noise method.
:return: corrupted data
"""
corruption_ratio = np.round(corrfrac * data.shape[1]).astype(np.int)
if corrtype == 'none':
return np.copy(data)
if corrfrac > 0.0:
if corrtype == 'masking':
return masking_noise(data, sess, corrfrac)
elif corrtype == 'salt_and_pepper':
return salt_and_pepper_noise(data, corruption_ratio)
else:
return np.copy(data) | [
"def",
"corrupt_input",
"(",
"data",
",",
"sess",
",",
"corrtype",
",",
"corrfrac",
")",
":",
"corruption_ratio",
"=",
"np",
".",
"round",
"(",
"corrfrac",
"*",
"data",
".",
"shape",
"[",
"1",
"]",
")",
".",
"astype",
"(",
"np",
".",
"int",
")",
"i... | Corrupt a fraction of data according to the chosen noise method.
:return: corrupted data | [
"Corrupt",
"a",
"fraction",
"of",
"data",
"according",
"to",
"the",
"chosen",
"noise",
"method",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L26-L43 | train | 212,181 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | xavier_init | def xavier_init(fan_in, fan_out, const=1):
"""Xavier initialization of network weights.
https://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow
:param fan_in: fan in of the network (n_features)
:param fan_out: fan out of the network (n_components)
:param const: multiplicative constant
"""
low = -const * np.sqrt(6.0 / (fan_in + fan_out))
high = const * np.sqrt(6.0 / (fan_in + fan_out))
return tf.random_uniform((fan_in, fan_out), minval=low, maxval=high) | python | def xavier_init(fan_in, fan_out, const=1):
"""Xavier initialization of network weights.
https://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow
:param fan_in: fan in of the network (n_features)
:param fan_out: fan out of the network (n_components)
:param const: multiplicative constant
"""
low = -const * np.sqrt(6.0 / (fan_in + fan_out))
high = const * np.sqrt(6.0 / (fan_in + fan_out))
return tf.random_uniform((fan_in, fan_out), minval=low, maxval=high) | [
"def",
"xavier_init",
"(",
"fan_in",
",",
"fan_out",
",",
"const",
"=",
"1",
")",
":",
"low",
"=",
"-",
"const",
"*",
"np",
".",
"sqrt",
"(",
"6.0",
"/",
"(",
"fan_in",
"+",
"fan_out",
")",
")",
"high",
"=",
"const",
"*",
"np",
".",
"sqrt",
"("... | Xavier initialization of network weights.
https://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow
:param fan_in: fan in of the network (n_features)
:param fan_out: fan out of the network (n_components)
:param const: multiplicative constant | [
"Xavier",
"initialization",
"of",
"network",
"weights",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L46-L57 | train | 212,182 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | gen_batches | def gen_batches(data, batch_size):
"""Divide input data into batches.
:param data: input data
:param batch_size: size of each batch
:return: data divided into batches
"""
data = np.array(data)
for i in range(0, data.shape[0], batch_size):
yield data[i:i + batch_size] | python | def gen_batches(data, batch_size):
"""Divide input data into batches.
:param data: input data
:param batch_size: size of each batch
:return: data divided into batches
"""
data = np.array(data)
for i in range(0, data.shape[0], batch_size):
yield data[i:i + batch_size] | [
"def",
"gen_batches",
"(",
"data",
",",
"batch_size",
")",
":",
"data",
"=",
"np",
".",
"array",
"(",
"data",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"data",
".",
"shape",
"[",
"0",
"]",
",",
"batch_size",
")",
":",
"yield",
"data",
"[",
... | Divide input data into batches.
:param data: input data
:param batch_size: size of each batch
:return: data divided into batches | [
"Divide",
"input",
"data",
"into",
"batches",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L89-L99 | train | 212,183 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | to_one_hot | def to_one_hot(dataY):
"""Convert the vector of labels dataY into one-hot encoding.
:param dataY: vector of labels
:return: one-hot encoded labels
"""
nc = 1 + np.max(dataY)
onehot = [np.zeros(nc, dtype=np.int8) for _ in dataY]
for i, j in enumerate(dataY):
onehot[i][j] = 1
return onehot | python | def to_one_hot(dataY):
"""Convert the vector of labels dataY into one-hot encoding.
:param dataY: vector of labels
:return: one-hot encoded labels
"""
nc = 1 + np.max(dataY)
onehot = [np.zeros(nc, dtype=np.int8) for _ in dataY]
for i, j in enumerate(dataY):
onehot[i][j] = 1
return onehot | [
"def",
"to_one_hot",
"(",
"dataY",
")",
":",
"nc",
"=",
"1",
"+",
"np",
".",
"max",
"(",
"dataY",
")",
"onehot",
"=",
"[",
"np",
".",
"zeros",
"(",
"nc",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
"for",
"_",
"in",
"dataY",
"]",
"for",
"i",
... | Convert the vector of labels dataY into one-hot encoding.
:param dataY: vector of labels
:return: one-hot encoded labels | [
"Convert",
"the",
"vector",
"of",
"labels",
"dataY",
"into",
"one",
"-",
"hot",
"encoding",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L102-L112 | train | 212,184 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | conv2bin | def conv2bin(data):
"""Convert a matrix of probabilities into binary values.
If the matrix has values <= 0 or >= 1, the values are
normalized to be in [0, 1].
:type data: numpy array
:param data: input matrix
:return: converted binary matrix
"""
if data.min() < 0 or data.max() > 1:
data = normalize(data)
out_data = data.copy()
for i, sample in enumerate(out_data):
for j, val in enumerate(sample):
if np.random.random() <= val:
out_data[i][j] = 1
else:
out_data[i][j] = 0
return out_data | python | def conv2bin(data):
"""Convert a matrix of probabilities into binary values.
If the matrix has values <= 0 or >= 1, the values are
normalized to be in [0, 1].
:type data: numpy array
:param data: input matrix
:return: converted binary matrix
"""
if data.min() < 0 or data.max() > 1:
data = normalize(data)
out_data = data.copy()
for i, sample in enumerate(out_data):
for j, val in enumerate(sample):
if np.random.random() <= val:
out_data[i][j] = 1
else:
out_data[i][j] = 0
return out_data | [
"def",
"conv2bin",
"(",
"data",
")",
":",
"if",
"data",
".",
"min",
"(",
")",
"<",
"0",
"or",
"data",
".",
"max",
"(",
")",
">",
"1",
":",
"data",
"=",
"normalize",
"(",
"data",
")",
"out_data",
"=",
"data",
".",
"copy",
"(",
")",
"for",
"i",... | Convert a matrix of probabilities into binary values.
If the matrix has values <= 0 or >= 1, the values are
normalized to be in [0, 1].
:type data: numpy array
:param data: input matrix
:return: converted binary matrix | [
"Convert",
"a",
"matrix",
"of",
"probabilities",
"into",
"binary",
"values",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L115-L139 | train | 212,185 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | masking_noise | def masking_noise(data, sess, v):
"""Apply masking noise to data in X.
In other words a fraction v of elements of X
(chosen at random) is forced to zero.
:param data: array_like, Input data
:param sess: TensorFlow session
:param v: fraction of elements to distort, float
:return: transformed data
"""
data_noise = data.copy()
rand = tf.random_uniform(data.shape)
data_noise[sess.run(tf.nn.relu(tf.sign(v - rand))).astype(np.bool)] = 0
return data_noise | python | def masking_noise(data, sess, v):
"""Apply masking noise to data in X.
In other words a fraction v of elements of X
(chosen at random) is forced to zero.
:param data: array_like, Input data
:param sess: TensorFlow session
:param v: fraction of elements to distort, float
:return: transformed data
"""
data_noise = data.copy()
rand = tf.random_uniform(data.shape)
data_noise[sess.run(tf.nn.relu(tf.sign(v - rand))).astype(np.bool)] = 0
return data_noise | [
"def",
"masking_noise",
"(",
"data",
",",
"sess",
",",
"v",
")",
":",
"data_noise",
"=",
"data",
".",
"copy",
"(",
")",
"rand",
"=",
"tf",
".",
"random_uniform",
"(",
"data",
".",
"shape",
")",
"data_noise",
"[",
"sess",
".",
"run",
"(",
"tf",
".",... | Apply masking noise to data in X.
In other words a fraction v of elements of X
(chosen at random) is forced to zero.
:param data: array_like, Input data
:param sess: TensorFlow session
:param v: fraction of elements to distort, float
:return: transformed data | [
"Apply",
"masking",
"noise",
"to",
"data",
"in",
"X",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L156-L170 | train | 212,186 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | salt_and_pepper_noise | def salt_and_pepper_noise(X, v):
"""Apply salt and pepper noise to data in X.
In other words a fraction v of elements of X
(chosen at random) is set to its maximum or minimum value according to a
fair coin flip.
If minimum or maximum are not given, the min (max) value in X is taken.
:param X: array_like, Input data
:param v: int, fraction of elements to distort
:return: transformed data
"""
X_noise = X.copy()
n_features = X.shape[1]
mn = X.min()
mx = X.max()
for i, sample in enumerate(X):
mask = np.random.randint(0, n_features, v)
for m in mask:
if np.random.random() < 0.5:
X_noise[i][m] = mn
else:
X_noise[i][m] = mx
return X_noise | python | def salt_and_pepper_noise(X, v):
"""Apply salt and pepper noise to data in X.
In other words a fraction v of elements of X
(chosen at random) is set to its maximum or minimum value according to a
fair coin flip.
If minimum or maximum are not given, the min (max) value in X is taken.
:param X: array_like, Input data
:param v: int, fraction of elements to distort
:return: transformed data
"""
X_noise = X.copy()
n_features = X.shape[1]
mn = X.min()
mx = X.max()
for i, sample in enumerate(X):
mask = np.random.randint(0, n_features, v)
for m in mask:
if np.random.random() < 0.5:
X_noise[i][m] = mn
else:
X_noise[i][m] = mx
return X_noise | [
"def",
"salt_and_pepper_noise",
"(",
"X",
",",
"v",
")",
":",
"X_noise",
"=",
"X",
".",
"copy",
"(",
")",
"n_features",
"=",
"X",
".",
"shape",
"[",
"1",
"]",
"mn",
"=",
"X",
".",
"min",
"(",
")",
"mx",
"=",
"X",
".",
"max",
"(",
")",
"for",
... | Apply salt and pepper noise to data in X.
In other words a fraction v of elements of X
(chosen at random) is set to its maximum or minimum value according to a
fair coin flip.
If minimum or maximum are not given, the min (max) value in X is taken.
:param X: array_like, Input data
:param v: int, fraction of elements to distort
:return: transformed data | [
"Apply",
"salt",
"and",
"pepper",
"noise",
"to",
"data",
"in",
"X",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L173-L200 | train | 212,187 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | expand_args | def expand_args(**args_to_expand):
"""Expand the given lists into the length of the layers.
This is used as a convenience so that the user does not need to specify the
complete list of parameters for model initialization.
IE the user can just specify one parameter and this function will expand it
"""
layers = args_to_expand['layers']
try:
items = args_to_expand.iteritems()
except AttributeError:
items = args_to_expand.items()
for key, val in items:
if isinstance(val, list) and len(val) != len(layers):
args_to_expand[key] = [val[0] for _ in layers]
return args_to_expand | python | def expand_args(**args_to_expand):
"""Expand the given lists into the length of the layers.
This is used as a convenience so that the user does not need to specify the
complete list of parameters for model initialization.
IE the user can just specify one parameter and this function will expand it
"""
layers = args_to_expand['layers']
try:
items = args_to_expand.iteritems()
except AttributeError:
items = args_to_expand.items()
for key, val in items:
if isinstance(val, list) and len(val) != len(layers):
args_to_expand[key] = [val[0] for _ in layers]
return args_to_expand | [
"def",
"expand_args",
"(",
"*",
"*",
"args_to_expand",
")",
":",
"layers",
"=",
"args_to_expand",
"[",
"'layers'",
"]",
"try",
":",
"items",
"=",
"args_to_expand",
".",
"iteritems",
"(",
")",
"except",
"AttributeError",
":",
"items",
"=",
"args_to_expand",
"... | Expand the given lists into the length of the layers.
This is used as a convenience so that the user does not need to specify the
complete list of parameters for model initialization.
IE the user can just specify one parameter and this function will expand it | [
"Expand",
"the",
"given",
"lists",
"into",
"the",
"length",
"of",
"the",
"layers",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L207-L224 | train | 212,188 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | flag_to_list | def flag_to_list(flagval, flagtype):
"""Convert a string of comma-separated tf flags to a list of values."""
if flagtype == 'int':
return [int(_) for _ in flagval.split(',') if _]
elif flagtype == 'float':
return [float(_) for _ in flagval.split(',') if _]
elif flagtype == 'str':
return [_ for _ in flagval.split(',') if _]
else:
raise Exception("incorrect type") | python | def flag_to_list(flagval, flagtype):
"""Convert a string of comma-separated tf flags to a list of values."""
if flagtype == 'int':
return [int(_) for _ in flagval.split(',') if _]
elif flagtype == 'float':
return [float(_) for _ in flagval.split(',') if _]
elif flagtype == 'str':
return [_ for _ in flagval.split(',') if _]
else:
raise Exception("incorrect type") | [
"def",
"flag_to_list",
"(",
"flagval",
",",
"flagtype",
")",
":",
"if",
"flagtype",
"==",
"'int'",
":",
"return",
"[",
"int",
"(",
"_",
")",
"for",
"_",
"in",
"flagval",
".",
"split",
"(",
"','",
")",
"if",
"_",
"]",
"elif",
"flagtype",
"==",
"'flo... | Convert a string of comma-separated tf flags to a list of values. | [
"Convert",
"a",
"string",
"of",
"comma",
"-",
"separated",
"tf",
"flags",
"to",
"a",
"list",
"of",
"values",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L227-L239 | train | 212,189 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | str2actfunc | def str2actfunc(act_func):
"""Convert activation function name to tf function."""
if act_func == 'sigmoid':
return tf.nn.sigmoid
elif act_func == 'tanh':
return tf.nn.tanh
elif act_func == 'relu':
return tf.nn.relu | python | def str2actfunc(act_func):
"""Convert activation function name to tf function."""
if act_func == 'sigmoid':
return tf.nn.sigmoid
elif act_func == 'tanh':
return tf.nn.tanh
elif act_func == 'relu':
return tf.nn.relu | [
"def",
"str2actfunc",
"(",
"act_func",
")",
":",
"if",
"act_func",
"==",
"'sigmoid'",
":",
"return",
"tf",
".",
"nn",
".",
"sigmoid",
"elif",
"act_func",
"==",
"'tanh'",
":",
"return",
"tf",
".",
"nn",
".",
"tanh",
"elif",
"act_func",
"==",
"'relu'",
"... | Convert activation function name to tf function. | [
"Convert",
"activation",
"function",
"name",
"to",
"tf",
"function",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L242-L251 | train | 212,190 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | random_seed_np_tf | def random_seed_np_tf(seed):
"""Seed numpy and tensorflow random number generators.
:param seed: seed parameter
"""
if seed >= 0:
np.random.seed(seed)
tf.set_random_seed(seed)
return True
else:
return False | python | def random_seed_np_tf(seed):
"""Seed numpy and tensorflow random number generators.
:param seed: seed parameter
"""
if seed >= 0:
np.random.seed(seed)
tf.set_random_seed(seed)
return True
else:
return False | [
"def",
"random_seed_np_tf",
"(",
"seed",
")",
":",
"if",
"seed",
">=",
"0",
":",
"np",
".",
"random",
".",
"seed",
"(",
"seed",
")",
"tf",
".",
"set_random_seed",
"(",
"seed",
")",
"return",
"True",
"else",
":",
"return",
"False"
] | Seed numpy and tensorflow random number generators.
:param seed: seed parameter | [
"Seed",
"numpy",
"and",
"tensorflow",
"random",
"number",
"generators",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L254-L264 | train | 212,191 |
blackecho/Deep-Learning-TensorFlow | yadlt/utils/utilities.py | gen_image | def gen_image(img, width, height, outfile, img_type='grey'):
"""Save an image with the given parameters."""
assert len(img) == width * height or len(img) == width * height * 3
if img_type == 'grey':
misc.imsave(outfile, img.reshape(width, height))
elif img_type == 'color':
misc.imsave(outfile, img.reshape(3, width, height)) | python | def gen_image(img, width, height, outfile, img_type='grey'):
"""Save an image with the given parameters."""
assert len(img) == width * height or len(img) == width * height * 3
if img_type == 'grey':
misc.imsave(outfile, img.reshape(width, height))
elif img_type == 'color':
misc.imsave(outfile, img.reshape(3, width, height)) | [
"def",
"gen_image",
"(",
"img",
",",
"width",
",",
"height",
",",
"outfile",
",",
"img_type",
"=",
"'grey'",
")",
":",
"assert",
"len",
"(",
"img",
")",
"==",
"width",
"*",
"height",
"or",
"len",
"(",
"img",
")",
"==",
"width",
"*",
"height",
"*",
... | Save an image with the given parameters. | [
"Save",
"an",
"image",
"with",
"the",
"given",
"parameters",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/utils/utilities.py#L267-L275 | train | 212,192 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/convolutional/conv_net.py | ConvolutionalNetwork.build_model | def build_model(self, n_features, n_classes):
"""Create the computational graph of the model.
:param n_features: Number of features.
:param n_classes: number of classes.
:return: self
"""
self._create_placeholders(n_features, n_classes)
self._create_layers(n_classes)
self.cost = self.loss.compile(self.mod_y, self.input_labels)
self.train_step = self.trainer.compile(self.cost)
self.accuracy = Evaluation.accuracy(self.mod_y, self.input_labels) | python | def build_model(self, n_features, n_classes):
"""Create the computational graph of the model.
:param n_features: Number of features.
:param n_classes: number of classes.
:return: self
"""
self._create_placeholders(n_features, n_classes)
self._create_layers(n_classes)
self.cost = self.loss.compile(self.mod_y, self.input_labels)
self.train_step = self.trainer.compile(self.cost)
self.accuracy = Evaluation.accuracy(self.mod_y, self.input_labels) | [
"def",
"build_model",
"(",
"self",
",",
"n_features",
",",
"n_classes",
")",
":",
"self",
".",
"_create_placeholders",
"(",
"n_features",
",",
"n_classes",
")",
"self",
".",
"_create_layers",
"(",
"n_classes",
")",
"self",
".",
"cost",
"=",
"self",
".",
"l... | Create the computational graph of the model.
:param n_features: Number of features.
:param n_classes: number of classes.
:return: self | [
"Create",
"the",
"computational",
"graph",
"of",
"the",
"model",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/convolutional/conv_net.py#L103-L115 | train | 212,193 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/convolutional/conv_net.py | ConvolutionalNetwork.max_pool | def max_pool(x, dim):
"""Max pooling operation."""
return tf.nn.max_pool(
x, ksize=[1, dim, dim, 1], strides=[1, dim, dim, 1],
padding='SAME') | python | def max_pool(x, dim):
"""Max pooling operation."""
return tf.nn.max_pool(
x, ksize=[1, dim, dim, 1], strides=[1, dim, dim, 1],
padding='SAME') | [
"def",
"max_pool",
"(",
"x",
",",
"dim",
")",
":",
"return",
"tf",
".",
"nn",
".",
"max_pool",
"(",
"x",
",",
"ksize",
"=",
"[",
"1",
",",
"dim",
",",
"dim",
",",
"1",
"]",
",",
"strides",
"=",
"[",
"1",
",",
"dim",
",",
"dim",
",",
"1",
... | Max pooling operation. | [
"Max",
"pooling",
"operation",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/convolutional/conv_net.py#L279-L283 | train | 212,194 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/unsupervised_model.py | UnsupervisedModel.reconstruct | def reconstruct(self, data, graph=None):
"""Reconstruct data according to the model.
Parameters
----------
data : array_like, shape (n_samples, n_features)
Data to transform.
graph : tf.Graph, optional (default = None)
Tensorflow Graph object
Returns
-------
array_like, transformed data
"""
g = graph if graph is not None else self.tf_graph
with g.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {self.input_data: data, self.keep_prob: 1}
return self.reconstruction.eval(feed) | python | def reconstruct(self, data, graph=None):
"""Reconstruct data according to the model.
Parameters
----------
data : array_like, shape (n_samples, n_features)
Data to transform.
graph : tf.Graph, optional (default = None)
Tensorflow Graph object
Returns
-------
array_like, transformed data
"""
g = graph if graph is not None else self.tf_graph
with g.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {self.input_data: data, self.keep_prob: 1}
return self.reconstruction.eval(feed) | [
"def",
"reconstruct",
"(",
"self",
",",
"data",
",",
"graph",
"=",
"None",
")",
":",
"g",
"=",
"graph",
"if",
"graph",
"is",
"not",
"None",
"else",
"self",
".",
"tf_graph",
"with",
"g",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
".",
"Session"... | Reconstruct data according to the model.
Parameters
----------
data : array_like, shape (n_samples, n_features)
Data to transform.
graph : tf.Graph, optional (default = None)
Tensorflow Graph object
Returns
-------
array_like, transformed data | [
"Reconstruct",
"data",
"according",
"to",
"the",
"model",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/unsupervised_model.py#L94-L116 | train | 212,195 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/unsupervised_model.py | UnsupervisedModel.score | def score(self, data, data_ref, graph=None):
"""Compute the reconstruction loss over the test set.
Parameters
----------
data : array_like
Data to reconstruct.
data_ref : array_like
Reference data.
Returns
-------
float: Mean error.
"""
g = graph if graph is not None else self.tf_graph
with g.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {
self.input_data: data,
self.input_labels: data_ref,
self.keep_prob: 1
}
return self.cost.eval(feed) | python | def score(self, data, data_ref, graph=None):
"""Compute the reconstruction loss over the test set.
Parameters
----------
data : array_like
Data to reconstruct.
data_ref : array_like
Reference data.
Returns
-------
float: Mean error.
"""
g = graph if graph is not None else self.tf_graph
with g.as_default():
with tf.Session() as self.tf_session:
self.tf_saver.restore(self.tf_session, self.model_path)
feed = {
self.input_data: data,
self.input_labels: data_ref,
self.keep_prob: 1
}
return self.cost.eval(feed) | [
"def",
"score",
"(",
"self",
",",
"data",
",",
"data_ref",
",",
"graph",
"=",
"None",
")",
":",
"g",
"=",
"graph",
"if",
"graph",
"is",
"not",
"None",
"else",
"self",
".",
"tf_graph",
"with",
"g",
".",
"as_default",
"(",
")",
":",
"with",
"tf",
"... | Compute the reconstruction loss over the test set.
Parameters
----------
data : array_like
Data to reconstruct.
data_ref : array_like
Reference data.
Returns
-------
float: Mean error. | [
"Compute",
"the",
"reconstruction",
"loss",
"over",
"the",
"test",
"set",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/unsupervised_model.py#L118-L145 | train | 212,196 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/trainers.py | Trainer.compile | def compile(self, cost, name_scope="train"):
"""Compile the optimizer with the given training parameters.
Parameters
----------
cost : Tensor
A Tensor containing the value to minimize.
name_scope : str , optional (default="train")
Optional name scope for the optimizer graph ops.
"""
with tf.name_scope(name_scope):
return self.opt_.minimize(cost) | python | def compile(self, cost, name_scope="train"):
"""Compile the optimizer with the given training parameters.
Parameters
----------
cost : Tensor
A Tensor containing the value to minimize.
name_scope : str , optional (default="train")
Optional name scope for the optimizer graph ops.
"""
with tf.name_scope(name_scope):
return self.opt_.minimize(cost) | [
"def",
"compile",
"(",
"self",
",",
"cost",
",",
"name_scope",
"=",
"\"train\"",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"name_scope",
")",
":",
"return",
"self",
".",
"opt_",
".",
"minimize",
"(",
"cost",
")"
] | Compile the optimizer with the given training parameters.
Parameters
----------
cost : Tensor
A Tensor containing the value to minimize.
name_scope : str , optional (default="train")
Optional name scope for the optimizer graph ops. | [
"Compile",
"the",
"optimizer",
"with",
"the",
"given",
"training",
"parameters",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/trainers.py#L53-L64 | train | 212,197 |
blackecho/Deep-Learning-TensorFlow | yadlt/core/trainers.py | Loss.compile | def compile(self, mod_y, ref_y, regterm=None):
"""Compute the loss function tensor.
Parameters
----------
mode_y : tf.Tensor
model output tensor
ref_y : tf.Tensor
reference input tensor
regterm : tf.Tensor, optional (default = None)
Regularization term tensor
Returns
-------
Loss function tensor.
"""
with tf.name_scope(self.name):
if self.lfunc == 'cross_entropy':
clip_inf = tf.clip_by_value(mod_y, 1e-10, float('inf'))
clip_sup = tf.clip_by_value(1 - mod_y, 1e-10, float('inf'))
cost = - tf.reduce_mean(tf.add(
tf.multiply(ref_y, tf.log(clip_inf)),
tf.multiply(tf.subtract(1.0, ref_y), tf.log(clip_sup))))
elif self.lfunc == 'softmax_cross_entropy':
cost = tf.losses.softmax_cross_entropy(ref_y, mod_y)
elif self.lfunc == 'mse':
cost = tf.sqrt(tf.reduce_mean(
tf.square(tf.subtract(ref_y, mod_y))))
else:
cost = None
if cost is not None:
cost = cost + regterm if regterm is not None else cost
tf.summary.scalar(self.lfunc, cost)
else:
cost = None
return cost | python | def compile(self, mod_y, ref_y, regterm=None):
"""Compute the loss function tensor.
Parameters
----------
mode_y : tf.Tensor
model output tensor
ref_y : tf.Tensor
reference input tensor
regterm : tf.Tensor, optional (default = None)
Regularization term tensor
Returns
-------
Loss function tensor.
"""
with tf.name_scope(self.name):
if self.lfunc == 'cross_entropy':
clip_inf = tf.clip_by_value(mod_y, 1e-10, float('inf'))
clip_sup = tf.clip_by_value(1 - mod_y, 1e-10, float('inf'))
cost = - tf.reduce_mean(tf.add(
tf.multiply(ref_y, tf.log(clip_inf)),
tf.multiply(tf.subtract(1.0, ref_y), tf.log(clip_sup))))
elif self.lfunc == 'softmax_cross_entropy':
cost = tf.losses.softmax_cross_entropy(ref_y, mod_y)
elif self.lfunc == 'mse':
cost = tf.sqrt(tf.reduce_mean(
tf.square(tf.subtract(ref_y, mod_y))))
else:
cost = None
if cost is not None:
cost = cost + regterm if regterm is not None else cost
tf.summary.scalar(self.lfunc, cost)
else:
cost = None
return cost | [
"def",
"compile",
"(",
"self",
",",
"mod_y",
",",
"ref_y",
",",
"regterm",
"=",
"None",
")",
":",
"with",
"tf",
".",
"name_scope",
"(",
"self",
".",
"name",
")",
":",
"if",
"self",
".",
"lfunc",
"==",
"'cross_entropy'",
":",
"clip_inf",
"=",
"tf",
... | Compute the loss function tensor.
Parameters
----------
mode_y : tf.Tensor
model output tensor
ref_y : tf.Tensor
reference input tensor
regterm : tf.Tensor, optional (default = None)
Regularization term tensor
Returns
-------
Loss function tensor. | [
"Compute",
"the",
"loss",
"function",
"tensor",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/core/trainers.py#L94-L139 | train | 212,198 |
blackecho/Deep-Learning-TensorFlow | yadlt/models/autoencoders/deep_autoencoder.py | DeepAutoencoder.build_model | def build_model(self, n_features, encoding_w=None, encoding_b=None):
"""Create the computational graph for the reconstruction task.
:param n_features: Number of features
:param encoding_w: list of weights for the encoding layers.
:param encoding_b: list of biases for the encoding layers.
:return: self
"""
self._create_placeholders(n_features, n_features)
if encoding_w and encoding_b:
self.encoding_w_ = encoding_w
self.encoding_b_ = encoding_b
else:
self._create_variables(n_features)
self._create_encoding_layers()
self._create_decoding_layers()
variables = []
variables.extend(self.encoding_w_)
variables.extend(self.encoding_b_)
regterm = Layers.regularization(variables, self.regtype, self.regcoef)
self.cost = self.loss.compile(
self.reconstruction, self.input_labels, regterm=regterm)
self.train_step = self.trainer.compile(self.cost) | python | def build_model(self, n_features, encoding_w=None, encoding_b=None):
"""Create the computational graph for the reconstruction task.
:param n_features: Number of features
:param encoding_w: list of weights for the encoding layers.
:param encoding_b: list of biases for the encoding layers.
:return: self
"""
self._create_placeholders(n_features, n_features)
if encoding_w and encoding_b:
self.encoding_w_ = encoding_w
self.encoding_b_ = encoding_b
else:
self._create_variables(n_features)
self._create_encoding_layers()
self._create_decoding_layers()
variables = []
variables.extend(self.encoding_w_)
variables.extend(self.encoding_b_)
regterm = Layers.regularization(variables, self.regtype, self.regcoef)
self.cost = self.loss.compile(
self.reconstruction, self.input_labels, regterm=regterm)
self.train_step = self.trainer.compile(self.cost) | [
"def",
"build_model",
"(",
"self",
",",
"n_features",
",",
"encoding_w",
"=",
"None",
",",
"encoding_b",
"=",
"None",
")",
":",
"self",
".",
"_create_placeholders",
"(",
"n_features",
",",
"n_features",
")",
"if",
"encoding_w",
"and",
"encoding_b",
":",
"sel... | Create the computational graph for the reconstruction task.
:param n_features: Number of features
:param encoding_w: list of weights for the encoding layers.
:param encoding_b: list of biases for the encoding layers.
:return: self | [
"Create",
"the",
"computational",
"graph",
"for",
"the",
"reconstruction",
"task",
"."
] | ddeb1f2848da7b7bee166ad2152b4afc46bb2086 | https://github.com/blackecho/Deep-Learning-TensorFlow/blob/ddeb1f2848da7b7bee166ad2152b4afc46bb2086/yadlt/models/autoencoders/deep_autoencoder.py#L180-L206 | train | 212,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.